Search in sources :

Example 36 with GenericRecordBuilder

use of org.apache.avro.generic.GenericRecordBuilder in project secor by pinterest.

the class SecorSchemaRegistryClientTest method testDecodeMessage.

@Test
public void testDecodeMessage() {
    Schema schemaV1 = SchemaBuilder.record("Foo").fields().name("data_field_1").type().intType().noDefault().name("timestamp").type().longType().noDefault().endRecord();
    // backward compatible schema change
    Schema schemaV2 = SchemaBuilder.record("Foo").fields().name("data_field_1").type().intType().noDefault().name("data_field_2").type().stringType().noDefault().name("timestamp").type().longType().noDefault().endRecord();
    GenericRecord record1 = new GenericRecordBuilder(schemaV1).set("data_field_1", 1).set("timestamp", 1467176315L).build();
    GenericRecord record2 = new GenericRecordBuilder(schemaV2).set("data_field_1", 1).set("data_field_2", "hello").set("timestamp", 1467176316L).build();
    GenericRecord output = secorSchemaRegistryClient.deserialize("test-avr-topic", avroSerializer.serialize("test-avr-topic", record1));
    assertEquals(secorSchemaRegistryClient.getSchema("test-avr-topic"), schemaV1);
    assertEquals(output.get("data_field_1"), 1);
    assertEquals(output.get("timestamp"), 1467176315L);
    output = secorSchemaRegistryClient.deserialize("test-avr-topic", avroSerializer.serialize("test-avr-topic", record2));
    assertEquals(secorSchemaRegistryClient.getSchema("test-avr-topic"), schemaV2);
    assertEquals(output.get("data_field_1"), 1);
    assertTrue(StringUtils.equals((output.get("data_field_2")).toString(), "hello"));
    assertEquals(output.get("timestamp"), 1467176316L);
    output = secorSchemaRegistryClient.deserialize("test-avr-topic", new byte[0]);
    assertNull(output);
}
Also used : Schema(org.apache.avro.Schema) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) GenericRecord(org.apache.avro.generic.GenericRecord) Test(org.junit.Test)

Example 37 with GenericRecordBuilder

use of org.apache.avro.generic.GenericRecordBuilder in project schema-registry by pravega.

the class TestPravegaClientEndToEnd method testAvroSchemaEvolution.

@Test
public void testAvroSchemaEvolution() {
    // create stream
    String scope = "scope";
    String stream = "avroevolution";
    String groupId = NameUtils.getScopedStreamName(scope, stream);
    try (StreamManager streamManager = new StreamManagerImpl(clientConfig)) {
        streamManager.createScope(scope);
        streamManager.createStream(scope, stream, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build());
        SerializationFormat serializationFormat = SerializationFormat.Avro;
        AvroSchema<Object> schema1 = AvroSchema.of(SCHEMA1);
        AvroSchema<Object> schema2 = AvroSchema.of(SCHEMA2);
        AvroSchema<Object> schema3 = AvroSchema.of(SCHEMA3);
        SerializerConfig serializerConfig = SerializerConfig.builder().groupId(groupId).createGroup(serializationFormat, Compatibility.backward(), true).registerSchema(true).registryClient(client).build();
        try (EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, clientConfig)) {
            // region writer with schema1
            Serializer<Object> serializer = AvroSerializerFactory.serializer(serializerConfig, schema1);
            EventStreamWriter<Object> writer = clientFactory.createEventWriter(stream, serializer, EventWriterConfig.builder().build());
            GenericRecord record = new GenericRecordBuilder(SCHEMA1).set("a", "test").build();
            writer.writeEvent(record).join();
            // endregion
            // region writer with schema2
            serializer = AvroSerializerFactory.serializer(serializerConfig, schema2);
            writer = clientFactory.createEventWriter(stream, serializer, EventWriterConfig.builder().build());
            record = new GenericRecordBuilder(SCHEMA2).set("a", "test").set("b", "value").build();
            writer.writeEvent(record).join();
            // endregion
            // region writer with schema3
            // this should throw exception as schema change is not backwardPolicy compatible.
            AssertExtensions.assertThrows("", () -> AvroSerializerFactory.serializer(serializerConfig, schema3), ex -> Exceptions.unwrap(ex) instanceof RegistryExceptions.SchemaValidationFailedException);
            // endregion
            // region read into specific schema
            ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(scope, clientConfig, new SocketConnectionFactoryImpl(clientConfig));
            String rg = "rg" + stream;
            readerGroupManager.createReaderGroup(rg, ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build());
            AvroSchema<Object> readSchema = AvroSchema.of(SCHEMA2);
            Serializer<Object> deserializer = AvroSerializerFactory.genericDeserializer(serializerConfig, readSchema);
            EventStreamReader<Object> reader = clientFactory.createReader("r1", rg, deserializer, ReaderConfig.builder().build());
            // read two events successfully
            EventRead<Object> event = reader.readNextEvent(10000L);
            assertNotNull(event.getEvent());
            event = reader.readNextEvent(10000L);
            assertNotNull(event.getEvent());
            reader.close();
            // create new reader, this time with incompatible schema3
            String rg1 = "rg1" + stream;
            readerGroupManager.createReaderGroup(rg1, ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build());
            AvroSchema<Object> readSchemaEx = AvroSchema.of(SCHEMA3);
            AssertExtensions.assertThrows("", () -> AvroSerializerFactory.genericDeserializer(serializerConfig, readSchemaEx), ex -> Exceptions.unwrap(ex) instanceof IllegalArgumentException);
            reader.close();
            // endregion
            // region read into writer schema
            String rg2 = "rg2" + stream;
            readerGroupManager.createReaderGroup(rg2, ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build());
            deserializer = AvroSerializerFactory.genericDeserializer(serializerConfig, null);
            reader = clientFactory.createReader("r1", rg2, deserializer, ReaderConfig.builder().build());
            event = reader.readNextEvent(10000L);
            assertNotNull(event.getEvent());
            event = reader.readNextEvent(10000L);
            assertNotNull(event.getEvent());
            reader.close();
            readerGroupManager.close();
            // endregion
            client.removeGroup(groupId);
            streamManager.sealStream(scope, stream);
            streamManager.deleteStream(scope, stream);
        }
    }
}
Also used : ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) EventStreamClientFactory(io.pravega.client.EventStreamClientFactory) StreamManagerImpl(io.pravega.client.admin.impl.StreamManagerImpl) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) SerializerConfig(io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig) SerializationFormat(io.pravega.schemaregistry.contract.data.SerializationFormat) StreamManager(io.pravega.client.admin.StreamManager) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) GenericRecord(org.apache.avro.generic.GenericRecord) ReaderGroupManagerImpl(io.pravega.client.admin.impl.ReaderGroupManagerImpl) RegistryExceptions(io.pravega.schemaregistry.client.exceptions.RegistryExceptions) ProtobufTest(io.pravega.schemaregistry.serializer.protobuf.generated.ProtobufTest) Test(org.junit.Test)

Example 38 with GenericRecordBuilder

use of org.apache.avro.generic.GenericRecordBuilder in project schema-registry by pravega.

the class TestPravegaClientEndToEnd method testCodec.

@Test
public void testCodec() {
    // create stream
    String scope = "scope";
    String stream = "avrocodec";
    String groupId = NameUtils.getScopedStreamName(scope, stream);
    try (StreamManager streamManager = new StreamManagerImpl(clientConfig)) {
        streamManager.createScope(scope);
        streamManager.createStream(scope, stream, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build());
        SerializationFormat serializationFormat = SerializationFormat.Avro;
        AvroSchema<Object> schema1 = AvroSchema.of(SCHEMA1);
        AvroSchema<Object> schema2 = AvroSchema.of(SCHEMA2);
        AvroSchema<Test1> schema3 = AvroSchema.of(Test1.class);
        SerializerConfig serializerConfig = SerializerConfig.builder().groupId(groupId).createGroup(serializationFormat, Compatibility.backward(), true).registerSchema(true).registerCodec(true).registryClient(client).build();
        try (EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, clientConfig)) {
            // region writer with schema1
            Serializer<Object> serializer = AvroSerializerFactory.serializer(serializerConfig, schema1);
            EventStreamWriter<Object> writer = clientFactory.createEventWriter(stream, serializer, EventWriterConfig.builder().build());
            GenericRecord record = new GenericRecordBuilder(SCHEMA1).set("a", "test").build();
            writer.writeEvent(record).join();
            // endregion
            // region writer with schema2
            Serializer<Object> serializer2 = AvroSerializerFactory.serializer(serializerConfig, schema2);
            writer = clientFactory.createEventWriter(stream, serializer2, EventWriterConfig.builder().build());
            record = new GenericRecordBuilder(SCHEMA2).set("a", "test").set("b", "value").build();
            writer.writeEvent(record).join();
            // endregion
            // region writer with codec gzip
            serializerConfig = SerializerConfig.builder().groupId(groupId).registerSchema(true).registerCodec(true).encoder(Codecs.GzipCompressor.getCodec()).registryClient(client).build();
            Serializer<Test1> serializer3 = AvroSerializerFactory.serializer(serializerConfig, schema3);
            EventStreamWriter<Test1> writer3 = clientFactory.createEventWriter(stream, serializer3, EventWriterConfig.builder().build());
            String bigString = generateBigString(1);
            writer3.writeEvent(new Test1(bigString, 1)).join();
            List<CodecType> list = client.getCodecTypes(groupId);
            assertEquals(2, list.size());
            assertTrue(list.stream().anyMatch(x -> x.equals(Codecs.None.getCodec().getCodecType())));
            assertTrue(list.stream().anyMatch(x -> x.equals(Codecs.GzipCompressor.getCodec().getCodecType())));
            // endregion
            // region writer with codec snappy
            serializerConfig = SerializerConfig.builder().groupId(groupId).registerSchema(true).registerCodec(true).encoder(Codecs.SnappyCompressor.getCodec()).registryClient(client).build();
            Serializer<Test1> serializer4 = AvroSerializerFactory.serializer(serializerConfig, schema3);
            EventStreamWriter<Test1> writer4 = clientFactory.createEventWriter(stream, serializer4, EventWriterConfig.builder().build());
            String bigString2 = generateBigString(200);
            writer4.writeEvent(new Test1(bigString2, 1)).join();
            list = client.getCodecTypes(groupId);
            assertEquals(3, list.size());
            assertTrue(list.stream().anyMatch(x -> x.equals(Codecs.None.getCodec().getCodecType())));
            assertTrue(list.stream().anyMatch(x -> x.equals(Codecs.GzipCompressor.getCodec().getCodecType())));
            assertTrue(list.stream().anyMatch(x -> x.equals(Codecs.SnappyCompressor.getCodec().getCodecType())));
            // endregion
            // region reader
            serializerConfig = SerializerConfig.builder().groupId(groupId).registryClient(client).build();
            ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(scope, clientConfig, new SocketConnectionFactoryImpl(clientConfig));
            String rg = "rg" + stream + System.currentTimeMillis();
            readerGroupManager.createReaderGroup(rg, ReaderGroupConfig.builder().stream(NameUtils.getScopedStreamName(scope, stream)).disableAutomaticCheckpoints().build());
            Serializer<Object> deserializer = AvroSerializerFactory.genericDeserializer(serializerConfig, null);
            EventStreamReader<Object> reader = clientFactory.createReader("r1", rg, deserializer, ReaderConfig.builder().build());
            EventRead<Object> event = reader.readNextEvent(10000L);
            while (event.isCheckpoint() || event.getEvent() != null) {
                Object e = event.getEvent();
                event = reader.readNextEvent(10000L);
            }
            // endregion
            // region writer with custom codec
            CodecType mycodec = new CodecType("mycodec");
            Codec myCodec = new Codec() {

                @Override
                public String getName() {
                    return mycodec.getName();
                }

                @Override
                public CodecType getCodecType() {
                    return mycodec;
                }

                @SneakyThrows
                @Override
                public void encode(ByteBuffer data, OutputStream bos) {
                    bos.write(data.array(), data.arrayOffset() + data.position(), data.remaining());
                }

                @SneakyThrows
                @Override
                public ByteBuffer decode(ByteBuffer data, Map<String, String> properties) {
                    return data;
                }
            };
            serializerConfig = SerializerConfig.builder().groupId(groupId).registerSchema(true).registerCodec(true).encoder(myCodec).registryClient(client).build();
            Serializer<Test1> serializer5 = AvroSerializerFactory.serializer(serializerConfig, schema3);
            EventStreamWriter<Test1> writer2 = clientFactory.createEventWriter(stream, serializer5, EventWriterConfig.builder().build());
            String bigString3 = generateBigString(300);
            writer2.writeEvent(new Test1(bigString3, 1)).join();
            // endregion
            list = client.getCodecTypes(groupId);
            assertEquals(4, list.size());
            assertTrue(list.stream().anyMatch(x -> x.equals(Codecs.None.getCodec().getCodecType())));
            assertTrue(list.stream().anyMatch(x -> x.equals(Codecs.GzipCompressor.getCodec().getCodecType())));
            assertTrue(list.stream().anyMatch(x -> x.equals(Codecs.SnappyCompressor.getCodec().getCodecType())));
            assertTrue(list.stream().anyMatch(x -> x.equals(mycodec)));
            reader.close();
            // region new reader with additional codec
            // add new decoder for custom serialization
            SerializerConfig serializerConfig2 = SerializerConfig.builder().groupId(groupId).decoder(myCodec.getName(), myCodec).registryClient(client).build();
            Serializer<Object> deserializer2 = AvroSerializerFactory.genericDeserializer(serializerConfig2, null);
            EventStreamReader<Object> reader2 = clientFactory.createReader("r2", rg, deserializer2, ReaderConfig.builder().build());
            event = reader2.readNextEvent(10000L);
            while (event.isCheckpoint() || event.getEvent() != null) {
                Object e = event.getEvent();
                event = reader2.readNextEvent(10000L);
            }
            // endregion
            writer.close();
            writer2.close();
            writer3.close();
            writer4.close();
            reader.close();
            reader2.close();
            readerGroupManager.close();
            client.removeGroup(groupId);
            streamManager.sealStream(scope, stream);
            streamManager.deleteStream(scope, stream);
        }
    }
}
Also used : DescriptorProtos(com.google.protobuf.DescriptorProtos) GroupProperties(io.pravega.schemaregistry.contract.data.GroupProperties) EventStreamWriter(io.pravega.client.stream.EventStreamWriter) SneakyThrows(lombok.SneakyThrows) AssertExtensions(io.pravega.test.common.AssertExtensions) ReaderGroupManagerImpl(io.pravega.client.admin.impl.ReaderGroupManagerImpl) Random(java.util.Random) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) Codecs(io.pravega.schemaregistry.serializer.shared.codec.Codecs) EventRead(io.pravega.client.stream.EventRead) ByteBuffer(java.nio.ByteBuffer) PravegaStandaloneUtils(io.pravega.schemaregistry.pravegastandalone.PravegaStandaloneUtils) SchemaStore(io.pravega.schemaregistry.storage.SchemaStore) SerializerFactory(io.pravega.schemaregistry.serializers.SerializerFactory) ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) After(org.junit.After) Map(java.util.Map) RegistryExceptions(io.pravega.schemaregistry.client.exceptions.RegistryExceptions) JsonNode(com.fasterxml.jackson.databind.JsonNode) URI(java.net.URI) RestServer(io.pravega.schemaregistry.server.rest.RestServer) CodecType(io.pravega.schemaregistry.contract.data.CodecType) Path(java.nio.file.Path) SchemaRegistryClientConfig(io.pravega.schemaregistry.client.SchemaRegistryClientConfig) Either(io.pravega.schemaregistry.common.Either) ReaderGroupConfig(io.pravega.client.stream.ReaderGroupConfig) SerializerConfig(io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig) Schema(org.apache.avro.Schema) AvroSchema(io.pravega.schemaregistry.serializer.avro.schemas.AvroSchema) ImmutableMap(com.google.common.collect.ImmutableMap) SchemaRegistryClientFactory(io.pravega.schemaregistry.client.SchemaRegistryClientFactory) Executors(java.util.concurrent.Executors) Slf4j(lombok.extern.slf4j.Slf4j) Base64(java.util.Base64) List(java.util.List) StreamManagerImpl(io.pravega.client.admin.impl.StreamManagerImpl) SchemaRegistryService(io.pravega.schemaregistry.service.SchemaRegistryService) ProtobufSerializerFactory(io.pravega.schemaregistry.serializer.protobuf.impl.ProtobufSerializerFactory) TestUtils(io.pravega.test.common.TestUtils) SchemaInfo(io.pravega.schemaregistry.contract.data.SchemaInfo) ProtobufTest(io.pravega.schemaregistry.serializer.protobuf.generated.ProtobufTest) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) DynamicMessage(com.google.protobuf.DynamicMessage) StreamManager(io.pravega.client.admin.StreamManager) Exceptions(io.pravega.common.Exceptions) JSONSchema(io.pravega.schemaregistry.serializer.json.schemas.JSONSchema) HashMap(java.util.HashMap) SerializationFormat(io.pravega.schemaregistry.contract.data.SerializationFormat) Codec(io.pravega.schemaregistry.serializer.shared.codec.Codec) Function(java.util.function.Function) Strings(com.google.common.base.Strings) ReflectData(org.apache.avro.reflect.ReflectData) Test1(io.pravega.schemaregistry.serializer.avro.testobjs.generated.Test1) Timeout(org.junit.rules.Timeout) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) Test2(io.pravega.schemaregistry.serializer.avro.testobjs.generated.Test2) EventStreamClientFactory(io.pravega.client.EventStreamClientFactory) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) SchemaRegistryClient(io.pravega.schemaregistry.client.SchemaRegistryClient) Test3(io.pravega.schemaregistry.serializer.avro.testobjs.generated.Test3) EventWriterConfig(io.pravega.client.stream.EventWriterConfig) Serializer(io.pravega.client.stream.Serializer) Compatibility(io.pravega.schemaregistry.contract.data.Compatibility) OutputStream(java.io.OutputStream) NameUtils(io.pravega.shared.NameUtils) GenericRecord(org.apache.avro.generic.GenericRecord) ServiceConfig(io.pravega.schemaregistry.server.rest.ServiceConfig) Files(java.nio.file.Files) SpecificRecordBase(org.apache.avro.specific.SpecificRecordBase) EventStreamReader(io.pravega.client.stream.EventStreamReader) JsonSerializerFactory(io.pravega.schemaregistry.serializer.json.impl.JsonSerializerFactory) Test(org.junit.Test) IOException(java.io.IOException) AvroSerializerFactory(io.pravega.schemaregistry.serializer.avro.impl.AvroSerializerFactory) SchemaBuilder(org.apache.avro.SchemaBuilder) TimeUnit(java.util.concurrent.TimeUnit) SchemaStoreFactory(io.pravega.schemaregistry.storage.SchemaStoreFactory) WithSchema(io.pravega.schemaregistry.serializers.WithSchema) Rule(org.junit.Rule) Paths(java.nio.file.Paths) Data(lombok.Data) GeneratedMessageV3(com.google.protobuf.GeneratedMessageV3) ReaderConfig(io.pravega.client.stream.ReaderConfig) AllArgsConstructor(lombok.AllArgsConstructor) Assert(org.junit.Assert) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) ProtobufSchema(io.pravega.schemaregistry.serializer.protobuf.schemas.ProtobufSchema) ClientConfig(io.pravega.client.ClientConfig) VersionInfo(io.pravega.schemaregistry.contract.data.VersionInfo) NoArgsConstructor(lombok.NoArgsConstructor) OutputStream(java.io.OutputStream) EventStreamClientFactory(io.pravega.client.EventStreamClientFactory) StreamManagerImpl(io.pravega.client.admin.impl.StreamManagerImpl) Codec(io.pravega.schemaregistry.serializer.shared.codec.Codec) SerializationFormat(io.pravega.schemaregistry.contract.data.SerializationFormat) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) GenericRecord(org.apache.avro.generic.GenericRecord) ReaderGroupManagerImpl(io.pravega.client.admin.impl.ReaderGroupManagerImpl) ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) ByteBuffer(java.nio.ByteBuffer) Test1(io.pravega.schemaregistry.serializer.avro.testobjs.generated.Test1) SerializerConfig(io.pravega.schemaregistry.serializer.shared.impl.SerializerConfig) StreamManager(io.pravega.client.admin.StreamManager) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) CodecType(io.pravega.schemaregistry.contract.data.CodecType) ProtobufTest(io.pravega.schemaregistry.serializer.protobuf.generated.ProtobufTest) Test(org.junit.Test)

Example 39 with GenericRecordBuilder

use of org.apache.avro.generic.GenericRecordBuilder in project JavaBigData by TALKDATA.

the class TestDatasetSink method testCompatibleSchemas.

@Test
public void testCompatibleSchemas() throws EventDeliveryException {
    DatasetSink sink = sink(in, config);
    // add a compatible record that is missing the msg field
    GenericRecordBuilder compatBuilder = new GenericRecordBuilder(COMPATIBLE_SCHEMA);
    GenericData.Record compatibleRecord = compatBuilder.set("id", "0").build();
    // add the record to the incoming channel
    putToChannel(in, event(compatibleRecord, COMPATIBLE_SCHEMA, null, false));
    // the record will be read using the real schema, so create the expected
    // record using it, but without any data
    GenericRecordBuilder builder = new GenericRecordBuilder(RECORD_SCHEMA);
    GenericData.Record expectedRecord = builder.set("id", "0").build();
    expected.add(expectedRecord);
    // run the sink
    sink.start();
    sink.process();
    sink.stop();
    Assert.assertEquals(Sets.newHashSet(expected), read(Datasets.load(FILE_DATASET_URI)));
    Assert.assertEquals("Should have committed", 0, remaining(in));
}
Also used : GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) GenericData(org.apache.avro.generic.GenericData) Test(org.junit.Test)

Example 40 with GenericRecordBuilder

use of org.apache.avro.generic.GenericRecordBuilder in project JavaBigData by TALKDATA.

the class TestDatasetSink method testSerializedWithIncompatibleSchemas.

@Test
public void testSerializedWithIncompatibleSchemas() throws EventDeliveryException {
    final DatasetSink sink = sink(in, config);
    GenericRecordBuilder builder = new GenericRecordBuilder(INCOMPATIBLE_SCHEMA);
    GenericData.Record rec = builder.set("username", "koala").build();
    // We pass in a valid schema in the header, but an incompatible schema
    // was used to serialize the record
    putToChannel(in, event(rec, INCOMPATIBLE_SCHEMA, SCHEMA_FILE, true));
    // run the sink
    sink.start();
    assertThrows("Should fail", EventDeliveryException.class, new Callable() {

        @Override
        public Object call() throws EventDeliveryException {
            sink.process();
            return null;
        }
    });
    sink.stop();
    Assert.assertEquals("Should have rolled back", expected.size() + 1, remaining(in));
}
Also used : EventDeliveryException(org.apache.flume.EventDeliveryException) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) GenericData(org.apache.avro.generic.GenericData) Callable(java.util.concurrent.Callable) Test(org.junit.Test)

Aggregations

GenericRecordBuilder (org.apache.avro.generic.GenericRecordBuilder)399 GenericRecord (org.apache.avro.generic.GenericRecord)263 Test (org.junit.Test)263 Schema (org.apache.avro.Schema)216 GenericData (org.apache.avro.generic.GenericData)69 ArrayList (java.util.ArrayList)45 EnumTest (foo.bar.EnumTest)41 File (java.io.File)41 IndexedRecord (org.apache.avro.generic.IndexedRecord)39 Schema (org.apache.kafka.connect.data.Schema)39 SchemaAndValue (org.apache.kafka.connect.data.SchemaAndValue)35 Path (org.apache.hadoop.fs.Path)33 List (java.util.List)30 ByteBuffer (java.nio.ByteBuffer)29 HashMap (java.util.HashMap)29 AvroSchema (io.confluent.kafka.schemaregistry.avro.AvroSchema)28 Struct (org.apache.kafka.connect.data.Struct)28 ByteArrayOutputStream (java.io.ByteArrayOutputStream)27 Record (org.apache.avro.generic.GenericData.Record)25 SchemaBuilder (org.apache.avro.SchemaBuilder)22