use of org.apache.gobblin.kafka.serialize.LiAvroDeserializer in project incubator-gobblin by apache.
the class Kafka09DataWriterTest method testAvroSerialization.
@Test
public void testAvroSerialization() throws IOException, InterruptedException, SchemaRegistryException {
String topic = "testAvroSerialization08";
_kafkaTestHelper.provisionTopic(topic);
Properties props = new Properties();
props.setProperty(KafkaWriterConfigurationKeys.KAFKA_TOPIC, topic);
props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX + "bootstrap.servers", "localhost:" + _kafkaTestHelper.getKafkaServerPort());
props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX + "value.serializer", LiAvroSerializer.class.getName());
// set up mock schema registry
props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX + KafkaSchemaRegistryConfigurationKeys.KAFKA_SCHEMA_REGISTRY_CLASS, ConfigDrivenMd5SchemaRegistry.class.getCanonicalName());
Kafka09DataWriter<GenericRecord> kafka09DataWriter = new Kafka09DataWriter<>(props);
WriteCallback callback = mock(WriteCallback.class);
GenericRecord record = TestUtils.generateRandomAvroRecord();
try {
kafka09DataWriter.write(record, callback);
} finally {
kafka09DataWriter.close();
}
verify(callback, times(1)).onSuccess(isA(WriteResponse.class));
verify(callback, never()).onFailure(isA(Exception.class));
byte[] message = _kafkaTestHelper.getIteratorForTopic(topic).next().message();
ConfigDrivenMd5SchemaRegistry schemaReg = new ConfigDrivenMd5SchemaRegistry(topic, record.getSchema());
LiAvroDeserializer deser = new LiAvroDeserializer(schemaReg);
GenericRecord receivedRecord = deser.deserialize(topic, message);
Assert.assertEquals(record.toString(), receivedRecord.toString());
}
use of org.apache.gobblin.kafka.serialize.LiAvroDeserializer in project incubator-gobblin by apache.
the class Kafka08DataWriterTest method testAvroSerialization.
@Test
public void testAvroSerialization() throws IOException, InterruptedException, SchemaRegistryException {
String topic = "testAvroSerialization08";
_kafkaTestHelper.provisionTopic(topic);
Properties props = new Properties();
props.setProperty(KafkaWriterConfigurationKeys.KAFKA_TOPIC, topic);
props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX + "bootstrap.servers", "localhost:" + _kafkaTestHelper.getKafkaServerPort());
props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX + "value.serializer", "org.apache.gobblin.kafka.serialize.LiAvroSerializer");
// set up mock schema registry
props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX + KafkaSchemaRegistryConfigurationKeys.KAFKA_SCHEMA_REGISTRY_CLASS, ConfigDrivenMd5SchemaRegistry.class.getCanonicalName());
Kafka08DataWriter<GenericRecord> kafka08DataWriter = new Kafka08DataWriter<>(props);
WriteCallback callback = mock(WriteCallback.class);
GenericRecord record = TestUtils.generateRandomAvroRecord();
try {
kafka08DataWriter.write(record, callback);
} finally {
kafka08DataWriter.close();
}
verify(callback, times(1)).onSuccess(isA(WriteResponse.class));
verify(callback, never()).onFailure(isA(Exception.class));
byte[] message = _kafkaTestHelper.getIteratorForTopic(topic).next().message();
ConfigDrivenMd5SchemaRegistry schemaReg = new ConfigDrivenMd5SchemaRegistry(topic, record.getSchema());
LiAvroDeserializer deser = new LiAvroDeserializer(schemaReg);
GenericRecord receivedRecord = deser.deserialize(topic, message);
Assert.assertEquals(record.toString(), receivedRecord.toString());
}
Aggregations