use of org.apache.gobblin.writer.WriteCallback in project incubator-gobblin by apache.
the class Kafka08DataWriterTest method testStringSerialization.
@Test
public void testStringSerialization() throws IOException, InterruptedException {
String topic = "testStringSerialization08";
_kafkaTestHelper.provisionTopic(topic);
Properties props = new Properties();
props.setProperty(KafkaWriterConfigurationKeys.KAFKA_TOPIC, topic);
props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX + "bootstrap.servers", "localhost:" + _kafkaTestHelper.getKafkaServerPort());
props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX + "value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
Kafka08DataWriter<String> kafka08DataWriter = new Kafka08DataWriter<String>(props);
String messageString = "foobar";
WriteCallback callback = mock(WriteCallback.class);
try {
kafka08DataWriter.write(messageString, callback);
} finally {
kafka08DataWriter.close();
}
verify(callback, times(1)).onSuccess(isA(WriteResponse.class));
verify(callback, never()).onFailure(isA(Exception.class));
byte[] message = _kafkaTestHelper.getIteratorForTopic(topic).next().message();
String messageReceived = new String(message);
Assert.assertEquals(messageReceived, messageString);
}
use of org.apache.gobblin.writer.WriteCallback in project incubator-gobblin by apache.
the class Kafka08DataWriterTest method testAvroSerialization.
@Test
public void testAvroSerialization() throws IOException, InterruptedException, SchemaRegistryException {
String topic = "testAvroSerialization08";
_kafkaTestHelper.provisionTopic(topic);
Properties props = new Properties();
props.setProperty(KafkaWriterConfigurationKeys.KAFKA_TOPIC, topic);
props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX + "bootstrap.servers", "localhost:" + _kafkaTestHelper.getKafkaServerPort());
props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX + "value.serializer", "org.apache.gobblin.kafka.serialize.LiAvroSerializer");
// set up mock schema registry
props.setProperty(KafkaWriterConfigurationKeys.KAFKA_PRODUCER_CONFIG_PREFIX + KafkaSchemaRegistryConfigurationKeys.KAFKA_SCHEMA_REGISTRY_CLASS, ConfigDrivenMd5SchemaRegistry.class.getCanonicalName());
Kafka08DataWriter<GenericRecord> kafka08DataWriter = new Kafka08DataWriter<>(props);
WriteCallback callback = mock(WriteCallback.class);
GenericRecord record = TestUtils.generateRandomAvroRecord();
try {
kafka08DataWriter.write(record, callback);
} finally {
kafka08DataWriter.close();
}
verify(callback, times(1)).onSuccess(isA(WriteResponse.class));
verify(callback, never()).onFailure(isA(Exception.class));
byte[] message = _kafkaTestHelper.getIteratorForTopic(topic).next().message();
ConfigDrivenMd5SchemaRegistry schemaReg = new ConfigDrivenMd5SchemaRegistry(topic, record.getSchema());
LiAvroDeserializer deser = new LiAvroDeserializer(schemaReg);
GenericRecord receivedRecord = deser.deserialize(topic, message);
Assert.assertEquals(record.toString(), receivedRecord.toString());
}
Aggregations