use of io.confluent.kafka.serializers.KafkaJsonDeserializer in project incubator-gobblin by apache.
the class KafkaDeserializerExtractorTest method testConfluentJsonDeserializer.
@Test
public void testConfluentJsonDeserializer() throws IOException {
WorkUnitState mockWorkUnitState = getMockWorkUnitState();
mockWorkUnitState.setProp("json.value.type", KafkaRecord.class.getName());
KafkaRecord testKafkaRecord = new KafkaRecord("Hello World");
Serializer<KafkaRecord> kafkaEncoder = new KafkaJsonSerializer<>();
kafkaEncoder.configure(PropertiesUtils.propsToStringKeyMap(mockWorkUnitState.getProperties()), false);
Deserializer<KafkaRecord> kafkaDecoder = new KafkaJsonDeserializer<>();
kafkaDecoder.configure(PropertiesUtils.propsToStringKeyMap(mockWorkUnitState.getProperties()), false);
ByteBuffer testKafkaRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testKafkaRecord));
KafkaSchemaRegistry<?, ?> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class);
KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_JSON), kafkaDecoder, mockKafkaSchemaRegistry);
ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testKafkaRecordByteBuffer);
Assert.assertEquals(kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset), testKafkaRecord);
}
Aggregations