use of io.confluent.kafka.serializers.KafkaAvroSerializer in project ksql by confluentinc.
the class TopicStreamWriterFormatTest method shouldMatchAvroFormatter.
@Test
public void shouldMatchAvroFormatter() throws Exception {
/**
* Build an AVRO message
*/
String USER_SCHEMA = "{\n" + " \"fields\": [\n" + " { \"name\": \"str1\", \"type\": \"string\" }\n" + " ],\n" + " \"name\": \"myrecord\",\n" + " \"type\": \"record\"\n" + "}";
Schema.Parser parser = new Schema.Parser();
Schema schema = parser.parse(USER_SCHEMA);
GenericData.Record avroRecord = new GenericData.Record(schema);
avroRecord.put("str1", "My first string");
/**
* Setup expects
*/
SchemaRegistryClient schemaRegistryClient = mock(SchemaRegistryClient.class);
expect(schemaRegistryClient.register(anyString(), anyObject())).andReturn(1);
expect(schemaRegistryClient.getById(anyInt())).andReturn(schema);
replay(schemaRegistryClient);
Map<String, String> props = new HashMap<>();
props.put("schema.registry.url", "localhost:9092");
KafkaAvroSerializer avroSerializer = new KafkaAvroSerializer(schemaRegistryClient, props);
/**
* Test data
*/
byte[] testRecordBytes = avroSerializer.serialize("topic", avroRecord);
ConsumerRecord<String, Bytes> record = new ConsumerRecord<String, Bytes>("topic", 1, 1, "key", new Bytes(testRecordBytes));
/**
* Assert
*/
assertTrue(TopicStreamWriter.Format.AVRO.isFormat("topic", record, schemaRegistryClient));
}
use of io.confluent.kafka.serializers.KafkaAvroSerializer in project ksql by confluentinc.
the class KsqlGenericRowAvroDeserializerTest method getSerializedRow.
private byte[] getSerializedRow(String topicName, SchemaRegistryClient schemaRegistryClient, Schema rowAvroSchema, GenericRow genericRow) {
Map map = new HashMap();
// Automatically register the schema in the Schema Registry if it has not been registered.
map.put(AbstractKafkaAvroSerDeConfig.AUTO_REGISTER_SCHEMAS, true);
map.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "");
KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer(schemaRegistryClient, map);
GenericRecord avroRecord = new GenericData.Record(rowAvroSchema);
List<Schema.Field> fields = rowAvroSchema.getFields();
for (int i = 0; i < genericRow.getColumns().size(); i++) {
if (fields.get(i).schema().getType() == Schema.Type.ARRAY) {
avroRecord.put(fields.get(i).name(), Arrays.asList((Object[]) genericRow.getColumns().get(i)));
} else {
avroRecord.put(fields.get(i).name(), genericRow.getColumns().get(i));
}
}
return kafkaAvroSerializer.serialize(topicName, avroRecord);
}
use of io.confluent.kafka.serializers.KafkaAvroSerializer in project incubator-gobblin by apache.
the class KafkaDeserializerExtractorTest method testConfluentAvroDeserializerForSchemaEvolution.
@Test
public void testConfluentAvroDeserializerForSchemaEvolution() throws IOException, RestClientException, SchemaRegistryException {
WorkUnitState mockWorkUnitState = getMockWorkUnitState();
mockWorkUnitState.setProp("schema.registry.url", TEST_URL);
Schema schemaV1 = SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().endRecord();
Schema schemaV2 = SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().optionalString(TEST_FIELD_NAME2).endRecord();
GenericRecord testGenericRecord = new GenericRecordBuilder(schemaV1).set(TEST_FIELD_NAME, "testValue").build();
SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class);
when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schemaV1);
Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient);
Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient);
ByteBuffer testGenericRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord));
KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class);
when(mockKafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME)).thenReturn(schemaV2);
KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry);
when(kafkaDecoderExtractor.getSchema()).thenReturn(schemaV2);
ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer);
GenericRecord received = (GenericRecord) kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset);
Assert.assertEquals(received.toString(), "{\"testField\": \"testValue\", \"testField2\": null}");
}
use of io.confluent.kafka.serializers.KafkaAvroSerializer in project registry by hortonworks.
the class ConfluentRegistryCompatibleResourceTest method testConfluentSerDes.
@Test
public void testConfluentSerDes() throws Exception {
org.apache.avro.Schema schema = new org.apache.avro.Schema.Parser().parse(GENERIC_TEST_RECORD_SCHEMA);
GenericRecord record = new GenericRecordBuilder(schema).set("field1", "some value").set("field2", "some other value").build();
Map<String, Object> config = new HashMap<>();
config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, rootTarget.getUri().toString());
KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer();
kafkaAvroSerializer.configure(config, false);
byte[] bytes = kafkaAvroSerializer.serialize("topic", record);
KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer();
kafkaAvroDeserializer.configure(config, false);
GenericRecord result = (GenericRecord) kafkaAvroDeserializer.deserialize("topic", bytes);
LOG.info(result.toString());
}
use of io.confluent.kafka.serializers.KafkaAvroSerializer in project incubator-gobblin by apache.
the class KafkaDeserializerExtractorTest method testConfluentAvroDeserializer.
@Test
public void testConfluentAvroDeserializer() throws IOException, RestClientException {
WorkUnitState mockWorkUnitState = getMockWorkUnitState();
mockWorkUnitState.setProp("schema.registry.url", TEST_URL);
Schema schema = SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().endRecord();
GenericRecord testGenericRecord = new GenericRecordBuilder(schema).set(TEST_FIELD_NAME, "testValue").build();
SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class);
when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schema);
Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient);
Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient);
ByteBuffer testGenericRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord));
KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class);
KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry);
ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer);
Assert.assertEquals(kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset), testGenericRecord);
}
Aggregations