Search in sources :

Example 1 with KafkaAvroSerializer

use of io.confluent.kafka.serializers.KafkaAvroSerializer in project ksql by confluentinc.

the class TopicStreamWriterFormatTest method shouldMatchAvroFormatter.

@Test
public void shouldMatchAvroFormatter() throws Exception {
    /**
     * Build an AVRO message
     */
    String USER_SCHEMA = "{\n" + "    \"fields\": [\n" + "        { \"name\": \"str1\", \"type\": \"string\" }\n" + "    ],\n" + "    \"name\": \"myrecord\",\n" + "    \"type\": \"record\"\n" + "}";
    Schema.Parser parser = new Schema.Parser();
    Schema schema = parser.parse(USER_SCHEMA);
    GenericData.Record avroRecord = new GenericData.Record(schema);
    avroRecord.put("str1", "My first string");
    /**
     * Setup expects
     */
    SchemaRegistryClient schemaRegistryClient = mock(SchemaRegistryClient.class);
    expect(schemaRegistryClient.register(anyString(), anyObject())).andReturn(1);
    expect(schemaRegistryClient.getById(anyInt())).andReturn(schema);
    replay(schemaRegistryClient);
    Map<String, String> props = new HashMap<>();
    props.put("schema.registry.url", "localhost:9092");
    KafkaAvroSerializer avroSerializer = new KafkaAvroSerializer(schemaRegistryClient, props);
    /**
     * Test data
     */
    byte[] testRecordBytes = avroSerializer.serialize("topic", avroRecord);
    ConsumerRecord<String, Bytes> record = new ConsumerRecord<String, Bytes>("topic", 1, 1, "key", new Bytes(testRecordBytes));
    /**
     * Assert
     */
    assertTrue(TopicStreamWriter.Format.AVRO.isFormat("topic", record, schemaRegistryClient));
}
Also used : HashMap(java.util.HashMap) Schema(org.apache.avro.Schema) KafkaAvroSerializer(io.confluent.kafka.serializers.KafkaAvroSerializer) GenericData(org.apache.avro.generic.GenericData) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Bytes(org.apache.kafka.common.utils.Bytes) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) Test(org.junit.Test)

Example 2 with KafkaAvroSerializer

use of io.confluent.kafka.serializers.KafkaAvroSerializer in project ksql by confluentinc.

the class KsqlGenericRowAvroDeserializerTest method getSerializedRow.

private byte[] getSerializedRow(String topicName, SchemaRegistryClient schemaRegistryClient, Schema rowAvroSchema, GenericRow genericRow) {
    Map map = new HashMap();
    // Automatically register the schema in the Schema Registry if it has not been registered.
    map.put(AbstractKafkaAvroSerDeConfig.AUTO_REGISTER_SCHEMAS, true);
    map.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "");
    KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer(schemaRegistryClient, map);
    GenericRecord avroRecord = new GenericData.Record(rowAvroSchema);
    List<Schema.Field> fields = rowAvroSchema.getFields();
    for (int i = 0; i < genericRow.getColumns().size(); i++) {
        if (fields.get(i).schema().getType() == Schema.Type.ARRAY) {
            avroRecord.put(fields.get(i).name(), Arrays.asList((Object[]) genericRow.getColumns().get(i)));
        } else {
            avroRecord.put(fields.get(i).name(), genericRow.getColumns().get(i));
        }
    }
    return kafkaAvroSerializer.serialize(topicName, avroRecord);
}
Also used : HashMap(java.util.HashMap) KafkaAvroSerializer(io.confluent.kafka.serializers.KafkaAvroSerializer) GenericRecord(org.apache.avro.generic.GenericRecord) GenericRecord(org.apache.avro.generic.GenericRecord) HashMap(java.util.HashMap) Map(java.util.Map)

Example 3 with KafkaAvroSerializer

use of io.confluent.kafka.serializers.KafkaAvroSerializer in project incubator-gobblin by apache.

the class KafkaDeserializerExtractorTest method testConfluentAvroDeserializerForSchemaEvolution.

@Test
public void testConfluentAvroDeserializerForSchemaEvolution() throws IOException, RestClientException, SchemaRegistryException {
    WorkUnitState mockWorkUnitState = getMockWorkUnitState();
    mockWorkUnitState.setProp("schema.registry.url", TEST_URL);
    Schema schemaV1 = SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().endRecord();
    Schema schemaV2 = SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().optionalString(TEST_FIELD_NAME2).endRecord();
    GenericRecord testGenericRecord = new GenericRecordBuilder(schemaV1).set(TEST_FIELD_NAME, "testValue").build();
    SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class);
    when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schemaV1);
    Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient);
    Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient);
    ByteBuffer testGenericRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord));
    KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class);
    when(mockKafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME)).thenReturn(schemaV2);
    KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry);
    when(kafkaDecoderExtractor.getSchema()).thenReturn(schemaV2);
    ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer);
    GenericRecord received = (GenericRecord) kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset);
    Assert.assertEquals(received.toString(), "{\"testField\": \"testValue\", \"testField2\": null}");
}
Also used : WorkUnitState(org.apache.gobblin.configuration.WorkUnitState) Schema(org.apache.avro.Schema) KafkaAvroSerializer(io.confluent.kafka.serializers.KafkaAvroSerializer) KafkaAvroDeserializer(io.confluent.kafka.serializers.KafkaAvroDeserializer) ByteBuffer(java.nio.ByteBuffer) ByteArrayBasedKafkaRecord(org.apache.gobblin.kafka.client.ByteArrayBasedKafkaRecord) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) GenericRecord(org.apache.avro.generic.GenericRecord) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) Test(org.testng.annotations.Test)

Example 4 with KafkaAvroSerializer

use of io.confluent.kafka.serializers.KafkaAvroSerializer in project registry by hortonworks.

the class ConfluentRegistryCompatibleResourceTest method testConfluentSerDes.

@Test
public void testConfluentSerDes() throws Exception {
    org.apache.avro.Schema schema = new org.apache.avro.Schema.Parser().parse(GENERIC_TEST_RECORD_SCHEMA);
    GenericRecord record = new GenericRecordBuilder(schema).set("field1", "some value").set("field2", "some other value").build();
    Map<String, Object> config = new HashMap<>();
    config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, rootTarget.getUri().toString());
    KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer();
    kafkaAvroSerializer.configure(config, false);
    byte[] bytes = kafkaAvroSerializer.serialize("topic", record);
    KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer();
    kafkaAvroDeserializer.configure(config, false);
    GenericRecord result = (GenericRecord) kafkaAvroDeserializer.deserialize("topic", bytes);
    LOG.info(result.toString());
}
Also used : HashMap(java.util.HashMap) Schema(com.hortonworks.registries.schemaregistry.webservice.ConfluentSchemaRegistryCompatibleResource.Schema) KafkaAvroSerializer(io.confluent.kafka.serializers.KafkaAvroSerializer) KafkaAvroDeserializer(io.confluent.kafka.serializers.KafkaAvroDeserializer) SchemaString(com.hortonworks.registries.schemaregistry.webservice.ConfluentSchemaRegistryCompatibleResource.SchemaString) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) GenericRecord(org.apache.avro.generic.GenericRecord) Test(org.junit.Test)

Example 5 with KafkaAvroSerializer

use of io.confluent.kafka.serializers.KafkaAvroSerializer in project incubator-gobblin by apache.

the class KafkaDeserializerExtractorTest method testConfluentAvroDeserializer.

@Test
public void testConfluentAvroDeserializer() throws IOException, RestClientException {
    WorkUnitState mockWorkUnitState = getMockWorkUnitState();
    mockWorkUnitState.setProp("schema.registry.url", TEST_URL);
    Schema schema = SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().endRecord();
    GenericRecord testGenericRecord = new GenericRecordBuilder(schema).set(TEST_FIELD_NAME, "testValue").build();
    SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class);
    when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schema);
    Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient);
    Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient);
    ByteBuffer testGenericRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord));
    KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class);
    KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry);
    ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer);
    Assert.assertEquals(kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset), testGenericRecord);
}
Also used : WorkUnitState(org.apache.gobblin.configuration.WorkUnitState) Schema(org.apache.avro.Schema) KafkaAvroSerializer(io.confluent.kafka.serializers.KafkaAvroSerializer) KafkaAvroDeserializer(io.confluent.kafka.serializers.KafkaAvroDeserializer) ByteBuffer(java.nio.ByteBuffer) ByteArrayBasedKafkaRecord(org.apache.gobblin.kafka.client.ByteArrayBasedKafkaRecord) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) GenericRecord(org.apache.avro.generic.GenericRecord) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) Test(org.testng.annotations.Test)

Aggregations

KafkaAvroSerializer (io.confluent.kafka.serializers.KafkaAvroSerializer)5 GenericRecord (org.apache.avro.generic.GenericRecord)4 SchemaRegistryClient (io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)3 KafkaAvroDeserializer (io.confluent.kafka.serializers.KafkaAvroDeserializer)3 HashMap (java.util.HashMap)3 Schema (org.apache.avro.Schema)3 GenericRecordBuilder (org.apache.avro.generic.GenericRecordBuilder)3 ByteBuffer (java.nio.ByteBuffer)2 WorkUnitState (org.apache.gobblin.configuration.WorkUnitState)2 ByteArrayBasedKafkaRecord (org.apache.gobblin.kafka.client.ByteArrayBasedKafkaRecord)2 Test (org.junit.Test)2 Test (org.testng.annotations.Test)2 Schema (com.hortonworks.registries.schemaregistry.webservice.ConfluentSchemaRegistryCompatibleResource.Schema)1 SchemaString (com.hortonworks.registries.schemaregistry.webservice.ConfluentSchemaRegistryCompatibleResource.SchemaString)1 Map (java.util.Map)1 GenericData (org.apache.avro.generic.GenericData)1 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)1 Bytes (org.apache.kafka.common.utils.Bytes)1