Search in sources :

Example 1 with KafkaAvroDeserializer

use of com.hortonworks.registries.schemaregistry.serdes.avro.kafka.KafkaAvroDeserializer in project registry by hortonworks.

the class ConfluentProtocolCompatibleTest method testRegistryProduceConfluentConsume.

@Test
public void testRegistryProduceConfluentConsume() throws Exception {
    String configPath = new File(Resources.getResource("schema-registry-test.yaml").toURI()).getAbsolutePath();
    LocalSchemaRegistryServer localSchemaRegistryServer = new LocalSchemaRegistryServer(configPath);
    try {
        localSchemaRegistryServer.start();
        final String confluentUrl = String.format("http://localhost:%d/api/v1/confluent", localSchemaRegistryServer.getLocalPort());
        final String registryUrl = String.format("http://localhost:%d/api/v1", localSchemaRegistryServer.getLocalPort());
        Map<String, Object> confluentConfig = new HashMap<>();
        confluentConfig.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, confluentUrl);
        Map<String, Object> registryConfig = new HashMap<>();
        registryConfig.put(SchemaRegistryClient.Configuration.SCHEMA_REGISTRY_URL.name(), registryUrl);
        registryConfig.put(SERDES_PROTOCOL_VERSION, SerDesProtocolHandlerRegistry.CONFLUENT_VERSION_PROTOCOL);
        Schema schema = new Schema.Parser().parse(GENERIC_TEST_RECORD_SCHEMA);
        GenericRecord record = new GenericRecordBuilder(schema).set("field1", "some value").set("field2", "some other value").build();
        KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer();
        kafkaAvroSerializer.configure(registryConfig, false);
        byte[] bytes = kafkaAvroSerializer.serialize("topic", record);
        KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer();
        kafkaAvroDeserializer.configure(registryConfig, false);
        GenericRecord registryResult = (GenericRecord) kafkaAvroDeserializer.deserialize("topic", bytes);
        LOG.info(registryResult.toString());
        io.confluent.kafka.serializers.KafkaAvroDeserializer confluentKafkaAvroDeserializer = new io.confluent.kafka.serializers.KafkaAvroDeserializer();
        confluentKafkaAvroDeserializer.configure(confluentConfig, false);
        GenericRecord confluentResult = (GenericRecord) confluentKafkaAvroDeserializer.deserialize("topic", bytes);
        LOG.info(confluentResult.toString());
        Assert.assertEquals(record, registryResult);
        Assert.assertEquals(record, confluentResult);
        Assert.assertEquals(registryResult, confluentResult);
    } finally {
        localSchemaRegistryServer.stop();
    }
}
Also used : HashMap(java.util.HashMap) LocalSchemaRegistryServer(com.hortonworks.registries.schemaregistry.webservice.LocalSchemaRegistryServer) Schema(org.apache.avro.Schema) KafkaAvroSerializer(com.hortonworks.registries.schemaregistry.serdes.avro.kafka.KafkaAvroSerializer) KafkaAvroDeserializer(com.hortonworks.registries.schemaregistry.serdes.avro.kafka.KafkaAvroDeserializer) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) GenericRecord(org.apache.avro.generic.GenericRecord) File(java.io.File) Test(org.junit.Test)

Example 2 with KafkaAvroDeserializer

use of com.hortonworks.registries.schemaregistry.serdes.avro.kafka.KafkaAvroDeserializer in project registry by hortonworks.

the class ConfluentProtocolCompatibleTest method testConfluentProduceRegistryConsume.

@Test
public void testConfluentProduceRegistryConsume() throws Exception {
    String configPath = new File(Resources.getResource("schema-registry-test.yaml").toURI()).getAbsolutePath();
    LocalSchemaRegistryServer localSchemaRegistryServer = new LocalSchemaRegistryServer(configPath);
    try {
        localSchemaRegistryServer.start();
        final String confluentUrl = String.format("http://localhost:%d/api/v1/confluent", localSchemaRegistryServer.getLocalPort());
        final String registryUrl = String.format("http://localhost:%d/api/v1", localSchemaRegistryServer.getLocalPort());
        Map<String, Object> confluentConfig = new HashMap<>();
        confluentConfig.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, confluentUrl);
        Map<String, Object> registryConfig = new HashMap<>();
        registryConfig.put(SchemaRegistryClient.Configuration.SCHEMA_REGISTRY_URL.name(), registryUrl);
        Schema schema = new Schema.Parser().parse(GENERIC_TEST_RECORD_SCHEMA);
        GenericRecord record = new GenericRecordBuilder(schema).set("field1", "some value").set("field2", "some other value").build();
        io.confluent.kafka.serializers.KafkaAvroSerializer kafkaAvroSerializer = new io.confluent.kafka.serializers.KafkaAvroSerializer();
        kafkaAvroSerializer.configure(confluentConfig, false);
        byte[] bytes = kafkaAvroSerializer.serialize("topic", record);
        io.confluent.kafka.serializers.KafkaAvroDeserializer confluentKafkaAvroDeserializer = new io.confluent.kafka.serializers.KafkaAvroDeserializer();
        confluentKafkaAvroDeserializer.configure(confluentConfig, false);
        GenericRecord confluentResult = (GenericRecord) confluentKafkaAvroDeserializer.deserialize("topic", bytes);
        LOG.info(confluentResult.toString());
        KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer();
        kafkaAvroDeserializer.configure(registryConfig, false);
        GenericRecord registryResult = (GenericRecord) kafkaAvroDeserializer.deserialize("topic", bytes);
        LOG.info(registryResult.toString());
        Assert.assertEquals(record, registryResult);
        Assert.assertEquals(record, confluentResult);
        Assert.assertEquals(registryResult, confluentResult);
    } finally {
        localSchemaRegistryServer.stop();
    }
}
Also used : HashMap(java.util.HashMap) LocalSchemaRegistryServer(com.hortonworks.registries.schemaregistry.webservice.LocalSchemaRegistryServer) Schema(org.apache.avro.Schema) KafkaAvroSerializer(com.hortonworks.registries.schemaregistry.serdes.avro.kafka.KafkaAvroSerializer) KafkaAvroDeserializer(com.hortonworks.registries.schemaregistry.serdes.avro.kafka.KafkaAvroDeserializer) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) GenericRecord(org.apache.avro.generic.GenericRecord) File(java.io.File) Test(org.junit.Test)

Example 3 with KafkaAvroDeserializer

use of com.hortonworks.registries.schemaregistry.serdes.avro.kafka.KafkaAvroDeserializer in project registry by hortonworks.

the class KafkaAvroSerDesTest method _testKafkaSerDes.

private void _testKafkaSerDes(String topic, boolean isKey, Object payload) {
    KafkaAvroSerializer avroSerializer = new KafkaAvroSerializer();
    Map<String, Object> schemaRegistryClientConf = SCHEMA_REGISTRY_TEST_SERVER_CLIENT_WRAPPER.exportClientConf(true);
    avroSerializer.configure(schemaRegistryClientConf, isKey);
    KafkaAvroDeserializer avroDeserializer = new KafkaAvroDeserializer();
    avroDeserializer.configure(schemaRegistryClientConf, isKey);
    byte[] serializedData = avroSerializer.serialize(topic, payload);
    Object deserializedObj = avroDeserializer.deserialize(topic, serializedData);
    if (payload instanceof byte[]) {
        Assert.assertArrayEquals((byte[]) payload, (byte[]) deserializedObj);
    } else {
        AvroSchemaRegistryClientUtil.assertAvroObjs(payload, deserializedObj);
    }
}
Also used : KafkaAvroSerializer(com.hortonworks.registries.schemaregistry.serdes.avro.kafka.KafkaAvroSerializer) KafkaAvroDeserializer(com.hortonworks.registries.schemaregistry.serdes.avro.kafka.KafkaAvroDeserializer)

Aggregations

KafkaAvroDeserializer (com.hortonworks.registries.schemaregistry.serdes.avro.kafka.KafkaAvroDeserializer)3 KafkaAvroSerializer (com.hortonworks.registries.schemaregistry.serdes.avro.kafka.KafkaAvroSerializer)3 LocalSchemaRegistryServer (com.hortonworks.registries.schemaregistry.webservice.LocalSchemaRegistryServer)2 File (java.io.File)2 HashMap (java.util.HashMap)2 Schema (org.apache.avro.Schema)2 GenericRecord (org.apache.avro.generic.GenericRecord)2 GenericRecordBuilder (org.apache.avro.generic.GenericRecordBuilder)2 Test (org.junit.Test)2