use of com.hortonworks.registries.schemaregistry.serdes.avro.kafka.KafkaAvroDeserializer in project registry by hortonworks.
the class ConfluentProtocolCompatibleTest method testRegistryProduceConfluentConsume.
@Test
public void testRegistryProduceConfluentConsume() throws Exception {
String configPath = new File(Resources.getResource("schema-registry-test.yaml").toURI()).getAbsolutePath();
LocalSchemaRegistryServer localSchemaRegistryServer = new LocalSchemaRegistryServer(configPath);
try {
localSchemaRegistryServer.start();
final String confluentUrl = String.format("http://localhost:%d/api/v1/confluent", localSchemaRegistryServer.getLocalPort());
final String registryUrl = String.format("http://localhost:%d/api/v1", localSchemaRegistryServer.getLocalPort());
Map<String, Object> confluentConfig = new HashMap<>();
confluentConfig.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, confluentUrl);
Map<String, Object> registryConfig = new HashMap<>();
registryConfig.put(SchemaRegistryClient.Configuration.SCHEMA_REGISTRY_URL.name(), registryUrl);
registryConfig.put(SERDES_PROTOCOL_VERSION, SerDesProtocolHandlerRegistry.CONFLUENT_VERSION_PROTOCOL);
Schema schema = new Schema.Parser().parse(GENERIC_TEST_RECORD_SCHEMA);
GenericRecord record = new GenericRecordBuilder(schema).set("field1", "some value").set("field2", "some other value").build();
KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer();
kafkaAvroSerializer.configure(registryConfig, false);
byte[] bytes = kafkaAvroSerializer.serialize("topic", record);
KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer();
kafkaAvroDeserializer.configure(registryConfig, false);
GenericRecord registryResult = (GenericRecord) kafkaAvroDeserializer.deserialize("topic", bytes);
LOG.info(registryResult.toString());
io.confluent.kafka.serializers.KafkaAvroDeserializer confluentKafkaAvroDeserializer = new io.confluent.kafka.serializers.KafkaAvroDeserializer();
confluentKafkaAvroDeserializer.configure(confluentConfig, false);
GenericRecord confluentResult = (GenericRecord) confluentKafkaAvroDeserializer.deserialize("topic", bytes);
LOG.info(confluentResult.toString());
Assert.assertEquals(record, registryResult);
Assert.assertEquals(record, confluentResult);
Assert.assertEquals(registryResult, confluentResult);
} finally {
localSchemaRegistryServer.stop();
}
}
use of com.hortonworks.registries.schemaregistry.serdes.avro.kafka.KafkaAvroDeserializer in project registry by hortonworks.
the class ConfluentProtocolCompatibleTest method testConfluentProduceRegistryConsume.
@Test
public void testConfluentProduceRegistryConsume() throws Exception {
String configPath = new File(Resources.getResource("schema-registry-test.yaml").toURI()).getAbsolutePath();
LocalSchemaRegistryServer localSchemaRegistryServer = new LocalSchemaRegistryServer(configPath);
try {
localSchemaRegistryServer.start();
final String confluentUrl = String.format("http://localhost:%d/api/v1/confluent", localSchemaRegistryServer.getLocalPort());
final String registryUrl = String.format("http://localhost:%d/api/v1", localSchemaRegistryServer.getLocalPort());
Map<String, Object> confluentConfig = new HashMap<>();
confluentConfig.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, confluentUrl);
Map<String, Object> registryConfig = new HashMap<>();
registryConfig.put(SchemaRegistryClient.Configuration.SCHEMA_REGISTRY_URL.name(), registryUrl);
Schema schema = new Schema.Parser().parse(GENERIC_TEST_RECORD_SCHEMA);
GenericRecord record = new GenericRecordBuilder(schema).set("field1", "some value").set("field2", "some other value").build();
io.confluent.kafka.serializers.KafkaAvroSerializer kafkaAvroSerializer = new io.confluent.kafka.serializers.KafkaAvroSerializer();
kafkaAvroSerializer.configure(confluentConfig, false);
byte[] bytes = kafkaAvroSerializer.serialize("topic", record);
io.confluent.kafka.serializers.KafkaAvroDeserializer confluentKafkaAvroDeserializer = new io.confluent.kafka.serializers.KafkaAvroDeserializer();
confluentKafkaAvroDeserializer.configure(confluentConfig, false);
GenericRecord confluentResult = (GenericRecord) confluentKafkaAvroDeserializer.deserialize("topic", bytes);
LOG.info(confluentResult.toString());
KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer();
kafkaAvroDeserializer.configure(registryConfig, false);
GenericRecord registryResult = (GenericRecord) kafkaAvroDeserializer.deserialize("topic", bytes);
LOG.info(registryResult.toString());
Assert.assertEquals(record, registryResult);
Assert.assertEquals(record, confluentResult);
Assert.assertEquals(registryResult, confluentResult);
} finally {
localSchemaRegistryServer.stop();
}
}
use of com.hortonworks.registries.schemaregistry.serdes.avro.kafka.KafkaAvroDeserializer in project registry by hortonworks.
the class KafkaAvroSerDesTest method _testKafkaSerDes.
private void _testKafkaSerDes(String topic, boolean isKey, Object payload) {
KafkaAvroSerializer avroSerializer = new KafkaAvroSerializer();
Map<String, Object> schemaRegistryClientConf = SCHEMA_REGISTRY_TEST_SERVER_CLIENT_WRAPPER.exportClientConf(true);
avroSerializer.configure(schemaRegistryClientConf, isKey);
KafkaAvroDeserializer avroDeserializer = new KafkaAvroDeserializer();
avroDeserializer.configure(schemaRegistryClientConf, isKey);
byte[] serializedData = avroSerializer.serialize(topic, payload);
Object deserializedObj = avroDeserializer.deserialize(topic, serializedData);
if (payload instanceof byte[]) {
Assert.assertArrayEquals((byte[]) payload, (byte[]) deserializedObj);
} else {
AvroSchemaRegistryClientUtil.assertAvroObjs(payload, deserializedObj);
}
}
Aggregations