use of io.confluent.kafka.serializers.KafkaAvroDeserializer in project ksql by confluentinc.
the class KsqlGenericRowAvroDeserializerTest method shouldCreateCorrectRow.
@Test
public void shouldCreateCorrectRow() {
KafkaAvroDeserializer kafkaAvroDeserializer = EasyMock.mock(KafkaAvroDeserializer.class);
EasyMock.expect(kafkaAvroDeserializer.deserialize(EasyMock.anyString(), EasyMock.anyObject())).andReturn(genericRecord);
expectLastCall();
replay(kafkaAvroDeserializer);
KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new KsqlGenericRowAvroDeserializer(schema, kafkaAvroDeserializer, false);
GenericRow genericRow = ksqlGenericRowAvroDeserializer.deserialize("", new byte[] {});
assertThat("Column number does not match.", genericRow.getColumns().size(), equalTo(6));
assertThat("Invalid column value.", genericRow.getColumns().get(0), equalTo(1511897796092L));
assertThat("Invalid column value.", genericRow.getColumns().get(1), equalTo(1L));
assertThat("Invalid column value.", ((Double[]) genericRow.getColumns().get(4))[0], equalTo(100.0));
assertThat("Invalid column value.", ((Map<String, Double>) genericRow.getColumns().get(5)).get("key1"), equalTo(100.0));
}
use of io.confluent.kafka.serializers.KafkaAvroDeserializer in project incubator-gobblin by apache.
the class KafkaDeserializerExtractorTest method testConfluentAvroDeserializerForSchemaEvolution.
@Test
public void testConfluentAvroDeserializerForSchemaEvolution() throws IOException, RestClientException, SchemaRegistryException {
WorkUnitState mockWorkUnitState = getMockWorkUnitState();
mockWorkUnitState.setProp("schema.registry.url", TEST_URL);
Schema schemaV1 = SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().endRecord();
Schema schemaV2 = SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().optionalString(TEST_FIELD_NAME2).endRecord();
GenericRecord testGenericRecord = new GenericRecordBuilder(schemaV1).set(TEST_FIELD_NAME, "testValue").build();
SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class);
when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schemaV1);
Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient);
Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient);
ByteBuffer testGenericRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord));
KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class);
when(mockKafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME)).thenReturn(schemaV2);
KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry);
when(kafkaDecoderExtractor.getSchema()).thenReturn(schemaV2);
ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer);
GenericRecord received = (GenericRecord) kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset);
Assert.assertEquals(received.toString(), "{\"testField\": \"testValue\", \"testField2\": null}");
}
use of io.confluent.kafka.serializers.KafkaAvroDeserializer in project flink by apache.
the class SQLClientSchemaRegistryITCase method testWriting.
@Test
public void testWriting() throws Exception {
String testUserBehaviorTopic = "test-user-behavior-" + UUID.randomUUID().toString();
// Create topic test-avro
kafkaClient.createTopic(1, 1, testUserBehaviorTopic);
String behaviourSubject = testUserBehaviorTopic + "-value";
List<String> sqlLines = Arrays.asList("CREATE TABLE user_behavior (", " user_id BIGINT NOT NULL,", " item_id BIGINT,", " category_id BIGINT,", " behavior STRING,", " ts TIMESTAMP(3)", ") WITH (", " 'connector' = 'kafka',", " 'properties.bootstrap.servers' = '" + INTER_CONTAINER_KAFKA_ALIAS + ":9092',", " 'topic' = '" + testUserBehaviorTopic + "',", " 'format' = 'avro-confluent',", " 'avro-confluent.url' = 'http://" + INTER_CONTAINER_REGISTRY_ALIAS + ":8082" + "'", ");", "", "INSERT INTO user_behavior VALUES (1, 1, 1, 'buy', TO_TIMESTAMP(FROM_UNIXTIME(1234)));");
executeSqlStatements(sqlLines);
List<Integer> versions = getAllVersions(behaviourSubject);
assertThat(versions.size(), equalTo(1));
List<Object> userBehaviors = kafkaClient.readMessages(1, "test-group", testUserBehaviorTopic, new KafkaAvroDeserializer(registryClient));
String schemaString = registryClient.getByVersion(behaviourSubject, versions.get(0), false).getSchema();
Schema userBehaviorSchema = new Schema.Parser().parse(schemaString);
GenericRecordBuilder recordBuilder = new GenericRecordBuilder(userBehaviorSchema);
assertThat(userBehaviors, equalTo(Collections.singletonList(recordBuilder.set("user_id", 1L).set("item_id", 1L).set("category_id", 1L).set("behavior", "buy").set("ts", 1234000L).build())));
}
use of io.confluent.kafka.serializers.KafkaAvroDeserializer in project registry by hortonworks.
the class ConfluentRegistryCompatibleResourceTest method testConfluentSerDes.
@Test
public void testConfluentSerDes() throws Exception {
org.apache.avro.Schema schema = new org.apache.avro.Schema.Parser().parse(GENERIC_TEST_RECORD_SCHEMA);
GenericRecord record = new GenericRecordBuilder(schema).set("field1", "some value").set("field2", "some other value").build();
Map<String, Object> config = new HashMap<>();
config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, rootTarget.getUri().toString());
KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer();
kafkaAvroSerializer.configure(config, false);
byte[] bytes = kafkaAvroSerializer.serialize("topic", record);
KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer();
kafkaAvroDeserializer.configure(config, false);
GenericRecord result = (GenericRecord) kafkaAvroDeserializer.deserialize("topic", bytes);
LOG.info(result.toString());
}
use of io.confluent.kafka.serializers.KafkaAvroDeserializer in project ksql by confluentinc.
the class KsqlGenericRowAvroSerializerTest method shouldSerializeRowCorrectly.
@Test
public void shouldSerializeRowCorrectly() {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
GenericRow genericRow = new GenericRow(columns);
byte[] serializedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow);
Assert.assertNotNull(genericRecord);
assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo(1511897796092L));
assertThat("Incorrect serialization.", genericRecord.get("orderid".toUpperCase()), equalTo(1L));
assertThat("Incorrect serialization.", genericRecord.get("itemid".toUpperCase()).toString(), equalTo("item_1"));
assertThat("Incorrect serialization.", genericRecord.get("orderunits".toUpperCase()), equalTo(10.0));
GenericData.Array array = (GenericData.Array) genericRecord.get("arraycol".toUpperCase());
Map map = (Map) genericRecord.get("mapcol".toUpperCase());
assertThat("Incorrect serialization.", array.size(), equalTo(1));
assertThat("Incorrect serialization.", array.get(0), equalTo(100.0));
assertThat("Incorrect serialization.", map.size(), equalTo(1));
assertThat("Incorrect serialization.", map.get(new Utf8("key1")), equalTo(100.0));
}
Aggregations