Search in sources :

Example 1 with KafkaAvroDeserializer

use of io.confluent.kafka.serializers.KafkaAvroDeserializer in project ksql by confluentinc.

the class KsqlGenericRowAvroDeserializerTest method shouldCreateCorrectRow.

@Test
public void shouldCreateCorrectRow() {
    KafkaAvroDeserializer kafkaAvroDeserializer = EasyMock.mock(KafkaAvroDeserializer.class);
    EasyMock.expect(kafkaAvroDeserializer.deserialize(EasyMock.anyString(), EasyMock.anyObject())).andReturn(genericRecord);
    expectLastCall();
    replay(kafkaAvroDeserializer);
    KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new KsqlGenericRowAvroDeserializer(schema, kafkaAvroDeserializer, false);
    GenericRow genericRow = ksqlGenericRowAvroDeserializer.deserialize("", new byte[] {});
    assertThat("Column number does not match.", genericRow.getColumns().size(), equalTo(6));
    assertThat("Invalid column value.", genericRow.getColumns().get(0), equalTo(1511897796092L));
    assertThat("Invalid column value.", genericRow.getColumns().get(1), equalTo(1L));
    assertThat("Invalid column value.", ((Double[]) genericRow.getColumns().get(4))[0], equalTo(100.0));
    assertThat("Invalid column value.", ((Map<String, Double>) genericRow.getColumns().get(5)).get("key1"), equalTo(100.0));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) KafkaAvroDeserializer(io.confluent.kafka.serializers.KafkaAvroDeserializer) Test(org.junit.Test)

Example 2 with KafkaAvroDeserializer

use of io.confluent.kafka.serializers.KafkaAvroDeserializer in project incubator-gobblin by apache.

the class KafkaDeserializerExtractorTest method testConfluentAvroDeserializerForSchemaEvolution.

@Test
public void testConfluentAvroDeserializerForSchemaEvolution() throws IOException, RestClientException, SchemaRegistryException {
    WorkUnitState mockWorkUnitState = getMockWorkUnitState();
    mockWorkUnitState.setProp("schema.registry.url", TEST_URL);
    Schema schemaV1 = SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().endRecord();
    Schema schemaV2 = SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().optionalString(TEST_FIELD_NAME2).endRecord();
    GenericRecord testGenericRecord = new GenericRecordBuilder(schemaV1).set(TEST_FIELD_NAME, "testValue").build();
    SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class);
    when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schemaV1);
    Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient);
    Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient);
    ByteBuffer testGenericRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord));
    KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class);
    when(mockKafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME)).thenReturn(schemaV2);
    KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry);
    when(kafkaDecoderExtractor.getSchema()).thenReturn(schemaV2);
    ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer);
    GenericRecord received = (GenericRecord) kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset);
    Assert.assertEquals(received.toString(), "{\"testField\": \"testValue\", \"testField2\": null}");
}
Also used : WorkUnitState(org.apache.gobblin.configuration.WorkUnitState) Schema(org.apache.avro.Schema) KafkaAvroSerializer(io.confluent.kafka.serializers.KafkaAvroSerializer) KafkaAvroDeserializer(io.confluent.kafka.serializers.KafkaAvroDeserializer) ByteBuffer(java.nio.ByteBuffer) ByteArrayBasedKafkaRecord(org.apache.gobblin.kafka.client.ByteArrayBasedKafkaRecord) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) GenericRecord(org.apache.avro.generic.GenericRecord) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) Test(org.testng.annotations.Test)

Example 3 with KafkaAvroDeserializer

use of io.confluent.kafka.serializers.KafkaAvroDeserializer in project flink by apache.

the class SQLClientSchemaRegistryITCase method testWriting.

@Test
public void testWriting() throws Exception {
    String testUserBehaviorTopic = "test-user-behavior-" + UUID.randomUUID().toString();
    // Create topic test-avro
    kafkaClient.createTopic(1, 1, testUserBehaviorTopic);
    String behaviourSubject = testUserBehaviorTopic + "-value";
    List<String> sqlLines = Arrays.asList("CREATE TABLE user_behavior (", " user_id BIGINT NOT NULL,", " item_id BIGINT,", " category_id BIGINT,", " behavior STRING,", " ts TIMESTAMP(3)", ") WITH (", " 'connector' = 'kafka',", " 'properties.bootstrap.servers' = '" + INTER_CONTAINER_KAFKA_ALIAS + ":9092',", " 'topic' = '" + testUserBehaviorTopic + "',", " 'format' = 'avro-confluent',", " 'avro-confluent.url' = 'http://" + INTER_CONTAINER_REGISTRY_ALIAS + ":8082" + "'", ");", "", "INSERT INTO user_behavior VALUES (1, 1, 1, 'buy', TO_TIMESTAMP(FROM_UNIXTIME(1234)));");
    executeSqlStatements(sqlLines);
    List<Integer> versions = getAllVersions(behaviourSubject);
    assertThat(versions.size(), equalTo(1));
    List<Object> userBehaviors = kafkaClient.readMessages(1, "test-group", testUserBehaviorTopic, new KafkaAvroDeserializer(registryClient));
    String schemaString = registryClient.getByVersion(behaviourSubject, versions.get(0), false).getSchema();
    Schema userBehaviorSchema = new Schema.Parser().parse(schemaString);
    GenericRecordBuilder recordBuilder = new GenericRecordBuilder(userBehaviorSchema);
    assertThat(userBehaviors, equalTo(Collections.singletonList(recordBuilder.set("user_id", 1L).set("item_id", 1L).set("category_id", 1L).set("behavior", "buy").set("ts", 1234000L).build())));
}
Also used : AvroSchema(io.confluent.kafka.schemaregistry.avro.AvroSchema) Schema(org.apache.avro.Schema) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) KafkaAvroDeserializer(io.confluent.kafka.serializers.KafkaAvroDeserializer) Test(org.junit.Test)

Example 4 with KafkaAvroDeserializer

use of io.confluent.kafka.serializers.KafkaAvroDeserializer in project registry by hortonworks.

the class ConfluentRegistryCompatibleResourceTest method testConfluentSerDes.

@Test
public void testConfluentSerDes() throws Exception {
    org.apache.avro.Schema schema = new org.apache.avro.Schema.Parser().parse(GENERIC_TEST_RECORD_SCHEMA);
    GenericRecord record = new GenericRecordBuilder(schema).set("field1", "some value").set("field2", "some other value").build();
    Map<String, Object> config = new HashMap<>();
    config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, rootTarget.getUri().toString());
    KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer();
    kafkaAvroSerializer.configure(config, false);
    byte[] bytes = kafkaAvroSerializer.serialize("topic", record);
    KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer();
    kafkaAvroDeserializer.configure(config, false);
    GenericRecord result = (GenericRecord) kafkaAvroDeserializer.deserialize("topic", bytes);
    LOG.info(result.toString());
}
Also used : HashMap(java.util.HashMap) Schema(com.hortonworks.registries.schemaregistry.webservice.ConfluentSchemaRegistryCompatibleResource.Schema) KafkaAvroSerializer(io.confluent.kafka.serializers.KafkaAvroSerializer) KafkaAvroDeserializer(io.confluent.kafka.serializers.KafkaAvroDeserializer) SchemaString(com.hortonworks.registries.schemaregistry.webservice.ConfluentSchemaRegistryCompatibleResource.SchemaString) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) GenericRecord(org.apache.avro.generic.GenericRecord) Test(org.junit.Test)

Example 5 with KafkaAvroDeserializer

use of io.confluent.kafka.serializers.KafkaAvroDeserializer in project ksql by confluentinc.

the class KsqlGenericRowAvroSerializerTest method shouldSerializeRowCorrectly.

@Test
public void shouldSerializeRowCorrectly() {
    SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
    KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
    List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
    GenericRow genericRow = new GenericRow(columns);
    byte[] serializedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
    KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
    GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow);
    Assert.assertNotNull(genericRecord);
    assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo(1511897796092L));
    assertThat("Incorrect serialization.", genericRecord.get("orderid".toUpperCase()), equalTo(1L));
    assertThat("Incorrect serialization.", genericRecord.get("itemid".toUpperCase()).toString(), equalTo("item_1"));
    assertThat("Incorrect serialization.", genericRecord.get("orderunits".toUpperCase()), equalTo(10.0));
    GenericData.Array array = (GenericData.Array) genericRecord.get("arraycol".toUpperCase());
    Map map = (Map) genericRecord.get("mapcol".toUpperCase());
    assertThat("Incorrect serialization.", array.size(), equalTo(1));
    assertThat("Incorrect serialization.", array.get(0), equalTo(100.0));
    assertThat("Incorrect serialization.", map.size(), equalTo(1));
    assertThat("Incorrect serialization.", map.get(new Utf8("key1")), equalTo(100.0));
}
Also used : HashMap(java.util.HashMap) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) KsqlConfig(io.confluent.ksql.util.KsqlConfig) KafkaAvroDeserializer(io.confluent.kafka.serializers.KafkaAvroDeserializer) GenericData(org.apache.avro.generic.GenericData) GenericRow(io.confluent.ksql.GenericRow) Utf8(org.apache.avro.util.Utf8) List(java.util.List) GenericRecord(org.apache.avro.generic.GenericRecord) HashMap(java.util.HashMap) Map(java.util.Map) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) Test(org.junit.Test)

Aggregations

KafkaAvroDeserializer (io.confluent.kafka.serializers.KafkaAvroDeserializer)8 GenericRecord (org.apache.avro.generic.GenericRecord)6 Test (org.junit.Test)5 SchemaRegistryClient (io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)4 Schema (org.apache.avro.Schema)4 GenericRecordBuilder (org.apache.avro.generic.GenericRecordBuilder)4 KafkaAvroSerializer (io.confluent.kafka.serializers.KafkaAvroSerializer)3 GenericRow (io.confluent.ksql.GenericRow)3 HashMap (java.util.HashMap)3 Map (java.util.Map)3 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)2 KsqlConfig (io.confluent.ksql.util.KsqlConfig)2 ByteBuffer (java.nio.ByteBuffer)2 List (java.util.List)2 GenericData (org.apache.avro.generic.GenericData)2 Utf8 (org.apache.avro.util.Utf8)2 WorkUnitState (org.apache.gobblin.configuration.WorkUnitState)2 ByteArrayBasedKafkaRecord (org.apache.gobblin.kafka.client.ByteArrayBasedKafkaRecord)2 Schema (com.hortonworks.registries.schemaregistry.webservice.ConfluentSchemaRegistryCompatibleResource.Schema)1 SchemaString (com.hortonworks.registries.schemaregistry.webservice.ConfluentSchemaRegistryCompatibleResource.SchemaString)1