Search in sources :

Example 11 with SchemaRegistryClient

use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.

the class KsqlGenericRowAvroSerializerTest method shouldSerializeRowWithNullValues.

@Test
@SuppressWarnings("unchecked")
public void shouldSerializeRowWithNullValues() {
    SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
    KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
    List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, null, null);
    GenericRow genericRow = new GenericRow(columns);
    ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
}
Also used : GenericRow(io.confluent.ksql.GenericRow) HashMap(java.util.HashMap) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) KsqlConfig(io.confluent.ksql.util.KsqlConfig) List(java.util.List) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) Test(org.junit.Test)

Example 12 with SchemaRegistryClient

use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.

the class KsqlGenericRowAvroSerializerTest method shouldFailForIncompatibleType.

@Test
public void shouldFailForIncompatibleType() {
    SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
    KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
    List columns = Arrays.asList(1511897796092L, 1L, "item_1", "10.0", new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
    GenericRow genericRow = new GenericRow(columns);
    try {
        byte[] serilizedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
        Assert.fail("Did not fail for incompatible types.");
    } catch (Exception e) {
        assertThat(e.getMessage(), equalTo("org.apache.kafka.common.errors.SerializationException: Error serializing Avro message"));
    }
}
Also used : GenericRow(io.confluent.ksql.GenericRow) HashMap(java.util.HashMap) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) KsqlConfig(io.confluent.ksql.util.KsqlConfig) List(java.util.List) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) Test(org.junit.Test)

Example 13 with SchemaRegistryClient

use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project incubator-gobblin by apache.

the class ConfluentKafkaSchemaRegistryTest method doTestRegisterAndGetLatest.

private void doTestRegisterAndGetLatest(Properties properties) throws SchemaRegistryException {
    SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
    KafkaSchemaRegistry<Integer, Schema> kafkaSchemaRegistry = new ConfluentKafkaSchemaRegistry(properties, schemaRegistryClient);
    Schema schema1 = SchemaBuilder.record(TEST_RECORD_NAME + "1").namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().endRecord();
    Schema schema2 = SchemaBuilder.record(TEST_RECORD_NAME + "2").namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().endRecord();
    kafkaSchemaRegistry.register(schema1, TEST_TOPIC_NAME);
    kafkaSchemaRegistry.register(schema2, TEST_TOPIC_NAME);
    Assert.assertNotEquals(schema1, kafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME));
    Assert.assertEquals(schema2, kafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME));
}
Also used : MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) Schema(org.apache.avro.Schema) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)

Example 14 with SchemaRegistryClient

use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project incubator-gobblin by apache.

the class KafkaDeserializerExtractorTest method testConfluentAvroDeserializerForSchemaEvolution.

@Test
public void testConfluentAvroDeserializerForSchemaEvolution() throws IOException, RestClientException, SchemaRegistryException {
    WorkUnitState mockWorkUnitState = getMockWorkUnitState();
    mockWorkUnitState.setProp("schema.registry.url", TEST_URL);
    Schema schemaV1 = SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().endRecord();
    Schema schemaV2 = SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().optionalString(TEST_FIELD_NAME2).endRecord();
    GenericRecord testGenericRecord = new GenericRecordBuilder(schemaV1).set(TEST_FIELD_NAME, "testValue").build();
    SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class);
    when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schemaV1);
    Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient);
    Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient);
    ByteBuffer testGenericRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord));
    KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class);
    when(mockKafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME)).thenReturn(schemaV2);
    KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry);
    when(kafkaDecoderExtractor.getSchema()).thenReturn(schemaV2);
    ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer);
    GenericRecord received = (GenericRecord) kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset);
    Assert.assertEquals(received.toString(), "{\"testField\": \"testValue\", \"testField2\": null}");
}
Also used : WorkUnitState(org.apache.gobblin.configuration.WorkUnitState) Schema(org.apache.avro.Schema) KafkaAvroSerializer(io.confluent.kafka.serializers.KafkaAvroSerializer) KafkaAvroDeserializer(io.confluent.kafka.serializers.KafkaAvroDeserializer) ByteBuffer(java.nio.ByteBuffer) ByteArrayBasedKafkaRecord(org.apache.gobblin.kafka.client.ByteArrayBasedKafkaRecord) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) GenericRecord(org.apache.avro.generic.GenericRecord) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) Test(org.testng.annotations.Test)

Example 15 with SchemaRegistryClient

use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project beam by apache.

the class ConfluentSchemaRegistryDeserializerProviderTest method testDeserialize.

@Test
public void testDeserialize() {
    // Test deserializing evolved schema.
    // Verify that records from older schemas are deserialized to the latest schema
    String schemaRegistryUrl = "mock://my-scope-name";
    String subject = "mytopic";
    SchemaRegistryClient mockRegistryClient = mockSchemaRegistryClient(schemaRegistryUrl, subject);
    Map<String, Object> map = new HashMap<>();
    map.put(AbstractKafkaAvroSerDeConfig.AUTO_REGISTER_SCHEMAS, true);
    map.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
    Serializer<GenericRecord> serializer = (Serializer) new KafkaAvroSerializer(mockRegistryClient);
    serializer.configure(map, true);
    byte[] bytes = serializer.serialize(subject, new GenericRecordBuilder(AVRO_SCHEMA_V1).set("name", "KeyName").set("age", 1).set("favorite_number", 2).set("favorite_color", "color3").build());
    Object deserialized = mockDeserializerProvider(schemaRegistryUrl, subject, null).getDeserializer(new HashMap<>(), true).deserialize(subject, bytes);
    GenericRecord expected = new GenericRecordBuilder(AVRO_SCHEMA).set("name", "KeyName").set("favorite_number", 2).set("favorite_color", "color3").build();
    assertEquals(expected, deserialized);
}
Also used : HashMap(java.util.HashMap) KafkaAvroSerializer(io.confluent.kafka.serializers.KafkaAvroSerializer) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) GenericRecord(org.apache.avro.generic.GenericRecord) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) KafkaAvroSerializer(io.confluent.kafka.serializers.KafkaAvroSerializer) Serializer(org.apache.kafka.common.serialization.Serializer) Test(org.junit.Test)

Aggregations

SchemaRegistryClient (io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)28 Test (org.junit.Test)21 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)11 KsqlConfig (io.confluent.ksql.util.KsqlConfig)10 GenericRow (io.confluent.ksql.GenericRow)8 List (java.util.List)8 HashMap (java.util.HashMap)7 Schema (org.apache.avro.Schema)6 GenericRecord (org.apache.avro.generic.GenericRecord)5 RestClientException (io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException)4 KafkaAvroDeserializer (io.confluent.kafka.serializers.KafkaAvroDeserializer)4 KafkaAvroSerializer (io.confluent.kafka.serializers.KafkaAvroSerializer)4 IOException (java.io.IOException)4 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)4 Bytes (org.apache.kafka.common.utils.Bytes)4 Schema (org.apache.kafka.connect.data.Schema)4 Map (java.util.Map)3 GenericData (org.apache.avro.generic.GenericData)3 Test (org.testng.annotations.Test)3 SchemaMetadata (io.confluent.kafka.schemaregistry.client.SchemaMetadata)2