use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.
the class KsqlGenericRowAvroSerializerTest method shouldSerializeRowWithNullValues.
@Test
@SuppressWarnings("unchecked")
public void shouldSerializeRowWithNullValues() {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, null, null);
GenericRow genericRow = new GenericRow(columns);
ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
}
use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.
the class KsqlGenericRowAvroSerializerTest method shouldFailForIncompatibleType.
@Test
public void shouldFailForIncompatibleType() {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
List columns = Arrays.asList(1511897796092L, 1L, "item_1", "10.0", new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
GenericRow genericRow = new GenericRow(columns);
try {
byte[] serilizedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
Assert.fail("Did not fail for incompatible types.");
} catch (Exception e) {
assertThat(e.getMessage(), equalTo("org.apache.kafka.common.errors.SerializationException: Error serializing Avro message"));
}
}
use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project incubator-gobblin by apache.
the class ConfluentKafkaSchemaRegistryTest method doTestRegisterAndGetLatest.
private void doTestRegisterAndGetLatest(Properties properties) throws SchemaRegistryException {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
KafkaSchemaRegistry<Integer, Schema> kafkaSchemaRegistry = new ConfluentKafkaSchemaRegistry(properties, schemaRegistryClient);
Schema schema1 = SchemaBuilder.record(TEST_RECORD_NAME + "1").namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().endRecord();
Schema schema2 = SchemaBuilder.record(TEST_RECORD_NAME + "2").namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().endRecord();
kafkaSchemaRegistry.register(schema1, TEST_TOPIC_NAME);
kafkaSchemaRegistry.register(schema2, TEST_TOPIC_NAME);
Assert.assertNotEquals(schema1, kafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME));
Assert.assertEquals(schema2, kafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME));
}
use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project incubator-gobblin by apache.
the class KafkaDeserializerExtractorTest method testConfluentAvroDeserializerForSchemaEvolution.
@Test
public void testConfluentAvroDeserializerForSchemaEvolution() throws IOException, RestClientException, SchemaRegistryException {
WorkUnitState mockWorkUnitState = getMockWorkUnitState();
mockWorkUnitState.setProp("schema.registry.url", TEST_URL);
Schema schemaV1 = SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().endRecord();
Schema schemaV2 = SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().optionalString(TEST_FIELD_NAME2).endRecord();
GenericRecord testGenericRecord = new GenericRecordBuilder(schemaV1).set(TEST_FIELD_NAME, "testValue").build();
SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class);
when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schemaV1);
Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient);
Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient);
ByteBuffer testGenericRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord));
KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class);
when(mockKafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME)).thenReturn(schemaV2);
KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry);
when(kafkaDecoderExtractor.getSchema()).thenReturn(schemaV2);
ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer);
GenericRecord received = (GenericRecord) kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset);
Assert.assertEquals(received.toString(), "{\"testField\": \"testValue\", \"testField2\": null}");
}
use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project beam by apache.
the class ConfluentSchemaRegistryDeserializerProviderTest method testDeserialize.
@Test
public void testDeserialize() {
// Test deserializing evolved schema.
// Verify that records from older schemas are deserialized to the latest schema
String schemaRegistryUrl = "mock://my-scope-name";
String subject = "mytopic";
SchemaRegistryClient mockRegistryClient = mockSchemaRegistryClient(schemaRegistryUrl, subject);
Map<String, Object> map = new HashMap<>();
map.put(AbstractKafkaAvroSerDeConfig.AUTO_REGISTER_SCHEMAS, true);
map.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
Serializer<GenericRecord> serializer = (Serializer) new KafkaAvroSerializer(mockRegistryClient);
serializer.configure(map, true);
byte[] bytes = serializer.serialize(subject, new GenericRecordBuilder(AVRO_SCHEMA_V1).set("name", "KeyName").set("age", 1).set("favorite_number", 2).set("favorite_color", "color3").build());
Object deserialized = mockDeserializerProvider(schemaRegistryUrl, subject, null).getDeserializer(new HashMap<>(), true).deserialize(subject, bytes);
GenericRecord expected = new GenericRecordBuilder(AVRO_SCHEMA).set("name", "KeyName").set("favorite_number", 2).set("favorite_color", "color3").build();
assertEquals(expected, deserialized);
}
Aggregations