use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.
the class TopicStreamWriterFormatTest method shouldNotMatchAvroFormatter.
@Test
public void shouldNotMatchAvroFormatter() throws Exception {
/**
* Setup expects
*/
SchemaRegistryClient schemaRegistryClient = mock(SchemaRegistryClient.class);
replay(schemaRegistryClient);
/**
* Test data
*/
ConsumerRecord<String, Bytes> record = new ConsumerRecord<String, Bytes>("topic", 1, 1, "key", new Bytes("test-data".getBytes()));
/**
* Assert
*/
assertFalse(TopicStreamWriter.Format.AVRO.isFormat("topic", record, schemaRegistryClient));
}
use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.
the class TopicStreamWriterFormatTest method shouldNotMatchJsonFormatter.
@Test
public void shouldNotMatchJsonFormatter() throws Exception {
SchemaRegistryClient schemaRegistryClient = mock(SchemaRegistryClient.class);
replay(schemaRegistryClient);
/**
* Test data
*/
String json = "{ BAD DATA \"name\": \"myrecord\"," + " \"type\": \"record\"" + "}";
ConsumerRecord<String, Bytes> record = new ConsumerRecord<String, Bytes>("topic", 1, 1, "key", new Bytes(json.getBytes()));
assertFalse(TopicStreamWriter.Format.JSON.isFormat("topic", record, schemaRegistryClient));
}
use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.
the class KsqlGenericRowAvroDeserializerTest method shouldDeserializeWithMissingFields.
@Test
public void shouldDeserializeWithMissingFields() {
String schemaStr1 = "{" + "\"namespace\": \"kql\"," + " \"name\": \"orders\"," + " \"type\": \"record\"," + " \"fields\": [" + " {\"name\": \"orderTime\", \"type\": \"long\"}," + " {\"name\": \"orderId\", \"type\": \"long\"}," + " {\"name\": \"itemId\", \"type\": \"string\"}," + " {\"name\": \"orderUnits\", \"type\": \"double\"}" + " ]" + "}";
Schema.Parser parser = new Schema.Parser();
Schema avroSchema1 = parser.parse(schemaStr1);
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0);
GenericRow genericRow = new GenericRow(columns);
byte[] serializedRow = getSerializedRow("t1", schemaRegistryClient, avroSchema1, genericRow);
KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new KsqlGenericRowAvroDeserializer(schema, schemaRegistryClient, false);
GenericRow row = ksqlGenericRowAvroDeserializer.deserialize("t1", serializedRow);
assertThat("Incorrect deserializarion", row.getColumns().size(), equalTo(6));
assertThat("Incorrect deserializarion", (Long) row.getColumns().get(0), equalTo(1511897796092L));
assertThat("Incorrect deserializarion", (Long) row.getColumns().get(1), equalTo(1L));
assertThat("Incorrect deserializarion", (String) row.getColumns().get(2), equalTo("item_1"));
Assert.assertNull(row.getColumns().get(4));
Assert.assertNull(row.getColumns().get(5));
}
use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.
the class KsqlGenericRowAvroSerializerTest method shouldSerializeRowCorrectly.
@Test
public void shouldSerializeRowCorrectly() {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
GenericRow genericRow = new GenericRow(columns);
byte[] serializedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow);
Assert.assertNotNull(genericRecord);
assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo(1511897796092L));
assertThat("Incorrect serialization.", genericRecord.get("orderid".toUpperCase()), equalTo(1L));
assertThat("Incorrect serialization.", genericRecord.get("itemid".toUpperCase()).toString(), equalTo("item_1"));
assertThat("Incorrect serialization.", genericRecord.get("orderunits".toUpperCase()), equalTo(10.0));
GenericData.Array array = (GenericData.Array) genericRecord.get("arraycol".toUpperCase());
Map map = (Map) genericRecord.get("mapcol".toUpperCase());
assertThat("Incorrect serialization.", array.size(), equalTo(1));
assertThat("Incorrect serialization.", array.get(0), equalTo(100.0));
assertThat("Incorrect serialization.", map.size(), equalTo(1));
assertThat("Incorrect serialization.", map.get(new Utf8("key1")), equalTo(100.0));
}
use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.
the class KsqlGenericRowAvroSerializerTest method shouldSerializeRowWithNullCorrectly.
@Test
public void shouldSerializeRowWithNullCorrectly() {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
List columns = Arrays.asList(1511897796092L, 1L, null, 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
GenericRow genericRow = new GenericRow(columns);
byte[] serializedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow);
Assert.assertNotNull(genericRecord);
assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo(1511897796092L));
assertThat("Incorrect serialization.", genericRecord.get("orderid".toUpperCase()), equalTo(1L));
assertThat("Incorrect serialization.", genericRecord.get("itemid".toUpperCase()), equalTo(null));
assertThat("Incorrect serialization.", genericRecord.get("orderunits".toUpperCase()), equalTo(10.0));
GenericData.Array array = (GenericData.Array) genericRecord.get("arraycol".toUpperCase());
Map map = (Map) genericRecord.get("mapcol".toUpperCase());
assertThat("Incorrect serialization.", array.size(), equalTo(1));
assertThat("Incorrect serialization.", array.get(0), equalTo(100.0));
assertThat("Incorrect serialization.", map, equalTo(Collections.singletonMap(new Utf8("key1"), 100.0)));
}
Aggregations