use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.
the class AvroUtilTest method shouldPassAvroCheck.
@Test
public void shouldPassAvroCheck() throws Exception {
SchemaRegistryClient schemaRegistryClient = mock(SchemaRegistryClient.class);
SchemaMetadata schemaMetadata = new SchemaMetadata(1, 1, ordersAveroSchemaStr);
expect(schemaRegistryClient.getLatestSchemaMetadata(anyString())).andReturn(schemaMetadata);
replay(schemaRegistryClient);
AbstractStreamCreateStatement abstractStreamCreateStatement = getAbstractStreamCreateStatement("CREATE STREAM S1 WITH " + "(kafka_topic='s1_topic', " + "value_format='avro' );");
Pair<AbstractStreamCreateStatement, String> checkResult = avroUtil.checkAndSetAvroSchema(abstractStreamCreateStatement, new HashMap<>(), schemaRegistryClient);
AbstractStreamCreateStatement newAbstractStreamCreateStatement = checkResult.getLeft();
assertThat(newAbstractStreamCreateStatement.getElements(), equalTo(Arrays.asList(new TableElement("ORDERTIME", "BIGINT"), new TableElement("ORDERID", "BIGINT"), new TableElement("ITEMID", "VARCHAR"), new TableElement("ORDERUNITS", "DOUBLE"), new TableElement("ARRAYCOL", "ARRAY<DOUBLE>"), new TableElement("MAPCOL", "MAP<VARCHAR,DOUBLE>"))));
}
use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.
the class TopicStreamWriterFormatTest method shouldMatchJsonFormatter.
@Test
public void shouldMatchJsonFormatter() throws Exception {
SchemaRegistryClient schemaRegistryClient = mock(SchemaRegistryClient.class);
replay(schemaRegistryClient);
/**
* Test data
*/
String json = "{ \"name\": \"myrecord\"," + " \"type\": \"record\"" + "}";
ConsumerRecord<String, Bytes> record = new ConsumerRecord<String, Bytes>("topic", 1, 1, "key", new Bytes(json.getBytes()));
assertTrue(TopicStreamWriter.Format.JSON.isFormat("topic", record, schemaRegistryClient));
}
use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.
the class TopicStreamWriterFormatTest method shouldMatchAvroFormatter.
@Test
public void shouldMatchAvroFormatter() throws Exception {
/**
* Build an AVRO message
*/
String USER_SCHEMA = "{\n" + " \"fields\": [\n" + " { \"name\": \"str1\", \"type\": \"string\" }\n" + " ],\n" + " \"name\": \"myrecord\",\n" + " \"type\": \"record\"\n" + "}";
Schema.Parser parser = new Schema.Parser();
Schema schema = parser.parse(USER_SCHEMA);
GenericData.Record avroRecord = new GenericData.Record(schema);
avroRecord.put("str1", "My first string");
/**
* Setup expects
*/
SchemaRegistryClient schemaRegistryClient = mock(SchemaRegistryClient.class);
expect(schemaRegistryClient.register(anyString(), anyObject())).andReturn(1);
expect(schemaRegistryClient.getById(anyInt())).andReturn(schema);
replay(schemaRegistryClient);
Map<String, String> props = new HashMap<>();
props.put("schema.registry.url", "localhost:9092");
KafkaAvroSerializer avroSerializer = new KafkaAvroSerializer(schemaRegistryClient, props);
/**
* Test data
*/
byte[] testRecordBytes = avroSerializer.serialize("topic", avroRecord);
ConsumerRecord<String, Bytes> record = new ConsumerRecord<String, Bytes>("topic", 1, 1, "key", new Bytes(testRecordBytes));
/**
* Assert
*/
assertTrue(TopicStreamWriter.Format.AVRO.isFormat("topic", record, schemaRegistryClient));
}
use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.
the class KsqlGenericRowAvroDeserializerTest method shouldDeserializeCorrectly.
@Test
@SuppressWarnings("unchecked")
public void shouldDeserializeCorrectly() {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
GenericRow genericRow = new GenericRow(columns);
KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new KsqlGenericRowAvroDeserializer(schema, schemaRegistryClient, false);
byte[] serializedRow = getSerializedRow("t1", schemaRegistryClient, avroSchema, genericRow);
GenericRow row = ksqlGenericRowAvroDeserializer.deserialize("t1", serializedRow);
Assert.assertNotNull(row);
assertThat("Incorrect deserializarion", row.getColumns().size(), equalTo(6));
assertThat("Incorrect deserializarion", row.getColumns().get(0), equalTo(1511897796092L));
assertThat("Incorrect deserializarion", row.getColumns().get(1), equalTo(1L));
assertThat("Incorrect deserializarion", row.getColumns().get(2), equalTo("item_1"));
assertThat("Incorrect deserializarion", row.getColumns().get(3), equalTo(10.0));
assertThat("Incorrect deserializarion", ((Double[]) row.getColumns().get(4)).length, equalTo(1));
assertThat("Incorrect deserializarion", ((Map) row.getColumns().get(5)).size(), equalTo(1));
}
use of io.confluent.kafka.schemaregistry.client.SchemaRegistryClient in project ksql by confluentinc.
the class KsqlGenericRowAvroDeserializerTest method shouldDeserializeIfThereAreRedundantFields.
@Test
public void shouldDeserializeIfThereAreRedundantFields() {
org.apache.kafka.connect.data.Schema newSchema = SchemaBuilder.struct().field("ordertime".toUpperCase(), org.apache.kafka.connect.data.Schema.INT64_SCHEMA).field("orderid".toUpperCase(), org.apache.kafka.connect.data.Schema.INT64_SCHEMA).field("itemid".toUpperCase(), org.apache.kafka.connect.data.Schema.STRING_SCHEMA).field("orderunits".toUpperCase(), org.apache.kafka.connect.data.Schema.FLOAT64_SCHEMA).build();
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
GenericRow genericRow = new GenericRow(columns);
KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new KsqlGenericRowAvroDeserializer(newSchema, schemaRegistryClient, false);
byte[] serializedRow = getSerializedRow("t1", schemaRegistryClient, avroSchema, genericRow);
GenericRow row = ksqlGenericRowAvroDeserializer.deserialize("t1", serializedRow);
Assert.assertNotNull(row);
assertThat("Incorrect deserializarion", row.getColumns().size(), equalTo(4));
assertThat("Incorrect deserializarion", (Long) row.getColumns().get(0), equalTo(1511897796092L));
assertThat("Incorrect deserializarion", (Long) row.getColumns().get(1), equalTo(1L));
assertThat("Incorrect deserializarion", (String) row.getColumns().get(2), equalTo("item_1"));
}
Aggregations