use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class SchemaKStreamTest method init.
@Before
public void init() {
functionRegistry = new FunctionRegistry();
ksqlStream = (KsqlStream) metaStore.getSource("TEST1");
StreamsBuilder builder = new StreamsBuilder();
kStream = builder.stream(ksqlStream.getKsqlTopic().getKafkaTopicName(), Consumed.with(Serdes.String(), ksqlStream.getKsqlTopic().getKsqlTopicSerDe().getGenericRowSerde(null, new KsqlConfig(Collections.emptyMap()), false, new MockSchemaRegistryClient())));
}
use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class SchemaKTableTest method testSelectSchemaKStream.
@Test
public void testSelectSchemaKStream() throws Exception {
String selectQuery = "SELECT col0, col2, col3 FROM test1 WHERE col0 > 100;";
PlanNode logicalPlan = planBuilder.buildLogicalPlan(selectQuery);
ProjectNode projectNode = (ProjectNode) logicalPlan.getSources().get(0);
initialSchemaKTable = new SchemaKTable(logicalPlan.getTheSourceNode().getSchema(), kTable, ksqlTable.getKeyField(), new ArrayList<>(), false, SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());
SchemaKTable projectedSchemaKStream = initialSchemaKTable.select(projectNode.getProjectNameExpressionPairList());
Assert.assertTrue(projectedSchemaKStream.getSchema().fields().size() == 3);
Assert.assertTrue(projectedSchemaKStream.getSchema().field("COL0") == projectedSchemaKStream.getSchema().fields().get(0));
Assert.assertTrue(projectedSchemaKStream.getSchema().field("COL2") == projectedSchemaKStream.getSchema().fields().get(1));
Assert.assertTrue(projectedSchemaKStream.getSchema().field("COL3") == projectedSchemaKStream.getSchema().fields().get(2));
Assert.assertTrue(projectedSchemaKStream.getSchema().field("COL0").schema().type() == Schema.Type.INT64);
Assert.assertTrue(projectedSchemaKStream.getSchema().field("COL2").schema().type() == Schema.Type.STRING);
Assert.assertTrue(projectedSchemaKStream.getSchema().field("COL3").schema().type() == Schema.Type.FLOAT64);
Assert.assertTrue(projectedSchemaKStream.getSourceSchemaKStreams().get(0) == initialSchemaKTable);
}
use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class SchemaKTableTest method init.
@Before
public void init() {
functionRegistry = new FunctionRegistry();
ksqlTable = (KsqlTable) metaStore.getSource("TEST2");
StreamsBuilder builder = new StreamsBuilder();
kTable = builder.table(ksqlTable.getKsqlTopic().getKafkaTopicName(), Consumed.with(Serdes.String(), ksqlTable.getKsqlTopic().getKsqlTopicSerDe().getGenericRowSerde(null, new KsqlConfig(Collections.emptyMap()), false, new MockSchemaRegistryClient())));
}
use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class KsqlGenericRowAvroDeserializerTest method shouldDeserializeCorrectly.
@Test
@SuppressWarnings("unchecked")
public void shouldDeserializeCorrectly() {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
GenericRow genericRow = new GenericRow(columns);
KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new KsqlGenericRowAvroDeserializer(schema, schemaRegistryClient, false);
byte[] serializedRow = getSerializedRow("t1", schemaRegistryClient, avroSchema, genericRow);
GenericRow row = ksqlGenericRowAvroDeserializer.deserialize("t1", serializedRow);
Assert.assertNotNull(row);
assertThat("Incorrect deserializarion", row.getColumns().size(), equalTo(6));
assertThat("Incorrect deserializarion", row.getColumns().get(0), equalTo(1511897796092L));
assertThat("Incorrect deserializarion", row.getColumns().get(1), equalTo(1L));
assertThat("Incorrect deserializarion", row.getColumns().get(2), equalTo("item_1"));
assertThat("Incorrect deserializarion", row.getColumns().get(3), equalTo(10.0));
assertThat("Incorrect deserializarion", ((Double[]) row.getColumns().get(4)).length, equalTo(1));
assertThat("Incorrect deserializarion", ((Map) row.getColumns().get(5)).size(), equalTo(1));
}
use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class KsqlGenericRowAvroDeserializerTest method shouldDeserializeIfThereAreRedundantFields.
@Test
public void shouldDeserializeIfThereAreRedundantFields() {
org.apache.kafka.connect.data.Schema newSchema = SchemaBuilder.struct().field("ordertime".toUpperCase(), org.apache.kafka.connect.data.Schema.INT64_SCHEMA).field("orderid".toUpperCase(), org.apache.kafka.connect.data.Schema.INT64_SCHEMA).field("itemid".toUpperCase(), org.apache.kafka.connect.data.Schema.STRING_SCHEMA).field("orderunits".toUpperCase(), org.apache.kafka.connect.data.Schema.FLOAT64_SCHEMA).build();
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
GenericRow genericRow = new GenericRow(columns);
KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new KsqlGenericRowAvroDeserializer(newSchema, schemaRegistryClient, false);
byte[] serializedRow = getSerializedRow("t1", schemaRegistryClient, avroSchema, genericRow);
GenericRow row = ksqlGenericRowAvroDeserializer.deserialize("t1", serializedRow);
Assert.assertNotNull(row);
assertThat("Incorrect deserializarion", row.getColumns().size(), equalTo(4));
assertThat("Incorrect deserializarion", (Long) row.getColumns().get(0), equalTo(1511897796092L));
assertThat("Incorrect deserializarion", (Long) row.getColumns().get(1), equalTo(1L));
assertThat("Incorrect deserializarion", (String) row.getColumns().get(2), equalTo("item_1"));
}
Aggregations