use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class ProjectNodeTest method shouldCreateProjectionWithFieldNameExpressionPairs.
@Test
public void shouldCreateProjectionWithFieldNameExpressionPairs() {
mockSourceNode();
final BooleanLiteral trueExpression = new BooleanLiteral("true");
final BooleanLiteral falseExpression = new BooleanLiteral("false");
EasyMock.expect(stream.select(Arrays.asList(new Pair<>("field1", trueExpression), new Pair<>("field2", falseExpression)))).andReturn(stream);
EasyMock.replay(source, stream);
final ProjectNode node = new ProjectNode(new PlanNodeId("1"), source, SchemaBuilder.struct().field("field1", Schema.STRING_SCHEMA).field("field2", Schema.STRING_SCHEMA).build(), Arrays.asList(trueExpression, falseExpression));
node.buildStream(builder, ksqlConfig, kafkaTopicClient, functionRegistry, props, new MockSchemaRegistryClient());
EasyMock.verify(stream);
}
use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class PhysicalPlanBuilderTest method buildPhysicalPlanBuilder.
private PhysicalPlanBuilder buildPhysicalPlanBuilder(Map<String, Object> overrideProperties) {
final StreamsBuilder streamsBuilder = new StreamsBuilder();
final FunctionRegistry functionRegistry = new FunctionRegistry();
Map<String, Object> configMap = new HashMap<>();
configMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
configMap.put("application.id", "KSQL");
configMap.put("commit.interval.ms", 0);
configMap.put("cache.max.bytes.buffering", 0);
configMap.put("auto.offset.reset", "earliest");
return new PhysicalPlanBuilder(streamsBuilder, new KsqlConfig(configMap), new FakeKafkaTopicClient(), functionRegistry, overrideProperties, false, metaStore, new MockSchemaRegistryClient(), testKafkaStreamsBuilder);
}
use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class KsqlResourceTest method setUp.
@Before
public void setUp() throws IOException, RestClientException {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
registerSchema(schemaRegistryClient);
ksqlRestConfig = new KsqlRestConfig(TestKsqlResourceUtil.getDefaultKsqlConfig());
KsqlConfig ksqlConfig = new KsqlConfig(ksqlRestConfig.getKsqlConfigProperties());
ksqlEngine = new KsqlEngine(ksqlConfig, new MockKafkaTopicClient(), schemaRegistryClient, new MetaStoreImpl());
}
use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class KsqlGenericRowAvroDeserializerTest method shouldDeserializeWithMissingFields.
@Test
public void shouldDeserializeWithMissingFields() {
String schemaStr1 = "{" + "\"namespace\": \"kql\"," + " \"name\": \"orders\"," + " \"type\": \"record\"," + " \"fields\": [" + " {\"name\": \"orderTime\", \"type\": \"long\"}," + " {\"name\": \"orderId\", \"type\": \"long\"}," + " {\"name\": \"itemId\", \"type\": \"string\"}," + " {\"name\": \"orderUnits\", \"type\": \"double\"}" + " ]" + "}";
Schema.Parser parser = new Schema.Parser();
Schema avroSchema1 = parser.parse(schemaStr1);
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0);
GenericRow genericRow = new GenericRow(columns);
byte[] serializedRow = getSerializedRow("t1", schemaRegistryClient, avroSchema1, genericRow);
KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new KsqlGenericRowAvroDeserializer(schema, schemaRegistryClient, false);
GenericRow row = ksqlGenericRowAvroDeserializer.deserialize("t1", serializedRow);
assertThat("Incorrect deserializarion", row.getColumns().size(), equalTo(6));
assertThat("Incorrect deserializarion", (Long) row.getColumns().get(0), equalTo(1511897796092L));
assertThat("Incorrect deserializarion", (Long) row.getColumns().get(1), equalTo(1L));
assertThat("Incorrect deserializarion", (String) row.getColumns().get(2), equalTo("item_1"));
Assert.assertNull(row.getColumns().get(4));
Assert.assertNull(row.getColumns().get(5));
}
use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class KsqlGenericRowAvroSerializerTest method shouldSerializeRowCorrectly.
@Test
public void shouldSerializeRowCorrectly() {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
GenericRow genericRow = new GenericRow(columns);
byte[] serializedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow);
Assert.assertNotNull(genericRecord);
assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo(1511897796092L));
assertThat("Incorrect serialization.", genericRecord.get("orderid".toUpperCase()), equalTo(1L));
assertThat("Incorrect serialization.", genericRecord.get("itemid".toUpperCase()).toString(), equalTo("item_1"));
assertThat("Incorrect serialization.", genericRecord.get("orderunits".toUpperCase()), equalTo(10.0));
GenericData.Array array = (GenericData.Array) genericRecord.get("arraycol".toUpperCase());
Map map = (Map) genericRecord.get("mapcol".toUpperCase());
assertThat("Incorrect serialization.", array.size(), equalTo(1));
assertThat("Incorrect serialization.", array.get(0), equalTo(100.0));
assertThat("Incorrect serialization.", map.size(), equalTo(1));
assertThat("Incorrect serialization.", map.get(new Utf8("key1")), equalTo(100.0));
}
Aggregations