use of org.apache.kafka.common.serialization.StringDeserializer in project ksql by confluentinc.
the class JoinIntTest method shouldLeftJoinOrderAndItems.
private void shouldLeftJoinOrderAndItems(String testStreamName, String orderStreamTopic, String orderStreamName, String itemTableName, DataSource.DataSourceSerDe dataSourceSerDe) throws Exception {
final String queryString = String.format("CREATE STREAM %s AS SELECT ORDERID, ITEMID, ORDERUNITS, DESCRIPTION FROM %s LEFT JOIN" + " %s on %s.ITEMID = %s.ID WHERE %s.ITEMID = 'ITEM_1' ;", testStreamName, orderStreamName, itemTableName, orderStreamName, itemTableName, orderStreamName);
ksqlContext.sql(queryString);
Schema resultSchema = ksqlContext.getMetaStore().getSource(testStreamName).getSchema();
Map<String, GenericRow> expectedResults = Collections.singletonMap("ITEM_1", new GenericRow(Arrays.asList(null, null, "ORDER_1", "ITEM_1", 10.0, "home cinema")));
final Map<String, GenericRow> results = new HashMap<>();
TestUtils.waitForCondition(() -> {
results.putAll(testHarness.consumeData(testStreamName, resultSchema, 1, new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, dataSourceSerDe));
final boolean success = results.equals(expectedResults);
if (!success) {
try {
// The join may not be triggered fist time around due to order in which the
// consumer pulls the records back. So we publish again to make the stream
// trigger the join.
testHarness.publishTestData(orderStreamTopic, orderDataProvider, now, dataSourceSerDe);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
return success;
}, IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS * 2 + 30000, "failed to complete join correctly");
}
use of org.apache.kafka.common.serialization.StringDeserializer in project ksql by confluentinc.
the class StreamsSelectAndProjectIntTest method testSelectProjectKeyTimestamp.
private void testSelectProjectKeyTimestamp(String resultStream, String inputStreamName, DataSource.DataSourceSerDe dataSourceSerDe, Map<String, RecordMetadata> recordMetadataMap) throws Exception {
ksqlContext.sql(String.format("CREATE STREAM %s AS SELECT ROWKEY AS RKEY, ROWTIME " + "AS RTIME, ITEMID FROM %s WHERE ORDERUNITS > 20 AND ITEMID = " + "'ITEM_8';", resultStream, inputStreamName));
Schema resultSchema = ksqlContext.getMetaStore().getSource(resultStream).getSchema();
Map<String, GenericRow> results = testHarness.consumeData(resultStream, resultSchema, dataProvider.data().size(), new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, dataSourceSerDe);
Map<String, GenericRow> expectedResults = Collections.singletonMap("8", new GenericRow(Arrays.asList(null, null, "8", recordMetadataMap.get("8").timestamp(), "ITEM_8")));
assertThat(results, equalTo(expectedResults));
}
use of org.apache.kafka.common.serialization.StringDeserializer in project ksql by confluentinc.
the class StreamsSelectAndProjectIntTest method testSelectProjectAvroJson.
@Test
public void testSelectProjectAvroJson() throws Exception {
String resultStream = "PROJECT_STREAM_AVRO";
ksqlContext.sql(String.format("CREATE STREAM %s WITH ( value_format = 'JSON') AS SELECT " + "ITEMID, " + "ORDERUNITS, " + "PRICEARRAY FROM %s;", resultStream, avroStreamName));
Schema resultSchema = ksqlContext.getMetaStore().getSource(resultStream).getSchema();
Map<String, GenericRow> easyOrdersData = testHarness.consumeData(resultStream, resultSchema, dataProvider.data().size(), new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, DataSource.DataSourceSerDe.JSON);
GenericRow value = easyOrdersData.values().iterator().next();
// skip over first to values (rowKey, rowTime)
Assert.assertEquals("ITEM_1", value.getColumns().get(2).toString());
}
use of org.apache.kafka.common.serialization.StringDeserializer in project ksql by confluentinc.
the class StreamsSelectAndProjectIntTest method testSelectStar.
private void testSelectStar(String resultStream, String inputStreamName, DataSource.DataSourceSerDe dataSourceSerDe) throws Exception {
ksqlContext.sql(String.format("CREATE STREAM %s AS SELECT * FROM %s;", resultStream, inputStreamName));
Map<String, GenericRow> easyOrdersData = testHarness.consumeData(resultStream, dataProvider.schema(), dataProvider.data().size(), new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, dataSourceSerDe);
assertThat(easyOrdersData, equalTo(dataProvider.data()));
}
use of org.apache.kafka.common.serialization.StringDeserializer in project ksql by confluentinc.
the class UdfIntTest method testApplyUdfsToColumns.
private void testApplyUdfsToColumns(String resultStreamName, String inputStreamName, DataSource.DataSourceSerDe dataSourceSerde) throws Exception {
final String queryString = String.format("CREATE STREAM %s AS SELECT %s FROM %s WHERE %s;", resultStreamName, "ITEMID, ORDERUNITS*10, PRICEARRAY[0]+10, KEYVALUEMAP['key1']*KEYVALUEMAP['key2']+10, " + "PRICEARRAY[1]>1000", inputStreamName, "ORDERUNITS > 20 AND ITEMID LIKE '%_8'");
ksqlContext.sql(queryString);
Schema resultSchema = ksqlContext.getMetaStore().getSource(resultStreamName).getSchema();
Map<String, GenericRow> expectedResults = Collections.singletonMap("8", new GenericRow(Arrays.asList(null, null, "ITEM_8", 800.0, 1110.0, 12.0, true)));
Map<String, GenericRow> results = testHarness.consumeData(resultStreamName, resultSchema, 4, new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, dataSourceSerde);
assertThat(results, equalTo(expectedResults));
}
Aggregations