Search in sources :

Example 81 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project ksql by confluentinc.

the class JoinIntTest method shouldLeftJoinOrderAndItems.

private void shouldLeftJoinOrderAndItems(String testStreamName, String orderStreamTopic, String orderStreamName, String itemTableName, DataSource.DataSourceSerDe dataSourceSerDe) throws Exception {
    final String queryString = String.format("CREATE STREAM %s AS SELECT ORDERID, ITEMID, ORDERUNITS, DESCRIPTION FROM %s LEFT JOIN" + " %s on %s.ITEMID = %s.ID WHERE %s.ITEMID = 'ITEM_1' ;", testStreamName, orderStreamName, itemTableName, orderStreamName, itemTableName, orderStreamName);
    ksqlContext.sql(queryString);
    Schema resultSchema = ksqlContext.getMetaStore().getSource(testStreamName).getSchema();
    Map<String, GenericRow> expectedResults = Collections.singletonMap("ITEM_1", new GenericRow(Arrays.asList(null, null, "ORDER_1", "ITEM_1", 10.0, "home cinema")));
    final Map<String, GenericRow> results = new HashMap<>();
    TestUtils.waitForCondition(() -> {
        results.putAll(testHarness.consumeData(testStreamName, resultSchema, 1, new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, dataSourceSerDe));
        final boolean success = results.equals(expectedResults);
        if (!success) {
            try {
                // The join may not be triggered fist time around due to order in which the
                // consumer pulls the records back. So we publish again to make the stream
                // trigger the join.
                testHarness.publishTestData(orderStreamTopic, orderDataProvider, now, dataSourceSerDe);
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
        }
        return success;
    }, IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS * 2 + 30000, "failed to complete join correctly");
}
Also used : GenericRow(io.confluent.ksql.GenericRow) HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema)

Example 82 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project ksql by confluentinc.

the class StreamsSelectAndProjectIntTest method testSelectProjectKeyTimestamp.

private void testSelectProjectKeyTimestamp(String resultStream, String inputStreamName, DataSource.DataSourceSerDe dataSourceSerDe, Map<String, RecordMetadata> recordMetadataMap) throws Exception {
    ksqlContext.sql(String.format("CREATE STREAM %s AS SELECT ROWKEY AS RKEY, ROWTIME " + "AS RTIME, ITEMID FROM %s WHERE ORDERUNITS > 20 AND ITEMID = " + "'ITEM_8';", resultStream, inputStreamName));
    Schema resultSchema = ksqlContext.getMetaStore().getSource(resultStream).getSchema();
    Map<String, GenericRow> results = testHarness.consumeData(resultStream, resultSchema, dataProvider.data().size(), new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, dataSourceSerDe);
    Map<String, GenericRow> expectedResults = Collections.singletonMap("8", new GenericRow(Arrays.asList(null, null, "8", recordMetadataMap.get("8").timestamp(), "ITEM_8")));
    assertThat(results, equalTo(expectedResults));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema)

Example 83 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project ksql by confluentinc.

the class StreamsSelectAndProjectIntTest method testSelectProjectAvroJson.

@Test
public void testSelectProjectAvroJson() throws Exception {
    String resultStream = "PROJECT_STREAM_AVRO";
    ksqlContext.sql(String.format("CREATE STREAM %s WITH ( value_format = 'JSON') AS SELECT " + "ITEMID, " + "ORDERUNITS, " + "PRICEARRAY FROM %s;", resultStream, avroStreamName));
    Schema resultSchema = ksqlContext.getMetaStore().getSource(resultStream).getSchema();
    Map<String, GenericRow> easyOrdersData = testHarness.consumeData(resultStream, resultSchema, dataProvider.data().size(), new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, DataSource.DataSourceSerDe.JSON);
    GenericRow value = easyOrdersData.values().iterator().next();
    // skip over first to values (rowKey, rowTime)
    Assert.assertEquals("ITEM_1", value.getColumns().get(2).toString());
}
Also used : GenericRow(io.confluent.ksql.GenericRow) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema) IntegrationTest(io.confluent.common.utils.IntegrationTest) Test(org.junit.Test)

Example 84 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project ksql by confluentinc.

the class StreamsSelectAndProjectIntTest method testSelectStar.

private void testSelectStar(String resultStream, String inputStreamName, DataSource.DataSourceSerDe dataSourceSerDe) throws Exception {
    ksqlContext.sql(String.format("CREATE STREAM %s AS SELECT * FROM %s;", resultStream, inputStreamName));
    Map<String, GenericRow> easyOrdersData = testHarness.consumeData(resultStream, dataProvider.schema(), dataProvider.data().size(), new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, dataSourceSerDe);
    assertThat(easyOrdersData, equalTo(dataProvider.data()));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer)

Example 85 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project ksql by confluentinc.

the class UdfIntTest method testApplyUdfsToColumns.

private void testApplyUdfsToColumns(String resultStreamName, String inputStreamName, DataSource.DataSourceSerDe dataSourceSerde) throws Exception {
    final String queryString = String.format("CREATE STREAM %s AS SELECT %s FROM %s WHERE %s;", resultStreamName, "ITEMID, ORDERUNITS*10, PRICEARRAY[0]+10, KEYVALUEMAP['key1']*KEYVALUEMAP['key2']+10, " + "PRICEARRAY[1]>1000", inputStreamName, "ORDERUNITS > 20 AND ITEMID LIKE '%_8'");
    ksqlContext.sql(queryString);
    Schema resultSchema = ksqlContext.getMetaStore().getSource(resultStreamName).getSchema();
    Map<String, GenericRow> expectedResults = Collections.singletonMap("8", new GenericRow(Arrays.asList(null, null, "ITEM_8", 800.0, 1110.0, 12.0, true)));
    Map<String, GenericRow> results = testHarness.consumeData(resultStreamName, resultSchema, 4, new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, dataSourceSerde);
    assertThat(results, equalTo(expectedResults));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema)

Aggregations

StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)152 Test (org.junit.Test)91 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)59 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)46 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)35 HashMap (java.util.HashMap)33 Properties (java.util.Properties)32 IntegerDeserializer (org.apache.kafka.common.serialization.IntegerDeserializer)31 Windowed (org.apache.kafka.streams.kstream.Windowed)31 List (java.util.List)29 KeyValue (org.apache.kafka.streams.KeyValue)29 IntegrationTest (org.apache.kafka.test.IntegrationTest)27 ArrayList (java.util.ArrayList)26 LongDeserializer (org.apache.kafka.common.serialization.LongDeserializer)25 Map (java.util.Map)20 KafkaConsumer (org.apache.kafka.clients.consumer.KafkaConsumer)20 IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)17 Serdes (org.apache.kafka.common.serialization.Serdes)17 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)17 KStream (org.apache.kafka.streams.kstream.KStream)17