Search in sources :

Example 1 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project kafka by apache.

the class KStreamAggregationIntegrationTest method shouldCount.

@Test
public void shouldCount() throws Exception {
    produceMessages(mockTime.milliseconds());
    groupedStream.count("count-by-key").to(Serdes.String(), Serdes.Long(), outputTopic);
    startStreams();
    produceMessages(mockTime.milliseconds());
    final List<KeyValue<String, Long>> results = receiveMessages(new StringDeserializer(), new LongDeserializer(), 10);
    Collections.sort(results, new Comparator<KeyValue<String, Long>>() {

        @Override
        public int compare(final KeyValue<String, Long> o1, final KeyValue<String, Long> o2) {
            return KStreamAggregationIntegrationTest.compare(o1, o2);
        }
    });
    assertThat(results, is(Arrays.asList(KeyValue.pair("A", 1L), KeyValue.pair("A", 2L), KeyValue.pair("B", 1L), KeyValue.pair("B", 2L), KeyValue.pair("C", 1L), KeyValue.pair("C", 2L), KeyValue.pair("D", 1L), KeyValue.pair("D", 2L), KeyValue.pair("E", 1L), KeyValue.pair("E", 2L))));
}
Also used : KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Test(org.junit.Test)

Example 2 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project ksql by confluentinc.

the class StreamsSelectAndProjectIntTest method testTimestampColumnSelection.

private void testTimestampColumnSelection(String stream1Name, String stream2Name, String inputStreamName, DataSource.DataSourceSerDe dataSourceSerDe, Map<String, RecordMetadata> recordMetadataMap) throws Exception {
    final String query1String = String.format("CREATE STREAM %s WITH (timestamp='RTIME') AS SELECT ROWKEY AS RKEY, " + "ROWTIME+10000 AS " + "RTIME, ROWTIME+100 AS RT100, ORDERID, ITEMID " + "FROM %s WHERE ORDERUNITS > 20 AND ITEMID = 'ITEM_8'; " + "CREATE STREAM %s AS SELECT ROWKEY AS NEWRKEY, " + "ROWTIME AS NEWRTIME, RKEY, RTIME, RT100, ORDERID, ITEMID " + "FROM %s ;", stream1Name, inputStreamName, stream2Name, stream1Name);
    ksqlContext.sql(query1String);
    Map<String, GenericRow> expectedResults = new HashMap<>();
    expectedResults.put("8", new GenericRow(Arrays.asList(null, null, "8", recordMetadataMap.get("8").timestamp() + 10000, "8", recordMetadataMap.get("8").timestamp() + 10000, recordMetadataMap.get("8").timestamp() + 100, "ORDER_6", "ITEM_8")));
    Schema resultSchema = ksqlContext.getMetaStore().getSource(stream2Name).getSchema();
    Map<String, GenericRow> results2 = testHarness.consumeData(stream2Name, resultSchema, expectedResults.size(), new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, dataSourceSerDe);
    assertThat(results2, equalTo(expectedResults));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema)

Example 3 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project ksql by confluentinc.

the class StreamsSelectAndProjectIntTest method testSelectProject.

private void testSelectProject(String resultStream, String inputStreamName, DataSource.DataSourceSerDe dataSourceSerDe) throws Exception {
    ksqlContext.sql(String.format("CREATE STREAM %s AS SELECT ITEMID, ORDERUNITS, " + "PRICEARRAY FROM %s;", resultStream, inputStreamName));
    Schema resultSchema = ksqlContext.getMetaStore().getSource(resultStream).getSchema();
    Map<String, GenericRow> easyOrdersData = testHarness.consumeData(resultStream, resultSchema, dataProvider.data().size(), new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, dataSourceSerDe);
    GenericRow value = easyOrdersData.values().iterator().next();
    // skip over first to values (rowKey, rowTime)
    Assert.assertEquals("ITEM_1", value.getColumns().get(2));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema)

Example 4 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project ksql by confluentinc.

the class StreamsSelectAndProjectIntTest method testSelectWithFilter.

private void testSelectWithFilter(String resultStream, String inputStreamName, DataSource.DataSourceSerDe dataSourceSerDe) throws Exception {
    ksqlContext.sql(String.format("CREATE STREAM %s AS SELECT * FROM %s WHERE ORDERUNITS > 40;", resultStream, inputStreamName));
    Map<String, GenericRow> results = testHarness.consumeData(resultStream, dataProvider.schema(), 4, new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, dataSourceSerDe);
    Assert.assertEquals(4, results.size());
}
Also used : GenericRow(io.confluent.ksql.GenericRow) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer)

Example 5 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project ksql by confluentinc.

the class UdfIntTest method testTimestampColumnSelection.

private void testTimestampColumnSelection(String stream1Name, String stream2Name, String inputStreamName, DataSource.DataSourceSerDe dataSourceSerDe, Map<String, RecordMetadata> recordMetadataMap) throws Exception {
    final String query1String = String.format("CREATE STREAM %s WITH (timestamp='RTIME') AS SELECT ROWKEY AS RKEY, " + "ROWTIME+10000 AS " + "RTIME, ROWTIME+100 AS RT100, ORDERID, ITEMID " + "FROM %s WHERE ORDERUNITS > 20 AND ITEMID = 'ITEM_8'; " + "CREATE STREAM %s AS SELECT ROWKEY AS NEWRKEY, " + "ROWTIME AS NEWRTIME, RKEY, RTIME, RT100, ORDERID, ITEMID " + "FROM %s ;", stream1Name, inputStreamName, stream2Name, stream1Name);
    ksqlContext.sql(query1String);
    Schema resultSchema = SchemaUtil.removeImplicitRowTimeRowKeyFromSchema(ksqlContext.getMetaStore().getSource(stream2Name).getSchema());
    Map<String, GenericRow> expectedResults = new HashMap<>();
    expectedResults.put("8", new GenericRow(Arrays.asList("8", recordMetadataMap.get("8").timestamp() + 10000, "8", recordMetadataMap.get("8").timestamp() + 10000, recordMetadataMap.get("8").timestamp() + 100, "ORDER_6", "ITEM_8")));
    Map<String, GenericRow> results = testHarness.consumeData(stream2Name, resultSchema, expectedResults.size(), new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, dataSourceSerDe);
    assertThat(results, equalTo(expectedResults));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema)

Aggregations

StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)152 Test (org.junit.Test)91 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)59 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)46 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)35 HashMap (java.util.HashMap)33 Properties (java.util.Properties)32 IntegerDeserializer (org.apache.kafka.common.serialization.IntegerDeserializer)31 Windowed (org.apache.kafka.streams.kstream.Windowed)31 List (java.util.List)29 KeyValue (org.apache.kafka.streams.KeyValue)29 IntegrationTest (org.apache.kafka.test.IntegrationTest)27 ArrayList (java.util.ArrayList)26 LongDeserializer (org.apache.kafka.common.serialization.LongDeserializer)25 Map (java.util.Map)20 KafkaConsumer (org.apache.kafka.clients.consumer.KafkaConsumer)20 IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)17 Serdes (org.apache.kafka.common.serialization.Serdes)17 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)17 KStream (org.apache.kafka.streams.kstream.KStream)17