Search in sources :

Example 11 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class WindowingIntTest method shouldAggregateSessionWindow.

@Test
public void shouldAggregateSessionWindow() throws Exception {
    testHarness.publishTestData(topicName, dataProvider, now);
    final String streamName = "SESSION_AGGTEST";
    final String queryString = String.format("CREATE TABLE %s AS SELECT %s FROM ORDERS WINDOW %s GROUP BY ORDERID;", streamName, "ORDERID, COUNT(*), SUM(ORDERUNITS)", "SESSION (10 SECONDS)");
    ksqlContext.sql(queryString);
    Schema resultSchema = ksqlContext.getMetaStore().getSource(streamName).getSchema();
    GenericRow expectedResults = new GenericRow(Arrays.asList(null, null, "ORDER_6", 6, /**
     * 2 x items *
     */
    420.0));
    final Map<String, GenericRow> results = new HashMap<>();
    TestUtils.waitForCondition(() -> {
        final Map<Windowed<String>, GenericRow> windowedResults = testHarness.consumeData(streamName, resultSchema, datasetOneMetaData.size(), new TimeWindowedDeserializer<>(new StringDeserializer()), 1000);
        updateResults(results, windowedResults);
        final GenericRow actual = results.get("ORDER_6");
        return expectedResults.equals(actual) && results.size() == 6;
    }, 60000, "didn't receive correct results within timeout");
    AdminClient adminClient = AdminClient.create(testHarness.ksqlConfig.getKsqlStreamConfigProps());
    KafkaTopicClient topicClient = new KafkaTopicClientImpl(adminClient);
    Set<String> topicBeforeCleanup = topicClient.listTopicNames();
    assertThat("Expected to have 5 topics instead have : " + topicBeforeCleanup.size(), topicBeforeCleanup.size(), equalTo(5));
    QueryMetadata queryMetadata = ksqlContext.getRunningQueries().iterator().next();
    queryMetadata.close();
    Set<String> topicsAfterCleanUp = topicClient.listTopicNames();
    assertThat("Expected to see 3 topics after clean up but seeing " + topicsAfterCleanUp.size(), topicsAfterCleanUp.size(), equalTo(3));
    assertThat(topicClient.getTopicCleanupPolicy(streamName), equalTo(KafkaTopicClient.TopicCleanupPolicy.DELETE));
}
Also used : QueryMetadata(io.confluent.ksql.util.QueryMetadata) HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema) GenericRow(io.confluent.ksql.GenericRow) Windowed(org.apache.kafka.streams.kstream.Windowed) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) AdminClient(org.apache.kafka.clients.admin.AdminClient) IntegrationTest(io.confluent.common.utils.IntegrationTest) Test(org.junit.Test)

Example 12 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class WindowingIntTest method shouldAggregateHoppingWindow.

@Test
public void shouldAggregateHoppingWindow() throws Exception {
    testHarness.publishTestData(topicName, dataProvider, now);
    final String streamName = "HOPPING_AGGTEST";
    final String queryString = String.format("CREATE TABLE %s AS SELECT %s FROM ORDERS WINDOW %s WHERE ITEMID = 'ITEM_1' GROUP BY ITEMID;", streamName, "ITEMID, COUNT(ITEMID), SUM(ORDERUNITS)", "HOPPING ( SIZE 10 SECONDS, ADVANCE BY 5 SECONDS)");
    ksqlContext.sql(queryString);
    Schema resultSchema = ksqlContext.getMetaStore().getSource(streamName).getSchema();
    final GenericRow expected = new GenericRow(Arrays.asList(null, null, "ITEM_1", 2, /**
     * 2 x items *
     */
    20.0));
    final Map<String, GenericRow> results = new HashMap<>();
    TestUtils.waitForCondition(() -> {
        final Map<Windowed<String>, GenericRow> windowedResults = testHarness.consumeData(streamName, resultSchema, 1, new TimeWindowedDeserializer<>(new StringDeserializer()), 1000);
        updateResults(results, windowedResults);
        final GenericRow actual = results.get("ITEM_1");
        return expected.equals(actual);
    }, 60000, "didn't receive correct results within timeout");
    AdminClient adminClient = AdminClient.create(testHarness.ksqlConfig.getKsqlStreamConfigProps());
    KafkaTopicClient topicClient = new KafkaTopicClientImpl(adminClient);
    Set<String> topicBeforeCleanup = topicClient.listTopicNames();
    assertThat("Expected to have 5 topics instead have : " + topicBeforeCleanup.size(), topicBeforeCleanup.size(), equalTo(5));
    QueryMetadata queryMetadata = ksqlContext.getRunningQueries().iterator().next();
    queryMetadata.close();
    Set<String> topicsAfterCleanUp = topicClient.listTopicNames();
    assertThat("Expected to see 3 topics after clean up but seeing " + topicsAfterCleanUp.size(), topicsAfterCleanUp.size(), equalTo(3));
    assertThat(topicClient.getTopicCleanupPolicy(streamName), equalTo(KafkaTopicClient.TopicCleanupPolicy.DELETE));
}
Also used : QueryMetadata(io.confluent.ksql.util.QueryMetadata) HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema) GenericRow(io.confluent.ksql.GenericRow) Windowed(org.apache.kafka.streams.kstream.Windowed) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) AdminClient(org.apache.kafka.clients.admin.AdminClient) IntegrationTest(io.confluent.common.utils.IntegrationTest) Test(org.junit.Test)

Example 13 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class SelectValueMapperTest method shouldSelectChosenColumns.

@Test
public void shouldSelectChosenColumns() throws Exception {
    final SelectValueMapper mapper = createMapper("SELECT col0, col2, col3 FROM test1 WHERE col0 > 100;");
    final GenericRow transformed = mapper.apply(new GenericRow(Arrays.asList(1L, "hi", "bye", 2.0F, "blah")));
    assertThat(transformed, equalTo(new GenericRow(Arrays.asList(1L, "bye", 2.0F))));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) Test(org.junit.Test)

Example 14 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class ItemDataProvider method buildData.

private Map<String, GenericRow> buildData() {
    Map<String, GenericRow> dataMap = new HashMap<>();
    dataMap.put("ITEM_1", new GenericRow(Arrays.asList("ITEM_1", "home cinema")));
    dataMap.put("ITEM_2", new GenericRow(Arrays.asList("ITEM_2", "clock radio")));
    dataMap.put("ITEM_3", new GenericRow(Arrays.asList("ITEM_3", "road bike")));
    dataMap.put("ITEM_4", new GenericRow(Arrays.asList("ITEM_4", "mountain bike")));
    dataMap.put("ITEM_5", new GenericRow(Arrays.asList("ITEM_5", "snowboard")));
    dataMap.put("ITEM_6", new GenericRow(Arrays.asList("ITEM_6", "iphone 10")));
    dataMap.put("ITEM_7", new GenericRow(Arrays.asList("ITEM_7", "gopro")));
    dataMap.put("ITEM_8", new GenericRow(Arrays.asList("ITEM_8", "cat")));
    return dataMap;
}
Also used : GenericRow(io.confluent.ksql.GenericRow) HashMap(java.util.HashMap)

Example 15 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class OrderDataProvider method buildData.

private Map<String, GenericRow> buildData() {
    Map<String, Double> mapField = new HashMap<>();
    mapField.put("key1", 1.0);
    mapField.put("key2", 2.0);
    mapField.put("key3", 3.0);
    Map<String, GenericRow> dataMap = new HashMap<>();
    dataMap.put("1", new GenericRow(Arrays.asList(1l, "ORDER_1", "ITEM_1", 10.0, new Double[] { 100.0, 110.99, 90.0 }, mapField)));
    dataMap.put("2", new GenericRow(Arrays.asList(2l, "ORDER_2", "ITEM_2", 20.0, new Double[] { 10.0, 10.99, 9.0 }, mapField)));
    dataMap.put("3", new GenericRow(Arrays.asList(3l, "ORDER_3", "ITEM_3", 30.0, new Double[] { 10.0, 10.99, 91.0 }, mapField)));
    dataMap.put("4", new GenericRow(Arrays.asList(4l, "ORDER_4", "ITEM_4", 40.0, new Double[] { 10.0, 140.99, 94.0 }, mapField)));
    dataMap.put("5", new GenericRow(Arrays.asList(5l, "ORDER_5", "ITEM_5", 50.0, new Double[] { 160.0, 160.99, 98.0 }, mapField)));
    dataMap.put("6", new GenericRow(Arrays.asList(6l, "ORDER_6", "ITEM_6", 60.0, new Double[] { 1000.0, 1100.99, 900.0 }, mapField)));
    dataMap.put("7", new GenericRow(Arrays.asList(7l, "ORDER_6", "ITEM_7", 70.0, new Double[] { 1100.0, 1110.99, 190.0 }, mapField)));
    dataMap.put("8", new GenericRow(Arrays.asList(8l, "ORDER_6", "ITEM_8", 80.0, new Double[] { 1100.0, 1110.99, 970.0 }, mapField)));
    return dataMap;
}
Also used : GenericRow(io.confluent.ksql.GenericRow) HashMap(java.util.HashMap)

Aggregations

GenericRow (io.confluent.ksql.GenericRow)65 Test (org.junit.Test)38 HashMap (java.util.HashMap)27 Schema (org.apache.kafka.connect.data.Schema)19 List (java.util.List)15 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)15 ArrayList (java.util.ArrayList)11 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)9 IntegrationTest (io.confluent.common.utils.IntegrationTest)8 SchemaRegistryClient (io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)8 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)5 KsqlTopicSerDe (io.confluent.ksql.serde.KsqlTopicSerDe)5 KafkaTopicClient (io.confluent.ksql.util.KafkaTopicClient)5 KafkaTopicClientImpl (io.confluent.ksql.util.KafkaTopicClientImpl)5 KsqlConfig (io.confluent.ksql.util.KsqlConfig)5 Map (java.util.Map)5 GenericRecord (org.apache.avro.generic.GenericRecord)4 Windowed (org.apache.kafka.streams.kstream.Windowed)4 KafkaAvroDeserializer (io.confluent.kafka.serializers.KafkaAvroDeserializer)3 DereferenceExpression (io.confluent.ksql.parser.tree.DereferenceExpression)3