Search in sources :

Example 6 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class StreamsSelectAndProjectIntTest method testTimestampColumnSelection.

private void testTimestampColumnSelection(String stream1Name, String stream2Name, String inputStreamName, DataSource.DataSourceSerDe dataSourceSerDe, Map<String, RecordMetadata> recordMetadataMap) throws Exception {
    final String query1String = String.format("CREATE STREAM %s WITH (timestamp='RTIME') AS SELECT ROWKEY AS RKEY, " + "ROWTIME+10000 AS " + "RTIME, ROWTIME+100 AS RT100, ORDERID, ITEMID " + "FROM %s WHERE ORDERUNITS > 20 AND ITEMID = 'ITEM_8'; " + "CREATE STREAM %s AS SELECT ROWKEY AS NEWRKEY, " + "ROWTIME AS NEWRTIME, RKEY, RTIME, RT100, ORDERID, ITEMID " + "FROM %s ;", stream1Name, inputStreamName, stream2Name, stream1Name);
    ksqlContext.sql(query1String);
    Map<String, GenericRow> expectedResults = new HashMap<>();
    expectedResults.put("8", new GenericRow(Arrays.asList(null, null, "8", recordMetadataMap.get("8").timestamp() + 10000, "8", recordMetadataMap.get("8").timestamp() + 10000, recordMetadataMap.get("8").timestamp() + 100, "ORDER_6", "ITEM_8")));
    Schema resultSchema = ksqlContext.getMetaStore().getSource(stream2Name).getSchema();
    Map<String, GenericRow> results2 = testHarness.consumeData(stream2Name, resultSchema, expectedResults.size(), new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, dataSourceSerDe);
    assertThat(results2, equalTo(expectedResults));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema)

Example 7 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class StreamsSelectAndProjectIntTest method testSelectProject.

private void testSelectProject(String resultStream, String inputStreamName, DataSource.DataSourceSerDe dataSourceSerDe) throws Exception {
    ksqlContext.sql(String.format("CREATE STREAM %s AS SELECT ITEMID, ORDERUNITS, " + "PRICEARRAY FROM %s;", resultStream, inputStreamName));
    Schema resultSchema = ksqlContext.getMetaStore().getSource(resultStream).getSchema();
    Map<String, GenericRow> easyOrdersData = testHarness.consumeData(resultStream, resultSchema, dataProvider.data().size(), new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, dataSourceSerDe);
    GenericRow value = easyOrdersData.values().iterator().next();
    // skip over first to values (rowKey, rowTime)
    Assert.assertEquals("ITEM_1", value.getColumns().get(2));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema)

Example 8 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class StreamsSelectAndProjectIntTest method testSelectWithFilter.

private void testSelectWithFilter(String resultStream, String inputStreamName, DataSource.DataSourceSerDe dataSourceSerDe) throws Exception {
    ksqlContext.sql(String.format("CREATE STREAM %s AS SELECT * FROM %s WHERE ORDERUNITS > 40;", resultStream, inputStreamName));
    Map<String, GenericRow> results = testHarness.consumeData(resultStream, dataProvider.schema(), 4, new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, dataSourceSerDe);
    Assert.assertEquals(4, results.size());
}
Also used : GenericRow(io.confluent.ksql.GenericRow) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer)

Example 9 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class UdfIntTest method testTimestampColumnSelection.

private void testTimestampColumnSelection(String stream1Name, String stream2Name, String inputStreamName, DataSource.DataSourceSerDe dataSourceSerDe, Map<String, RecordMetadata> recordMetadataMap) throws Exception {
    final String query1String = String.format("CREATE STREAM %s WITH (timestamp='RTIME') AS SELECT ROWKEY AS RKEY, " + "ROWTIME+10000 AS " + "RTIME, ROWTIME+100 AS RT100, ORDERID, ITEMID " + "FROM %s WHERE ORDERUNITS > 20 AND ITEMID = 'ITEM_8'; " + "CREATE STREAM %s AS SELECT ROWKEY AS NEWRKEY, " + "ROWTIME AS NEWRTIME, RKEY, RTIME, RT100, ORDERID, ITEMID " + "FROM %s ;", stream1Name, inputStreamName, stream2Name, stream1Name);
    ksqlContext.sql(query1String);
    Schema resultSchema = SchemaUtil.removeImplicitRowTimeRowKeyFromSchema(ksqlContext.getMetaStore().getSource(stream2Name).getSchema());
    Map<String, GenericRow> expectedResults = new HashMap<>();
    expectedResults.put("8", new GenericRow(Arrays.asList("8", recordMetadataMap.get("8").timestamp() + 10000, "8", recordMetadataMap.get("8").timestamp() + 10000, recordMetadataMap.get("8").timestamp() + 100, "ORDER_6", "ITEM_8")));
    Map<String, GenericRow> results = testHarness.consumeData(stream2Name, resultSchema, expectedResults.size(), new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, dataSourceSerDe);
    assertThat(results, equalTo(expectedResults));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema)

Example 10 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class WindowingIntTest method shouldAggregateWithNoWindow.

@Test
public void shouldAggregateWithNoWindow() throws Exception {
    testHarness.publishTestData(topicName, dataProvider, now);
    final String streamName = "NOWINDOW_AGGTEST";
    final String queryString = String.format("CREATE TABLE %s AS SELECT %s FROM ORDERS WHERE ITEMID = 'ITEM_1' GROUP BY ITEMID;", streamName, "ITEMID, COUNT(ITEMID), SUM(ORDERUNITS)");
    ksqlContext.sql(queryString);
    Schema resultSchema = ksqlContext.getMetaStore().getSource(streamName).getSchema();
    final GenericRow expected = new GenericRow(Arrays.asList(null, null, "ITEM_1", 2, /**
     * 2 x items *
     */
    20.0));
    final Map<String, GenericRow> results = new HashMap<>();
    TestUtils.waitForCondition(() -> {
        final Map<String, GenericRow> aggregateResults = testHarness.consumeData(streamName, resultSchema, 1, new StringDeserializer(), MAX_POLL_PER_ITERATION);
        final GenericRow actual = aggregateResults.get("ITEM_1");
        return expected.equals(actual);
    }, 60000, "didn't receive correct results within timeout");
    AdminClient adminClient = AdminClient.create(testHarness.ksqlConfig.getKsqlStreamConfigProps());
    KafkaTopicClient topicClient = new KafkaTopicClientImpl(adminClient);
    Set<String> topicBeforeCleanup = topicClient.listTopicNames();
    assertThat("Expected to have 5 topics instead have : " + topicBeforeCleanup.size(), topicBeforeCleanup.size(), equalTo(5));
    QueryMetadata queryMetadata = ksqlContext.getRunningQueries().iterator().next();
    queryMetadata.close();
    Set<String> topicsAfterCleanUp = topicClient.listTopicNames();
    assertThat("Expected to see 3 topics after clean up but seeing " + topicsAfterCleanUp.size(), topicsAfterCleanUp.size(), equalTo(3));
    assertThat(topicClient.getTopicCleanupPolicy(streamName), equalTo(KafkaTopicClient.TopicCleanupPolicy.COMPACT));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) QueryMetadata(io.confluent.ksql.util.QueryMetadata) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) AdminClient(org.apache.kafka.clients.admin.AdminClient) IntegrationTest(io.confluent.common.utils.IntegrationTest) Test(org.junit.Test)

Aggregations

GenericRow (io.confluent.ksql.GenericRow)65 Test (org.junit.Test)38 HashMap (java.util.HashMap)27 Schema (org.apache.kafka.connect.data.Schema)19 List (java.util.List)15 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)15 ArrayList (java.util.ArrayList)11 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)9 IntegrationTest (io.confluent.common.utils.IntegrationTest)8 SchemaRegistryClient (io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)8 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)5 KsqlTopicSerDe (io.confluent.ksql.serde.KsqlTopicSerDe)5 KafkaTopicClient (io.confluent.ksql.util.KafkaTopicClient)5 KafkaTopicClientImpl (io.confluent.ksql.util.KafkaTopicClientImpl)5 KsqlConfig (io.confluent.ksql.util.KsqlConfig)5 Map (java.util.Map)5 GenericRecord (org.apache.avro.generic.GenericRecord)4 Windowed (org.apache.kafka.streams.kstream.Windowed)4 KafkaAvroDeserializer (io.confluent.kafka.serializers.KafkaAvroDeserializer)3 DereferenceExpression (io.confluent.ksql.parser.tree.DereferenceExpression)3