Search in sources :

Example 51 with GenericKey

use of io.confluent.ksql.GenericKey in project ksql by confluentinc.

the class KsMaterializationFunctionalTest method shouldQueryMaterializedTableForTumblingWindowed.

@Test
public void shouldQueryMaterializedTableForTumblingWindowed() {
    // Given:
    final PersistentQueryMetadata query = executeQuery("CREATE TABLE " + output + " AS" + " SELECT USERID, COUNT(*) AS COUNT FROM " + USER_STREAM + " WINDOW TUMBLING (SIZE " + WINDOW_SIZE.getSeconds() + " SECONDS)" + " GROUP BY USERID;");
    final LogicalSchema schema = schema("COUNT", SqlTypes.BIGINT);
    final Map<Windowed<String>, GenericRow> rows = waitForUniqueUserRows(TIME_WINDOWED_DESERIALIZER, schema);
    // When:
    final Materialization materialization = query.getMaterialization(queryId, contextStacker).get();
    // Then:
    assertThat(materialization.windowType(), is(Optional.of(WindowType.TUMBLING)));
    final MaterializedWindowedTable table = materialization.windowed();
    rows.forEach((k, v) -> {
        final Window w = Window.of(k.window().startTime(), k.window().endTime());
        final GenericKey key = genericKey(k.key());
        final List<WindowedRow> resultAtWindowStart = withRetry(() -> Lists.newArrayList(table.get(key, PARTITION, Range.singleton(w.start()), Range.all())));
        assertThat("at exact window start", resultAtWindowStart, hasSize(1));
        assertThat(resultAtWindowStart.get(0).schema(), is(schema));
        assertThat(resultAtWindowStart.get(0).window(), is(Optional.of(w)));
        assertThat(resultAtWindowStart.get(0).key(), is(key));
        assertThat(resultAtWindowStart.get(0).value(), is(v));
        final List<WindowedRow> resultAtWindowEnd = withRetry(() -> Lists.newArrayList(table.get(key, PARTITION, Range.all(), Range.singleton(w.end()))));
        assertThat("at exact window end", resultAtWindowEnd, hasSize(1));
        final List<WindowedRow> resultFromRange = withRetry(() -> withRetry(() -> Lists.newArrayList(table.get(key, PARTITION, Range.closed(w.start().minusMillis(1), w.start().plusMillis(1)), Range.all()))));
        assertThat("range including window start", resultFromRange, is(resultAtWindowStart));
        final List<WindowedRow> resultPast = withRetry(() -> Lists.newArrayList(table.get(key, PARTITION, Range.closed(w.start().plusMillis(1), w.start().plusMillis(1)), Range.all())));
        assertThat("past start", resultPast, is(empty()));
    });
}
Also used : Windowed(org.apache.kafka.streams.kstream.Windowed) GenericRow(io.confluent.ksql.GenericRow) Window(io.confluent.ksql.Window) Materialization(io.confluent.ksql.execution.streams.materialization.Materialization) MaterializedWindowedTable(io.confluent.ksql.execution.streams.materialization.MaterializedWindowedTable) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) GenericKey(io.confluent.ksql.GenericKey) WindowedRow(io.confluent.ksql.execution.streams.materialization.WindowedRow) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 52 with GenericKey

use of io.confluent.ksql.GenericKey in project ksql by confluentinc.

the class JsonFormatTest method produceInitData.

private static void produceInitData() {
    TEST_HARNESS.produceRows(inputTopic, ORDER_DATA_PROVIDER, KAFKA, JSON);
    final LogicalSchema messageSchema = LogicalSchema.builder().keyColumn(SystemColumns.ROWKEY_NAME, SqlTypes.STRING).valueColumn(ColumnName.of("MESSAGE"), SqlTypes.STRING).build();
    final GenericKey messageKey = genericKey("1");
    final GenericRow messageRow = genericRow("{\"log\":{\"@timestamp\":\"2017-05-30T16:44:22.175Z\",\"@version\":\"1\"," + "\"caasVersion\":\"0.0.2\",\"cloud\":\"aws\",\"logs\":[{\"entry\":\"first\"}],\"clusterId\":\"cp99\",\"clusterName\":\"kafka\",\"cpComponentId\":\"kafka\",\"host\":\"kafka-1-wwl0p\",\"k8sId\":\"k8s13\",\"k8sName\":\"perf\",\"level\":\"ERROR\",\"logger\":\"kafka.server.ReplicaFetcherThread\",\"message\":\"Found invalid messages during fetch for partition [foo512,172] offset 0 error Record is corrupt (stored crc = 1321230880, computed crc = 1139143803)\",\"networkId\":\"vpc-d8c7a9bf\",\"region\":\"us-west-2\",\"serverId\":\"1\",\"skuId\":\"sku5\",\"source\":\"kafka\",\"tenantId\":\"t47\",\"tenantName\":\"perf-test\",\"thread\":\"ReplicaFetcherThread-0-2\",\"zone\":\"us-west-2a\"},\"stream\":\"stdout\",\"time\":2017}");
    final Map<GenericKey, GenericRow> records = new HashMap<>();
    records.put(messageKey, messageRow);
    final PhysicalSchema schema = PhysicalSchema.from(messageSchema, SerdeFeatures.of(), SerdeFeatures.of());
    TEST_HARNESS.produceRows(messageLogTopic, records.entrySet(), schema, KAFKA, JSON);
}
Also used : GenericRow(io.confluent.ksql.GenericRow) PhysicalSchema(io.confluent.ksql.schema.ksql.PhysicalSchema) HashMap(java.util.HashMap) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) GenericKey(io.confluent.ksql.GenericKey)

Example 53 with GenericKey

use of io.confluent.ksql.GenericKey in project ksql by confluentinc.

the class ReplaceIntTest method shouldReplaceSimpleProject.

@Test
public void shouldReplaceSimpleProject() {
    // Given:
    final String outputTopic = TopicTestUtil.uniqueTopicName("");
    ksqlContext.sql(String.format("CREATE STREAM project WITH(kafka_topic='%s') AS SELECT k, col1 FROM source;", outputTopic));
    TEST_HARNESS.produceRows(inputTopic, new Provider("1", "A", 1), FormatFactory.KAFKA, FormatFactory.JSON);
    assertForSource("PROJECT", outputTopic, ImmutableMap.of(genericKey("1"), GenericRow.genericRow("A")));
    // When:
    ksqlContext.sql(String.format("CREATE OR REPLACE STREAM project WITH(kafka_topic='%s') AS SELECT k, col1, col2 FROM source;", outputTopic));
    TEST_HARNESS.produceRows(inputTopic, new Provider("2", "B", 2), FormatFactory.KAFKA, FormatFactory.JSON);
    // Then:
    final Map<GenericKey, GenericRow> expected = ImmutableMap.of(genericKey("1"), GenericRow.genericRow("A", null), // this row is leftover from the original query
    genericKey("2"), GenericRow.genericRow("B", 2));
    assertForSource("PROJECT", outputTopic, expected);
}
Also used : GenericRow(io.confluent.ksql.GenericRow) GenericKey(io.confluent.ksql.GenericKey) TestDataProvider(io.confluent.ksql.util.TestDataProvider) IntegrationTest(io.confluent.common.utils.IntegrationTest) Test(org.junit.Test)

Example 54 with GenericKey

use of io.confluent.ksql.GenericKey in project ksql by confluentinc.

the class JoinIntTest method assertExpectedResults.

private void assertExpectedResults(final Map<GenericKey, GenericRow> expectedResults, final DataSource outputSource, final String inputTopic, final Format inputKeyFormat, final Format inputValueFormat, final long timeoutMs) {
    final PhysicalSchema schema = PhysicalSchema.from(outputSource.getSchema(), outputSource.getKsqlTopic().getKeyFormat().getFeatures(), outputSource.getKsqlTopic().getValueFormat().getFeatures());
    final Map<GenericKey, GenericRow> results = new HashMap<>();
    assertThatEventually("failed to complete join correctly", () -> {
        results.putAll(TEST_HARNESS.verifyAvailableUniqueRows(outputSource.getKafkaTopicName(), 1, FormatFactory.of(outputSource.getKsqlTopic().getKeyFormat().getFormatInfo()), FormatFactory.of(outputSource.getKsqlTopic().getValueFormat().getFormatInfo()), schema));
        final boolean success = results.equals(expectedResults);
        if (!success) {
            try {
                // The join may not be triggered fist time around due to order in which the
                // consumer pulls the records back. So we publish again to make the stream
                // trigger the join.
                TEST_HARNESS.produceRows(inputTopic, ORDER_DATA_PROVIDER, inputKeyFormat, inputValueFormat, () -> now);
            } catch (final Exception e) {
                throw new RuntimeException(e);
            }
        }
        return success;
    }, is(true), timeoutMs, TimeUnit.MILLISECONDS);
    assertThat(results, is(expectedResults));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) PhysicalSchema(io.confluent.ksql.schema.ksql.PhysicalSchema) HashMap(java.util.HashMap) GenericKey(io.confluent.ksql.GenericKey) ZooKeeperClientException(kafka.zookeeper.ZooKeeperClientException)

Example 55 with GenericKey

use of io.confluent.ksql.GenericKey in project ksql by confluentinc.

the class ReplaceWithSharedRuntimesIntTest method shouldReplaceSimpleProject.

@Test
public void shouldReplaceSimpleProject() {
    // Given:
    final String outputTopic = TopicTestUtil.uniqueTopicName("");
    ksqlContext.sql(String.format("CREATE STREAM project WITH(kafka_topic='%s') AS SELECT k, col1 FROM source;", outputTopic));
    TEST_HARNESS.produceRows(inputTopic, new Provider("1", "A", 1), FormatFactory.KAFKA, FormatFactory.JSON);
    assertForSource("PROJECT", outputTopic, ImmutableMap.of(genericKey("1"), GenericRow.genericRow("A")));
    // When:
    ksqlContext.sql(String.format("CREATE OR REPLACE STREAM project WITH(kafka_topic='%s') AS SELECT k, col1, col2 FROM source;", outputTopic));
    TEST_HARNESS.produceRows(inputTopic, new Provider("2", "B", 2), FormatFactory.KAFKA, FormatFactory.JSON);
    // Then:
    final Map<GenericKey, GenericRow> expected = ImmutableMap.of(genericKey("1"), GenericRow.genericRow("A", null), // this row is leftover from the original query
    genericKey("2"), GenericRow.genericRow("B", 2));
    assertForSource("PROJECT", outputTopic, expected);
}
Also used : GenericRow(io.confluent.ksql.GenericRow) GenericKey(io.confluent.ksql.GenericKey) TestDataProvider(io.confluent.ksql.util.TestDataProvider) IntegrationTest(io.confluent.common.utils.IntegrationTest) Test(org.junit.Test)

Aggregations

GenericKey (io.confluent.ksql.GenericKey)147 GenericRow (io.confluent.ksql.GenericRow)100 Test (org.junit.Test)93 LogicalSchema (io.confluent.ksql.schema.ksql.LogicalSchema)24 Windowed (org.apache.kafka.streams.kstream.Windowed)20 WindowedRow (io.confluent.ksql.execution.streams.materialization.WindowedRow)14 PhysicalSchema (io.confluent.ksql.schema.ksql.PhysicalSchema)14 Materialized (org.apache.kafka.streams.kstream.Materialized)13 ValueAndTimestamp (org.apache.kafka.streams.state.ValueAndTimestamp)13 UnqualifiedColumnReferenceExp (io.confluent.ksql.execution.expression.tree.UnqualifiedColumnReferenceExp)12 MaterializationException (io.confluent.ksql.execution.streams.materialization.MaterializationException)9 IntegrationTest (io.confluent.common.utils.IntegrationTest)8 Materialization (io.confluent.ksql.execution.streams.materialization.Materialization)8 Row (io.confluent.ksql.execution.streams.materialization.Row)8 PersistentQueryMetadata (io.confluent.ksql.util.PersistentQueryMetadata)8 TimeWindow (org.apache.kafka.streams.kstream.internals.TimeWindow)8 IntegrationTest (org.apache.kafka.test.IntegrationTest)8 InOrder (org.mockito.InOrder)8 MaterializedTable (io.confluent.ksql.execution.streams.materialization.MaterializedTable)7 Objects (java.util.Objects)7