Search in sources :

Example 6 with Materialization

use of io.confluent.ksql.execution.streams.materialization.Materialization in project ksql by confluentinc.

the class KsMaterializationFunctionalTest method shouldQueryMaterializedTableForTumblingWindowed.

@Test
public void shouldQueryMaterializedTableForTumblingWindowed() {
    // Given:
    final PersistentQueryMetadata query = executeQuery("CREATE TABLE " + output + " AS" + " SELECT USERID, COUNT(*) AS COUNT FROM " + USER_STREAM + " WINDOW TUMBLING (SIZE " + WINDOW_SIZE.getSeconds() + " SECONDS)" + " GROUP BY USERID;");
    final LogicalSchema schema = schema("COUNT", SqlTypes.BIGINT);
    final Map<Windowed<String>, GenericRow> rows = waitForUniqueUserRows(TIME_WINDOWED_DESERIALIZER, schema);
    // When:
    final Materialization materialization = query.getMaterialization(queryId, contextStacker).get();
    // Then:
    assertThat(materialization.windowType(), is(Optional.of(WindowType.TUMBLING)));
    final MaterializedWindowedTable table = materialization.windowed();
    rows.forEach((k, v) -> {
        final Window w = Window.of(k.window().startTime(), k.window().endTime());
        final GenericKey key = genericKey(k.key());
        final List<WindowedRow> resultAtWindowStart = withRetry(() -> Lists.newArrayList(table.get(key, PARTITION, Range.singleton(w.start()), Range.all())));
        assertThat("at exact window start", resultAtWindowStart, hasSize(1));
        assertThat(resultAtWindowStart.get(0).schema(), is(schema));
        assertThat(resultAtWindowStart.get(0).window(), is(Optional.of(w)));
        assertThat(resultAtWindowStart.get(0).key(), is(key));
        assertThat(resultAtWindowStart.get(0).value(), is(v));
        final List<WindowedRow> resultAtWindowEnd = withRetry(() -> Lists.newArrayList(table.get(key, PARTITION, Range.all(), Range.singleton(w.end()))));
        assertThat("at exact window end", resultAtWindowEnd, hasSize(1));
        final List<WindowedRow> resultFromRange = withRetry(() -> withRetry(() -> Lists.newArrayList(table.get(key, PARTITION, Range.closed(w.start().minusMillis(1), w.start().plusMillis(1)), Range.all()))));
        assertThat("range including window start", resultFromRange, is(resultAtWindowStart));
        final List<WindowedRow> resultPast = withRetry(() -> Lists.newArrayList(table.get(key, PARTITION, Range.closed(w.start().plusMillis(1), w.start().plusMillis(1)), Range.all())));
        assertThat("past start", resultPast, is(empty()));
    });
}
Also used : Windowed(org.apache.kafka.streams.kstream.Windowed) GenericRow(io.confluent.ksql.GenericRow) Window(io.confluent.ksql.Window) Materialization(io.confluent.ksql.execution.streams.materialization.Materialization) MaterializedWindowedTable(io.confluent.ksql.execution.streams.materialization.MaterializedWindowedTable) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) GenericKey(io.confluent.ksql.GenericKey) WindowedRow(io.confluent.ksql.execution.streams.materialization.WindowedRow) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 7 with Materialization

use of io.confluent.ksql.execution.streams.materialization.Materialization in project ksql by confluentinc.

the class KsMaterializationFunctionalTest method shouldReturnEmptyIfNotMaterializedStream.

@Test
public void shouldReturnEmptyIfNotMaterializedStream() {
    // Given:
    final PersistentQueryMetadata query = executeQuery("CREATE STREAM " + output + " AS" + " SELECT * FROM " + USER_STREAM + ";");
    // When:
    final Optional<Materialization> result = query.getMaterialization(queryId, contextStacker);
    // Then:
    assertThat(result, is(Optional.empty()));
}
Also used : Materialization(io.confluent.ksql.execution.streams.materialization.Materialization) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 8 with Materialization

use of io.confluent.ksql.execution.streams.materialization.Materialization in project ksql by confluentinc.

the class KsMaterializationFunctionalTest method shouldQueryTumblingWindowMaterializedTableWithRetention.

@Test
public void shouldQueryTumblingWindowMaterializedTableWithRetention() {
    // Given:
    final PersistentQueryMetadata query = executeQuery("CREATE TABLE " + output + " AS" + " SELECT PAGEID, COUNT(*) AS COUNT FROM " + PAGE_VIEWS_STREAM + " WINDOW TUMBLING (SIZE " + WINDOW_SEGMENT_DURATION.getSeconds() + " SECONDS," + " RETENTION " + (WINDOW_SEGMENT_DURATION.getSeconds() * 2) + " SECONDS," + " GRACE PERIOD 0 SECONDS)" + " GROUP BY PAGEID;");
    final List<ConsumerRecord<Windowed<String>, GenericRow>> rows = waitForPageViewRows(TIME_WINDOWED_DESERIALIZER, query.getPhysicalSchema());
    // When:
    final Materialization materialization = query.getMaterialization(queryId, contextStacker).get();
    // Then:
    assertThat(materialization.windowType(), is(Optional.of(WindowType.TUMBLING)));
    final MaterializedWindowedTable table = materialization.windowed();
    final Set<Optional<Window>> expectedWindows = Stream.of(Window.of(WINDOW_START_INSTANTS.get(1), WINDOW_START_INSTANTS.get(1).plusSeconds(WINDOW_SEGMENT_DURATION.getSeconds())), Window.of(WINDOW_START_INSTANTS.get(2), WINDOW_START_INSTANTS.get(2).plusSeconds(WINDOW_SEGMENT_DURATION.getSeconds())), Window.of(WINDOW_START_INSTANTS.get(3), WINDOW_START_INSTANTS.get(3).plusSeconds(WINDOW_SEGMENT_DURATION.getSeconds()))).map(Optional::of).collect(Collectors.toSet());
    verifyRetainedWindows(rows, table, expectedWindows);
}
Also used : Materialization(io.confluent.ksql.execution.streams.materialization.Materialization) Optional(java.util.Optional) MaterializedWindowedTable(io.confluent.ksql.execution.streams.materialization.MaterializedWindowedTable) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 9 with Materialization

use of io.confluent.ksql.execution.streams.materialization.Materialization in project ksql by confluentinc.

the class KsMaterializationFunctionalTest method shouldReturnEmptyIfAppServerNotConfigured.

@Test
public void shouldReturnEmptyIfAppServerNotConfigured() {
    // Given:
    try (TestKsqlContext ksqlNoAppServer = TEST_HARNESS.ksqlContextBuilder().build()) {
        initializeKsql(ksqlNoAppServer);
        final PersistentQueryMetadata query = executeQuery(ksqlNoAppServer, "CREATE TABLE " + output + " AS" + " SELECT USERID, COUNT(*) AS COUNT FROM " + USER_TABLE + " GROUP BY USERID;");
        // When:
        final Optional<Materialization> result = query.getMaterialization(queryId, contextStacker);
        // Then:
        assertThat(result, is(Optional.empty()));
    }
}
Also used : Materialization(io.confluent.ksql.execution.streams.materialization.Materialization) TestKsqlContext(io.confluent.ksql.integration.TestKsqlContext) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 10 with Materialization

use of io.confluent.ksql.execution.streams.materialization.Materialization in project ksql by confluentinc.

the class KsMaterializationFunctionalTest method shouldQueryMaterializedTableForSessionWindowed.

@Test
public void shouldQueryMaterializedTableForSessionWindowed() {
    // Given:
    final PersistentQueryMetadata query = executeQuery("CREATE TABLE " + output + " AS" + " SELECT USERID, COUNT(*) AS COUNT FROM " + USER_STREAM + " WINDOW SESSION (" + WINDOW_SIZE.getSeconds() + " SECONDS)" + " GROUP BY USERID;");
    final LogicalSchema schema = schema("COUNT", SqlTypes.BIGINT);
    final Map<Windowed<String>, GenericRow> rows = waitForUniqueUserRows(SESSION_WINDOWED_DESERIALIZER, schema);
    // When:
    final Materialization materialization = query.getMaterialization(queryId, contextStacker).get();
    // Then:
    assertThat(materialization.windowType(), is(Optional.of(WindowType.SESSION)));
    final MaterializedWindowedTable table = materialization.windowed();
    rows.forEach((k, v) -> {
        final Window w = Window.of(k.window().startTime(), k.window().endTime());
        final GenericKey key = genericKey(k.key());
        final List<WindowedRow> resultAtWindowStart = withRetry(() -> Lists.newArrayList(table.get(key, PARTITION, Range.singleton(w.start()), Range.all())));
        assertThat("at exact window start", resultAtWindowStart, hasSize(1));
        assertThat(resultAtWindowStart.get(0).schema(), is(schema));
        assertThat(resultAtWindowStart.get(0).window(), is(Optional.of(w)));
        assertThat(resultAtWindowStart.get(0).key(), is(key));
        assertThat(resultAtWindowStart.get(0).value(), is(v));
        final List<WindowedRow> resultAtWindowEnd = withRetry(() -> Lists.newArrayList(table.get(key, PARTITION, Range.all(), Range.singleton(w.end()))));
        assertThat("at exact window end", resultAtWindowEnd, hasSize(1));
        final List<WindowedRow> resultFromRange = withRetry(() -> Lists.newArrayList(table.get(key, PARTITION, Range.closed(w.start().minusMillis(1), w.start().plusMillis(1)), Range.all())));
        assertThat("range including window start", resultFromRange, is(resultAtWindowStart));
        final List<WindowedRow> resultPast = withRetry(() -> Lists.newArrayList(table.get(key, PARTITION, Range.closed(w.start().plusMillis(1), w.start().plusMillis(1)), Range.all())));
        assertThat("past start", resultPast, is(empty()));
    });
}
Also used : Windowed(org.apache.kafka.streams.kstream.Windowed) GenericRow(io.confluent.ksql.GenericRow) Window(io.confluent.ksql.Window) Materialization(io.confluent.ksql.execution.streams.materialization.Materialization) MaterializedWindowedTable(io.confluent.ksql.execution.streams.materialization.MaterializedWindowedTable) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) GenericKey(io.confluent.ksql.GenericKey) WindowedRow(io.confluent.ksql.execution.streams.materialization.WindowedRow) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Aggregations

Materialization (io.confluent.ksql.execution.streams.materialization.Materialization)16 PersistentQueryMetadata (io.confluent.ksql.util.PersistentQueryMetadata)16 Test (org.junit.Test)16 IntegrationTest (org.apache.kafka.test.IntegrationTest)13 GenericKey (io.confluent.ksql.GenericKey)8 GenericRow (io.confluent.ksql.GenericRow)8 WindowedRow (io.confluent.ksql.execution.streams.materialization.WindowedRow)8 LogicalSchema (io.confluent.ksql.schema.ksql.LogicalSchema)8 MaterializedWindowedTable (io.confluent.ksql.execution.streams.materialization.MaterializedWindowedTable)7 MaterializedTable (io.confluent.ksql.execution.streams.materialization.MaterializedTable)5 Row (io.confluent.ksql.execution.streams.materialization.Row)5 Window (io.confluent.ksql.Window)4 Windowed (org.apache.kafka.streams.kstream.Windowed)4 KsMaterialization (io.confluent.ksql.execution.streams.materialization.ks.KsMaterialization)3 Optional (java.util.Optional)3 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)3 TestKsqlContext (io.confluent.ksql.integration.TestKsqlContext)2 Range (com.google.common.collect.Range)1 GenericKey.genericKey (io.confluent.ksql.GenericKey.genericKey)1 QueryContext (io.confluent.ksql.execution.context.QueryContext)1