Search in sources :

Example 11 with TestInputTopic

use of org.apache.kafka.streams.TestInputTopic in project kafka by apache.

the class KStreamWindowAggregateTest method shouldLogAndMeterWhenSkippingExpiredWindow.

@Test
public void shouldLogAndMeterWhenSkippingExpiredWindow() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic = "topic";
    final KStream<String, String> stream1 = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String()));
    stream1.groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(TimeWindows.ofSizeAndGrace(ofMillis(10), ofMillis(90)).advanceBy(ofMillis(5))).aggregate(() -> "", MockAggregator.toStringInstance("+"), Materialized.<String, String, WindowStore<Bytes, byte[]>>as("topic1-Canonicalized").withValueSerde(Serdes.String()).withCachingDisabled().withLoggingDisabled().withRetention(Duration.ofMillis(100))).toStream().map((key, value) -> new KeyValue<>(key.toString(), value)).to("output");
    try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KStreamWindowAggregate.class);
        final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer());
        inputTopic.pipeInput("k", "100", 100L);
        inputTopic.pipeInput("k", "0", 0L);
        inputTopic.pipeInput("k", "1", 1L);
        inputTopic.pipeInput("k", "2", 2L);
        inputTopic.pipeInput("k", "3", 3L);
        inputTopic.pipeInput("k", "4", 4L);
        inputTopic.pipeInput("k", "5", 5L);
        inputTopic.pipeInput("k", "6", 6L);
        assertLatenessMetrics(driver, // how many events get dropped
        is(7.0), // k:0 is 100ms late, since its time is 0, but it arrives at stream time 100.
        is(100.0), // (0 + 100 + 99 + 98 + 97 + 96 + 95 + 94) / 8
        is(84.875));
        assertThat(appender.getMessages(), hasItems("Skipping record for expired window. topic=[topic] partition=[0] offset=[1] timestamp=[0] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[2] timestamp=[1] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[3] timestamp=[2] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[4] timestamp=[3] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[5] timestamp=[4] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[6] timestamp=[5] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[7] timestamp=[6] window=[0,10) expiration=[10] streamTime=[100]"));
        final TestOutputTopic<String, String> outputTopic = driver.createOutputTopic("output", new StringDeserializer(), new StringDeserializer());
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("[k@95/105]", "+100", null, 100L)));
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("[k@100/110]", "+100", null, 100L)));
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("[k@5/15]", "+5", null, 5L)));
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("[k@5/15]", "+5+6", null, 6L)));
        assertTrue(outputTopic.isEmpty());
    }
}
Also used : CoreMatchers.is(org.hamcrest.CoreMatchers.is) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) MockInitializer(org.apache.kafka.test.MockInitializer) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) CoreMatchers.not(org.hamcrest.CoreMatchers.not) KStream(org.apache.kafka.streams.kstream.KStream) WindowStore(org.apache.kafka.streams.state.WindowStore) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Windowed(org.apache.kafka.streams.kstream.Windowed) Arrays.asList(java.util.Arrays.asList) Duration(java.time.Duration) MetricName(org.apache.kafka.common.MetricName) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TestRecord(org.apache.kafka.streams.test.TestRecord) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) CoreMatchers.hasItems(org.hamcrest.CoreMatchers.hasItems) KeyValue(org.apache.kafka.streams.KeyValue) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) Bytes(org.apache.kafka.common.utils.Bytes) List(java.util.List) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) TimeWindows(org.apache.kafka.streams.kstream.TimeWindows) Matcher(org.hamcrest.Matcher) Materialized(org.apache.kafka.streams.kstream.Materialized) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Duration.ofMillis(java.time.Duration.ofMillis) Assert.assertEquals(org.junit.Assert.assertEquals) KeyValue(org.apache.kafka.streams.KeyValue) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) WindowStore(org.apache.kafka.streams.state.WindowStore) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) Test(org.junit.Test)

Example 12 with TestInputTopic

use of org.apache.kafka.streams.TestInputTopic in project kafka by apache.

the class KTableAggregateTest method testAggRepartition.

@Test
public void testAggRepartition() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic1 = "topic1";
    final KTable<String, String> table1 = builder.table(topic1, consumed);
    final KTable<String, String> table2 = table1.groupBy((key, value) -> {
        switch(key) {
            case "null":
                return KeyValue.pair(null, value);
            case "NULL":
                return null;
            default:
                return KeyValue.pair(value, value);
        }
    }, stringSerialized).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, MockAggregator.TOSTRING_REMOVER, Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("topic1-Canonized").withValueSerde(stringSerde));
    table2.toStream().process(supplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), CONFIG, Instant.ofEpochMilli(0L))) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        inputTopic.pipeInput("A", "1", 10L);
        inputTopic.pipeInput("A", (String) null, 15L);
        inputTopic.pipeInput("A", "1", 12L);
        inputTopic.pipeInput("B", "2", 20L);
        inputTopic.pipeInput("null", "3", 25L);
        inputTopic.pipeInput("B", "4", 23L);
        inputTopic.pipeInput("NULL", "5", 24L);
        inputTopic.pipeInput("B", "7", 22L);
        assertEquals(asList(new KeyValueTimestamp<>("1", "0+1", 10), new KeyValueTimestamp<>("1", "0+1-1", 15), new KeyValueTimestamp<>("1", "0+1-1+1", 15), new KeyValueTimestamp<>("2", "0+2", 20), new KeyValueTimestamp<>("2", "0+2-2", 23), new KeyValueTimestamp<>("4", "0+4", 23), new KeyValueTimestamp<>("4", "0+4-4", 23), new KeyValueTimestamp<>("7", "0+7", 22)), supplier.theCapturedProcessor().processed());
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StreamsConfig(org.apache.kafka.streams.StreamsConfig) MockInitializer(org.apache.kafka.test.MockInitializer) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Serde(org.apache.kafka.common.serialization.Serde) Arrays.asList(java.util.Arrays.asList) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) MockMapper(org.apache.kafka.test.MockMapper) KTable(org.apache.kafka.streams.kstream.KTable) Properties(java.util.Properties) TestUtils(org.apache.kafka.test.TestUtils) Consumed(org.apache.kafka.streams.kstream.Consumed) KeyValue(org.apache.kafka.streams.KeyValue) Test(org.junit.Test) Instant(java.time.Instant) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) Bytes(org.apache.kafka.common.utils.Bytes) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) Materialized(org.apache.kafka.streams.kstream.Materialized) TestInputTopic(org.apache.kafka.streams.TestInputTopic) Assert.assertEquals(org.junit.Assert.assertEquals) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Example 13 with TestInputTopic

use of org.apache.kafka.streams.TestInputTopic in project kafka by apache.

the class KTableSourceTest method kTableShouldLogOnOutOfOrder.

@Test
public void kTableShouldLogOnOutOfOrder() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic = "topic";
    builder.table(topic, stringConsumed, Materialized.as("store"));
    try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KTableSource.class);
        final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        inputTopic.pipeInput("key", "value", 10L);
        inputTopic.pipeInput("key", "value", 5L);
        assertThat(appender.getEvents().stream().filter(e -> e.getLevel().equals("WARN")).map(Event::getMessage).collect(Collectors.toList()), hasItem("Detected out-of-order KTable update for store, old timestamp=[10] new timestamp=[5]. topic=[topic] partition=[1] offset=[0]."));
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Arrays.asList(java.util.Arrays.asList) TopologyWrapper(org.apache.kafka.streams.TopologyWrapper) Duration(java.time.Duration) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TestRecord(org.apache.kafka.streams.test.TestRecord) StreamsTestUtils.getMetricByName(org.apache.kafka.test.StreamsTestUtils.getMetricByName) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TopologyTestDriverWrapper(org.apache.kafka.streams.TopologyTestDriverWrapper) KTable(org.apache.kafka.streams.kstream.KTable) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Event(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) Instant(java.time.Instant) Collectors(java.util.stream.Collectors) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Assert.assertNull(org.junit.Assert.assertNull) Ignore(org.junit.Ignore) Materialized(org.apache.kafka.streams.kstream.Materialized) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) TestInputTopic(org.apache.kafka.streams.TestInputTopic) InternalTopologyBuilder(org.apache.kafka.streams.processor.internals.InternalTopologyBuilder) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Topology(org.apache.kafka.streams.Topology) Assert.assertEquals(org.junit.Assert.assertEquals) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Event(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 14 with TestInputTopic

use of org.apache.kafka.streams.TestInputTopic in project kafka by apache.

the class SuppressScenarioTest method shouldSuppressIntermediateEventsWithRecordLimit.

@Test
public void shouldSuppressIntermediateEventsWithRecordLimit() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KTable<String, Long> valueCounts = builder.table("input", Consumed.with(STRING_SERDE, STRING_SERDE), Materialized.<String, String, KeyValueStore<Bytes, byte[]>>with(STRING_SERDE, STRING_SERDE).withCachingDisabled().withLoggingDisabled()).groupBy((k, v) -> new KeyValue<>(v, k), Grouped.with(STRING_SERDE, STRING_SERDE)).count(Materialized.with(STRING_SERDE, Serdes.Long()));
    valueCounts.suppress(untilTimeLimit(ofMillis(Long.MAX_VALUE), maxRecords(1L).emitEarlyWhenFull())).toStream().to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
    valueCounts.toStream().to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
    final Topology topology = builder.build();
    System.out.println(topology.describe());
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
        inputTopic.pipeInput("k1", "v1", 0L);
        inputTopic.pipeInput("k1", "v2", 1L);
        inputTopic.pipeInput("k2", "v1", 2L);
        verify(drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("v1", 1L, 0L), new KeyValueTimestamp<>("v1", 0L, 1L), new KeyValueTimestamp<>("v2", 1L, 1L), new KeyValueTimestamp<>("v1", 1L, 2L)));
        verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(// consecutive updates to v1 get suppressed into only the latter.
        new KeyValueTimestamp<>("v1", 0L, 1L), new KeyValueTimestamp<>("v2", 1L, 1L)));
        inputTopic.pipeInput("x", "x", 3L);
        verify(drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER), singletonList(new KeyValueTimestamp<>("x", 1L, 3L)));
        verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), singletonList(// now we see that last update to v1, but we won't see the update to x until it gets evicted
        new KeyValueTimestamp<>("v1", 1L, 2L)));
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Produced(org.apache.kafka.streams.kstream.Produced) Collections.singletonList(java.util.Collections.singletonList) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Serde(org.apache.kafka.common.serialization.Serde) Arrays.asList(java.util.Arrays.asList) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) BufferConfig.maxRecords(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxRecords) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) TestUtils(org.apache.kafka.test.TestUtils) Collections.emptyList(java.util.Collections.emptyList) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) Bytes(org.apache.kafka.common.utils.Bytes) List(java.util.List) Materialized(org.apache.kafka.streams.kstream.Materialized) ZERO(java.time.Duration.ZERO) Duration.ofMillis(java.time.Duration.ofMillis) Topology(org.apache.kafka.streams.Topology) StreamsConfig(org.apache.kafka.streams.StreamsConfig) KGroupedStream(org.apache.kafka.streams.kstream.KGroupedStream) SessionWindows(org.apache.kafka.streams.kstream.SessionWindows) BufferConfig.unbounded(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.unbounded) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) KStream(org.apache.kafka.streams.kstream.KStream) BufferConfig.maxBytes(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxBytes) WindowStore(org.apache.kafka.streams.state.WindowStore) Suppressed.untilWindowCloses(org.apache.kafka.streams.kstream.Suppressed.untilWindowCloses) Windowed(org.apache.kafka.streams.kstream.Windowed) Named(org.apache.kafka.streams.kstream.Named) Deserializer(org.apache.kafka.common.serialization.Deserializer) SessionStore(org.apache.kafka.streams.state.SessionStore) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Utils(org.apache.kafka.common.utils.Utils) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) Properties(java.util.Properties) Iterator(java.util.Iterator) Consumed(org.apache.kafka.streams.kstream.Consumed) Suppressed(org.apache.kafka.streams.kstream.Suppressed) Test(org.junit.Test) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) SlidingWindows(org.apache.kafka.streams.kstream.SlidingWindows) TimeWindows(org.apache.kafka.streams.kstream.TimeWindows) TestInputTopic(org.apache.kafka.streams.TestInputTopic) Comparator(java.util.Comparator) Suppressed.untilTimeLimit(org.apache.kafka.streams.kstream.Suppressed.untilTimeLimit) Bytes(org.apache.kafka.common.utils.Bytes) BufferConfig.maxBytes(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxBytes) KeyValue(org.apache.kafka.streams.KeyValue) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Topology(org.apache.kafka.streams.Topology) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Example 15 with TestInputTopic

use of org.apache.kafka.streams.TestInputTopic in project kafka by apache.

the class SuppressScenarioTest method shouldSuppressIntermediateEventsWithBytesLimit.

@Test
public void shouldSuppressIntermediateEventsWithBytesLimit() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KTable<String, Long> valueCounts = builder.table("input", Consumed.with(STRING_SERDE, STRING_SERDE), Materialized.<String, String, KeyValueStore<Bytes, byte[]>>with(STRING_SERDE, STRING_SERDE).withCachingDisabled().withLoggingDisabled()).groupBy((k, v) -> new KeyValue<>(v, k), Grouped.with(STRING_SERDE, STRING_SERDE)).count();
    valueCounts.suppress(untilTimeLimit(ofMillis(Long.MAX_VALUE), maxBytes(200L).emitEarlyWhenFull())).toStream().to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
    valueCounts.toStream().to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
    final Topology topology = builder.build();
    System.out.println(topology.describe());
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
        inputTopic.pipeInput("k1", "v1", 0L);
        inputTopic.pipeInput("k1", "v2", 1L);
        inputTopic.pipeInput("k2", "v1", 2L);
        verify(drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("v1", 1L, 0L), new KeyValueTimestamp<>("v1", 0L, 1L), new KeyValueTimestamp<>("v2", 1L, 1L), new KeyValueTimestamp<>("v1", 1L, 2L)));
        verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(// consecutive updates to v1 get suppressed into only the latter.
        new KeyValueTimestamp<>("v1", 0L, 1L), new KeyValueTimestamp<>("v2", 1L, 1L)));
        inputTopic.pipeInput("x", "x", 3L);
        verify(drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER), singletonList(new KeyValueTimestamp<>("x", 1L, 3L)));
        verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), singletonList(// now we see that last update to v1, but we won't see the update to x until it gets evicted
        new KeyValueTimestamp<>("v1", 1L, 2L)));
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Produced(org.apache.kafka.streams.kstream.Produced) Collections.singletonList(java.util.Collections.singletonList) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Serde(org.apache.kafka.common.serialization.Serde) Arrays.asList(java.util.Arrays.asList) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) BufferConfig.maxRecords(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxRecords) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) TestUtils(org.apache.kafka.test.TestUtils) Collections.emptyList(java.util.Collections.emptyList) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) Bytes(org.apache.kafka.common.utils.Bytes) List(java.util.List) Materialized(org.apache.kafka.streams.kstream.Materialized) ZERO(java.time.Duration.ZERO) Duration.ofMillis(java.time.Duration.ofMillis) Topology(org.apache.kafka.streams.Topology) StreamsConfig(org.apache.kafka.streams.StreamsConfig) KGroupedStream(org.apache.kafka.streams.kstream.KGroupedStream) SessionWindows(org.apache.kafka.streams.kstream.SessionWindows) BufferConfig.unbounded(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.unbounded) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) KStream(org.apache.kafka.streams.kstream.KStream) BufferConfig.maxBytes(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxBytes) WindowStore(org.apache.kafka.streams.state.WindowStore) Suppressed.untilWindowCloses(org.apache.kafka.streams.kstream.Suppressed.untilWindowCloses) Windowed(org.apache.kafka.streams.kstream.Windowed) Named(org.apache.kafka.streams.kstream.Named) Deserializer(org.apache.kafka.common.serialization.Deserializer) SessionStore(org.apache.kafka.streams.state.SessionStore) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Utils(org.apache.kafka.common.utils.Utils) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) Properties(java.util.Properties) Iterator(java.util.Iterator) Consumed(org.apache.kafka.streams.kstream.Consumed) Suppressed(org.apache.kafka.streams.kstream.Suppressed) Test(org.junit.Test) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) SlidingWindows(org.apache.kafka.streams.kstream.SlidingWindows) TimeWindows(org.apache.kafka.streams.kstream.TimeWindows) TestInputTopic(org.apache.kafka.streams.TestInputTopic) Comparator(java.util.Comparator) Suppressed.untilTimeLimit(org.apache.kafka.streams.kstream.Suppressed.untilTimeLimit) Bytes(org.apache.kafka.common.utils.Bytes) BufferConfig.maxBytes(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxBytes) KeyValue(org.apache.kafka.streams.KeyValue) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Topology(org.apache.kafka.streams.Topology) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Aggregations

TestInputTopic (org.apache.kafka.streams.TestInputTopic)27 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)27 Properties (java.util.Properties)25 Serdes (org.apache.kafka.common.serialization.Serdes)25 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)25 Consumed (org.apache.kafka.streams.kstream.Consumed)25 Test (org.junit.Test)25 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)24 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)21 Duration (java.time.Duration)20 KStream (org.apache.kafka.streams.kstream.KStream)19 StreamsTestUtils (org.apache.kafka.test.StreamsTestUtils)19 Assert.assertEquals (org.junit.Assert.assertEquals)18 Materialized (org.apache.kafka.streams.kstream.Materialized)17 MockApiProcessorSupplier (org.apache.kafka.test.MockApiProcessorSupplier)17 MatcherAssert.assertThat (org.hamcrest.MatcherAssert.assertThat)17 KeyValue (org.apache.kafka.streams.KeyValue)16 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)15 KTable (org.apache.kafka.streams.kstream.KTable)15 Arrays.asList (java.util.Arrays.asList)14