Search in sources :

Example 61 with KeyValueTimestamp

use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.

the class SuppressScenarioTest method shouldWorkBeforeJoinRight.

@Test
public void shouldWorkBeforeJoinRight() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KTable<String, String> left = builder.table("left", Consumed.with(Serdes.String(), Serdes.String()));
    final KTable<String, String> right = builder.table("right", Consumed.with(Serdes.String(), Serdes.String())).suppress(untilTimeLimit(ofMillis(10), unbounded()));
    left.outerJoin(right, (l, r) -> String.format("(%s,%s)", l, r)).toStream().to("output", Produced.with(Serdes.String(), Serdes.String()));
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), config)) {
        final TestInputTopic<String, String> inputTopicRight = driver.createInputTopic("right", STRING_SERIALIZER, STRING_SERIALIZER);
        final TestInputTopic<String, String> inputTopicLeft = driver.createInputTopic("left", STRING_SERIALIZER, STRING_SERIALIZER);
        inputTopicRight.pipeInput("B", "1", 0L);
        inputTopicRight.pipeInput("A", "1", 0L);
        // buffered, no output
        verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), emptyList());
        inputTopicRight.pipeInput("tick", "tick", 10L);
        // flush buffer
        verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), asList(new KeyValueTimestamp<>("A", "(null,1)", 0L), new KeyValueTimestamp<>("B", "(null,1)", 0L)));
        inputTopicRight.pipeInput("A", "2", 11L);
        // buffered, no output
        verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), emptyList());
        inputTopicLeft.pipeInput("A", "a", 12L);
        // should join with previously emitted right side
        verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), singletonList(new KeyValueTimestamp<>("A", "(a,1)", 12L)));
        inputTopicLeft.pipeInput("B", "b", 12L);
        // should view through to the parent KTable, since B is no longer buffered
        verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), singletonList(new KeyValueTimestamp<>("B", "(b,1)", 12L)));
        inputTopicLeft.pipeInput("A", "b", 13L);
        // should join with previously emitted right side
        verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), singletonList(new KeyValueTimestamp<>("A", "(b,1)", 13L)));
        inputTopicRight.pipeInput("tick", "tick1", 21L);
        verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), asList(// just a testing artifact
        new KeyValueTimestamp<>("tick", "(null,tick1)", 21), new KeyValueTimestamp<>("A", "(b,2)", 13L)));
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Example 62 with KeyValueTimestamp

use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.

the class SuppressScenarioTest method shouldImmediatelyEmitEventsWithZeroEmitAfter.

@Test
public void shouldImmediatelyEmitEventsWithZeroEmitAfter() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KTable<String, Long> valueCounts = builder.table("input", Consumed.with(STRING_SERDE, STRING_SERDE), Materialized.<String, String, KeyValueStore<Bytes, byte[]>>with(STRING_SERDE, STRING_SERDE).withCachingDisabled().withLoggingDisabled()).groupBy((k, v) -> new KeyValue<>(v, k), Grouped.with(STRING_SERDE, STRING_SERDE)).count();
    valueCounts.suppress(untilTimeLimit(ZERO, unbounded())).toStream().to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
    valueCounts.toStream().to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
    final Topology topology = builder.build();
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
        inputTopic.pipeInput("k1", "v1", 0L);
        inputTopic.pipeInput("k1", "v2", 1L);
        inputTopic.pipeInput("k2", "v1", 2L);
        verify(drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("v1", 1L, 0L), new KeyValueTimestamp<>("v1", 0L, 1L), new KeyValueTimestamp<>("v2", 1L, 1L), new KeyValueTimestamp<>("v1", 1L, 2L)));
        verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("v1", 1L, 0L), new KeyValueTimestamp<>("v1", 0L, 1L), new KeyValueTimestamp<>("v2", 1L, 1L), new KeyValueTimestamp<>("v1", 1L, 2L)));
        inputTopic.pipeInput("x", "x", 3L);
        verify(drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER), singletonList(new KeyValueTimestamp<>("x", 1L, 3L)));
        verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), singletonList(new KeyValueTimestamp<>("x", 1L, 3L)));
        inputTopic.pipeInput("x", "y", 4L);
        verify(drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("x", 0L, 4L), new KeyValueTimestamp<>("y", 1L, 4L)));
        verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("x", 0L, 4L), new KeyValueTimestamp<>("y", 1L, 4L)));
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Produced(org.apache.kafka.streams.kstream.Produced) Collections.singletonList(java.util.Collections.singletonList) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Serde(org.apache.kafka.common.serialization.Serde) Arrays.asList(java.util.Arrays.asList) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) BufferConfig.maxRecords(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxRecords) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) TestUtils(org.apache.kafka.test.TestUtils) Collections.emptyList(java.util.Collections.emptyList) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) Bytes(org.apache.kafka.common.utils.Bytes) List(java.util.List) Materialized(org.apache.kafka.streams.kstream.Materialized) ZERO(java.time.Duration.ZERO) Duration.ofMillis(java.time.Duration.ofMillis) Topology(org.apache.kafka.streams.Topology) StreamsConfig(org.apache.kafka.streams.StreamsConfig) KGroupedStream(org.apache.kafka.streams.kstream.KGroupedStream) SessionWindows(org.apache.kafka.streams.kstream.SessionWindows) BufferConfig.unbounded(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.unbounded) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) KStream(org.apache.kafka.streams.kstream.KStream) BufferConfig.maxBytes(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxBytes) WindowStore(org.apache.kafka.streams.state.WindowStore) Suppressed.untilWindowCloses(org.apache.kafka.streams.kstream.Suppressed.untilWindowCloses) Windowed(org.apache.kafka.streams.kstream.Windowed) Named(org.apache.kafka.streams.kstream.Named) Deserializer(org.apache.kafka.common.serialization.Deserializer) SessionStore(org.apache.kafka.streams.state.SessionStore) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Utils(org.apache.kafka.common.utils.Utils) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) Properties(java.util.Properties) Iterator(java.util.Iterator) Consumed(org.apache.kafka.streams.kstream.Consumed) Suppressed(org.apache.kafka.streams.kstream.Suppressed) Test(org.junit.Test) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) SlidingWindows(org.apache.kafka.streams.kstream.SlidingWindows) TimeWindows(org.apache.kafka.streams.kstream.TimeWindows) TestInputTopic(org.apache.kafka.streams.TestInputTopic) Comparator(java.util.Comparator) Suppressed.untilTimeLimit(org.apache.kafka.streams.kstream.Suppressed.untilTimeLimit) Bytes(org.apache.kafka.common.utils.Bytes) BufferConfig.maxBytes(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxBytes) KeyValue(org.apache.kafka.streams.KeyValue) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Topology(org.apache.kafka.streams.Topology) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Example 63 with KeyValueTimestamp

use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.

the class SuppressScenarioTest method shouldWorkBeforeJoinLeft.

@Test
public void shouldWorkBeforeJoinLeft() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KTable<String, String> left = builder.table("left", Consumed.with(Serdes.String(), Serdes.String())).suppress(untilTimeLimit(ofMillis(10), unbounded()));
    final KTable<String, String> right = builder.table("right", Consumed.with(Serdes.String(), Serdes.String()));
    left.outerJoin(right, (l, r) -> String.format("(%s,%s)", l, r)).toStream().to("output", Produced.with(Serdes.String(), Serdes.String()));
    final Topology topology = builder.build();
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
        final TestInputTopic<String, String> inputTopicRight = driver.createInputTopic("right", STRING_SERIALIZER, STRING_SERIALIZER);
        final TestInputTopic<String, String> inputTopicLeft = driver.createInputTopic("left", STRING_SERIALIZER, STRING_SERIALIZER);
        inputTopicLeft.pipeInput("B", "1", 0L);
        inputTopicLeft.pipeInput("A", "1", 0L);
        // buffered, no output
        verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), emptyList());
        inputTopicLeft.pipeInput("tick", "tick", 10L);
        // flush buffer
        verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), asList(new KeyValueTimestamp<>("A", "(1,null)", 0L), new KeyValueTimestamp<>("B", "(1,null)", 0L)));
        inputTopicLeft.pipeInput("A", "2", 11L);
        // buffered, no output
        verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), emptyList());
        inputTopicRight.pipeInput("A", "a", 12L);
        // should join with previously emitted left side
        verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), singletonList(new KeyValueTimestamp<>("A", "(1,a)", 12L)));
        inputTopicRight.pipeInput("B", "b", 12L);
        // should view through to the parent KTable, since B is no longer buffered
        verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), singletonList(new KeyValueTimestamp<>("B", "(1,b)", 12L)));
        inputTopicRight.pipeInput("A", "b", 13L);
        // should join with previously emitted left side
        verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), singletonList(new KeyValueTimestamp<>("A", "(1,b)", 13L)));
        inputTopicLeft.pipeInput("tick", "tick1", 21L);
        verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), asList(// just a testing artifact
        new KeyValueTimestamp<>("tick", "(tick1,null)", 21), new KeyValueTimestamp<>("A", "(2,b)", 13L)));
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Topology(org.apache.kafka.streams.Topology) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Example 64 with KeyValueTimestamp

use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.

the class SuppressScenarioTest method shouldSupportFinalResultsForSlidingWindows.

@Test
public void shouldSupportFinalResultsForSlidingWindows() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KTable<Windowed<String>, Long> valueCounts = builder.stream("input", Consumed.with(STRING_SERDE, STRING_SERDE)).groupBy((String k, String v) -> k, Grouped.with(STRING_SERDE, STRING_SERDE)).windowedBy(SlidingWindows.withTimeDifferenceAndGrace(ofMillis(5L), ofMillis(15L))).count(Materialized.<String, Long, WindowStore<Bytes, byte[]>>as("counts").withCachingDisabled().withKeySerde(STRING_SERDE));
    valueCounts.suppress(untilWindowCloses(unbounded())).toStream().map((final Windowed<String> k, final Long v) -> new KeyValue<>(k.toString(), v)).to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
    valueCounts.toStream().map((final Windowed<String> k, final Long v) -> new KeyValue<>(k.toString(), v)).to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
    final Topology topology = builder.build();
    System.out.println(topology.describe());
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
        inputTopic.pipeInput("k1", "v1", 10L);
        inputTopic.pipeInput("k1", "v1", 11L);
        inputTopic.pipeInput("k1", "v1", 10L);
        inputTopic.pipeInput("k1", "v1", 13L);
        inputTopic.pipeInput("k1", "v1", 10L);
        inputTopic.pipeInput("k1", "v1", 24L);
        // this update should get dropped, since the previous event advanced the stream time and closed the window.
        inputTopic.pipeInput("k1", "v1", 5L);
        inputTopic.pipeInput("k1", "v1", 7L);
        // final record to advance stream time and flush windows
        inputTopic.pipeInput("k1", "v1", 90L);
        final Comparator<TestRecord<String, Long>> comparator = Comparator.comparing((TestRecord<String, Long> o) -> o.getKey()).thenComparing((TestRecord<String, Long> o) -> o.timestamp());
        final List<TestRecord<String, Long>> actual = drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER);
        actual.sort(comparator);
        verify(actual, asList(// right window for k1@10 created when k1@11 is processed
        new KeyValueTimestamp<>("[k1@11/16]", 1L, 11L), // right window for k1@10 updated when k1@13 is processed
        new KeyValueTimestamp<>("[k1@11/16]", 2L, 13L), // right window for k1@11 created when k1@13 is processed
        new KeyValueTimestamp<>("[k1@12/17]", 1L, 13L), // left window for k1@24 created when k1@24 is processed
        new KeyValueTimestamp<>("[k1@19/24]", 1L, 24L), // left window for k1@10 created when k1@10 is processed
        new KeyValueTimestamp<>("[k1@5/10]", 1L, 10L), // left window for k1@10 updated when k1@10 is processed
        new KeyValueTimestamp<>("[k1@5/10]", 2L, 10L), // left window for k1@10 updated when k1@10 is processed
        new KeyValueTimestamp<>("[k1@5/10]", 3L, 10L), // left window for k1@10 updated when k1@5 is processed
        new KeyValueTimestamp<>("[k1@5/10]", 4L, 10L), // left window for k1@10 updated when k1@7 is processed
        new KeyValueTimestamp<>("[k1@5/10]", 5L, 10L), // left window for k1@11 created when k1@11 is processed
        new KeyValueTimestamp<>("[k1@6/11]", 2L, 11L), // left window for k1@11 updated when k1@10 is processed
        new KeyValueTimestamp<>("[k1@6/11]", 3L, 11L), // left window for k1@11 updated when k1@10 is processed
        new KeyValueTimestamp<>("[k1@6/11]", 4L, 11L), // left window for k1@11 updated when k1@7 is processed
        new KeyValueTimestamp<>("[k1@6/11]", 5L, 11L), // left window for k1@13 created when k1@13 is processed
        new KeyValueTimestamp<>("[k1@8/13]", 4L, 13L), // left window for k1@13 updated when k1@10 is processed
        new KeyValueTimestamp<>("[k1@8/13]", 5L, 13L), // right window for k1@90 created when k1@90 is processed
        new KeyValueTimestamp<>("[k1@85/90]", 1L, 90L)));
        verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("[k1@5/10]", 5L, 10L), new KeyValueTimestamp<>("[k1@6/11]", 5L, 11L), new KeyValueTimestamp<>("[k1@8/13]", 5L, 13L), new KeyValueTimestamp<>("[k1@11/16]", 2L, 13L), new KeyValueTimestamp<>("[k1@12/17]", 1L, 13L), new KeyValueTimestamp<>("[k1@19/24]", 1L, 24L)));
    }
}
Also used : KeyValue(org.apache.kafka.streams.KeyValue) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Topology(org.apache.kafka.streams.Topology) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Windowed(org.apache.kafka.streams.kstream.Windowed) Bytes(org.apache.kafka.common.utils.Bytes) BufferConfig.maxBytes(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxBytes) TestRecord(org.apache.kafka.streams.test.TestRecord) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Example 65 with KeyValueTimestamp

use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.

the class SuppressScenarioTest method shouldSupportFinalResultsForTimeWindowsWithLargeJump.

@Test
public void shouldSupportFinalResultsForTimeWindowsWithLargeJump() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KTable<Windowed<String>, Long> valueCounts = builder.stream("input", Consumed.with(STRING_SERDE, STRING_SERDE)).groupBy((String k, String v) -> k, Grouped.with(STRING_SERDE, STRING_SERDE)).windowedBy(TimeWindows.of(ofMillis(2L)).grace(ofMillis(2L))).count(Materialized.<String, Long, WindowStore<Bytes, byte[]>>as("counts").withCachingDisabled().withKeySerde(STRING_SERDE));
    valueCounts.suppress(untilWindowCloses(unbounded())).toStream().map((final Windowed<String> k, final Long v) -> new KeyValue<>(k.toString(), v)).to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
    valueCounts.toStream().map((final Windowed<String> k, final Long v) -> new KeyValue<>(k.toString(), v)).to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
    final Topology topology = builder.build();
    System.out.println(topology.describe());
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
        inputTopic.pipeInput("k1", "v1", 0L);
        inputTopic.pipeInput("k1", "v1", 1L);
        inputTopic.pipeInput("k1", "v1", 2L);
        inputTopic.pipeInput("k1", "v1", 0L);
        inputTopic.pipeInput("k1", "v1", 3L);
        inputTopic.pipeInput("k1", "v1", 0L);
        inputTopic.pipeInput("k1", "v1", 4L);
        // this update should get dropped, since the previous event advanced the stream time and closed the window.
        inputTopic.pipeInput("k1", "v1", 0L);
        inputTopic.pipeInput("k1", "v1", 30L);
        verify(drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("[k1@0/2]", 1L, 0L), new KeyValueTimestamp<>("[k1@0/2]", 2L, 1L), new KeyValueTimestamp<>("[k1@2/4]", 1L, 2L), new KeyValueTimestamp<>("[k1@0/2]", 3L, 1L), new KeyValueTimestamp<>("[k1@2/4]", 2L, 3L), new KeyValueTimestamp<>("[k1@0/2]", 4L, 1L), new KeyValueTimestamp<>("[k1@4/6]", 1L, 4L), new KeyValueTimestamp<>("[k1@30/32]", 1L, 30L)));
        verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("[k1@0/2]", 4L, 1L), new KeyValueTimestamp<>("[k1@2/4]", 2L, 3L), new KeyValueTimestamp<>("[k1@4/6]", 1L, 4L)));
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Windowed(org.apache.kafka.streams.kstream.Windowed) Bytes(org.apache.kafka.common.utils.Bytes) BufferConfig.maxBytes(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxBytes) KeyValue(org.apache.kafka.streams.KeyValue) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Topology(org.apache.kafka.streams.Topology) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Aggregations

KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)71 Test (org.junit.Test)65 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)46 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)44 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)36 Properties (java.util.Properties)26 Windowed (org.apache.kafka.streams.kstream.Windowed)22 Serdes (org.apache.kafka.common.serialization.Serdes)21 IntegrationTest (org.apache.kafka.test.IntegrationTest)20 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)19 KeyValue (org.apache.kafka.streams.KeyValue)19 Consumed (org.apache.kafka.streams.kstream.Consumed)17 IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)16 MockApiProcessorSupplier (org.apache.kafka.test.MockApiProcessorSupplier)16 Bytes (org.apache.kafka.common.utils.Bytes)15 KStream (org.apache.kafka.streams.kstream.KStream)14 Duration (java.time.Duration)13 KafkaStreams (org.apache.kafka.streams.KafkaStreams)13 TestInputTopic (org.apache.kafka.streams.TestInputTopic)13 Utils.mkProperties (org.apache.kafka.common.utils.Utils.mkProperties)12