Search in sources :

Example 81 with TopologyTestDriver

use of org.apache.kafka.streams.TopologyTestDriver in project kafka by apache.

the class KStreamSlidingWindowAggregateTest method testEarlyRecordsLargeInput.

@Test
public void testEarlyRecordsLargeInput() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic = "topic";
    final WindowBytesStoreSupplier storeSupplier = inOrderIterator ? new InOrderMemoryWindowStoreSupplier("InOrder", 50000L, 10L, false) : Stores.inMemoryWindowStore("Reverse", Duration.ofMillis(50000), Duration.ofMillis(10), false);
    final KTable<Windowed<String>, String> table2 = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String())).groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(SlidingWindows.ofTimeDifferenceAndGrace(ofMillis(10), ofMillis(50))).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.as(storeSupplier));
    final MockApiProcessorSupplier<Windowed<String>, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
    table2.toStream().process(supplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic1 = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer());
        inputTopic1.pipeInput("E", "1", 0L);
        inputTopic1.pipeInput("E", "3", 5L);
        inputTopic1.pipeInput("E", "4", 6L);
        inputTopic1.pipeInput("E", "2", 3L);
        inputTopic1.pipeInput("E", "6", 13L);
        inputTopic1.pipeInput("E", "5", 10L);
        inputTopic1.pipeInput("E", "7", 4L);
        inputTopic1.pipeInput("E", "8", 2L);
        inputTopic1.pipeInput("E", "9", 15L);
    }
    final Comparator<KeyValueTimestamp<Windowed<String>, String>> comparator = Comparator.comparing((KeyValueTimestamp<Windowed<String>, String> o) -> o.key().key()).thenComparing((KeyValueTimestamp<Windowed<String>, String> o) -> o.key().window().start());
    final ArrayList<KeyValueTimestamp<Windowed<String>, String>> actual = supplier.theCapturedProcessor().processed();
    actual.sort(comparator);
    assertEquals(asList(// E@0
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(0, 10)), "0+1", 0), // E@5
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(0, 10)), "0+1+3", 5), // E@6
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(0, 10)), "0+1+3+4", 6), // E@3
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(0, 10)), "0+1+3+4+2", 6), // E@10
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(0, 10)), "0+1+3+4+2+5", 10), // E@4
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(0, 10)), "0+1+3+4+2+5+7", 10), // E@2
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(0, 10)), "0+1+3+4+2+5+7+8", 10), // E@5
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(1, 11)), "0+3", 5), // E@6
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(1, 11)), "0+3+4", 6), // E@3
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(1, 11)), "0+3+4+2", 6), // E@10
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(1, 11)), "0+3+4+2+5", 10), // E@4
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(1, 11)), "0+3+4+2+5+7", 10), // E@2
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(1, 11)), "0+3+4+2+5+7+8", 10), // E@13
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(3, 13)), "0+3+4+2+6", 13), // E@10
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(3, 13)), "0+3+4+2+6+5", 13), // E@4
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(3, 13)), "0+3+4+2+6+5+7", 13), // E@3
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(4, 14)), "0+3+4", 6), // E@13
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(4, 14)), "0+3+4+6", 13), // E@10
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(4, 14)), "0+3+4+6+5", 13), // E@4
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(4, 14)), "0+3+4+6+5+7", 13), // E@4
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(5, 15)), "0+3+4+6+5", 13), // E@15
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(5, 15)), "0+3+4+6+5+9", 15), // E@6
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(6, 16)), "0+4", 6), // E@13
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(6, 16)), "0+4+6", 13), // E@10
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(6, 16)), "0+4+6+5", 13), // E@15
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(6, 16)), "0+4+6+5+9", 15), // E@13
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(7, 17)), "0+6", 13), // E@10
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(7, 17)), "0+6+5", 13), // E@15
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(7, 17)), "0+6+5+9", 15), // E@10
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(11, 21)), "0+6", 13), // E@15
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(11, 21)), "0+6+9", 15), // E@15
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(14, 24)), "0+9", 15)), actual);
}
Also used : MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) InMemoryWindowBytesStoreSupplier(org.apache.kafka.streams.state.internals.InMemoryWindowBytesStoreSupplier) WindowBytesStoreSupplier(org.apache.kafka.streams.state.WindowBytesStoreSupplier) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Windowed(org.apache.kafka.streams.kstream.Windowed) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Example 82 with TopologyTestDriver

use of org.apache.kafka.streams.TopologyTestDriver in project kafka by apache.

the class KStreamSlidingWindowAggregateTest method shouldLogAndMeterWhenSkippingNullKey.

@Test
public void shouldLogAndMeterWhenSkippingNullKey() {
    final String builtInMetricsVersion = StreamsConfig.METRICS_LATEST;
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic = "topic";
    builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String())).groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(SlidingWindows.ofTimeDifferenceAndGrace(ofMillis(10), ofMillis(100))).aggregate(MockInitializer.STRING_INIT, MockAggregator.toStringInstance("+"), Materialized.<String, String, WindowStore<Bytes, byte[]>>as("topic1-Canonicalized").withValueSerde(Serdes.String()));
    props.setProperty(StreamsConfig.BUILT_IN_METRICS_VERSION_CONFIG, builtInMetricsVersion);
    try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KStreamSlidingWindowAggregate.class);
        final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer());
        inputTopic.pipeInput(null, "1");
        assertThat(appender.getEvents().stream().filter(e -> e.getLevel().equals("WARN")).map(Event::getMessage).collect(Collectors.toList()), hasItem("Skipping record due to null key or value. topic=[topic] partition=[0] offset=[0]"));
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) InMemoryWindowStore(org.apache.kafka.streams.state.internals.InMemoryWindowStore) WindowStore(org.apache.kafka.streams.state.WindowStore) CoreMatchers.is(org.hamcrest.CoreMatchers.is) Arrays(java.util.Arrays) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) Stores(org.apache.kafka.streams.state.Stores) MockReducer(org.apache.kafka.test.MockReducer) Random(java.util.Random) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Arrays.asList(java.util.Arrays.asList) Duration(java.time.Duration) Map(java.util.Map) MetricName(org.apache.kafka.common.MetricName) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) InMemoryWindowBytesStoreSupplier(org.apache.kafka.streams.state.internals.InMemoryWindowBytesStoreSupplier) TestRecord(org.apache.kafka.streams.test.TestRecord) Parameterized(org.junit.runners.Parameterized) TimeWindowedDeserializer(org.apache.kafka.streams.kstream.TimeWindowedDeserializer) Event(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event) Collection(java.util.Collection) WindowBytesStoreSupplier(org.apache.kafka.streams.state.WindowBytesStoreSupplier) Collectors(java.util.stream.Collectors) Bytes(org.apache.kafka.common.utils.Bytes) List(java.util.List) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) Materialized(org.apache.kafka.streams.kstream.Materialized) Duration.ofMillis(java.time.Duration.ofMillis) StreamsConfig(org.apache.kafka.streams.StreamsConfig) MockInitializer(org.apache.kafka.test.MockInitializer) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) InMemoryWindowStore(org.apache.kafka.streams.state.internals.InMemoryWindowStore) RunWith(org.junit.runner.RunWith) CoreMatchers.not(org.hamcrest.CoreMatchers.not) HashMap(java.util.HashMap) KStream(org.apache.kafka.streams.kstream.KStream) WindowStore(org.apache.kafka.streams.state.WindowStore) ArrayList(java.util.ArrayList) Windowed(org.apache.kafka.streams.kstream.Windowed) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) CoreMatchers.hasItems(org.hamcrest.CoreMatchers.hasItems) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) SlidingWindows(org.apache.kafka.streams.kstream.SlidingWindows) KeyValueIterator(org.apache.kafka.streams.state.KeyValueIterator) WindowStoreIterator(org.apache.kafka.streams.state.WindowStoreIterator) Matcher(org.hamcrest.Matcher) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Comparator(java.util.Comparator) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Event(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 83 with TopologyTestDriver

use of org.apache.kafka.streams.TopologyTestDriver in project kafka by apache.

the class KStreamSlidingWindowAggregateTest method testAggregateRandomInput.

@Test
public void testAggregateRandomInput() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic1 = "topic1";
    final WindowBytesStoreSupplier storeSupplier = inOrderIterator ? new InOrderMemoryWindowStoreSupplier("InOrder", 50000L, 10L, false) : Stores.inMemoryWindowStore("Reverse", Duration.ofMillis(50000), Duration.ofMillis(10), false);
    final KTable<Windowed<String>, String> table = builder.stream(topic1, Consumed.with(Serdes.String(), Serdes.String())).groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(SlidingWindows.ofTimeDifferenceAndGrace(ofMillis(10), ofMillis(10000))).aggregate(() -> "", (key, value, aggregate) -> {
        aggregate += value;
        final char[] ch = aggregate.toCharArray();
        Arrays.sort(ch);
        aggregate = String.valueOf(ch);
        return aggregate;
    }, Materialized.as(storeSupplier));
    final MockApiProcessorSupplier<Windowed<String>, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
    table.toStream().process(supplier);
    final long seed = new Random().nextLong();
    final Random shuffle = new Random(seed);
    try {
        final List<ValueAndTimestamp<String>> input = Arrays.asList(ValueAndTimestamp.make("A", 10L), ValueAndTimestamp.make("B", 15L), ValueAndTimestamp.make("C", 16L), ValueAndTimestamp.make("D", 18L), ValueAndTimestamp.make("E", 30L), ValueAndTimestamp.make("F", 40L), ValueAndTimestamp.make("G", 55L), ValueAndTimestamp.make("H", 56L), ValueAndTimestamp.make("I", 58L), ValueAndTimestamp.make("J", 58L), ValueAndTimestamp.make("K", 62L), ValueAndTimestamp.make("L", 63L), ValueAndTimestamp.make("M", 63L), ValueAndTimestamp.make("N", 63L), ValueAndTimestamp.make("O", 76L), ValueAndTimestamp.make("P", 77L), ValueAndTimestamp.make("Q", 80L), ValueAndTimestamp.make("R", 2L), ValueAndTimestamp.make("S", 3L), ValueAndTimestamp.make("T", 5L), ValueAndTimestamp.make("U", 8L));
        Collections.shuffle(input, shuffle);
        try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
            final TestInputTopic<String, String> inputTopic1 = driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer());
            for (final ValueAndTimestamp<String> i : input) {
                inputTopic1.pipeInput("A", i.value(), i.timestamp());
            }
        }
        final Map<Long, ValueAndTimestamp<String>> results = new HashMap<>();
        for (final KeyValueTimestamp<Windowed<String>, String> entry : supplier.theCapturedProcessor().processed()) {
            final Windowed<String> window = entry.key();
            final Long start = window.window().start();
            final ValueAndTimestamp<String> valueAndTimestamp = ValueAndTimestamp.make(entry.value(), entry.timestamp());
            if (results.putIfAbsent(start, valueAndTimestamp) != null) {
                results.replace(start, valueAndTimestamp);
            }
        }
        verifyRandomTestResults(results);
    } catch (final AssertionError t) {
        throw new AssertionError("Assertion failed in randomized test. Reproduce with seed: " + seed + ".", t);
    } catch (final Throwable t) {
        final String msg = "Exception in randomized scenario. Reproduce with seed: " + seed + ".";
        throw new AssertionError(msg, t);
    }
}
Also used : MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) HashMap(java.util.HashMap) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Random(java.util.Random) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) InMemoryWindowBytesStoreSupplier(org.apache.kafka.streams.state.internals.InMemoryWindowBytesStoreSupplier) WindowBytesStoreSupplier(org.apache.kafka.streams.state.WindowBytesStoreSupplier) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Windowed(org.apache.kafka.streams.kstream.Windowed) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) Test(org.junit.Test)

Example 84 with TopologyTestDriver

use of org.apache.kafka.streams.TopologyTestDriver in project kafka by apache.

the class KStreamTransformTest method testTransform.

// Old PAPI. Needs to be migrated.
@SuppressWarnings("deprecation")
@Test
public void testTransform() {
    final StreamsBuilder builder = new StreamsBuilder();
    final TransformerSupplier<Number, Number, KeyValue<Integer, Integer>> transformerSupplier = () -> new Transformer<Number, Number, KeyValue<Integer, Integer>>() {

        private int total = 0;

        @Override
        public void init(final ProcessorContext context) {
            context.schedule(Duration.ofMillis(1), PunctuationType.WALL_CLOCK_TIME, timestamp -> context.forward(-1, (int) timestamp, To.all().withTimestamp(timestamp)));
        }

        @Override
        public KeyValue<Integer, Integer> transform(final Number key, final Number value) {
            total += value.intValue();
            return KeyValue.pair(key.intValue() * 2, total);
        }

        @Override
        public void close() {
        }
    };
    final int[] expectedKeys = { 1, 10, 100, 1000 };
    final MockProcessorSupplier<Integer, Integer> processor = new MockProcessorSupplier<>();
    final KStream<Integer, Integer> stream = builder.stream(TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()));
    stream.transform(transformerSupplier).process(processor);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), Instant.ofEpochMilli(0L))) {
        final TestInputTopic<Integer, Integer> inputTopic = driver.createInputTopic(TOPIC_NAME, new IntegerSerializer(), new IntegerSerializer());
        for (final int expectedKey : expectedKeys) {
            inputTopic.pipeInput(expectedKey, expectedKey * 10, expectedKey / 2L);
        }
        driver.advanceWallClockTime(Duration.ofMillis(2));
        driver.advanceWallClockTime(Duration.ofMillis(1));
        final KeyValueTimestamp[] expected = { new KeyValueTimestamp<>(2, 10, 0), new KeyValueTimestamp<>(20, 110, 5), new KeyValueTimestamp<>(200, 1110, 50), new KeyValueTimestamp<>(2000, 11110, 500), new KeyValueTimestamp<>(-1, 2, 2), new KeyValueTimestamp<>(-1, 3, 3) };
        assertEquals(expected.length, processor.theCapturedProcessor().processed().size());
        for (int i = 0; i < expected.length; i++) {
            assertEquals(expected[i], processor.theCapturedProcessor().processed().get(i));
        }
    }
}
Also used : KeyValue(org.apache.kafka.streams.KeyValue) Transformer(org.apache.kafka.streams.kstream.Transformer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) ProcessorContext(org.apache.kafka.streams.processor.ProcessorContext) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) MockProcessorSupplier(org.apache.kafka.test.MockProcessorSupplier) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Example 85 with TopologyTestDriver

use of org.apache.kafka.streams.TopologyTestDriver in project kafka by apache.

the class KStreamWindowAggregateTest method shouldLogAndMeterWhenSkippingExpiredWindow.

@Test
public void shouldLogAndMeterWhenSkippingExpiredWindow() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic = "topic";
    final KStream<String, String> stream1 = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String()));
    stream1.groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(TimeWindows.ofSizeAndGrace(ofMillis(10), ofMillis(90)).advanceBy(ofMillis(5))).aggregate(() -> "", MockAggregator.toStringInstance("+"), Materialized.<String, String, WindowStore<Bytes, byte[]>>as("topic1-Canonicalized").withValueSerde(Serdes.String()).withCachingDisabled().withLoggingDisabled().withRetention(Duration.ofMillis(100))).toStream().map((key, value) -> new KeyValue<>(key.toString(), value)).to("output");
    try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KStreamWindowAggregate.class);
        final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer());
        inputTopic.pipeInput("k", "100", 100L);
        inputTopic.pipeInput("k", "0", 0L);
        inputTopic.pipeInput("k", "1", 1L);
        inputTopic.pipeInput("k", "2", 2L);
        inputTopic.pipeInput("k", "3", 3L);
        inputTopic.pipeInput("k", "4", 4L);
        inputTopic.pipeInput("k", "5", 5L);
        inputTopic.pipeInput("k", "6", 6L);
        assertLatenessMetrics(driver, // how many events get dropped
        is(7.0), // k:0 is 100ms late, since its time is 0, but it arrives at stream time 100.
        is(100.0), // (0 + 100 + 99 + 98 + 97 + 96 + 95 + 94) / 8
        is(84.875));
        assertThat(appender.getMessages(), hasItems("Skipping record for expired window. topic=[topic] partition=[0] offset=[1] timestamp=[0] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[2] timestamp=[1] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[3] timestamp=[2] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[4] timestamp=[3] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[5] timestamp=[4] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[6] timestamp=[5] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[7] timestamp=[6] window=[0,10) expiration=[10] streamTime=[100]"));
        final TestOutputTopic<String, String> outputTopic = driver.createOutputTopic("output", new StringDeserializer(), new StringDeserializer());
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("[k@95/105]", "+100", null, 100L)));
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("[k@100/110]", "+100", null, 100L)));
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("[k@5/15]", "+5", null, 5L)));
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("[k@5/15]", "+5+6", null, 6L)));
        assertTrue(outputTopic.isEmpty());
    }
}
Also used : CoreMatchers.is(org.hamcrest.CoreMatchers.is) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) MockInitializer(org.apache.kafka.test.MockInitializer) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) CoreMatchers.not(org.hamcrest.CoreMatchers.not) KStream(org.apache.kafka.streams.kstream.KStream) WindowStore(org.apache.kafka.streams.state.WindowStore) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Windowed(org.apache.kafka.streams.kstream.Windowed) Arrays.asList(java.util.Arrays.asList) Duration(java.time.Duration) MetricName(org.apache.kafka.common.MetricName) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TestRecord(org.apache.kafka.streams.test.TestRecord) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) CoreMatchers.hasItems(org.hamcrest.CoreMatchers.hasItems) KeyValue(org.apache.kafka.streams.KeyValue) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) Bytes(org.apache.kafka.common.utils.Bytes) List(java.util.List) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) TimeWindows(org.apache.kafka.streams.kstream.TimeWindows) Matcher(org.hamcrest.Matcher) Materialized(org.apache.kafka.streams.kstream.Materialized) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Duration.ofMillis(java.time.Duration.ofMillis) Assert.assertEquals(org.junit.Assert.assertEquals) KeyValue(org.apache.kafka.streams.KeyValue) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) WindowStore(org.apache.kafka.streams.state.WindowStore) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) Test(org.junit.Test)

Aggregations

TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)240 Test (org.junit.Test)209 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)152 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)132 MockApiProcessorSupplier (org.apache.kafka.test.MockApiProcessorSupplier)91 IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)63 KeyValue (org.apache.kafka.streams.KeyValue)60 CoreMatchers.containsString (org.hamcrest.CoreMatchers.containsString)57 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)54 Windowed (org.apache.kafka.streams.kstream.Windowed)53 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)51 KeyValueStore (org.apache.kafka.streams.state.KeyValueStore)48 Topology (org.apache.kafka.streams.Topology)47 Properties (java.util.Properties)41 TestInputTopic (org.apache.kafka.streams.TestInputTopic)32 Serdes (org.apache.kafka.common.serialization.Serdes)31 Consumed (org.apache.kafka.streams.kstream.Consumed)30 StreamsTestUtils (org.apache.kafka.test.StreamsTestUtils)24 Bytes (org.apache.kafka.common.utils.Bytes)23 TestRecord (org.apache.kafka.streams.test.TestRecord)23