Search in sources :

Example 11 with TestRecord

use of org.apache.kafka.streams.test.TestRecord in project kafka by apache.

the class KStreamSlidingWindowAggregateTest method shouldLogAndMeterWhenSkippingExpiredWindowByGrace.

@Test
public void shouldLogAndMeterWhenSkippingExpiredWindowByGrace() {
    final String builtInMetricsVersion = StreamsConfig.METRICS_LATEST;
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic = "topic";
    final WindowBytesStoreSupplier storeSupplier = inOrderIterator ? new InOrderMemoryWindowStoreSupplier("InOrder", 50000L, 10L, false) : Stores.inMemoryWindowStore("Reverse", Duration.ofMillis(50000), Duration.ofMillis(10), false);
    final KStream<String, String> stream1 = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String()));
    stream1.groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(SlidingWindows.ofTimeDifferenceAndGrace(ofMillis(10), ofMillis(90))).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.as(storeSupplier)).toStream().to("output");
    props.setProperty(StreamsConfig.BUILT_IN_METRICS_VERSION_CONFIG, builtInMetricsVersion);
    try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KStreamSlidingWindowAggregate.class);
        final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer());
        inputTopic.pipeInput("k", "100", 200L);
        inputTopic.pipeInput("k", "0", 100L);
        inputTopic.pipeInput("k", "1", 101L);
        inputTopic.pipeInput("k", "2", 102L);
        inputTopic.pipeInput("k", "3", 103L);
        inputTopic.pipeInput("k", "4", 104L);
        inputTopic.pipeInput("k", "5", 105L);
        inputTopic.pipeInput("k", "6", 15L);
        assertLatenessMetrics(driver, is(7.0), is(185.0), is(96.25));
        assertThat(appender.getMessages(), hasItems(// left window for k@100
        "Skipping record for expired window. topic=[topic] partition=[0] offset=[1] timestamp=[100] window=[90,100] expiration=[110] streamTime=[200]", // left window for k@101
        "Skipping record for expired window. topic=[topic] partition=[0] offset=[2] timestamp=[101] window=[91,101] expiration=[110] streamTime=[200]", // left window for k@102
        "Skipping record for expired window. topic=[topic] partition=[0] offset=[3] timestamp=[102] window=[92,102] expiration=[110] streamTime=[200]", // left window for k@103
        "Skipping record for expired window. topic=[topic] partition=[0] offset=[4] timestamp=[103] window=[93,103] expiration=[110] streamTime=[200]", // left window for k@104
        "Skipping record for expired window. topic=[topic] partition=[0] offset=[5] timestamp=[104] window=[94,104] expiration=[110] streamTime=[200]", // left window for k@105
        "Skipping record for expired window. topic=[topic] partition=[0] offset=[6] timestamp=[105] window=[95,105] expiration=[110] streamTime=[200]", // left window for k@15
        "Skipping record for expired window. topic=[topic] partition=[0] offset=[7] timestamp=[15] window=[5,15] expiration=[110] streamTime=[200]"));
        final TestOutputTopic<Windowed<String>, String> outputTopic = driver.createOutputTopic("output", new TimeWindowedDeserializer<>(new StringDeserializer(), 10L), new StringDeserializer());
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>(new Windowed<>("k", new TimeWindow(190, 200)), "0+100", null, 200L)));
        assertTrue(outputTopic.isEmpty());
    }
}
Also used : InMemoryWindowBytesStoreSupplier(org.apache.kafka.streams.state.internals.InMemoryWindowBytesStoreSupplier) WindowBytesStoreSupplier(org.apache.kafka.streams.state.WindowBytesStoreSupplier) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Windowed(org.apache.kafka.streams.kstream.Windowed) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) Test(org.junit.Test)

Example 12 with TestRecord

use of org.apache.kafka.streams.test.TestRecord in project kafka by apache.

the class KStreamWindowAggregateTest method shouldLogAndMeterWhenSkippingExpiredWindowByGrace.

@Test
public void shouldLogAndMeterWhenSkippingExpiredWindowByGrace() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic = "topic";
    final KStream<String, String> stream1 = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String()));
    stream1.groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(TimeWindows.ofSizeAndGrace(ofMillis(10), ofMillis(90L)).advanceBy(ofMillis(10))).aggregate(() -> "", MockAggregator.toStringInstance("+"), Materialized.<String, String, WindowStore<Bytes, byte[]>>as("topic1-Canonicalized").withValueSerde(Serdes.String()).withCachingDisabled().withLoggingDisabled()).toStream().map((key, value) -> new KeyValue<>(key.toString(), value)).to("output");
    try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KStreamWindowAggregate.class);
        final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer());
        inputTopic.pipeInput("k", "100", 200L);
        inputTopic.pipeInput("k", "0", 100L);
        inputTopic.pipeInput("k", "1", 101L);
        inputTopic.pipeInput("k", "2", 102L);
        inputTopic.pipeInput("k", "3", 103L);
        inputTopic.pipeInput("k", "4", 104L);
        inputTopic.pipeInput("k", "5", 105L);
        inputTopic.pipeInput("k", "6", 6L);
        assertLatenessMetrics(driver, is(7.0), is(194.0), is(97.375));
        assertThat(appender.getMessages(), hasItems("Skipping record for expired window. topic=[topic] partition=[0] offset=[1] timestamp=[100] window=[100,110) expiration=[110] streamTime=[200]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[2] timestamp=[101] window=[100,110) expiration=[110] streamTime=[200]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[3] timestamp=[102] window=[100,110) expiration=[110] streamTime=[200]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[4] timestamp=[103] window=[100,110) expiration=[110] streamTime=[200]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[5] timestamp=[104] window=[100,110) expiration=[110] streamTime=[200]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[6] timestamp=[105] window=[100,110) expiration=[110] streamTime=[200]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[7] timestamp=[6] window=[0,10) expiration=[110] streamTime=[200]"));
        final TestOutputTopic<String, String> outputTopic = driver.createOutputTopic("output", new StringDeserializer(), new StringDeserializer());
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("[k@200/210]", "+100", null, 200L)));
        assertTrue(outputTopic.isEmpty());
    }
}
Also used : CoreMatchers.is(org.hamcrest.CoreMatchers.is) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) MockInitializer(org.apache.kafka.test.MockInitializer) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) CoreMatchers.not(org.hamcrest.CoreMatchers.not) KStream(org.apache.kafka.streams.kstream.KStream) WindowStore(org.apache.kafka.streams.state.WindowStore) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Windowed(org.apache.kafka.streams.kstream.Windowed) Arrays.asList(java.util.Arrays.asList) Duration(java.time.Duration) MetricName(org.apache.kafka.common.MetricName) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TestRecord(org.apache.kafka.streams.test.TestRecord) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) CoreMatchers.hasItems(org.hamcrest.CoreMatchers.hasItems) KeyValue(org.apache.kafka.streams.KeyValue) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) Bytes(org.apache.kafka.common.utils.Bytes) List(java.util.List) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) TimeWindows(org.apache.kafka.streams.kstream.TimeWindows) Matcher(org.hamcrest.Matcher) Materialized(org.apache.kafka.streams.kstream.Materialized) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Duration.ofMillis(java.time.Duration.ofMillis) Assert.assertEquals(org.junit.Assert.assertEquals) KeyValue(org.apache.kafka.streams.KeyValue) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Bytes(org.apache.kafka.common.utils.Bytes) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) Test(org.junit.Test)

Example 13 with TestRecord

use of org.apache.kafka.streams.test.TestRecord in project kafka by apache.

the class ProcessorTopologyTest method testDrivingInternalRepartitioningForwardingTimestampTopology.

@Test
public void testDrivingInternalRepartitioningForwardingTimestampTopology() {
    driver = new TopologyTestDriver(createInternalRepartitioningWithValueTimestampTopology(), props);
    final TestInputTopic<String, String> inputTopic = driver.createInputTopic(INPUT_TOPIC_1, STRING_SERIALIZER, STRING_SERIALIZER);
    inputTopic.pipeInput("key1", "value1@1000");
    inputTopic.pipeInput("key2", "value2@2000");
    inputTopic.pipeInput("key3", "value3@3000");
    final TestOutputTopic<String, String> outputTopic = driver.createOutputTopic(OUTPUT_TOPIC_1, STRING_DESERIALIZER, STRING_DESERIALIZER);
    assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("key1", "value1", null, 1000L)));
    assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("key2", "value2", null, 2000L)));
    assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("key3", "value3", null, 3000L)));
}
Also used : TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) TestRecord(org.apache.kafka.streams.test.TestRecord) Test(org.junit.Test)

Example 14 with TestRecord

use of org.apache.kafka.streams.test.TestRecord in project kafka by apache.

the class KStreamImplTest method shouldSupportTriggerMaterializedWithKTableFromKStream.

@Test
public void shouldSupportTriggerMaterializedWithKTableFromKStream() {
    final Consumed<String, String> consumed = Consumed.with(Serdes.String(), Serdes.String());
    final StreamsBuilder builder = new StreamsBuilder();
    final String input = "input";
    final String output = "output";
    final String storeName = "store";
    builder.stream(input, consumed).toTable().mapValues(value -> value.charAt(0) - (int) 'a', Materialized.<String, Integer, KeyValueStore<Bytes, byte[]>>as(storeName).withKeySerde(Serdes.String()).withValueSerde(Serdes.Integer())).toStream().to(output);
    final Topology topology = builder.build(props);
    final String topologyDescription = topology.describe().toString();
    assertThat(topologyDescription, equalTo("Topologies:\n" + "   Sub-topology: 0\n" + "    Source: KSTREAM-SOURCE-0000000000 (topics: [input])\n" + "      --> KSTREAM-TOTABLE-0000000001\n" + "    Processor: KSTREAM-TOTABLE-0000000001 (stores: [])\n" + "      --> KTABLE-MAPVALUES-0000000003\n" + "      <-- KSTREAM-SOURCE-0000000000\n" + "    Processor: KTABLE-MAPVALUES-0000000003 (stores: [store])\n" + "      --> KTABLE-TOSTREAM-0000000004\n" + "      <-- KSTREAM-TOTABLE-0000000001\n" + "    Processor: KTABLE-TOSTREAM-0000000004 (stores: [])\n" + "      --> KSTREAM-SINK-0000000005\n" + "      <-- KTABLE-MAPVALUES-0000000003\n" + "    Sink: KSTREAM-SINK-0000000005 (topic: output)\n" + "      <-- KTABLE-TOSTREAM-0000000004\n\n"));
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(input, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final TestOutputTopic<String, Integer> outputTopic = driver.createOutputTopic(output, Serdes.String().deserializer(), Serdes.Integer().deserializer());
        final KeyValueStore<String, Integer> store = driver.getKeyValueStore(storeName);
        inputTopic.pipeInput("A", "green", 10L);
        inputTopic.pipeInput("B", "green", 9L);
        inputTopic.pipeInput("A", "blue", 12L);
        inputTopic.pipeInput("C", "yellow", 15L);
        inputTopic.pipeInput("D", "green", 11L);
        final Map<String, Integer> expectedStore = new HashMap<>();
        expectedStore.putIfAbsent("A", 1);
        expectedStore.putIfAbsent("B", 6);
        expectedStore.putIfAbsent("C", 24);
        expectedStore.putIfAbsent("D", 6);
        assertEquals(expectedStore, asMap(store));
        assertEquals(asList(new TestRecord<>("A", 6, Instant.ofEpochMilli(10)), new TestRecord<>("B", 6, Instant.ofEpochMilli(9)), new TestRecord<>("A", 1, Instant.ofEpochMilli(12)), new TestRecord<>("C", 24, Instant.ofEpochMilli(15)), new TestRecord<>("D", 6, Instant.ofEpochMilli(11))), outputTopic.readRecordsToList());
    }
}
Also used : HashMap(java.util.HashMap) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) ProcessorTopology(org.apache.kafka.streams.processor.internals.ProcessorTopology) Topology(org.apache.kafka.streams.Topology) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Bytes(org.apache.kafka.common.utils.Bytes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) Test(org.junit.Test)

Example 15 with TestRecord

use of org.apache.kafka.streams.test.TestRecord in project kafka by apache.

the class SuppressScenarioTest method shouldSupportFinalResultsForSlidingWindows.

@Test
public void shouldSupportFinalResultsForSlidingWindows() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KTable<Windowed<String>, Long> valueCounts = builder.stream("input", Consumed.with(STRING_SERDE, STRING_SERDE)).groupBy((String k, String v) -> k, Grouped.with(STRING_SERDE, STRING_SERDE)).windowedBy(SlidingWindows.withTimeDifferenceAndGrace(ofMillis(5L), ofMillis(15L))).count(Materialized.<String, Long, WindowStore<Bytes, byte[]>>as("counts").withCachingDisabled().withKeySerde(STRING_SERDE));
    valueCounts.suppress(untilWindowCloses(unbounded())).toStream().map((final Windowed<String> k, final Long v) -> new KeyValue<>(k.toString(), v)).to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
    valueCounts.toStream().map((final Windowed<String> k, final Long v) -> new KeyValue<>(k.toString(), v)).to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
    final Topology topology = builder.build();
    System.out.println(topology.describe());
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
        inputTopic.pipeInput("k1", "v1", 10L);
        inputTopic.pipeInput("k1", "v1", 11L);
        inputTopic.pipeInput("k1", "v1", 10L);
        inputTopic.pipeInput("k1", "v1", 13L);
        inputTopic.pipeInput("k1", "v1", 10L);
        inputTopic.pipeInput("k1", "v1", 24L);
        // this update should get dropped, since the previous event advanced the stream time and closed the window.
        inputTopic.pipeInput("k1", "v1", 5L);
        inputTopic.pipeInput("k1", "v1", 7L);
        // final record to advance stream time and flush windows
        inputTopic.pipeInput("k1", "v1", 90L);
        final Comparator<TestRecord<String, Long>> comparator = Comparator.comparing((TestRecord<String, Long> o) -> o.getKey()).thenComparing((TestRecord<String, Long> o) -> o.timestamp());
        final List<TestRecord<String, Long>> actual = drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER);
        actual.sort(comparator);
        verify(actual, asList(// right window for k1@10 created when k1@11 is processed
        new KeyValueTimestamp<>("[k1@11/16]", 1L, 11L), // right window for k1@10 updated when k1@13 is processed
        new KeyValueTimestamp<>("[k1@11/16]", 2L, 13L), // right window for k1@11 created when k1@13 is processed
        new KeyValueTimestamp<>("[k1@12/17]", 1L, 13L), // left window for k1@24 created when k1@24 is processed
        new KeyValueTimestamp<>("[k1@19/24]", 1L, 24L), // left window for k1@10 created when k1@10 is processed
        new KeyValueTimestamp<>("[k1@5/10]", 1L, 10L), // left window for k1@10 updated when k1@10 is processed
        new KeyValueTimestamp<>("[k1@5/10]", 2L, 10L), // left window for k1@10 updated when k1@10 is processed
        new KeyValueTimestamp<>("[k1@5/10]", 3L, 10L), // left window for k1@10 updated when k1@5 is processed
        new KeyValueTimestamp<>("[k1@5/10]", 4L, 10L), // left window for k1@10 updated when k1@7 is processed
        new KeyValueTimestamp<>("[k1@5/10]", 5L, 10L), // left window for k1@11 created when k1@11 is processed
        new KeyValueTimestamp<>("[k1@6/11]", 2L, 11L), // left window for k1@11 updated when k1@10 is processed
        new KeyValueTimestamp<>("[k1@6/11]", 3L, 11L), // left window for k1@11 updated when k1@10 is processed
        new KeyValueTimestamp<>("[k1@6/11]", 4L, 11L), // left window for k1@11 updated when k1@7 is processed
        new KeyValueTimestamp<>("[k1@6/11]", 5L, 11L), // left window for k1@13 created when k1@13 is processed
        new KeyValueTimestamp<>("[k1@8/13]", 4L, 13L), // left window for k1@13 updated when k1@10 is processed
        new KeyValueTimestamp<>("[k1@8/13]", 5L, 13L), // right window for k1@90 created when k1@90 is processed
        new KeyValueTimestamp<>("[k1@85/90]", 1L, 90L)));
        verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("[k1@5/10]", 5L, 10L), new KeyValueTimestamp<>("[k1@6/11]", 5L, 11L), new KeyValueTimestamp<>("[k1@8/13]", 5L, 13L), new KeyValueTimestamp<>("[k1@11/16]", 2L, 13L), new KeyValueTimestamp<>("[k1@12/17]", 1L, 13L), new KeyValueTimestamp<>("[k1@19/24]", 1L, 24L)));
    }
}
Also used : KeyValue(org.apache.kafka.streams.KeyValue) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Topology(org.apache.kafka.streams.Topology) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Windowed(org.apache.kafka.streams.kstream.Windowed) Bytes(org.apache.kafka.common.utils.Bytes) BufferConfig.maxBytes(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxBytes) TestRecord(org.apache.kafka.streams.test.TestRecord) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Aggregations

TestRecord (org.apache.kafka.streams.test.TestRecord)18 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)14 Test (org.junit.Test)12 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)11 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)9 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)9 CoreMatchers.containsString (org.hamcrest.CoreMatchers.containsString)8 Duration (java.time.Duration)6 LinkedList (java.util.LinkedList)5 Bytes (org.apache.kafka.common.utils.Bytes)5 TestInputTopic (org.apache.kafka.streams.TestInputTopic)5 Windowed (org.apache.kafka.streams.kstream.Windowed)5 Instant (java.time.Instant)4 List (java.util.List)4 KeyValue (org.apache.kafka.streams.KeyValue)4 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)4 Topology (org.apache.kafka.streams.Topology)4 Duration.ofMillis (java.time.Duration.ofMillis)3 ArrayList (java.util.ArrayList)3 Arrays.asList (java.util.Arrays.asList)3