Search in sources :

Example 1 with TestOutputTopic

use of org.apache.kafka.streams.TestOutputTopic in project kafka by apache.

the class KStreamImplTest method shouldSupportKeyChangeKTableFromKStream.

@Test
public void shouldSupportKeyChangeKTableFromKStream() {
    final Consumed<String, String> consumed = Consumed.with(Serdes.String(), Serdes.String());
    final StreamsBuilder builder = new StreamsBuilder();
    final String input = "input";
    final String output = "output";
    builder.stream(input, consumed).map((key, value) -> new KeyValue<>(key.charAt(0) - 'A', value)).toTable(Materialized.with(Serdes.Integer(), null)).toStream().to(output);
    final Topology topology = builder.build();
    final String topologyDescription = topology.describe().toString();
    assertThat(topologyDescription, equalTo("Topologies:\n" + "   Sub-topology: 0\n" + "    Source: KSTREAM-SOURCE-0000000000 (topics: [input])\n" + "      --> KSTREAM-MAP-0000000001\n" + "    Processor: KSTREAM-MAP-0000000001 (stores: [])\n" + "      --> KSTREAM-FILTER-0000000005\n" + "      <-- KSTREAM-SOURCE-0000000000\n" + "    Processor: KSTREAM-FILTER-0000000005 (stores: [])\n" + "      --> KSTREAM-SINK-0000000004\n" + "      <-- KSTREAM-MAP-0000000001\n" + "    Sink: KSTREAM-SINK-0000000004 (topic: KSTREAM-TOTABLE-0000000002-repartition)\n" + "      <-- KSTREAM-FILTER-0000000005\n" + "\n" + "  Sub-topology: 1\n" + "    Source: KSTREAM-SOURCE-0000000006 (topics: [KSTREAM-TOTABLE-0000000002-repartition])\n" + "      --> KSTREAM-TOTABLE-0000000002\n" + "    Processor: KSTREAM-TOTABLE-0000000002 (stores: [])\n" + "      --> KTABLE-TOSTREAM-0000000007\n" + "      <-- KSTREAM-SOURCE-0000000006\n" + "    Processor: KTABLE-TOSTREAM-0000000007 (stores: [])\n" + "      --> KSTREAM-SINK-0000000008\n" + "      <-- KSTREAM-TOTABLE-0000000002\n" + "    Sink: KSTREAM-SINK-0000000008 (topic: output)\n" + "      <-- KTABLE-TOSTREAM-0000000007\n\n"));
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(input, Serdes.String().serializer(), Serdes.String().serializer());
        final TestOutputTopic<Integer, String> outputTopic = driver.createOutputTopic(output, Serdes.Integer().deserializer(), Serdes.String().deserializer());
        inputTopic.pipeInput("A", "01", 5L);
        inputTopic.pipeInput("B", "02", 100L);
        inputTopic.pipeInput("C", "03", 0L);
        inputTopic.pipeInput("D", "04", 0L);
        inputTopic.pipeInput("A", "05", 10L);
        inputTopic.pipeInput("A", "06", 8L);
        final List<TestRecord<Integer, String>> outputExpectRecords = new ArrayList<>();
        outputExpectRecords.add(new TestRecord<>(0, "01", Instant.ofEpochMilli(5L)));
        outputExpectRecords.add(new TestRecord<>(1, "02", Instant.ofEpochMilli(100L)));
        outputExpectRecords.add(new TestRecord<>(2, "03", Instant.ofEpochMilli(0L)));
        outputExpectRecords.add(new TestRecord<>(3, "04", Instant.ofEpochMilli(0L)));
        outputExpectRecords.add(new TestRecord<>(0, "05", Instant.ofEpochMilli(10L)));
        outputExpectRecords.add(new TestRecord<>(0, "06", Instant.ofEpochMilli(8L)));
        assertEquals(outputTopic.readRecordsToList(), outputExpectRecords);
    }
}
Also used : CoreMatchers.is(org.hamcrest.CoreMatchers.is) Arrays(java.util.Arrays) ValueTransformerSupplier(org.apache.kafka.streams.kstream.ValueTransformerSupplier) Produced(org.apache.kafka.streams.kstream.Produced) IsInstanceOf.instanceOf(org.hamcrest.core.IsInstanceOf.instanceOf) Stores(org.apache.kafka.streams.state.Stores) Repartitioned(org.apache.kafka.streams.kstream.Repartitioned) MockProcessorSupplier(org.apache.kafka.test.MockProcessorSupplier) Joined(org.apache.kafka.streams.kstream.Joined) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Matcher(java.util.regex.Matcher) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ProcessorSupplier(org.apache.kafka.streams.processor.api.ProcessorSupplier) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) TransformerSupplier(org.apache.kafka.streams.kstream.TransformerSupplier) Serde(org.apache.kafka.common.serialization.Serde) Arrays.asList(java.util.Arrays.asList) TopologyWrapper(org.apache.kafka.streams.TopologyWrapper) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) Map(java.util.Map) SourceNode(org.apache.kafka.streams.processor.internals.SourceNode) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) ValueMapperWithKey(org.apache.kafka.streams.kstream.ValueMapperWithKey) MockValueJoiner(org.apache.kafka.test.MockValueJoiner) MockMapper(org.apache.kafka.test.MockMapper) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) TopicNameExtractor(org.apache.kafka.streams.processor.TopicNameExtractor) KeyValue(org.apache.kafka.streams.KeyValue) Instant(java.time.Instant) Bytes(org.apache.kafka.common.utils.Bytes) ProcessorContext(org.apache.kafka.streams.processor.ProcessorContext) List(java.util.List) Predicate(org.apache.kafka.streams.kstream.Predicate) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) ValueJoiner(org.apache.kafka.streams.kstream.ValueJoiner) Materialized(org.apache.kafka.streams.kstream.Materialized) Pattern(java.util.regex.Pattern) ProcessorTopology(org.apache.kafka.streams.processor.internals.ProcessorTopology) Duration.ofMillis(java.time.Duration.ofMillis) Topology(org.apache.kafka.streams.Topology) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Assert.assertThrows(org.junit.Assert.assertThrows) HashMap(java.util.HashMap) KStream(org.apache.kafka.streams.kstream.KStream) Function(java.util.function.Function) StreamJoined(org.apache.kafka.streams.kstream.StreamJoined) ArrayList(java.util.ArrayList) ValueJoinerWithKey(org.apache.kafka.streams.kstream.ValueJoinerWithKey) JoinWindows(org.apache.kafka.streams.kstream.JoinWindows) Named(org.apache.kafka.streams.kstream.Named) ValueTransformer(org.apache.kafka.streams.kstream.ValueTransformer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Before(org.junit.Before) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Collections.emptyMap(java.util.Collections.emptyMap) KTable(org.apache.kafka.streams.kstream.KTable) KeyValueMapper(org.apache.kafka.streams.kstream.KeyValueMapper) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Transformer(org.apache.kafka.streams.kstream.Transformer) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) ValueTransformerWithKeySupplier(org.apache.kafka.streams.kstream.ValueTransformerWithKeySupplier) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) TimeUnit(java.util.concurrent.TimeUnit) FailOnInvalidTimestamp(org.apache.kafka.streams.processor.FailOnInvalidTimestamp) Assert.assertNull(org.junit.Assert.assertNull) GlobalKTable(org.apache.kafka.streams.kstream.GlobalKTable) IsNull.notNullValue(org.hamcrest.core.IsNull.notNullValue) ValueTransformerWithKey(org.apache.kafka.streams.kstream.ValueTransformerWithKey) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) KeyValue(org.apache.kafka.streams.KeyValue) ArrayList(java.util.ArrayList) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) ProcessorTopology(org.apache.kafka.streams.processor.internals.ProcessorTopology) Topology(org.apache.kafka.streams.Topology) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TestRecord(org.apache.kafka.streams.test.TestRecord) Test(org.junit.Test)

Example 2 with TestOutputTopic

use of org.apache.kafka.streams.TestOutputTopic in project kafka by apache.

the class RepartitionOptimizingTest method runTest.

private void runTest(final String optimizationConfig, final int expectedNumberRepartitionTopics) {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<String, String> sourceStream = builder.stream(INPUT_TOPIC, Consumed.with(Serdes.String(), Serdes.String()).withName("sourceStream"));
    final KStream<String, String> mappedStream = sourceStream.map((k, v) -> KeyValue.pair(k.toUpperCase(Locale.getDefault()), v), Named.as("source-map"));
    mappedStream.filter((k, v) -> k.equals("B"), Named.as("process-filter")).mapValues(v -> v.toUpperCase(Locale.getDefault()), Named.as("process-mapValues")).process(() -> new SimpleProcessor(processorValueCollector), Named.as("process"));
    final KStream<String, Long> countStream = mappedStream.groupByKey(Grouped.as("count-groupByKey")).count(Named.as("count"), Materialized.<String, Long>as(Stores.inMemoryKeyValueStore("count-store")).withKeySerde(Serdes.String()).withValueSerde(Serdes.Long())).toStream(Named.as("count-toStream"));
    countStream.to(COUNT_TOPIC, Produced.with(Serdes.String(), Serdes.Long()).withName("count-to"));
    mappedStream.groupByKey(Grouped.as("aggregate-groupByKey")).aggregate(initializer, aggregator, Named.as("aggregate"), Materialized.<String, Integer>as(Stores.inMemoryKeyValueStore("aggregate-store")).withKeySerde(Serdes.String()).withValueSerde(Serdes.Integer())).toStream(Named.as("aggregate-toStream")).to(AGGREGATION_TOPIC, Produced.with(Serdes.String(), Serdes.Integer()).withName("reduce-to"));
    // adding operators for case where the repartition node is further downstream
    mappedStream.filter((k, v) -> true, Named.as("reduce-filter")).peek((k, v) -> System.out.println(k + ":" + v), Named.as("reduce-peek")).groupByKey(Grouped.as("reduce-groupByKey")).reduce(reducer, Named.as("reducer"), Materialized.as(Stores.inMemoryKeyValueStore("reduce-store"))).toStream(Named.as("reduce-toStream")).to(REDUCE_TOPIC, Produced.with(Serdes.String(), Serdes.String()));
    mappedStream.filter((k, v) -> k.equals("A"), Named.as("join-filter")).join(countStream, (v1, v2) -> v1 + ":" + v2.toString(), JoinWindows.of(ofMillis(5000)), StreamJoined.<String, String, Long>with(Stores.inMemoryWindowStore("join-store", ofDays(1), ofMillis(10000), true), Stores.inMemoryWindowStore("other-join-store", ofDays(1), ofMillis(10000), true)).withName("join").withKeySerde(Serdes.String()).withValueSerde(Serdes.String()).withOtherValueSerde(Serdes.Long())).to(JOINED_TOPIC, Produced.as("join-to"));
    streamsConfiguration.setProperty(StreamsConfig.TOPOLOGY_OPTIMIZATION_CONFIG, optimizationConfig);
    final Topology topology = builder.build(streamsConfiguration);
    topologyTestDriver = new TopologyTestDriver(topology, streamsConfiguration);
    final TestInputTopic<String, String> inputTopicA = topologyTestDriver.createInputTopic(INPUT_TOPIC, stringSerializer, stringSerializer);
    final TestOutputTopic<String, Long> countOutputTopic = topologyTestDriver.createOutputTopic(COUNT_TOPIC, stringDeserializer, new LongDeserializer());
    final TestOutputTopic<String, Integer> aggregationOutputTopic = topologyTestDriver.createOutputTopic(AGGREGATION_TOPIC, stringDeserializer, new IntegerDeserializer());
    final TestOutputTopic<String, String> reduceOutputTopic = topologyTestDriver.createOutputTopic(REDUCE_TOPIC, stringDeserializer, stringDeserializer);
    final TestOutputTopic<String, String> joinedOutputTopic = topologyTestDriver.createOutputTopic(JOINED_TOPIC, stringDeserializer, stringDeserializer);
    inputTopicA.pipeKeyValueList(getKeyValues());
    // Verify the topology
    final String topologyString = topology.describe().toString();
    if (optimizationConfig.equals(StreamsConfig.OPTIMIZE)) {
        assertEquals(EXPECTED_OPTIMIZED_TOPOLOGY, topologyString);
    } else {
        assertEquals(EXPECTED_UNOPTIMIZED_TOPOLOGY, topologyString);
    }
    // Verify the number of repartition topics
    assertEquals(expectedNumberRepartitionTopics, getCountOfRepartitionTopicsFound(topologyString));
    // Verify the values collected by the processor
    assertThat(3, equalTo(processorValueCollector.size()));
    assertThat(processorValueCollector, equalTo(expectedCollectedProcessorValues));
    // Verify the expected output
    assertThat(countOutputTopic.readKeyValuesToMap(), equalTo(keyValueListToMap(expectedCountKeyValues)));
    assertThat(aggregationOutputTopic.readKeyValuesToMap(), equalTo(keyValueListToMap(expectedAggKeyValues)));
    assertThat(reduceOutputTopic.readKeyValuesToMap(), equalTo(keyValueListToMap(expectedReduceKeyValues)));
    assertThat(joinedOutputTopic.readKeyValuesToMap(), equalTo(keyValueListToMap(expectedJoinKeyValues)));
}
Also used : StreamsConfig(org.apache.kafka.streams.StreamsConfig) Arrays(java.util.Arrays) Produced(org.apache.kafka.streams.kstream.Produced) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Stores(org.apache.kafka.streams.state.Stores) LoggerFactory(org.slf4j.LoggerFactory) HashMap(java.util.HashMap) KStream(org.apache.kafka.streams.kstream.KStream) StreamJoined(org.apache.kafka.streams.kstream.StreamJoined) ArrayList(java.util.ArrayList) Initializer(org.apache.kafka.streams.kstream.Initializer) JoinWindows(org.apache.kafka.streams.kstream.JoinWindows) Matcher(java.util.regex.Matcher) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Locale(java.util.Locale) Map(java.util.Map) Named(org.apache.kafka.streams.kstream.Named) After(org.junit.After) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Record(org.apache.kafka.streams.processor.api.Record) Deserializer(org.apache.kafka.common.serialization.Deserializer) Processor(org.apache.kafka.streams.processor.api.Processor) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Aggregator(org.apache.kafka.streams.kstream.Aggregator) Before(org.junit.Before) Duration.ofDays(java.time.Duration.ofDays) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Logger(org.slf4j.Logger) Consumed(org.apache.kafka.streams.kstream.Consumed) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) Test(org.junit.Test) Grouped(org.apache.kafka.streams.kstream.Grouped) List(java.util.List) Serializer(org.apache.kafka.common.serialization.Serializer) Reducer(org.apache.kafka.streams.kstream.Reducer) Materialized(org.apache.kafka.streams.kstream.Materialized) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Pattern(java.util.regex.Pattern) Duration.ofMillis(java.time.Duration.ofMillis) Topology(org.apache.kafka.streams.Topology) Assert.assertEquals(org.junit.Assert.assertEquals) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Topology(org.apache.kafka.streams.Topology) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer)

Example 3 with TestOutputTopic

use of org.apache.kafka.streams.TestOutputTopic in project kafka by apache.

the class RepartitionWithMergeOptimizingTest method runTest.

private void runTest(final String optimizationConfig, final int expectedNumberRepartitionTopics) {
    streamsConfiguration.setProperty(StreamsConfig.TOPOLOGY_OPTIMIZATION_CONFIG, optimizationConfig);
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<String, String> sourceAStream = builder.stream(INPUT_A_TOPIC, Consumed.with(Serdes.String(), Serdes.String()).withName("sourceAStream"));
    final KStream<String, String> sourceBStream = builder.stream(INPUT_B_TOPIC, Consumed.with(Serdes.String(), Serdes.String()).withName("sourceBStream"));
    final KStream<String, String> mappedAStream = sourceAStream.map((k, v) -> KeyValue.pair(v.split(":")[0], v), Named.as("mappedAStream"));
    final KStream<String, String> mappedBStream = sourceBStream.map((k, v) -> KeyValue.pair(v.split(":")[0], v), Named.as("mappedBStream"));
    final KStream<String, String> mergedStream = mappedAStream.merge(mappedBStream, Named.as("mergedStream"));
    mergedStream.groupByKey(Grouped.as("long-groupByKey")).count(Named.as("long-count"), Materialized.as(Stores.inMemoryKeyValueStore("long-store"))).toStream(Named.as("long-toStream")).to(COUNT_TOPIC, Produced.with(Serdes.String(), Serdes.Long()).withName("long-to"));
    mergedStream.groupByKey(Grouped.as("string-groupByKey")).count(Named.as("string-count"), Materialized.as(Stores.inMemoryKeyValueStore("string-store"))).toStream(Named.as("string-toStream")).mapValues(v -> v.toString(), Named.as("string-mapValues")).to(STRING_COUNT_TOPIC, Produced.with(Serdes.String(), Serdes.String()).withName("string-to"));
    final Topology topology = builder.build(streamsConfiguration);
    topologyTestDriver = new TopologyTestDriver(topology, streamsConfiguration);
    final TestInputTopic<String, String> inputTopicA = topologyTestDriver.createInputTopic(INPUT_A_TOPIC, stringSerializer, stringSerializer);
    final TestInputTopic<String, String> inputTopicB = topologyTestDriver.createInputTopic(INPUT_B_TOPIC, stringSerializer, stringSerializer);
    final TestOutputTopic<String, Long> countOutputTopic = topologyTestDriver.createOutputTopic(COUNT_TOPIC, stringDeserializer, new LongDeserializer());
    final TestOutputTopic<String, String> stringCountOutputTopic = topologyTestDriver.createOutputTopic(STRING_COUNT_TOPIC, stringDeserializer, stringDeserializer);
    inputTopicA.pipeKeyValueList(getKeyValues());
    inputTopicB.pipeKeyValueList(getKeyValues());
    final String topologyString = topology.describe().toString();
    // Verify the topology
    if (optimizationConfig.equals(StreamsConfig.OPTIMIZE)) {
        assertEquals(EXPECTED_OPTIMIZED_TOPOLOGY, topologyString);
    } else {
        assertEquals(EXPECTED_UNOPTIMIZED_TOPOLOGY, topologyString);
    }
    // Verify the number of repartition topics
    assertEquals(expectedNumberRepartitionTopics, getCountOfRepartitionTopicsFound(topologyString));
    // Verify the expected output
    assertThat(countOutputTopic.readKeyValuesToMap(), equalTo(keyValueListToMap(expectedCountKeyValues)));
    assertThat(stringCountOutputTopic.readKeyValuesToMap(), equalTo(keyValueListToMap(expectedStringCountKeyValues)));
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StreamsConfig(org.apache.kafka.streams.StreamsConfig) Arrays(java.util.Arrays) Produced(org.apache.kafka.streams.kstream.Produced) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Stores(org.apache.kafka.streams.state.Stores) LoggerFactory(org.slf4j.LoggerFactory) HashMap(java.util.HashMap) KStream(org.apache.kafka.streams.kstream.KStream) ArrayList(java.util.ArrayList) Matcher(java.util.regex.Matcher) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Map(java.util.Map) Named(org.apache.kafka.streams.kstream.Named) After(org.junit.After) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Deserializer(org.apache.kafka.common.serialization.Deserializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Before(org.junit.Before) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Logger(org.slf4j.Logger) Consumed(org.apache.kafka.streams.kstream.Consumed) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) Test(org.junit.Test) Grouped(org.apache.kafka.streams.kstream.Grouped) List(java.util.List) Serializer(org.apache.kafka.common.serialization.Serializer) Materialized(org.apache.kafka.streams.kstream.Materialized) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Pattern(java.util.regex.Pattern) Topology(org.apache.kafka.streams.Topology) Assert.assertEquals(org.junit.Assert.assertEquals) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Topology(org.apache.kafka.streams.Topology)

Example 4 with TestOutputTopic

use of org.apache.kafka.streams.TestOutputTopic in project kafka by apache.

the class KStreamWindowAggregateTest method shouldLogAndMeterWhenSkippingExpiredWindow.

@Test
public void shouldLogAndMeterWhenSkippingExpiredWindow() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic = "topic";
    final KStream<String, String> stream1 = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String()));
    stream1.groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(TimeWindows.ofSizeAndGrace(ofMillis(10), ofMillis(90)).advanceBy(ofMillis(5))).aggregate(() -> "", MockAggregator.toStringInstance("+"), Materialized.<String, String, WindowStore<Bytes, byte[]>>as("topic1-Canonicalized").withValueSerde(Serdes.String()).withCachingDisabled().withLoggingDisabled().withRetention(Duration.ofMillis(100))).toStream().map((key, value) -> new KeyValue<>(key.toString(), value)).to("output");
    try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KStreamWindowAggregate.class);
        final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer());
        inputTopic.pipeInput("k", "100", 100L);
        inputTopic.pipeInput("k", "0", 0L);
        inputTopic.pipeInput("k", "1", 1L);
        inputTopic.pipeInput("k", "2", 2L);
        inputTopic.pipeInput("k", "3", 3L);
        inputTopic.pipeInput("k", "4", 4L);
        inputTopic.pipeInput("k", "5", 5L);
        inputTopic.pipeInput("k", "6", 6L);
        assertLatenessMetrics(driver, // how many events get dropped
        is(7.0), // k:0 is 100ms late, since its time is 0, but it arrives at stream time 100.
        is(100.0), // (0 + 100 + 99 + 98 + 97 + 96 + 95 + 94) / 8
        is(84.875));
        assertThat(appender.getMessages(), hasItems("Skipping record for expired window. topic=[topic] partition=[0] offset=[1] timestamp=[0] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[2] timestamp=[1] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[3] timestamp=[2] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[4] timestamp=[3] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[5] timestamp=[4] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[6] timestamp=[5] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[7] timestamp=[6] window=[0,10) expiration=[10] streamTime=[100]"));
        final TestOutputTopic<String, String> outputTopic = driver.createOutputTopic("output", new StringDeserializer(), new StringDeserializer());
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("[k@95/105]", "+100", null, 100L)));
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("[k@100/110]", "+100", null, 100L)));
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("[k@5/15]", "+5", null, 5L)));
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("[k@5/15]", "+5+6", null, 6L)));
        assertTrue(outputTopic.isEmpty());
    }
}
Also used : CoreMatchers.is(org.hamcrest.CoreMatchers.is) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) MockInitializer(org.apache.kafka.test.MockInitializer) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) CoreMatchers.not(org.hamcrest.CoreMatchers.not) KStream(org.apache.kafka.streams.kstream.KStream) WindowStore(org.apache.kafka.streams.state.WindowStore) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Windowed(org.apache.kafka.streams.kstream.Windowed) Arrays.asList(java.util.Arrays.asList) Duration(java.time.Duration) MetricName(org.apache.kafka.common.MetricName) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TestRecord(org.apache.kafka.streams.test.TestRecord) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) CoreMatchers.hasItems(org.hamcrest.CoreMatchers.hasItems) KeyValue(org.apache.kafka.streams.KeyValue) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) Bytes(org.apache.kafka.common.utils.Bytes) List(java.util.List) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) TimeWindows(org.apache.kafka.streams.kstream.TimeWindows) Matcher(org.hamcrest.Matcher) Materialized(org.apache.kafka.streams.kstream.Materialized) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Duration.ofMillis(java.time.Duration.ofMillis) Assert.assertEquals(org.junit.Assert.assertEquals) KeyValue(org.apache.kafka.streams.KeyValue) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) WindowStore(org.apache.kafka.streams.state.WindowStore) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) Test(org.junit.Test)

Example 5 with TestOutputTopic

use of org.apache.kafka.streams.TestOutputTopic in project kafka by apache.

the class KStreamSplitTest method testKStreamSplit.

@Test
public void testKStreamSplit() {
    final Map<String, KStream<Integer, String>> branches = source.split().branch(isEven, Branched.withConsumer(ks -> ks.to("x2"))).branch(isMultipleOfThree, Branched.withConsumer(ks -> ks.to("x3"))).branch(isMultipleOfFive, Branched.withConsumer(ks -> ks.to("x5"))).noDefaultBranch();
    assertEquals(0, branches.size());
    builder.build();
    withDriver(driver -> {
        final TestOutputTopic<Integer, String> x2 = driver.createOutputTopic("x2", new IntegerDeserializer(), new StringDeserializer());
        final TestOutputTopic<Integer, String> x3 = driver.createOutputTopic("x3", new IntegerDeserializer(), new StringDeserializer());
        final TestOutputTopic<Integer, String> x5 = driver.createOutputTopic("x5", new IntegerDeserializer(), new StringDeserializer());
        assertEquals(Arrays.asList("V0", "V2", "V4", "V6"), x2.readValuesToList());
        assertEquals(Arrays.asList("V3"), x3.readValuesToList());
        assertEquals(Arrays.asList("V5"), x5.readValuesToList());
    });
}
Also used : TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Arrays(java.util.Arrays) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Test(org.junit.Test) Branched(org.apache.kafka.streams.kstream.Branched) KStream(org.apache.kafka.streams.kstream.KStream) Consumer(java.util.function.Consumer) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Predicate(org.apache.kafka.streams.kstream.Predicate) Named(org.apache.kafka.streams.kstream.Named) Map(java.util.Map) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Topology(org.apache.kafka.streams.Topology) Assert.assertEquals(org.junit.Assert.assertEquals) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) KStream(org.apache.kafka.streams.kstream.KStream) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Test(org.junit.Test)

Aggregations

Properties (java.util.Properties)9 Serdes (org.apache.kafka.common.serialization.Serdes)9 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)9 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)9 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)9 TestInputTopic (org.apache.kafka.streams.TestInputTopic)9 TestOutputTopic (org.apache.kafka.streams.TestOutputTopic)9 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)9 Consumed (org.apache.kafka.streams.kstream.Consumed)9 KStream (org.apache.kafka.streams.kstream.KStream)9 StreamsTestUtils (org.apache.kafka.test.StreamsTestUtils)9 Assert.assertEquals (org.junit.Assert.assertEquals)9 Test (org.junit.Test)9 Arrays (java.util.Arrays)7 Map (java.util.Map)7 Topology (org.apache.kafka.streams.Topology)7 Named (org.apache.kafka.streams.kstream.Named)7 List (java.util.List)6 KeyValue (org.apache.kafka.streams.KeyValue)6 Grouped (org.apache.kafka.streams.kstream.Grouped)6