Search in sources :

Example 6 with TestInputTopic

use of org.apache.kafka.streams.TestInputTopic in project kafka by apache.

the class RepartitionOptimizingTest method runTest.

private void runTest(final String optimizationConfig, final int expectedNumberRepartitionTopics) {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<String, String> sourceStream = builder.stream(INPUT_TOPIC, Consumed.with(Serdes.String(), Serdes.String()).withName("sourceStream"));
    final KStream<String, String> mappedStream = sourceStream.map((k, v) -> KeyValue.pair(k.toUpperCase(Locale.getDefault()), v), Named.as("source-map"));
    mappedStream.filter((k, v) -> k.equals("B"), Named.as("process-filter")).mapValues(v -> v.toUpperCase(Locale.getDefault()), Named.as("process-mapValues")).process(() -> new SimpleProcessor(processorValueCollector), Named.as("process"));
    final KStream<String, Long> countStream = mappedStream.groupByKey(Grouped.as("count-groupByKey")).count(Named.as("count"), Materialized.<String, Long>as(Stores.inMemoryKeyValueStore("count-store")).withKeySerde(Serdes.String()).withValueSerde(Serdes.Long())).toStream(Named.as("count-toStream"));
    countStream.to(COUNT_TOPIC, Produced.with(Serdes.String(), Serdes.Long()).withName("count-to"));
    mappedStream.groupByKey(Grouped.as("aggregate-groupByKey")).aggregate(initializer, aggregator, Named.as("aggregate"), Materialized.<String, Integer>as(Stores.inMemoryKeyValueStore("aggregate-store")).withKeySerde(Serdes.String()).withValueSerde(Serdes.Integer())).toStream(Named.as("aggregate-toStream")).to(AGGREGATION_TOPIC, Produced.with(Serdes.String(), Serdes.Integer()).withName("reduce-to"));
    // adding operators for case where the repartition node is further downstream
    mappedStream.filter((k, v) -> true, Named.as("reduce-filter")).peek((k, v) -> System.out.println(k + ":" + v), Named.as("reduce-peek")).groupByKey(Grouped.as("reduce-groupByKey")).reduce(reducer, Named.as("reducer"), Materialized.as(Stores.inMemoryKeyValueStore("reduce-store"))).toStream(Named.as("reduce-toStream")).to(REDUCE_TOPIC, Produced.with(Serdes.String(), Serdes.String()));
    mappedStream.filter((k, v) -> k.equals("A"), Named.as("join-filter")).join(countStream, (v1, v2) -> v1 + ":" + v2.toString(), JoinWindows.of(ofMillis(5000)), StreamJoined.<String, String, Long>with(Stores.inMemoryWindowStore("join-store", ofDays(1), ofMillis(10000), true), Stores.inMemoryWindowStore("other-join-store", ofDays(1), ofMillis(10000), true)).withName("join").withKeySerde(Serdes.String()).withValueSerde(Serdes.String()).withOtherValueSerde(Serdes.Long())).to(JOINED_TOPIC, Produced.as("join-to"));
    streamsConfiguration.setProperty(StreamsConfig.TOPOLOGY_OPTIMIZATION_CONFIG, optimizationConfig);
    final Topology topology = builder.build(streamsConfiguration);
    topologyTestDriver = new TopologyTestDriver(topology, streamsConfiguration);
    final TestInputTopic<String, String> inputTopicA = topologyTestDriver.createInputTopic(INPUT_TOPIC, stringSerializer, stringSerializer);
    final TestOutputTopic<String, Long> countOutputTopic = topologyTestDriver.createOutputTopic(COUNT_TOPIC, stringDeserializer, new LongDeserializer());
    final TestOutputTopic<String, Integer> aggregationOutputTopic = topologyTestDriver.createOutputTopic(AGGREGATION_TOPIC, stringDeserializer, new IntegerDeserializer());
    final TestOutputTopic<String, String> reduceOutputTopic = topologyTestDriver.createOutputTopic(REDUCE_TOPIC, stringDeserializer, stringDeserializer);
    final TestOutputTopic<String, String> joinedOutputTopic = topologyTestDriver.createOutputTopic(JOINED_TOPIC, stringDeserializer, stringDeserializer);
    inputTopicA.pipeKeyValueList(getKeyValues());
    // Verify the topology
    final String topologyString = topology.describe().toString();
    if (optimizationConfig.equals(StreamsConfig.OPTIMIZE)) {
        assertEquals(EXPECTED_OPTIMIZED_TOPOLOGY, topologyString);
    } else {
        assertEquals(EXPECTED_UNOPTIMIZED_TOPOLOGY, topologyString);
    }
    // Verify the number of repartition topics
    assertEquals(expectedNumberRepartitionTopics, getCountOfRepartitionTopicsFound(topologyString));
    // Verify the values collected by the processor
    assertThat(3, equalTo(processorValueCollector.size()));
    assertThat(processorValueCollector, equalTo(expectedCollectedProcessorValues));
    // Verify the expected output
    assertThat(countOutputTopic.readKeyValuesToMap(), equalTo(keyValueListToMap(expectedCountKeyValues)));
    assertThat(aggregationOutputTopic.readKeyValuesToMap(), equalTo(keyValueListToMap(expectedAggKeyValues)));
    assertThat(reduceOutputTopic.readKeyValuesToMap(), equalTo(keyValueListToMap(expectedReduceKeyValues)));
    assertThat(joinedOutputTopic.readKeyValuesToMap(), equalTo(keyValueListToMap(expectedJoinKeyValues)));
}
Also used : StreamsConfig(org.apache.kafka.streams.StreamsConfig) Arrays(java.util.Arrays) Produced(org.apache.kafka.streams.kstream.Produced) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Stores(org.apache.kafka.streams.state.Stores) LoggerFactory(org.slf4j.LoggerFactory) HashMap(java.util.HashMap) KStream(org.apache.kafka.streams.kstream.KStream) StreamJoined(org.apache.kafka.streams.kstream.StreamJoined) ArrayList(java.util.ArrayList) Initializer(org.apache.kafka.streams.kstream.Initializer) JoinWindows(org.apache.kafka.streams.kstream.JoinWindows) Matcher(java.util.regex.Matcher) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Locale(java.util.Locale) Map(java.util.Map) Named(org.apache.kafka.streams.kstream.Named) After(org.junit.After) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Record(org.apache.kafka.streams.processor.api.Record) Deserializer(org.apache.kafka.common.serialization.Deserializer) Processor(org.apache.kafka.streams.processor.api.Processor) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Aggregator(org.apache.kafka.streams.kstream.Aggregator) Before(org.junit.Before) Duration.ofDays(java.time.Duration.ofDays) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Logger(org.slf4j.Logger) Consumed(org.apache.kafka.streams.kstream.Consumed) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) Test(org.junit.Test) Grouped(org.apache.kafka.streams.kstream.Grouped) List(java.util.List) Serializer(org.apache.kafka.common.serialization.Serializer) Reducer(org.apache.kafka.streams.kstream.Reducer) Materialized(org.apache.kafka.streams.kstream.Materialized) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Pattern(java.util.regex.Pattern) Duration.ofMillis(java.time.Duration.ofMillis) Topology(org.apache.kafka.streams.Topology) Assert.assertEquals(org.junit.Assert.assertEquals) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Topology(org.apache.kafka.streams.Topology) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer)

Example 7 with TestInputTopic

use of org.apache.kafka.streams.TestInputTopic in project kafka by apache.

the class RepartitionWithMergeOptimizingTest method runTest.

private void runTest(final String optimizationConfig, final int expectedNumberRepartitionTopics) {
    streamsConfiguration.setProperty(StreamsConfig.TOPOLOGY_OPTIMIZATION_CONFIG, optimizationConfig);
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<String, String> sourceAStream = builder.stream(INPUT_A_TOPIC, Consumed.with(Serdes.String(), Serdes.String()).withName("sourceAStream"));
    final KStream<String, String> sourceBStream = builder.stream(INPUT_B_TOPIC, Consumed.with(Serdes.String(), Serdes.String()).withName("sourceBStream"));
    final KStream<String, String> mappedAStream = sourceAStream.map((k, v) -> KeyValue.pair(v.split(":")[0], v), Named.as("mappedAStream"));
    final KStream<String, String> mappedBStream = sourceBStream.map((k, v) -> KeyValue.pair(v.split(":")[0], v), Named.as("mappedBStream"));
    final KStream<String, String> mergedStream = mappedAStream.merge(mappedBStream, Named.as("mergedStream"));
    mergedStream.groupByKey(Grouped.as("long-groupByKey")).count(Named.as("long-count"), Materialized.as(Stores.inMemoryKeyValueStore("long-store"))).toStream(Named.as("long-toStream")).to(COUNT_TOPIC, Produced.with(Serdes.String(), Serdes.Long()).withName("long-to"));
    mergedStream.groupByKey(Grouped.as("string-groupByKey")).count(Named.as("string-count"), Materialized.as(Stores.inMemoryKeyValueStore("string-store"))).toStream(Named.as("string-toStream")).mapValues(v -> v.toString(), Named.as("string-mapValues")).to(STRING_COUNT_TOPIC, Produced.with(Serdes.String(), Serdes.String()).withName("string-to"));
    final Topology topology = builder.build(streamsConfiguration);
    topologyTestDriver = new TopologyTestDriver(topology, streamsConfiguration);
    final TestInputTopic<String, String> inputTopicA = topologyTestDriver.createInputTopic(INPUT_A_TOPIC, stringSerializer, stringSerializer);
    final TestInputTopic<String, String> inputTopicB = topologyTestDriver.createInputTopic(INPUT_B_TOPIC, stringSerializer, stringSerializer);
    final TestOutputTopic<String, Long> countOutputTopic = topologyTestDriver.createOutputTopic(COUNT_TOPIC, stringDeserializer, new LongDeserializer());
    final TestOutputTopic<String, String> stringCountOutputTopic = topologyTestDriver.createOutputTopic(STRING_COUNT_TOPIC, stringDeserializer, stringDeserializer);
    inputTopicA.pipeKeyValueList(getKeyValues());
    inputTopicB.pipeKeyValueList(getKeyValues());
    final String topologyString = topology.describe().toString();
    // Verify the topology
    if (optimizationConfig.equals(StreamsConfig.OPTIMIZE)) {
        assertEquals(EXPECTED_OPTIMIZED_TOPOLOGY, topologyString);
    } else {
        assertEquals(EXPECTED_UNOPTIMIZED_TOPOLOGY, topologyString);
    }
    // Verify the number of repartition topics
    assertEquals(expectedNumberRepartitionTopics, getCountOfRepartitionTopicsFound(topologyString));
    // Verify the expected output
    assertThat(countOutputTopic.readKeyValuesToMap(), equalTo(keyValueListToMap(expectedCountKeyValues)));
    assertThat(stringCountOutputTopic.readKeyValuesToMap(), equalTo(keyValueListToMap(expectedStringCountKeyValues)));
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StreamsConfig(org.apache.kafka.streams.StreamsConfig) Arrays(java.util.Arrays) Produced(org.apache.kafka.streams.kstream.Produced) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Stores(org.apache.kafka.streams.state.Stores) LoggerFactory(org.slf4j.LoggerFactory) HashMap(java.util.HashMap) KStream(org.apache.kafka.streams.kstream.KStream) ArrayList(java.util.ArrayList) Matcher(java.util.regex.Matcher) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Map(java.util.Map) Named(org.apache.kafka.streams.kstream.Named) After(org.junit.After) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Deserializer(org.apache.kafka.common.serialization.Deserializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Before(org.junit.Before) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Logger(org.slf4j.Logger) Consumed(org.apache.kafka.streams.kstream.Consumed) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) Test(org.junit.Test) Grouped(org.apache.kafka.streams.kstream.Grouped) List(java.util.List) Serializer(org.apache.kafka.common.serialization.Serializer) Materialized(org.apache.kafka.streams.kstream.Materialized) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Pattern(java.util.regex.Pattern) Topology(org.apache.kafka.streams.Topology) Assert.assertEquals(org.junit.Assert.assertEquals) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Topology(org.apache.kafka.streams.Topology)

Example 8 with TestInputTopic

use of org.apache.kafka.streams.TestInputTopic in project kafka by apache.

the class KTableKTableLeftJoinTest method shouldNotThrowIllegalStateExceptionWhenMultiCacheEvictions.

/**
 * This test was written to reproduce https://issues.apache.org/jira/browse/KAFKA-4492
 * It is based on a fairly complicated join used by the developer that reported the bug.
 * Before the fix this would trigger an IllegalStateException.
 */
@Test
public void shouldNotThrowIllegalStateExceptionWhenMultiCacheEvictions() {
    final String agg = "agg";
    final String tableOne = "tableOne";
    final String tableTwo = "tableTwo";
    final String tableThree = "tableThree";
    final String tableFour = "tableFour";
    final String tableFive = "tableFive";
    final String tableSix = "tableSix";
    final String[] inputs = { agg, tableOne, tableTwo, tableThree, tableFour, tableFive, tableSix };
    final StreamsBuilder builder = new StreamsBuilder();
    final Consumed<Long, String> consumed = Consumed.with(Serdes.Long(), Serdes.String());
    final KTable<Long, String> aggTable = builder.table(agg, consumed, Materialized.as(Stores.inMemoryKeyValueStore("agg-base-store"))).groupBy(KeyValue::new, Grouped.with(Serdes.Long(), Serdes.String())).reduce(MockReducer.STRING_ADDER, MockReducer.STRING_ADDER, Materialized.as(Stores.inMemoryKeyValueStore("agg-store")));
    final KTable<Long, String> one = builder.table(tableOne, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableOne-base-store")));
    final KTable<Long, String> two = builder.table(tableTwo, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableTwo-base-store")));
    final KTable<Long, String> three = builder.table(tableThree, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableThree-base-store")));
    final KTable<Long, String> four = builder.table(tableFour, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableFour-base-store")));
    final KTable<Long, String> five = builder.table(tableFive, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableFive-base-store")));
    final KTable<Long, String> six = builder.table(tableSix, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableSix-base-store")));
    final ValueMapper<String, String> mapper = value -> value.toUpperCase(Locale.ROOT);
    final KTable<Long, String> seven = one.mapValues(mapper);
    final KTable<Long, String> eight = six.leftJoin(seven, MockValueJoiner.TOSTRING_JOINER);
    aggTable.leftJoin(one, MockValueJoiner.TOSTRING_JOINER).leftJoin(two, MockValueJoiner.TOSTRING_JOINER).leftJoin(three, MockValueJoiner.TOSTRING_JOINER).leftJoin(four, MockValueJoiner.TOSTRING_JOINER).leftJoin(five, MockValueJoiner.TOSTRING_JOINER).leftJoin(eight, MockValueJoiner.TOSTRING_JOINER).mapValues(mapper);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final String[] values = { "a", "AA", "BBB", "CCCC", "DD", "EEEEEEEE", "F", "GGGGGGGGGGGGGGG", "HHH", "IIIIIIIIII", "J", "KK", "LLLL", "MMMMMMMMMMMMMMMMMMMMMM", "NNNNN", "O", "P", "QQQQQ", "R", "SSSS", "T", "UU", "VVVVVVVVVVVVVVVVVVV" };
        TestInputTopic<Long, String> inputTopic;
        final Random random = new Random();
        for (int i = 0; i < 1000; i++) {
            for (final String input : inputs) {
                final Long key = (long) random.nextInt(1000);
                final String value = values[random.nextInt(values.length)];
                inputTopic = driver.createInputTopic(input, Serdes.Long().serializer(), Serdes.String().serializer());
                inputTopic.pipeInput(key, value);
            }
        }
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Arrays(java.util.Arrays) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Stores(org.apache.kafka.streams.state.Stores) MockReducer(org.apache.kafka.test.MockReducer) Random(java.util.Random) MockProcessorContext(org.apache.kafka.streams.processor.api.MockProcessorContext) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) HashSet(java.util.HashSet) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Locale(java.util.Locale) TopologyWrapper(org.apache.kafka.streams.TopologyWrapper) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) Record(org.apache.kafka.streams.processor.api.Record) Processor(org.apache.kafka.streams.processor.api.Processor) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TestRecord(org.apache.kafka.streams.test.TestRecord) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) MockValueJoiner(org.apache.kafka.test.MockValueJoiner) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TopologyTestDriverWrapper(org.apache.kafka.streams.TopologyTestDriverWrapper) KTable(org.apache.kafka.streams.kstream.KTable) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Collection(java.util.Collection) KeyValue(org.apache.kafka.streams.KeyValue) Set(java.util.Set) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) Instant(java.time.Instant) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) Assert.assertFalse(org.junit.Assert.assertFalse) Materialized(org.apache.kafka.streams.kstream.Materialized) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TestInputTopic(org.apache.kafka.streams.TestInputTopic) Matchers.is(org.hamcrest.Matchers.is) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Topology(org.apache.kafka.streams.Topology) Assert.assertEquals(org.junit.Assert.assertEquals) Random(java.util.Random) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Test(org.junit.Test)

Example 9 with TestInputTopic

use of org.apache.kafka.streams.TestInputTopic in project kafka by apache.

the class KTableMapValuesTest method testNotSendingOldValue.

@Test
public void testNotSendingOldValue() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic1 = "topic1";
    final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, consumed);
    final KTableImpl<String, String, Integer> table2 = (KTableImpl<String, String, Integer>) table1.mapValues(s -> Integer.valueOf(s));
    final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
    final Topology topology = builder.build().addProcessor("proc", supplier, table2.name);
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
        final TestInputTopic<String, String> inputTopic1 = driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final MockApiProcessor<String, Integer, Void, Void> proc = supplier.theCapturedProcessor();
        assertFalse(table1.sendingOldValueEnabled());
        assertFalse(table2.sendingOldValueEnabled());
        inputTopic1.pipeInput("A", "01", 5L);
        inputTopic1.pipeInput("B", "01", 10L);
        inputTopic1.pipeInput("C", "01", 15L);
        proc.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(1, null), 5), new KeyValueTimestamp<>("B", new Change<>(1, null), 10), new KeyValueTimestamp<>("C", new Change<>(1, null), 15));
        inputTopic1.pipeInput("A", "02", 10L);
        inputTopic1.pipeInput("B", "02", 8L);
        proc.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(2, null), 10), new KeyValueTimestamp<>("B", new Change<>(2, null), 8));
        inputTopic1.pipeInput("A", "03", 20L);
        proc.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(3, null), 20));
        inputTopic1.pipeInput("A", (String) null, 30L);
        proc.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, null), 30));
    }
}
Also used : ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Arrays.asList(java.util.Arrays.asList) TopologyWrapper(org.apache.kafka.streams.TopologyWrapper) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TopologyTestDriverWrapper(org.apache.kafka.streams.TopologyTestDriverWrapper) KTable(org.apache.kafka.streams.kstream.KTable) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Test(org.junit.Test) Instant(java.time.Instant) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Bytes(org.apache.kafka.common.utils.Bytes) Assert.assertNull(org.junit.Assert.assertNull) Assert.assertFalse(org.junit.Assert.assertFalse) Materialized(org.apache.kafka.streams.kstream.Materialized) TestInputTopic(org.apache.kafka.streams.TestInputTopic) InternalTopologyBuilder(org.apache.kafka.streams.processor.internals.InternalTopologyBuilder) Matchers.is(org.hamcrest.Matchers.is) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Topology(org.apache.kafka.streams.Topology) Assert.assertEquals(org.junit.Assert.assertEquals) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Topology(org.apache.kafka.streams.Topology) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 10 with TestInputTopic

use of org.apache.kafka.streams.TestInputTopic in project kafka by apache.

the class KStreamSlidingWindowAggregateTest method shouldLogAndMeterWhenSkippingNullKey.

@Test
public void shouldLogAndMeterWhenSkippingNullKey() {
    final String builtInMetricsVersion = StreamsConfig.METRICS_LATEST;
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic = "topic";
    builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String())).groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(SlidingWindows.ofTimeDifferenceAndGrace(ofMillis(10), ofMillis(100))).aggregate(MockInitializer.STRING_INIT, MockAggregator.toStringInstance("+"), Materialized.<String, String, WindowStore<Bytes, byte[]>>as("topic1-Canonicalized").withValueSerde(Serdes.String()));
    props.setProperty(StreamsConfig.BUILT_IN_METRICS_VERSION_CONFIG, builtInMetricsVersion);
    try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KStreamSlidingWindowAggregate.class);
        final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer());
        inputTopic.pipeInput(null, "1");
        assertThat(appender.getEvents().stream().filter(e -> e.getLevel().equals("WARN")).map(Event::getMessage).collect(Collectors.toList()), hasItem("Skipping record due to null key or value. topic=[topic] partition=[0] offset=[0]"));
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) InMemoryWindowStore(org.apache.kafka.streams.state.internals.InMemoryWindowStore) WindowStore(org.apache.kafka.streams.state.WindowStore) CoreMatchers.is(org.hamcrest.CoreMatchers.is) Arrays(java.util.Arrays) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) Stores(org.apache.kafka.streams.state.Stores) MockReducer(org.apache.kafka.test.MockReducer) Random(java.util.Random) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Arrays.asList(java.util.Arrays.asList) Duration(java.time.Duration) Map(java.util.Map) MetricName(org.apache.kafka.common.MetricName) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) InMemoryWindowBytesStoreSupplier(org.apache.kafka.streams.state.internals.InMemoryWindowBytesStoreSupplier) TestRecord(org.apache.kafka.streams.test.TestRecord) Parameterized(org.junit.runners.Parameterized) TimeWindowedDeserializer(org.apache.kafka.streams.kstream.TimeWindowedDeserializer) Event(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event) Collection(java.util.Collection) WindowBytesStoreSupplier(org.apache.kafka.streams.state.WindowBytesStoreSupplier) Collectors(java.util.stream.Collectors) Bytes(org.apache.kafka.common.utils.Bytes) List(java.util.List) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) Materialized(org.apache.kafka.streams.kstream.Materialized) Duration.ofMillis(java.time.Duration.ofMillis) StreamsConfig(org.apache.kafka.streams.StreamsConfig) MockInitializer(org.apache.kafka.test.MockInitializer) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) InMemoryWindowStore(org.apache.kafka.streams.state.internals.InMemoryWindowStore) RunWith(org.junit.runner.RunWith) CoreMatchers.not(org.hamcrest.CoreMatchers.not) HashMap(java.util.HashMap) KStream(org.apache.kafka.streams.kstream.KStream) WindowStore(org.apache.kafka.streams.state.WindowStore) ArrayList(java.util.ArrayList) Windowed(org.apache.kafka.streams.kstream.Windowed) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) CoreMatchers.hasItems(org.hamcrest.CoreMatchers.hasItems) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) SlidingWindows(org.apache.kafka.streams.kstream.SlidingWindows) KeyValueIterator(org.apache.kafka.streams.state.KeyValueIterator) WindowStoreIterator(org.apache.kafka.streams.state.WindowStoreIterator) Matcher(org.hamcrest.Matcher) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Comparator(java.util.Comparator) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Event(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Aggregations

TestInputTopic (org.apache.kafka.streams.TestInputTopic)27 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)27 Properties (java.util.Properties)25 Serdes (org.apache.kafka.common.serialization.Serdes)25 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)25 Consumed (org.apache.kafka.streams.kstream.Consumed)25 Test (org.junit.Test)25 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)24 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)21 Duration (java.time.Duration)20 KStream (org.apache.kafka.streams.kstream.KStream)19 StreamsTestUtils (org.apache.kafka.test.StreamsTestUtils)19 Assert.assertEquals (org.junit.Assert.assertEquals)18 Materialized (org.apache.kafka.streams.kstream.Materialized)17 MockApiProcessorSupplier (org.apache.kafka.test.MockApiProcessorSupplier)17 MatcherAssert.assertThat (org.hamcrest.MatcherAssert.assertThat)17 KeyValue (org.apache.kafka.streams.KeyValue)16 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)15 KTable (org.apache.kafka.streams.kstream.KTable)15 Arrays.asList (java.util.Arrays.asList)14