Search in sources :

Example 6 with TestOutputTopic

use of org.apache.kafka.streams.TestOutputTopic in project kafka by apache.

the class KStreamSplitTest method testResultingMap.

@Test
public void testResultingMap() {
    final Map<String, KStream<Integer, String>> branches = source.split(Named.as("foo-")).branch(isEven, Branched.as("bar")).branch(isMultipleOfThree, Branched.withConsumer(ks -> {
    })).branch(isMultipleOfFive, Branched.withFunction(ks -> null)).branch(isNegative, Branched.withFunction(ks -> ks)).branch(isMultipleOfSeven).defaultBranch();
    assertEquals(4, branches.size());
    // direct the branched streams into different topics named with branch name
    for (final Map.Entry<String, KStream<Integer, String>> branch : branches.entrySet()) {
        branch.getValue().to(branch.getKey());
    }
    builder.build();
    withDriver(driver -> {
        final TestOutputTopic<Integer, String> even = driver.createOutputTopic("foo-bar", new IntegerDeserializer(), new StringDeserializer());
        final TestOutputTopic<Integer, String> negative = driver.createOutputTopic("foo-4", new IntegerDeserializer(), new StringDeserializer());
        final TestOutputTopic<Integer, String> x7 = driver.createOutputTopic("foo-5", new IntegerDeserializer(), new StringDeserializer());
        final TestOutputTopic<Integer, String> defaultBranch = driver.createOutputTopic("foo-0", new IntegerDeserializer(), new StringDeserializer());
        assertEquals(Arrays.asList("V0", "V2", "V4", "V6"), even.readValuesToList());
        assertEquals(Arrays.asList("V-1"), negative.readValuesToList());
        assertEquals(Arrays.asList("V7"), x7.readValuesToList());
        assertEquals(Arrays.asList("V1"), defaultBranch.readValuesToList());
    });
}
Also used : TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Arrays(java.util.Arrays) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Test(org.junit.Test) Branched(org.apache.kafka.streams.kstream.Branched) KStream(org.apache.kafka.streams.kstream.KStream) Consumer(java.util.function.Consumer) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Predicate(org.apache.kafka.streams.kstream.Predicate) Named(org.apache.kafka.streams.kstream.Named) Map(java.util.Map) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Topology(org.apache.kafka.streams.Topology) Assert.assertEquals(org.junit.Assert.assertEquals) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) KStream(org.apache.kafka.streams.kstream.KStream) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Map(java.util.Map) Test(org.junit.Test)

Example 7 with TestOutputTopic

use of org.apache.kafka.streams.TestOutputTopic in project kafka by apache.

the class KStreamWindowAggregateTest method shouldLogAndMeterWhenSkippingExpiredWindowByGrace.

@Test
public void shouldLogAndMeterWhenSkippingExpiredWindowByGrace() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic = "topic";
    final KStream<String, String> stream1 = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String()));
    stream1.groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(TimeWindows.ofSizeAndGrace(ofMillis(10), ofMillis(90L)).advanceBy(ofMillis(10))).aggregate(() -> "", MockAggregator.toStringInstance("+"), Materialized.<String, String, WindowStore<Bytes, byte[]>>as("topic1-Canonicalized").withValueSerde(Serdes.String()).withCachingDisabled().withLoggingDisabled()).toStream().map((key, value) -> new KeyValue<>(key.toString(), value)).to("output");
    try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KStreamWindowAggregate.class);
        final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer());
        inputTopic.pipeInput("k", "100", 200L);
        inputTopic.pipeInput("k", "0", 100L);
        inputTopic.pipeInput("k", "1", 101L);
        inputTopic.pipeInput("k", "2", 102L);
        inputTopic.pipeInput("k", "3", 103L);
        inputTopic.pipeInput("k", "4", 104L);
        inputTopic.pipeInput("k", "5", 105L);
        inputTopic.pipeInput("k", "6", 6L);
        assertLatenessMetrics(driver, is(7.0), is(194.0), is(97.375));
        assertThat(appender.getMessages(), hasItems("Skipping record for expired window. topic=[topic] partition=[0] offset=[1] timestamp=[100] window=[100,110) expiration=[110] streamTime=[200]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[2] timestamp=[101] window=[100,110) expiration=[110] streamTime=[200]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[3] timestamp=[102] window=[100,110) expiration=[110] streamTime=[200]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[4] timestamp=[103] window=[100,110) expiration=[110] streamTime=[200]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[5] timestamp=[104] window=[100,110) expiration=[110] streamTime=[200]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[6] timestamp=[105] window=[100,110) expiration=[110] streamTime=[200]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[7] timestamp=[6] window=[0,10) expiration=[110] streamTime=[200]"));
        final TestOutputTopic<String, String> outputTopic = driver.createOutputTopic("output", new StringDeserializer(), new StringDeserializer());
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("[k@200/210]", "+100", null, 200L)));
        assertTrue(outputTopic.isEmpty());
    }
}
Also used : CoreMatchers.is(org.hamcrest.CoreMatchers.is) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) MockInitializer(org.apache.kafka.test.MockInitializer) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) CoreMatchers.not(org.hamcrest.CoreMatchers.not) KStream(org.apache.kafka.streams.kstream.KStream) WindowStore(org.apache.kafka.streams.state.WindowStore) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Windowed(org.apache.kafka.streams.kstream.Windowed) Arrays.asList(java.util.Arrays.asList) Duration(java.time.Duration) MetricName(org.apache.kafka.common.MetricName) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TestRecord(org.apache.kafka.streams.test.TestRecord) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) CoreMatchers.hasItems(org.hamcrest.CoreMatchers.hasItems) KeyValue(org.apache.kafka.streams.KeyValue) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) Bytes(org.apache.kafka.common.utils.Bytes) List(java.util.List) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) TimeWindows(org.apache.kafka.streams.kstream.TimeWindows) Matcher(org.hamcrest.Matcher) Materialized(org.apache.kafka.streams.kstream.Materialized) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Duration.ofMillis(java.time.Duration.ofMillis) Assert.assertEquals(org.junit.Assert.assertEquals) KeyValue(org.apache.kafka.streams.KeyValue) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Bytes(org.apache.kafka.common.utils.Bytes) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) Test(org.junit.Test)

Example 8 with TestOutputTopic

use of org.apache.kafka.streams.TestOutputTopic in project kafka by apache.

the class KStreamSplitTest method testBranchingWithNoTerminalOperation.

@Test
public void testBranchingWithNoTerminalOperation() {
    final String outputTopicName = "output";
    source.split().branch(isEven, Branched.withConsumer(ks -> ks.to(outputTopicName))).branch(isMultipleOfFive, Branched.withConsumer(ks -> ks.to(outputTopicName)));
    builder.build();
    withDriver(driver -> {
        final TestOutputTopic<Integer, String> outputTopic = driver.createOutputTopic(outputTopicName, new IntegerDeserializer(), new StringDeserializer());
        assertEquals(Arrays.asList("V0", "V2", "V4", "V5", "V6"), outputTopic.readValuesToList());
    });
}
Also used : TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Arrays(java.util.Arrays) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Test(org.junit.Test) Branched(org.apache.kafka.streams.kstream.Branched) KStream(org.apache.kafka.streams.kstream.KStream) Consumer(java.util.function.Consumer) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Predicate(org.apache.kafka.streams.kstream.Predicate) Named(org.apache.kafka.streams.kstream.Named) Map(java.util.Map) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Topology(org.apache.kafka.streams.Topology) Assert.assertEquals(org.junit.Assert.assertEquals) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Test(org.junit.Test)

Example 9 with TestOutputTopic

use of org.apache.kafka.streams.TestOutputTopic in project kafka by apache.

the class KStreamImplTest method shouldSupportForeignKeyTableTableJoinWithKTableFromKStream.

@Test
public void shouldSupportForeignKeyTableTableJoinWithKTableFromKStream() {
    final Consumed<String, String> consumed = Consumed.with(Serdes.String(), Serdes.String());
    final StreamsBuilder builder = new StreamsBuilder();
    final String input1 = "input1";
    final String input2 = "input2";
    final String output = "output";
    final KTable<String, String> leftTable = builder.stream(input1, consumed).toTable();
    final KTable<String, String> rightTable = builder.stream(input2, consumed).toTable();
    final Function<String, String> extractor = value -> value.split("\\|")[1];
    final ValueJoiner<String, String, String> joiner = (value1, value2) -> "(" + value1 + "," + value2 + ")";
    leftTable.join(rightTable, extractor, joiner).toStream().to(output);
    final Topology topology = builder.build(props);
    final String topologyDescription = topology.describe().toString();
    assertThat(topologyDescription, equalTo("Topologies:\n" + "   Sub-topology: 0\n" + "    Source: KTABLE-SOURCE-0000000016 (topics: [KTABLE-FK-JOIN-SUBSCRIPTION-RESPONSE-0000000014-topic])\n" + "      --> KTABLE-FK-JOIN-SUBSCRIPTION-RESPONSE-RESOLVER-PROCESSOR-0000000017\n" + "    Source: KSTREAM-SOURCE-0000000000 (topics: [input1])\n" + "      --> KSTREAM-TOTABLE-0000000001\n" + "    Processor: KTABLE-FK-JOIN-SUBSCRIPTION-RESPONSE-RESOLVER-PROCESSOR-0000000017 (stores: [KSTREAM-TOTABLE-STATE-STORE-0000000002])\n" + "      --> KTABLE-FK-JOIN-OUTPUT-0000000018\n" + "      <-- KTABLE-SOURCE-0000000016\n" + "    Processor: KSTREAM-TOTABLE-0000000001 (stores: [KSTREAM-TOTABLE-STATE-STORE-0000000002])\n" + "      --> KTABLE-FK-JOIN-SUBSCRIPTION-REGISTRATION-0000000007\n" + "      <-- KSTREAM-SOURCE-0000000000\n" + "    Processor: KTABLE-FK-JOIN-OUTPUT-0000000018 (stores: [])\n" + "      --> KTABLE-TOSTREAM-0000000020\n" + "      <-- KTABLE-FK-JOIN-SUBSCRIPTION-RESPONSE-RESOLVER-PROCESSOR-0000000017\n" + "    Processor: KTABLE-FK-JOIN-SUBSCRIPTION-REGISTRATION-0000000007 (stores: [])\n" + "      --> KTABLE-SINK-0000000008\n" + "      <-- KSTREAM-TOTABLE-0000000001\n" + "    Processor: KTABLE-TOSTREAM-0000000020 (stores: [])\n" + "      --> KSTREAM-SINK-0000000021\n" + "      <-- KTABLE-FK-JOIN-OUTPUT-0000000018\n" + "    Sink: KSTREAM-SINK-0000000021 (topic: output)\n" + "      <-- KTABLE-TOSTREAM-0000000020\n" + "    Sink: KTABLE-SINK-0000000008 (topic: KTABLE-FK-JOIN-SUBSCRIPTION-REGISTRATION-0000000006-topic)\n" + "      <-- KTABLE-FK-JOIN-SUBSCRIPTION-REGISTRATION-0000000007\n" + "\n" + "  Sub-topology: 1\n" + "    Source: KSTREAM-SOURCE-0000000003 (topics: [input2])\n" + "      --> KSTREAM-TOTABLE-0000000004\n" + "    Source: KTABLE-SOURCE-0000000009 (topics: [KTABLE-FK-JOIN-SUBSCRIPTION-REGISTRATION-0000000006-topic])\n" + "      --> KTABLE-FK-JOIN-SUBSCRIPTION-PROCESSOR-0000000011\n" + "    Processor: KSTREAM-TOTABLE-0000000004 (stores: [KSTREAM-TOTABLE-STATE-STORE-0000000005])\n" + "      --> KTABLE-FK-JOIN-SUBSCRIPTION-PROCESSOR-0000000013\n" + "      <-- KSTREAM-SOURCE-0000000003\n" + "    Processor: KTABLE-FK-JOIN-SUBSCRIPTION-PROCESSOR-0000000011 (stores: [KTABLE-FK-JOIN-SUBSCRIPTION-STATE-STORE-0000000010])\n" + "      --> KTABLE-FK-JOIN-SUBSCRIPTION-PROCESSOR-0000000012\n" + "      <-- KTABLE-SOURCE-0000000009\n" + "    Processor: KTABLE-FK-JOIN-SUBSCRIPTION-PROCESSOR-0000000012 (stores: [KSTREAM-TOTABLE-STATE-STORE-0000000005])\n" + "      --> KTABLE-SINK-0000000015\n" + "      <-- KTABLE-FK-JOIN-SUBSCRIPTION-PROCESSOR-0000000011\n" + "    Processor: KTABLE-FK-JOIN-SUBSCRIPTION-PROCESSOR-0000000013 (stores: [KTABLE-FK-JOIN-SUBSCRIPTION-STATE-STORE-0000000010])\n" + "      --> KTABLE-SINK-0000000015\n" + "      <-- KSTREAM-TOTABLE-0000000004\n" + "    Sink: KTABLE-SINK-0000000015 (topic: KTABLE-FK-JOIN-SUBSCRIPTION-RESPONSE-0000000014-topic)\n" + "      <-- KTABLE-FK-JOIN-SUBSCRIPTION-PROCESSOR-0000000012, KTABLE-FK-JOIN-SUBSCRIPTION-PROCESSOR-0000000013\n\n"));
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
        final TestInputTopic<String, String> left = driver.createInputTopic(input1, new StringSerializer(), new StringSerializer());
        final TestInputTopic<String, String> right = driver.createInputTopic(input2, new StringSerializer(), new StringSerializer());
        final TestOutputTopic<String, String> outputTopic = driver.createOutputTopic(output, new StringDeserializer(), new StringDeserializer());
        // Pre-populate the RHS records. This test is all about what happens when we add/remove LHS records
        right.pipeInput("rhs1", "rhsValue1");
        right.pipeInput("rhs2", "rhsValue2");
        // this unreferenced FK won't show up in any results
        right.pipeInput("rhs3", "rhsValue3");
        assertThat(outputTopic.readKeyValuesToMap(), is(emptyMap()));
        left.pipeInput("lhs1", "lhsValue1|rhs1");
        left.pipeInput("lhs2", "lhsValue2|rhs2");
        final Map<String, String> expected = mkMap(mkEntry("lhs1", "(lhsValue1|rhs1,rhsValue1)"), mkEntry("lhs2", "(lhsValue2|rhs2,rhsValue2)"));
        assertThat(outputTopic.readKeyValuesToMap(), is(expected));
        // Add another reference to an existing FK
        left.pipeInput("lhs3", "lhsValue3|rhs1");
        assertThat(outputTopic.readKeyValuesToMap(), is(mkMap(mkEntry("lhs3", "(lhsValue3|rhs1,rhsValue1)"))));
        left.pipeInput("lhs1", (String) null);
        assertThat(outputTopic.readKeyValuesToMap(), is(mkMap(mkEntry("lhs1", null))));
    }
}
Also used : CoreMatchers.is(org.hamcrest.CoreMatchers.is) Arrays(java.util.Arrays) ValueTransformerSupplier(org.apache.kafka.streams.kstream.ValueTransformerSupplier) Produced(org.apache.kafka.streams.kstream.Produced) IsInstanceOf.instanceOf(org.hamcrest.core.IsInstanceOf.instanceOf) Stores(org.apache.kafka.streams.state.Stores) Repartitioned(org.apache.kafka.streams.kstream.Repartitioned) MockProcessorSupplier(org.apache.kafka.test.MockProcessorSupplier) Joined(org.apache.kafka.streams.kstream.Joined) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Matcher(java.util.regex.Matcher) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ProcessorSupplier(org.apache.kafka.streams.processor.api.ProcessorSupplier) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) TransformerSupplier(org.apache.kafka.streams.kstream.TransformerSupplier) Serde(org.apache.kafka.common.serialization.Serde) Arrays.asList(java.util.Arrays.asList) TopologyWrapper(org.apache.kafka.streams.TopologyWrapper) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) Map(java.util.Map) SourceNode(org.apache.kafka.streams.processor.internals.SourceNode) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) ValueMapperWithKey(org.apache.kafka.streams.kstream.ValueMapperWithKey) MockValueJoiner(org.apache.kafka.test.MockValueJoiner) MockMapper(org.apache.kafka.test.MockMapper) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) TopicNameExtractor(org.apache.kafka.streams.processor.TopicNameExtractor) KeyValue(org.apache.kafka.streams.KeyValue) Instant(java.time.Instant) Bytes(org.apache.kafka.common.utils.Bytes) ProcessorContext(org.apache.kafka.streams.processor.ProcessorContext) List(java.util.List) Predicate(org.apache.kafka.streams.kstream.Predicate) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) ValueJoiner(org.apache.kafka.streams.kstream.ValueJoiner) Materialized(org.apache.kafka.streams.kstream.Materialized) Pattern(java.util.regex.Pattern) ProcessorTopology(org.apache.kafka.streams.processor.internals.ProcessorTopology) Duration.ofMillis(java.time.Duration.ofMillis) Topology(org.apache.kafka.streams.Topology) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Assert.assertThrows(org.junit.Assert.assertThrows) HashMap(java.util.HashMap) KStream(org.apache.kafka.streams.kstream.KStream) Function(java.util.function.Function) StreamJoined(org.apache.kafka.streams.kstream.StreamJoined) ArrayList(java.util.ArrayList) ValueJoinerWithKey(org.apache.kafka.streams.kstream.ValueJoinerWithKey) JoinWindows(org.apache.kafka.streams.kstream.JoinWindows) Named(org.apache.kafka.streams.kstream.Named) ValueTransformer(org.apache.kafka.streams.kstream.ValueTransformer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Before(org.junit.Before) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Collections.emptyMap(java.util.Collections.emptyMap) KTable(org.apache.kafka.streams.kstream.KTable) KeyValueMapper(org.apache.kafka.streams.kstream.KeyValueMapper) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Transformer(org.apache.kafka.streams.kstream.Transformer) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) ValueTransformerWithKeySupplier(org.apache.kafka.streams.kstream.ValueTransformerWithKeySupplier) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) TimeUnit(java.util.concurrent.TimeUnit) FailOnInvalidTimestamp(org.apache.kafka.streams.processor.FailOnInvalidTimestamp) Assert.assertNull(org.junit.Assert.assertNull) GlobalKTable(org.apache.kafka.streams.kstream.GlobalKTable) IsNull.notNullValue(org.hamcrest.core.IsNull.notNullValue) ValueTransformerWithKey(org.apache.kafka.streams.kstream.ValueTransformerWithKey) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) ProcessorTopology(org.apache.kafka.streams.processor.internals.ProcessorTopology) Topology(org.apache.kafka.streams.Topology) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Aggregations

Properties (java.util.Properties)9 Serdes (org.apache.kafka.common.serialization.Serdes)9 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)9 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)9 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)9 TestInputTopic (org.apache.kafka.streams.TestInputTopic)9 TestOutputTopic (org.apache.kafka.streams.TestOutputTopic)9 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)9 Consumed (org.apache.kafka.streams.kstream.Consumed)9 KStream (org.apache.kafka.streams.kstream.KStream)9 StreamsTestUtils (org.apache.kafka.test.StreamsTestUtils)9 Assert.assertEquals (org.junit.Assert.assertEquals)9 Test (org.junit.Test)9 Arrays (java.util.Arrays)7 Map (java.util.Map)7 Topology (org.apache.kafka.streams.Topology)7 Named (org.apache.kafka.streams.kstream.Named)7 List (java.util.List)6 KeyValue (org.apache.kafka.streams.KeyValue)6 Grouped (org.apache.kafka.streams.kstream.Grouped)6