Search in sources :

Example 11 with Consumed

use of org.apache.kafka.streams.kstream.Consumed in project kafka by apache.

the class KTableAggregateTest method testAggRepartition.

@Test
public void testAggRepartition() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic1 = "topic1";
    final KTable<String, String> table1 = builder.table(topic1, consumed);
    final KTable<String, String> table2 = table1.groupBy((key, value) -> {
        switch(key) {
            case "null":
                return KeyValue.pair(null, value);
            case "NULL":
                return null;
            default:
                return KeyValue.pair(value, value);
        }
    }, stringSerialized).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, MockAggregator.TOSTRING_REMOVER, Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("topic1-Canonized").withValueSerde(stringSerde));
    table2.toStream().process(supplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), CONFIG, Instant.ofEpochMilli(0L))) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        inputTopic.pipeInput("A", "1", 10L);
        inputTopic.pipeInput("A", (String) null, 15L);
        inputTopic.pipeInput("A", "1", 12L);
        inputTopic.pipeInput("B", "2", 20L);
        inputTopic.pipeInput("null", "3", 25L);
        inputTopic.pipeInput("B", "4", 23L);
        inputTopic.pipeInput("NULL", "5", 24L);
        inputTopic.pipeInput("B", "7", 22L);
        assertEquals(asList(new KeyValueTimestamp<>("1", "0+1", 10), new KeyValueTimestamp<>("1", "0+1-1", 15), new KeyValueTimestamp<>("1", "0+1-1+1", 15), new KeyValueTimestamp<>("2", "0+2", 20), new KeyValueTimestamp<>("2", "0+2-2", 23), new KeyValueTimestamp<>("4", "0+4", 23), new KeyValueTimestamp<>("4", "0+4-4", 23), new KeyValueTimestamp<>("7", "0+7", 22)), supplier.theCapturedProcessor().processed());
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StreamsConfig(org.apache.kafka.streams.StreamsConfig) MockInitializer(org.apache.kafka.test.MockInitializer) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Serde(org.apache.kafka.common.serialization.Serde) Arrays.asList(java.util.Arrays.asList) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) MockMapper(org.apache.kafka.test.MockMapper) KTable(org.apache.kafka.streams.kstream.KTable) Properties(java.util.Properties) TestUtils(org.apache.kafka.test.TestUtils) Consumed(org.apache.kafka.streams.kstream.Consumed) KeyValue(org.apache.kafka.streams.KeyValue) Test(org.junit.Test) Instant(java.time.Instant) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) Bytes(org.apache.kafka.common.utils.Bytes) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) Materialized(org.apache.kafka.streams.kstream.Materialized) TestInputTopic(org.apache.kafka.streams.TestInputTopic) Assert.assertEquals(org.junit.Assert.assertEquals) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Example 12 with Consumed

use of org.apache.kafka.streams.kstream.Consumed in project kafka by apache.

the class KTableImplTest method testStateStoreLazyEval.

@Test
public void testStateStoreLazyEval() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic1 = "topic1";
    final String topic2 = "topic2";
    final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, consumed);
    builder.table(topic2, consumed);
    final KTableImpl<String, String, Integer> table1Mapped = (KTableImpl<String, String, Integer>) table1.mapValues(s -> Integer.valueOf(s));
    table1Mapped.filter((key, value) -> (value % 2) == 0);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        assertEquals(0, driver.getAllStateStores().size());
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Produced(org.apache.kafka.streams.kstream.Produced) MockReducer(org.apache.kafka.test.MockReducer) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Serde(org.apache.kafka.common.serialization.Serde) Arrays.asList(java.util.Arrays.asList) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) SourceNode(org.apache.kafka.streams.processor.internals.SourceNode) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) ValueMapperWithKey(org.apache.kafka.streams.kstream.ValueMapperWithKey) MockValueJoiner(org.apache.kafka.test.MockValueJoiner) TopologyTestDriverWrapper(org.apache.kafka.streams.TopologyTestDriverWrapper) MockMapper(org.apache.kafka.test.MockMapper) KeyValue(org.apache.kafka.streams.KeyValue) Bytes(org.apache.kafka.common.utils.Bytes) ProcessorContext(org.apache.kafka.streams.processor.ProcessorContext) List(java.util.List) ValueJoiner(org.apache.kafka.streams.kstream.ValueJoiner) Materialized(org.apache.kafka.streams.kstream.Materialized) Matchers.is(org.hamcrest.Matchers.is) Topology(org.apache.kafka.streams.Topology) MockInitializer(org.apache.kafka.test.MockInitializer) Assert.assertThrows(org.junit.Assert.assertThrows) EasyMock.mock(org.easymock.EasyMock.mock) TopologyDescription(org.apache.kafka.streams.TopologyDescription) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Before(org.junit.Before) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) KeyValueMapper(org.apache.kafka.streams.kstream.KeyValueMapper) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Assert.assertNotNull(org.junit.Assert.assertNotNull) Test(org.junit.Test) Field(java.lang.reflect.Field) ValueTransformerWithKeySupplier(org.apache.kafka.streams.kstream.ValueTransformerWithKeySupplier) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) Assert.assertNull(org.junit.Assert.assertNull) Subtopology(org.apache.kafka.streams.TopologyDescription.Subtopology) ValueTransformerWithKey(org.apache.kafka.streams.kstream.ValueTransformerWithKey) TestInputTopic(org.apache.kafka.streams.TestInputTopic) SinkNode(org.apache.kafka.streams.processor.internals.SinkNode) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Assert.assertEquals(org.junit.Assert.assertEquals) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Test(org.junit.Test)

Example 13 with Consumed

use of org.apache.kafka.streams.kstream.Consumed in project kafka by apache.

the class KTableImplTest method testStateStore.

@Test
public void testStateStore() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic1 = "topic1";
    final String topic2 = "topic2";
    final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, consumed);
    final KTableImpl<String, String, String> table2 = (KTableImpl<String, String, String>) builder.table(topic2, consumed);
    final KTableImpl<String, String, Integer> table1Mapped = (KTableImpl<String, String, Integer>) table1.mapValues(s -> Integer.valueOf(s));
    final KTableImpl<String, Integer, Integer> table1MappedFiltered = (KTableImpl<String, Integer, Integer>) table1Mapped.filter((key, value) -> (value % 2) == 0);
    table2.join(table1MappedFiltered, (v1, v2) -> v1 + v2);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        assertEquals(2, driver.getAllStateStores().size());
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Produced(org.apache.kafka.streams.kstream.Produced) MockReducer(org.apache.kafka.test.MockReducer) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Serde(org.apache.kafka.common.serialization.Serde) Arrays.asList(java.util.Arrays.asList) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) SourceNode(org.apache.kafka.streams.processor.internals.SourceNode) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) ValueMapperWithKey(org.apache.kafka.streams.kstream.ValueMapperWithKey) MockValueJoiner(org.apache.kafka.test.MockValueJoiner) TopologyTestDriverWrapper(org.apache.kafka.streams.TopologyTestDriverWrapper) MockMapper(org.apache.kafka.test.MockMapper) KeyValue(org.apache.kafka.streams.KeyValue) Bytes(org.apache.kafka.common.utils.Bytes) ProcessorContext(org.apache.kafka.streams.processor.ProcessorContext) List(java.util.List) ValueJoiner(org.apache.kafka.streams.kstream.ValueJoiner) Materialized(org.apache.kafka.streams.kstream.Materialized) Matchers.is(org.hamcrest.Matchers.is) Topology(org.apache.kafka.streams.Topology) MockInitializer(org.apache.kafka.test.MockInitializer) Assert.assertThrows(org.junit.Assert.assertThrows) EasyMock.mock(org.easymock.EasyMock.mock) TopologyDescription(org.apache.kafka.streams.TopologyDescription) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Before(org.junit.Before) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) KeyValueMapper(org.apache.kafka.streams.kstream.KeyValueMapper) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Assert.assertNotNull(org.junit.Assert.assertNotNull) Test(org.junit.Test) Field(java.lang.reflect.Field) ValueTransformerWithKeySupplier(org.apache.kafka.streams.kstream.ValueTransformerWithKeySupplier) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) Assert.assertNull(org.junit.Assert.assertNull) Subtopology(org.apache.kafka.streams.TopologyDescription.Subtopology) ValueTransformerWithKey(org.apache.kafka.streams.kstream.ValueTransformerWithKey) TestInputTopic(org.apache.kafka.streams.TestInputTopic) SinkNode(org.apache.kafka.streams.processor.internals.SinkNode) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Assert.assertEquals(org.junit.Assert.assertEquals) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Test(org.junit.Test)

Example 14 with Consumed

use of org.apache.kafka.streams.kstream.Consumed in project kafka by apache.

the class KTableAggregateTest method testRemoveOldBeforeAddNew.

@Test
public void testRemoveOldBeforeAddNew() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String input = "count-test-input";
    final MockApiProcessorSupplier<String, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
    builder.table(input, consumed).groupBy((key, value) -> KeyValue.pair(String.valueOf(key.charAt(0)), String.valueOf(key.charAt(1))), stringSerialized).aggregate(() -> "", (aggKey, value, aggregate) -> aggregate + value, (key, value, aggregate) -> aggregate.replaceAll(value, ""), Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("someStore").withValueSerde(Serdes.String())).toStream().process(supplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), CONFIG, Instant.ofEpochMilli(0L))) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(input, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final MockApiProcessor<String, String, Void, Void> proc = supplier.theCapturedProcessor();
        inputTopic.pipeInput("11", "A", 10L);
        inputTopic.pipeInput("12", "B", 8L);
        inputTopic.pipeInput("11", (String) null, 12L);
        inputTopic.pipeInput("12", "C", 6L);
        assertEquals(asList(new KeyValueTimestamp<>("1", "1", 10), new KeyValueTimestamp<>("1", "12", 10), new KeyValueTimestamp<>("1", "2", 12), new KeyValueTimestamp<>("1", "", 12), new KeyValueTimestamp<>("1", "2", 12L)), proc.processed());
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StreamsConfig(org.apache.kafka.streams.StreamsConfig) MockInitializer(org.apache.kafka.test.MockInitializer) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Serde(org.apache.kafka.common.serialization.Serde) Arrays.asList(java.util.Arrays.asList) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) MockMapper(org.apache.kafka.test.MockMapper) KTable(org.apache.kafka.streams.kstream.KTable) Properties(java.util.Properties) TestUtils(org.apache.kafka.test.TestUtils) Consumed(org.apache.kafka.streams.kstream.Consumed) KeyValue(org.apache.kafka.streams.KeyValue) Test(org.junit.Test) Instant(java.time.Instant) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) Bytes(org.apache.kafka.common.utils.Bytes) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) Materialized(org.apache.kafka.streams.kstream.Materialized) TestInputTopic(org.apache.kafka.streams.TestInputTopic) Assert.assertEquals(org.junit.Assert.assertEquals) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Example 15 with Consumed

use of org.apache.kafka.streams.kstream.Consumed in project kafka by apache.

the class KStreamImplTest method shouldSupportForeignKeyTableTableJoinWithKTableFromKStream.

@Test
public void shouldSupportForeignKeyTableTableJoinWithKTableFromKStream() {
    final Consumed<String, String> consumed = Consumed.with(Serdes.String(), Serdes.String());
    final StreamsBuilder builder = new StreamsBuilder();
    final String input1 = "input1";
    final String input2 = "input2";
    final String output = "output";
    final KTable<String, String> leftTable = builder.stream(input1, consumed).toTable();
    final KTable<String, String> rightTable = builder.stream(input2, consumed).toTable();
    final Function<String, String> extractor = value -> value.split("\\|")[1];
    final ValueJoiner<String, String, String> joiner = (value1, value2) -> "(" + value1 + "," + value2 + ")";
    leftTable.join(rightTable, extractor, joiner).toStream().to(output);
    final Topology topology = builder.build(props);
    final String topologyDescription = topology.describe().toString();
    assertThat(topologyDescription, equalTo("Topologies:\n" + "   Sub-topology: 0\n" + "    Source: KTABLE-SOURCE-0000000016 (topics: [KTABLE-FK-JOIN-SUBSCRIPTION-RESPONSE-0000000014-topic])\n" + "      --> KTABLE-FK-JOIN-SUBSCRIPTION-RESPONSE-RESOLVER-PROCESSOR-0000000017\n" + "    Source: KSTREAM-SOURCE-0000000000 (topics: [input1])\n" + "      --> KSTREAM-TOTABLE-0000000001\n" + "    Processor: KTABLE-FK-JOIN-SUBSCRIPTION-RESPONSE-RESOLVER-PROCESSOR-0000000017 (stores: [KSTREAM-TOTABLE-STATE-STORE-0000000002])\n" + "      --> KTABLE-FK-JOIN-OUTPUT-0000000018\n" + "      <-- KTABLE-SOURCE-0000000016\n" + "    Processor: KSTREAM-TOTABLE-0000000001 (stores: [KSTREAM-TOTABLE-STATE-STORE-0000000002])\n" + "      --> KTABLE-FK-JOIN-SUBSCRIPTION-REGISTRATION-0000000007\n" + "      <-- KSTREAM-SOURCE-0000000000\n" + "    Processor: KTABLE-FK-JOIN-OUTPUT-0000000018 (stores: [])\n" + "      --> KTABLE-TOSTREAM-0000000020\n" + "      <-- KTABLE-FK-JOIN-SUBSCRIPTION-RESPONSE-RESOLVER-PROCESSOR-0000000017\n" + "    Processor: KTABLE-FK-JOIN-SUBSCRIPTION-REGISTRATION-0000000007 (stores: [])\n" + "      --> KTABLE-SINK-0000000008\n" + "      <-- KSTREAM-TOTABLE-0000000001\n" + "    Processor: KTABLE-TOSTREAM-0000000020 (stores: [])\n" + "      --> KSTREAM-SINK-0000000021\n" + "      <-- KTABLE-FK-JOIN-OUTPUT-0000000018\n" + "    Sink: KSTREAM-SINK-0000000021 (topic: output)\n" + "      <-- KTABLE-TOSTREAM-0000000020\n" + "    Sink: KTABLE-SINK-0000000008 (topic: KTABLE-FK-JOIN-SUBSCRIPTION-REGISTRATION-0000000006-topic)\n" + "      <-- KTABLE-FK-JOIN-SUBSCRIPTION-REGISTRATION-0000000007\n" + "\n" + "  Sub-topology: 1\n" + "    Source: KSTREAM-SOURCE-0000000003 (topics: [input2])\n" + "      --> KSTREAM-TOTABLE-0000000004\n" + "    Source: KTABLE-SOURCE-0000000009 (topics: [KTABLE-FK-JOIN-SUBSCRIPTION-REGISTRATION-0000000006-topic])\n" + "      --> KTABLE-FK-JOIN-SUBSCRIPTION-PROCESSOR-0000000011\n" + "    Processor: KSTREAM-TOTABLE-0000000004 (stores: [KSTREAM-TOTABLE-STATE-STORE-0000000005])\n" + "      --> KTABLE-FK-JOIN-SUBSCRIPTION-PROCESSOR-0000000013\n" + "      <-- KSTREAM-SOURCE-0000000003\n" + "    Processor: KTABLE-FK-JOIN-SUBSCRIPTION-PROCESSOR-0000000011 (stores: [KTABLE-FK-JOIN-SUBSCRIPTION-STATE-STORE-0000000010])\n" + "      --> KTABLE-FK-JOIN-SUBSCRIPTION-PROCESSOR-0000000012\n" + "      <-- KTABLE-SOURCE-0000000009\n" + "    Processor: KTABLE-FK-JOIN-SUBSCRIPTION-PROCESSOR-0000000012 (stores: [KSTREAM-TOTABLE-STATE-STORE-0000000005])\n" + "      --> KTABLE-SINK-0000000015\n" + "      <-- KTABLE-FK-JOIN-SUBSCRIPTION-PROCESSOR-0000000011\n" + "    Processor: KTABLE-FK-JOIN-SUBSCRIPTION-PROCESSOR-0000000013 (stores: [KTABLE-FK-JOIN-SUBSCRIPTION-STATE-STORE-0000000010])\n" + "      --> KTABLE-SINK-0000000015\n" + "      <-- KSTREAM-TOTABLE-0000000004\n" + "    Sink: KTABLE-SINK-0000000015 (topic: KTABLE-FK-JOIN-SUBSCRIPTION-RESPONSE-0000000014-topic)\n" + "      <-- KTABLE-FK-JOIN-SUBSCRIPTION-PROCESSOR-0000000012, KTABLE-FK-JOIN-SUBSCRIPTION-PROCESSOR-0000000013\n\n"));
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
        final TestInputTopic<String, String> left = driver.createInputTopic(input1, new StringSerializer(), new StringSerializer());
        final TestInputTopic<String, String> right = driver.createInputTopic(input2, new StringSerializer(), new StringSerializer());
        final TestOutputTopic<String, String> outputTopic = driver.createOutputTopic(output, new StringDeserializer(), new StringDeserializer());
        // Pre-populate the RHS records. This test is all about what happens when we add/remove LHS records
        right.pipeInput("rhs1", "rhsValue1");
        right.pipeInput("rhs2", "rhsValue2");
        // this unreferenced FK won't show up in any results
        right.pipeInput("rhs3", "rhsValue3");
        assertThat(outputTopic.readKeyValuesToMap(), is(emptyMap()));
        left.pipeInput("lhs1", "lhsValue1|rhs1");
        left.pipeInput("lhs2", "lhsValue2|rhs2");
        final Map<String, String> expected = mkMap(mkEntry("lhs1", "(lhsValue1|rhs1,rhsValue1)"), mkEntry("lhs2", "(lhsValue2|rhs2,rhsValue2)"));
        assertThat(outputTopic.readKeyValuesToMap(), is(expected));
        // Add another reference to an existing FK
        left.pipeInput("lhs3", "lhsValue3|rhs1");
        assertThat(outputTopic.readKeyValuesToMap(), is(mkMap(mkEntry("lhs3", "(lhsValue3|rhs1,rhsValue1)"))));
        left.pipeInput("lhs1", (String) null);
        assertThat(outputTopic.readKeyValuesToMap(), is(mkMap(mkEntry("lhs1", null))));
    }
}
Also used : CoreMatchers.is(org.hamcrest.CoreMatchers.is) Arrays(java.util.Arrays) ValueTransformerSupplier(org.apache.kafka.streams.kstream.ValueTransformerSupplier) Produced(org.apache.kafka.streams.kstream.Produced) IsInstanceOf.instanceOf(org.hamcrest.core.IsInstanceOf.instanceOf) Stores(org.apache.kafka.streams.state.Stores) Repartitioned(org.apache.kafka.streams.kstream.Repartitioned) MockProcessorSupplier(org.apache.kafka.test.MockProcessorSupplier) Joined(org.apache.kafka.streams.kstream.Joined) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Matcher(java.util.regex.Matcher) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ProcessorSupplier(org.apache.kafka.streams.processor.api.ProcessorSupplier) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) TransformerSupplier(org.apache.kafka.streams.kstream.TransformerSupplier) Serde(org.apache.kafka.common.serialization.Serde) Arrays.asList(java.util.Arrays.asList) TopologyWrapper(org.apache.kafka.streams.TopologyWrapper) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) Map(java.util.Map) SourceNode(org.apache.kafka.streams.processor.internals.SourceNode) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) ValueMapperWithKey(org.apache.kafka.streams.kstream.ValueMapperWithKey) MockValueJoiner(org.apache.kafka.test.MockValueJoiner) MockMapper(org.apache.kafka.test.MockMapper) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) TopicNameExtractor(org.apache.kafka.streams.processor.TopicNameExtractor) KeyValue(org.apache.kafka.streams.KeyValue) Instant(java.time.Instant) Bytes(org.apache.kafka.common.utils.Bytes) ProcessorContext(org.apache.kafka.streams.processor.ProcessorContext) List(java.util.List) Predicate(org.apache.kafka.streams.kstream.Predicate) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) ValueJoiner(org.apache.kafka.streams.kstream.ValueJoiner) Materialized(org.apache.kafka.streams.kstream.Materialized) Pattern(java.util.regex.Pattern) ProcessorTopology(org.apache.kafka.streams.processor.internals.ProcessorTopology) Duration.ofMillis(java.time.Duration.ofMillis) Topology(org.apache.kafka.streams.Topology) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Assert.assertThrows(org.junit.Assert.assertThrows) HashMap(java.util.HashMap) KStream(org.apache.kafka.streams.kstream.KStream) Function(java.util.function.Function) StreamJoined(org.apache.kafka.streams.kstream.StreamJoined) ArrayList(java.util.ArrayList) ValueJoinerWithKey(org.apache.kafka.streams.kstream.ValueJoinerWithKey) JoinWindows(org.apache.kafka.streams.kstream.JoinWindows) Named(org.apache.kafka.streams.kstream.Named) ValueTransformer(org.apache.kafka.streams.kstream.ValueTransformer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Before(org.junit.Before) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Collections.emptyMap(java.util.Collections.emptyMap) KTable(org.apache.kafka.streams.kstream.KTable) KeyValueMapper(org.apache.kafka.streams.kstream.KeyValueMapper) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Transformer(org.apache.kafka.streams.kstream.Transformer) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) ValueTransformerWithKeySupplier(org.apache.kafka.streams.kstream.ValueTransformerWithKeySupplier) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) TimeUnit(java.util.concurrent.TimeUnit) FailOnInvalidTimestamp(org.apache.kafka.streams.processor.FailOnInvalidTimestamp) Assert.assertNull(org.junit.Assert.assertNull) GlobalKTable(org.apache.kafka.streams.kstream.GlobalKTable) IsNull.notNullValue(org.hamcrest.core.IsNull.notNullValue) ValueTransformerWithKey(org.apache.kafka.streams.kstream.ValueTransformerWithKey) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) ProcessorTopology(org.apache.kafka.streams.processor.internals.ProcessorTopology) Topology(org.apache.kafka.streams.Topology) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Aggregations

Consumed (org.apache.kafka.streams.kstream.Consumed)16 KTable (org.apache.kafka.streams.kstream.KTable)16 Properties (java.util.Properties)15 Serdes (org.apache.kafka.common.serialization.Serdes)15 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)15 Materialized (org.apache.kafka.streams.kstream.Materialized)15 Assert.assertEquals (org.junit.Assert.assertEquals)14 Test (org.junit.Test)14 Duration (java.time.Duration)13 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)13 TestInputTopic (org.apache.kafka.streams.TestInputTopic)13 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)13 MockApiProcessor (org.apache.kafka.test.MockApiProcessor)13 MockApiProcessorSupplier (org.apache.kafka.test.MockApiProcessorSupplier)13 Instant (java.time.Instant)12 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)12 Bytes (org.apache.kafka.common.utils.Bytes)12 StreamsTestUtils (org.apache.kafka.test.StreamsTestUtils)12 MatcherAssert.assertThat (org.hamcrest.MatcherAssert.assertThat)12 Arrays.asList (java.util.Arrays.asList)11