Search in sources :

Example 76 with KeyValue

use of org.apache.kafka.streams.KeyValue in project kafka by apache.

the class CogroupedKStreamImplTest method shouldInsertRepartitionsTopicForUpstreamKeyModificationWithGroupedReusedInDifferentCogroups.

@Test
public void shouldInsertRepartitionsTopicForUpstreamKeyModificationWithGroupedReusedInDifferentCogroups() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<String, String> stream1 = builder.stream("one", stringConsumed);
    final KStream<String, String> stream2 = builder.stream("two", stringConsumed);
    final KStream<String, String> stream3 = builder.stream("three", stringConsumed);
    final KGroupedStream<String, String> groupedOne = stream1.map((k, v) -> new KeyValue<>(v, k)).groupByKey();
    final KGroupedStream<String, String> groupedTwo = stream2.groupByKey();
    final KGroupedStream<String, String> groupedThree = stream3.groupByKey();
    groupedOne.cogroup(STRING_AGGREGATOR).cogroup(groupedThree, STRING_AGGREGATOR).aggregate(STRING_INITIALIZER);
    groupedOne.cogroup(STRING_AGGREGATOR).cogroup(groupedTwo, STRING_AGGREGATOR).aggregate(STRING_INITIALIZER);
    final String topologyDescription = builder.build().describe().toString();
    assertThat(topologyDescription, equalTo("Topologies:\n" + "   Sub-topology: 0\n" + "    Source: KSTREAM-SOURCE-0000000000 (topics: [one])\n" + "      --> KSTREAM-MAP-0000000003\n" + "    Processor: KSTREAM-MAP-0000000003 (stores: [])\n" + "      --> COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition-filter, COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000011-repartition-filter\n" + "      <-- KSTREAM-SOURCE-0000000000\n" + "    Processor: COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition-filter (stores: [])\n" + "      --> COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition-sink\n" + "      <-- KSTREAM-MAP-0000000003\n" + "    Processor: COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000011-repartition-filter (stores: [])\n" + "      --> COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000011-repartition-sink\n" + "      <-- KSTREAM-MAP-0000000003\n" + "    Sink: COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition-sink (topic: COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition)\n" + "      <-- COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition-filter\n" + "    Sink: COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000011-repartition-sink (topic: COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000011-repartition)\n" + "      <-- COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000011-repartition-filter\n\n" + "  Sub-topology: 1\n" + "    Source: COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000011-repartition-source (topics: [COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000011-repartition])\n" + "      --> COGROUPKSTREAM-AGGREGATE-0000000015\n" + "    Source: KSTREAM-SOURCE-0000000001 (topics: [two])\n" + "      --> COGROUPKSTREAM-AGGREGATE-0000000016\n" + "    Processor: COGROUPKSTREAM-AGGREGATE-0000000015 (stores: [COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000011])\n" + "      --> COGROUPKSTREAM-MERGE-0000000017\n" + "      <-- COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000011-repartition-source\n" + "    Processor: COGROUPKSTREAM-AGGREGATE-0000000016 (stores: [COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000011])\n" + "      --> COGROUPKSTREAM-MERGE-0000000017\n" + "      <-- KSTREAM-SOURCE-0000000001\n" + "    Processor: COGROUPKSTREAM-MERGE-0000000017 (stores: [])\n" + "      --> none\n" + "      <-- COGROUPKSTREAM-AGGREGATE-0000000015, COGROUPKSTREAM-AGGREGATE-0000000016\n\n" + "  Sub-topology: 2\n" + "    Source: COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition-source (topics: [COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition])\n" + "      --> COGROUPKSTREAM-AGGREGATE-0000000008\n" + "    Source: KSTREAM-SOURCE-0000000002 (topics: [three])\n" + "      --> COGROUPKSTREAM-AGGREGATE-0000000009\n" + "    Processor: COGROUPKSTREAM-AGGREGATE-0000000008 (stores: [COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004])\n" + "      --> COGROUPKSTREAM-MERGE-0000000010\n" + "      <-- COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition-source\n" + "    Processor: COGROUPKSTREAM-AGGREGATE-0000000009 (stores: [COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004])\n" + "      --> COGROUPKSTREAM-MERGE-0000000010\n" + "      <-- KSTREAM-SOURCE-0000000002\n" + "    Processor: COGROUPKSTREAM-MERGE-0000000010 (stores: [])\n" + "      --> none\n" + "      <-- COGROUPKSTREAM-AGGREGATE-0000000008, COGROUPKSTREAM-AGGREGATE-0000000009\n\n"));
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StreamsConfig(org.apache.kafka.streams.StreamsConfig) KGroupedStream(org.apache.kafka.streams.kstream.KGroupedStream) MockInitializer(org.apache.kafka.test.MockInitializer) SessionWindows(org.apache.kafka.streams.kstream.SessionWindows) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Assert.assertThrows(org.junit.Assert.assertThrows) Window(org.apache.kafka.streams.kstream.Window) KStream(org.apache.kafka.streams.kstream.KStream) Initializer(org.apache.kafka.streams.kstream.Initializer) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Named(org.apache.kafka.streams.kstream.Named) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Aggregator(org.apache.kafka.streams.kstream.Aggregator) TestRecord(org.apache.kafka.streams.test.TestRecord) Before(org.junit.Before) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) MockValueJoiner(org.apache.kafka.test.MockValueJoiner) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) Properties(java.util.Properties) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Consumed(org.apache.kafka.streams.kstream.Consumed) KeyValue(org.apache.kafka.streams.KeyValue) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) Bytes(org.apache.kafka.common.utils.Bytes) SlidingWindows(org.apache.kafka.streams.kstream.SlidingWindows) Materialized(org.apache.kafka.streams.kstream.Materialized) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) TestInputTopic(org.apache.kafka.streams.TestInputTopic) CogroupedKStream(org.apache.kafka.streams.kstream.CogroupedKStream) Windows(org.apache.kafka.streams.kstream.Windows) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) KeyValue(org.apache.kafka.streams.KeyValue) Test(org.junit.Test)

Example 77 with KeyValue

use of org.apache.kafka.streams.KeyValue in project kafka by apache.

the class CogroupedKStreamImplTest method shouldInsertRepartitionsTopicForUpstreamKeyModificationWithGroupedReusedInDifferentCogroupsWithOptimization.

@Test
public void shouldInsertRepartitionsTopicForUpstreamKeyModificationWithGroupedReusedInDifferentCogroupsWithOptimization() {
    final StreamsBuilder builder = new StreamsBuilder();
    final Properties properties = new Properties();
    properties.setProperty(StreamsConfig.TOPOLOGY_OPTIMIZATION_CONFIG, StreamsConfig.OPTIMIZE);
    final KStream<String, String> stream1 = builder.stream("one", stringConsumed);
    final KStream<String, String> stream2 = builder.stream("two", stringConsumed);
    final KStream<String, String> stream3 = builder.stream("three", stringConsumed);
    final KGroupedStream<String, String> groupedOne = stream1.map((k, v) -> new KeyValue<>(v, k)).groupByKey();
    final KGroupedStream<String, String> groupedTwo = stream2.groupByKey();
    final KGroupedStream<String, String> groupedThree = stream3.groupByKey();
    groupedOne.cogroup(STRING_AGGREGATOR).cogroup(groupedThree, STRING_AGGREGATOR).aggregate(STRING_INITIALIZER);
    groupedOne.cogroup(STRING_AGGREGATOR).cogroup(groupedTwo, STRING_AGGREGATOR).aggregate(STRING_INITIALIZER);
    final String topologyDescription = builder.build(properties).describe().toString();
    assertThat(topologyDescription, equalTo("Topologies:\n" + "   Sub-topology: 0\n" + "    Source: KSTREAM-SOURCE-0000000000 (topics: [one])\n" + "      --> KSTREAM-MAP-0000000003\n" + "    Processor: KSTREAM-MAP-0000000003 (stores: [])\n" + "      --> COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition-filter\n" + "      <-- KSTREAM-SOURCE-0000000000\n" + "    Processor: COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition-filter (stores: [])\n" + "      --> COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition-sink\n" + "      <-- KSTREAM-MAP-0000000003\n" + "    Sink: COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition-sink (topic: COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition)\n" + "      <-- COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition-filter\n\n" + "  Sub-topology: 1\n" + "    Source: COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition-source (topics: [COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition])\n" + "      --> COGROUPKSTREAM-AGGREGATE-0000000008, COGROUPKSTREAM-AGGREGATE-0000000015\n" + "    Source: KSTREAM-SOURCE-0000000001 (topics: [two])\n" + "      --> COGROUPKSTREAM-AGGREGATE-0000000016\n" + "    Source: KSTREAM-SOURCE-0000000002 (topics: [three])\n" + "      --> COGROUPKSTREAM-AGGREGATE-0000000009\n" + "    Processor: COGROUPKSTREAM-AGGREGATE-0000000008 (stores: [COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004])\n" + "      --> COGROUPKSTREAM-MERGE-0000000010\n" + "      <-- COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition-source\n" + "    Processor: COGROUPKSTREAM-AGGREGATE-0000000009 (stores: [COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004])\n" + "      --> COGROUPKSTREAM-MERGE-0000000010\n" + "      <-- KSTREAM-SOURCE-0000000002\n" + "    Processor: COGROUPKSTREAM-AGGREGATE-0000000015 (stores: [COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000011])\n" + "      --> COGROUPKSTREAM-MERGE-0000000017\n" + "      <-- COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000004-repartition-source\n" + "    Processor: COGROUPKSTREAM-AGGREGATE-0000000016 (stores: [COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000011])\n" + "      --> COGROUPKSTREAM-MERGE-0000000017\n" + "      <-- KSTREAM-SOURCE-0000000001\n" + "    Processor: COGROUPKSTREAM-MERGE-0000000010 (stores: [])\n" + "      --> none\n" + "      <-- COGROUPKSTREAM-AGGREGATE-0000000008, COGROUPKSTREAM-AGGREGATE-0000000009\n" + "    Processor: COGROUPKSTREAM-MERGE-0000000017 (stores: [])\n" + "      --> none\n" + "      <-- COGROUPKSTREAM-AGGREGATE-0000000015, COGROUPKSTREAM-AGGREGATE-0000000016\n\n"));
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StreamsConfig(org.apache.kafka.streams.StreamsConfig) KGroupedStream(org.apache.kafka.streams.kstream.KGroupedStream) MockInitializer(org.apache.kafka.test.MockInitializer) SessionWindows(org.apache.kafka.streams.kstream.SessionWindows) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Assert.assertThrows(org.junit.Assert.assertThrows) Window(org.apache.kafka.streams.kstream.Window) KStream(org.apache.kafka.streams.kstream.KStream) Initializer(org.apache.kafka.streams.kstream.Initializer) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Named(org.apache.kafka.streams.kstream.Named) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Aggregator(org.apache.kafka.streams.kstream.Aggregator) TestRecord(org.apache.kafka.streams.test.TestRecord) Before(org.junit.Before) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) MockValueJoiner(org.apache.kafka.test.MockValueJoiner) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) Properties(java.util.Properties) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Consumed(org.apache.kafka.streams.kstream.Consumed) KeyValue(org.apache.kafka.streams.KeyValue) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) Bytes(org.apache.kafka.common.utils.Bytes) SlidingWindows(org.apache.kafka.streams.kstream.SlidingWindows) Materialized(org.apache.kafka.streams.kstream.Materialized) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) TestInputTopic(org.apache.kafka.streams.TestInputTopic) CogroupedKStream(org.apache.kafka.streams.kstream.CogroupedKStream) Windows(org.apache.kafka.streams.kstream.Windows) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) KeyValue(org.apache.kafka.streams.KeyValue) Properties(java.util.Properties) Test(org.junit.Test)

Example 78 with KeyValue

use of org.apache.kafka.streams.KeyValue in project kafka by apache.

the class KGroupedTableImplTest method shouldReduce.

@Test
public void shouldReduce() {
    final KeyValueMapper<String, Number, KeyValue<String, Integer>> intProjection = (key, value) -> KeyValue.pair(key, value.intValue());
    final KTable<String, Integer> reduced = builder.table(topic, Consumed.with(Serdes.String(), Serdes.Double()), Materialized.<String, Double, KeyValueStore<Bytes, byte[]>>as("store").withKeySerde(Serdes.String()).withValueSerde(Serdes.Double())).groupBy(intProjection).reduce(MockReducer.INTEGER_ADDER, MockReducer.INTEGER_SUBTRACTOR, Materialized.as("reduced"));
    final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = getReducedResults(reduced);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        assertReduced(supplier.theCapturedProcessor().lastValueAndTimestampPerKey(), topic, driver);
        assertEquals(reduced.queryableStoreName(), "reduced");
    }
}
Also used : MockInitializer(org.apache.kafka.test.MockInitializer) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Assert.assertThrows(org.junit.Assert.assertThrows) MockReducer(org.apache.kafka.test.MockReducer) TopologyException(org.apache.kafka.streams.errors.TopologyException) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) KGroupedTable(org.apache.kafka.streams.kstream.KGroupedTable) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Map(java.util.Map) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Before(org.junit.Before) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) MockMapper(org.apache.kafka.test.MockMapper) KTable(org.apache.kafka.streams.kstream.KTable) KeyValueMapper(org.apache.kafka.streams.kstream.KeyValueMapper) Properties(java.util.Properties) DoubleSerializer(org.apache.kafka.common.serialization.DoubleSerializer) Consumed(org.apache.kafka.streams.kstream.Consumed) KeyValue(org.apache.kafka.streams.KeyValue) Test(org.junit.Test) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) Bytes(org.apache.kafka.common.utils.Bytes) Assert.assertNull(org.junit.Assert.assertNull) Materialized(org.apache.kafka.streams.kstream.Materialized) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Assert.assertEquals(org.junit.Assert.assertEquals) Bytes(org.apache.kafka.common.utils.Bytes) KeyValue(org.apache.kafka.streams.KeyValue) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Test(org.junit.Test)

Example 79 with KeyValue

use of org.apache.kafka.streams.KeyValue in project kafka by apache.

the class ReadOnlyWindowStoreStub method all.

@Override
public KeyValueIterator<Windowed<K>, V> all() {
    if (!open) {
        throw new InvalidStateStoreException("Store is not open");
    }
    final List<KeyValue<Windowed<K>, V>> results = new ArrayList<>();
    for (final long now : data.keySet()) {
        final NavigableMap<K, V> kvMap = data.get(now);
        if (kvMap != null) {
            for (final Entry<K, V> entry : kvMap.entrySet()) {
                results.add(new KeyValue<>(new Windowed<>(entry.getKey(), new TimeWindow(now, now + windowSize)), entry.getValue()));
            }
        }
    }
    final Iterator<KeyValue<Windowed<K>, V>> iterator = results.iterator();
    return new KeyValueIterator<Windowed<K>, V>() {

        @Override
        public void close() {
        }

        @Override
        public Windowed<K> peekNextKey() {
            throw new UnsupportedOperationException("peekNextKey() not supported in " + getClass().getName());
        }

        @Override
        public boolean hasNext() {
            return iterator.hasNext();
        }

        @Override
        public KeyValue<Windowed<K>, V> next() {
            return iterator.next();
        }
    };
}
Also used : KeyValue(org.apache.kafka.streams.KeyValue) ArrayList(java.util.ArrayList) TimeWindow(org.apache.kafka.streams.kstream.internals.TimeWindow) Windowed(org.apache.kafka.streams.kstream.Windowed) InvalidStateStoreException(org.apache.kafka.streams.errors.InvalidStateStoreException) KeyValueIterator(org.apache.kafka.streams.state.KeyValueIterator)

Example 80 with KeyValue

use of org.apache.kafka.streams.KeyValue in project kafka by apache.

the class ReadOnlyWindowStoreStub method backwardFetch.

@Override
public KeyValueIterator<Windowed<K>, V> backwardFetch(final K keyFrom, final K keyTo, final Instant timeFrom, final Instant timeTo) throws IllegalArgumentException {
    final long timeFromTs = ApiUtils.validateMillisecondInstant(timeFrom, prepareMillisCheckFailMsgPrefix(timeFrom, "timeFrom"));
    final long timeToTs = ApiUtils.validateMillisecondInstant(timeTo, prepareMillisCheckFailMsgPrefix(timeTo, "timeTo"));
    if (!open) {
        throw new InvalidStateStoreException("Store is not open");
    }
    final List<KeyValue<Windowed<K>, V>> results = new ArrayList<>();
    for (long now = timeToTs; now >= timeFromTs; now--) {
        final NavigableMap<K, V> kvMap = data.get(now);
        if (kvMap != null) {
            final NavigableMap<K, V> kvSubMap;
            if (keyFrom == null && keyFrom == null) {
                kvSubMap = kvMap;
            } else if (keyFrom == null) {
                kvSubMap = kvMap.headMap(keyTo, true);
            } else if (keyTo == null) {
                kvSubMap = kvMap.tailMap(keyFrom, true);
            } else {
                // keyFrom != null and KeyTo != null
                kvSubMap = kvMap.subMap(keyFrom, true, keyTo, true);
            }
            for (final Entry<K, V> entry : kvSubMap.descendingMap().entrySet()) {
                results.add(new KeyValue<>(new Windowed<>(entry.getKey(), new TimeWindow(now, now + windowSize)), entry.getValue()));
            }
        }
    }
    final Iterator<KeyValue<Windowed<K>, V>> iterator = results.iterator();
    return new KeyValueIterator<Windowed<K>, V>() {

        @Override
        public void close() {
        }

        @Override
        public Windowed<K> peekNextKey() {
            throw new UnsupportedOperationException("peekNextKey() not supported in " + getClass().getName());
        }

        @Override
        public boolean hasNext() {
            return iterator.hasNext();
        }

        @Override
        public KeyValue<Windowed<K>, V> next() {
            return iterator.next();
        }
    };
}
Also used : KeyValue(org.apache.kafka.streams.KeyValue) ArrayList(java.util.ArrayList) TimeWindow(org.apache.kafka.streams.kstream.internals.TimeWindow) Windowed(org.apache.kafka.streams.kstream.Windowed) InvalidStateStoreException(org.apache.kafka.streams.errors.InvalidStateStoreException) KeyValueIterator(org.apache.kafka.streams.state.KeyValueIterator)

Aggregations

KeyValue (org.apache.kafka.streams.KeyValue)343 Test (org.junit.Test)268 Properties (java.util.Properties)127 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)123 Windowed (org.apache.kafka.streams.kstream.Windowed)105 ArrayList (java.util.ArrayList)90 KafkaStreams (org.apache.kafka.streams.KafkaStreams)82 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)74 Bytes (org.apache.kafka.common.utils.Bytes)74 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)68 IntegrationTest (org.apache.kafka.test.IntegrationTest)66 Serdes (org.apache.kafka.common.serialization.Serdes)65 KeyValueStore (org.apache.kafka.streams.state.KeyValueStore)62 StreamsConfig (org.apache.kafka.streams.StreamsConfig)55 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)53 KStream (org.apache.kafka.streams.kstream.KStream)52 SessionWindow (org.apache.kafka.streams.kstream.internals.SessionWindow)46 KTable (org.apache.kafka.streams.kstream.KTable)43 MatcherAssert.assertThat (org.hamcrest.MatcherAssert.assertThat)42 Consumed (org.apache.kafka.streams.kstream.Consumed)41