Search in sources :

Example 26 with KTable

use of org.apache.kafka.streams.kstream.KTable in project kafka by apache.

the class KTableKTableLeftJoinTest method shouldNotThrowIllegalStateExceptionWhenMultiCacheEvictions.

/**
 * This test was written to reproduce https://issues.apache.org/jira/browse/KAFKA-4492
 * It is based on a fairly complicated join used by the developer that reported the bug.
 * Before the fix this would trigger an IllegalStateException.
 */
@Test
public void shouldNotThrowIllegalStateExceptionWhenMultiCacheEvictions() {
    final String agg = "agg";
    final String tableOne = "tableOne";
    final String tableTwo = "tableTwo";
    final String tableThree = "tableThree";
    final String tableFour = "tableFour";
    final String tableFive = "tableFive";
    final String tableSix = "tableSix";
    final String[] inputs = { agg, tableOne, tableTwo, tableThree, tableFour, tableFive, tableSix };
    final StreamsBuilder builder = new StreamsBuilder();
    final Consumed<Long, String> consumed = Consumed.with(Serdes.Long(), Serdes.String());
    final KTable<Long, String> aggTable = builder.table(agg, consumed, Materialized.as(Stores.inMemoryKeyValueStore("agg-base-store"))).groupBy(KeyValue::new, Grouped.with(Serdes.Long(), Serdes.String())).reduce(MockReducer.STRING_ADDER, MockReducer.STRING_ADDER, Materialized.as(Stores.inMemoryKeyValueStore("agg-store")));
    final KTable<Long, String> one = builder.table(tableOne, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableOne-base-store")));
    final KTable<Long, String> two = builder.table(tableTwo, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableTwo-base-store")));
    final KTable<Long, String> three = builder.table(tableThree, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableThree-base-store")));
    final KTable<Long, String> four = builder.table(tableFour, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableFour-base-store")));
    final KTable<Long, String> five = builder.table(tableFive, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableFive-base-store")));
    final KTable<Long, String> six = builder.table(tableSix, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableSix-base-store")));
    final ValueMapper<String, String> mapper = value -> value.toUpperCase(Locale.ROOT);
    final KTable<Long, String> seven = one.mapValues(mapper);
    final KTable<Long, String> eight = six.leftJoin(seven, MockValueJoiner.TOSTRING_JOINER);
    aggTable.leftJoin(one, MockValueJoiner.TOSTRING_JOINER).leftJoin(two, MockValueJoiner.TOSTRING_JOINER).leftJoin(three, MockValueJoiner.TOSTRING_JOINER).leftJoin(four, MockValueJoiner.TOSTRING_JOINER).leftJoin(five, MockValueJoiner.TOSTRING_JOINER).leftJoin(eight, MockValueJoiner.TOSTRING_JOINER).mapValues(mapper);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final String[] values = { "a", "AA", "BBB", "CCCC", "DD", "EEEEEEEE", "F", "GGGGGGGGGGGGGGG", "HHH", "IIIIIIIIII", "J", "KK", "LLLL", "MMMMMMMMMMMMMMMMMMMMMM", "NNNNN", "O", "P", "QQQQQ", "R", "SSSS", "T", "UU", "VVVVVVVVVVVVVVVVVVV" };
        TestInputTopic<Long, String> inputTopic;
        final Random random = new Random();
        for (int i = 0; i < 1000; i++) {
            for (final String input : inputs) {
                final Long key = (long) random.nextInt(1000);
                final String value = values[random.nextInt(values.length)];
                inputTopic = driver.createInputTopic(input, Serdes.Long().serializer(), Serdes.String().serializer());
                inputTopic.pipeInput(key, value);
            }
        }
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Arrays(java.util.Arrays) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Stores(org.apache.kafka.streams.state.Stores) MockReducer(org.apache.kafka.test.MockReducer) Random(java.util.Random) MockProcessorContext(org.apache.kafka.streams.processor.api.MockProcessorContext) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) HashSet(java.util.HashSet) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Locale(java.util.Locale) TopologyWrapper(org.apache.kafka.streams.TopologyWrapper) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) Record(org.apache.kafka.streams.processor.api.Record) Processor(org.apache.kafka.streams.processor.api.Processor) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TestRecord(org.apache.kafka.streams.test.TestRecord) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) MockValueJoiner(org.apache.kafka.test.MockValueJoiner) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TopologyTestDriverWrapper(org.apache.kafka.streams.TopologyTestDriverWrapper) KTable(org.apache.kafka.streams.kstream.KTable) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Collection(java.util.Collection) KeyValue(org.apache.kafka.streams.KeyValue) Set(java.util.Set) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) Instant(java.time.Instant) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) Assert.assertFalse(org.junit.Assert.assertFalse) Materialized(org.apache.kafka.streams.kstream.Materialized) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TestInputTopic(org.apache.kafka.streams.TestInputTopic) Matchers.is(org.hamcrest.Matchers.is) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Topology(org.apache.kafka.streams.Topology) Assert.assertEquals(org.junit.Assert.assertEquals) Random(java.util.Random) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Test(org.junit.Test)

Example 27 with KTable

use of org.apache.kafka.streams.kstream.KTable in project kafka by apache.

the class KTableAggregateTest method testAggRepartition.

@Test
public void testAggRepartition() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic1 = "topic1";
    final KTable<String, String> table1 = builder.table(topic1, consumed);
    final KTable<String, String> table2 = table1.groupBy((key, value) -> {
        switch(key) {
            case "null":
                return KeyValue.pair(null, value);
            case "NULL":
                return null;
            default:
                return KeyValue.pair(value, value);
        }
    }, stringSerialized).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, MockAggregator.TOSTRING_REMOVER, Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("topic1-Canonized").withValueSerde(stringSerde));
    table2.toStream().process(supplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), CONFIG, Instant.ofEpochMilli(0L))) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        inputTopic.pipeInput("A", "1", 10L);
        inputTopic.pipeInput("A", (String) null, 15L);
        inputTopic.pipeInput("A", "1", 12L);
        inputTopic.pipeInput("B", "2", 20L);
        inputTopic.pipeInput("null", "3", 25L);
        inputTopic.pipeInput("B", "4", 23L);
        inputTopic.pipeInput("NULL", "5", 24L);
        inputTopic.pipeInput("B", "7", 22L);
        assertEquals(asList(new KeyValueTimestamp<>("1", "0+1", 10), new KeyValueTimestamp<>("1", "0+1-1", 15), new KeyValueTimestamp<>("1", "0+1-1+1", 15), new KeyValueTimestamp<>("2", "0+2", 20), new KeyValueTimestamp<>("2", "0+2-2", 23), new KeyValueTimestamp<>("4", "0+4", 23), new KeyValueTimestamp<>("4", "0+4-4", 23), new KeyValueTimestamp<>("7", "0+7", 22)), supplier.theCapturedProcessor().processed());
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StreamsConfig(org.apache.kafka.streams.StreamsConfig) MockInitializer(org.apache.kafka.test.MockInitializer) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Serde(org.apache.kafka.common.serialization.Serde) Arrays.asList(java.util.Arrays.asList) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) MockMapper(org.apache.kafka.test.MockMapper) KTable(org.apache.kafka.streams.kstream.KTable) Properties(java.util.Properties) TestUtils(org.apache.kafka.test.TestUtils) Consumed(org.apache.kafka.streams.kstream.Consumed) KeyValue(org.apache.kafka.streams.KeyValue) Test(org.junit.Test) Instant(java.time.Instant) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) Bytes(org.apache.kafka.common.utils.Bytes) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) Materialized(org.apache.kafka.streams.kstream.Materialized) TestInputTopic(org.apache.kafka.streams.TestInputTopic) Assert.assertEquals(org.junit.Assert.assertEquals) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Example 28 with KTable

use of org.apache.kafka.streams.kstream.KTable in project kafka by apache.

the class KTableSourceTest method kTableShouldLogOnOutOfOrder.

@Test
public void kTableShouldLogOnOutOfOrder() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic = "topic";
    builder.table(topic, stringConsumed, Materialized.as("store"));
    try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KTableSource.class);
        final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        inputTopic.pipeInput("key", "value", 10L);
        inputTopic.pipeInput("key", "value", 5L);
        assertThat(appender.getEvents().stream().filter(e -> e.getLevel().equals("WARN")).map(Event::getMessage).collect(Collectors.toList()), hasItem("Detected out-of-order KTable update for store, old timestamp=[10] new timestamp=[5]. topic=[topic] partition=[1] offset=[0]."));
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Arrays.asList(java.util.Arrays.asList) TopologyWrapper(org.apache.kafka.streams.TopologyWrapper) Duration(java.time.Duration) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TestRecord(org.apache.kafka.streams.test.TestRecord) StreamsTestUtils.getMetricByName(org.apache.kafka.test.StreamsTestUtils.getMetricByName) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TopologyTestDriverWrapper(org.apache.kafka.streams.TopologyTestDriverWrapper) KTable(org.apache.kafka.streams.kstream.KTable) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Event(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) Instant(java.time.Instant) Collectors(java.util.stream.Collectors) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Assert.assertNull(org.junit.Assert.assertNull) Ignore(org.junit.Ignore) Materialized(org.apache.kafka.streams.kstream.Materialized) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) TestInputTopic(org.apache.kafka.streams.TestInputTopic) InternalTopologyBuilder(org.apache.kafka.streams.processor.internals.InternalTopologyBuilder) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Topology(org.apache.kafka.streams.Topology) Assert.assertEquals(org.junit.Assert.assertEquals) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Event(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender.Event) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 29 with KTable

use of org.apache.kafka.streams.kstream.KTable in project kafka by apache.

the class SuppressScenarioTest method shouldSuppressIntermediateEventsWithRecordLimit.

@Test
public void shouldSuppressIntermediateEventsWithRecordLimit() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KTable<String, Long> valueCounts = builder.table("input", Consumed.with(STRING_SERDE, STRING_SERDE), Materialized.<String, String, KeyValueStore<Bytes, byte[]>>with(STRING_SERDE, STRING_SERDE).withCachingDisabled().withLoggingDisabled()).groupBy((k, v) -> new KeyValue<>(v, k), Grouped.with(STRING_SERDE, STRING_SERDE)).count(Materialized.with(STRING_SERDE, Serdes.Long()));
    valueCounts.suppress(untilTimeLimit(ofMillis(Long.MAX_VALUE), maxRecords(1L).emitEarlyWhenFull())).toStream().to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
    valueCounts.toStream().to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
    final Topology topology = builder.build();
    System.out.println(topology.describe());
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
        inputTopic.pipeInput("k1", "v1", 0L);
        inputTopic.pipeInput("k1", "v2", 1L);
        inputTopic.pipeInput("k2", "v1", 2L);
        verify(drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("v1", 1L, 0L), new KeyValueTimestamp<>("v1", 0L, 1L), new KeyValueTimestamp<>("v2", 1L, 1L), new KeyValueTimestamp<>("v1", 1L, 2L)));
        verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(// consecutive updates to v1 get suppressed into only the latter.
        new KeyValueTimestamp<>("v1", 0L, 1L), new KeyValueTimestamp<>("v2", 1L, 1L)));
        inputTopic.pipeInput("x", "x", 3L);
        verify(drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER), singletonList(new KeyValueTimestamp<>("x", 1L, 3L)));
        verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), singletonList(// now we see that last update to v1, but we won't see the update to x until it gets evicted
        new KeyValueTimestamp<>("v1", 1L, 2L)));
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Produced(org.apache.kafka.streams.kstream.Produced) Collections.singletonList(java.util.Collections.singletonList) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Serde(org.apache.kafka.common.serialization.Serde) Arrays.asList(java.util.Arrays.asList) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) BufferConfig.maxRecords(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxRecords) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) TestUtils(org.apache.kafka.test.TestUtils) Collections.emptyList(java.util.Collections.emptyList) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) Bytes(org.apache.kafka.common.utils.Bytes) List(java.util.List) Materialized(org.apache.kafka.streams.kstream.Materialized) ZERO(java.time.Duration.ZERO) Duration.ofMillis(java.time.Duration.ofMillis) Topology(org.apache.kafka.streams.Topology) StreamsConfig(org.apache.kafka.streams.StreamsConfig) KGroupedStream(org.apache.kafka.streams.kstream.KGroupedStream) SessionWindows(org.apache.kafka.streams.kstream.SessionWindows) BufferConfig.unbounded(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.unbounded) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) KStream(org.apache.kafka.streams.kstream.KStream) BufferConfig.maxBytes(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxBytes) WindowStore(org.apache.kafka.streams.state.WindowStore) Suppressed.untilWindowCloses(org.apache.kafka.streams.kstream.Suppressed.untilWindowCloses) Windowed(org.apache.kafka.streams.kstream.Windowed) Named(org.apache.kafka.streams.kstream.Named) Deserializer(org.apache.kafka.common.serialization.Deserializer) SessionStore(org.apache.kafka.streams.state.SessionStore) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Utils(org.apache.kafka.common.utils.Utils) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) Properties(java.util.Properties) Iterator(java.util.Iterator) Consumed(org.apache.kafka.streams.kstream.Consumed) Suppressed(org.apache.kafka.streams.kstream.Suppressed) Test(org.junit.Test) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) SlidingWindows(org.apache.kafka.streams.kstream.SlidingWindows) TimeWindows(org.apache.kafka.streams.kstream.TimeWindows) TestInputTopic(org.apache.kafka.streams.TestInputTopic) Comparator(java.util.Comparator) Suppressed.untilTimeLimit(org.apache.kafka.streams.kstream.Suppressed.untilTimeLimit) Bytes(org.apache.kafka.common.utils.Bytes) BufferConfig.maxBytes(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxBytes) KeyValue(org.apache.kafka.streams.KeyValue) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Topology(org.apache.kafka.streams.Topology) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Example 30 with KTable

use of org.apache.kafka.streams.kstream.KTable in project kafka by apache.

the class SuppressScenarioTest method shouldSuppressIntermediateEventsWithBytesLimit.

@Test
public void shouldSuppressIntermediateEventsWithBytesLimit() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KTable<String, Long> valueCounts = builder.table("input", Consumed.with(STRING_SERDE, STRING_SERDE), Materialized.<String, String, KeyValueStore<Bytes, byte[]>>with(STRING_SERDE, STRING_SERDE).withCachingDisabled().withLoggingDisabled()).groupBy((k, v) -> new KeyValue<>(v, k), Grouped.with(STRING_SERDE, STRING_SERDE)).count();
    valueCounts.suppress(untilTimeLimit(ofMillis(Long.MAX_VALUE), maxBytes(200L).emitEarlyWhenFull())).toStream().to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
    valueCounts.toStream().to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
    final Topology topology = builder.build();
    System.out.println(topology.describe());
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
        inputTopic.pipeInput("k1", "v1", 0L);
        inputTopic.pipeInput("k1", "v2", 1L);
        inputTopic.pipeInput("k2", "v1", 2L);
        verify(drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("v1", 1L, 0L), new KeyValueTimestamp<>("v1", 0L, 1L), new KeyValueTimestamp<>("v2", 1L, 1L), new KeyValueTimestamp<>("v1", 1L, 2L)));
        verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(// consecutive updates to v1 get suppressed into only the latter.
        new KeyValueTimestamp<>("v1", 0L, 1L), new KeyValueTimestamp<>("v2", 1L, 1L)));
        inputTopic.pipeInput("x", "x", 3L);
        verify(drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER), singletonList(new KeyValueTimestamp<>("x", 1L, 3L)));
        verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), singletonList(// now we see that last update to v1, but we won't see the update to x until it gets evicted
        new KeyValueTimestamp<>("v1", 1L, 2L)));
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Produced(org.apache.kafka.streams.kstream.Produced) Collections.singletonList(java.util.Collections.singletonList) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Serde(org.apache.kafka.common.serialization.Serde) Arrays.asList(java.util.Arrays.asList) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) BufferConfig.maxRecords(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxRecords) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) TestUtils(org.apache.kafka.test.TestUtils) Collections.emptyList(java.util.Collections.emptyList) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) Bytes(org.apache.kafka.common.utils.Bytes) List(java.util.List) Materialized(org.apache.kafka.streams.kstream.Materialized) ZERO(java.time.Duration.ZERO) Duration.ofMillis(java.time.Duration.ofMillis) Topology(org.apache.kafka.streams.Topology) StreamsConfig(org.apache.kafka.streams.StreamsConfig) KGroupedStream(org.apache.kafka.streams.kstream.KGroupedStream) SessionWindows(org.apache.kafka.streams.kstream.SessionWindows) BufferConfig.unbounded(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.unbounded) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) KStream(org.apache.kafka.streams.kstream.KStream) BufferConfig.maxBytes(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxBytes) WindowStore(org.apache.kafka.streams.state.WindowStore) Suppressed.untilWindowCloses(org.apache.kafka.streams.kstream.Suppressed.untilWindowCloses) Windowed(org.apache.kafka.streams.kstream.Windowed) Named(org.apache.kafka.streams.kstream.Named) Deserializer(org.apache.kafka.common.serialization.Deserializer) SessionStore(org.apache.kafka.streams.state.SessionStore) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Utils(org.apache.kafka.common.utils.Utils) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) Properties(java.util.Properties) Iterator(java.util.Iterator) Consumed(org.apache.kafka.streams.kstream.Consumed) Suppressed(org.apache.kafka.streams.kstream.Suppressed) Test(org.junit.Test) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) SlidingWindows(org.apache.kafka.streams.kstream.SlidingWindows) TimeWindows(org.apache.kafka.streams.kstream.TimeWindows) TestInputTopic(org.apache.kafka.streams.TestInputTopic) Comparator(java.util.Comparator) Suppressed.untilTimeLimit(org.apache.kafka.streams.kstream.Suppressed.untilTimeLimit) Bytes(org.apache.kafka.common.utils.Bytes) BufferConfig.maxBytes(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxBytes) KeyValue(org.apache.kafka.streams.KeyValue) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Topology(org.apache.kafka.streams.Topology) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Aggregations

KTable (org.apache.kafka.streams.kstream.KTable)56 Serdes (org.apache.kafka.common.serialization.Serdes)51 Properties (java.util.Properties)50 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)49 KeyValue (org.apache.kafka.streams.KeyValue)41 StreamsConfig (org.apache.kafka.streams.StreamsConfig)41 Test (org.junit.Test)40 KStream (org.apache.kafka.streams.kstream.KStream)39 Materialized (org.apache.kafka.streams.kstream.Materialized)35 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)32 Consumed (org.apache.kafka.streams.kstream.Consumed)32 Produced (org.apache.kafka.streams.kstream.Produced)28 Bytes (org.apache.kafka.common.utils.Bytes)27 KeyValueStore (org.apache.kafka.streams.state.KeyValueStore)27 MatcherAssert.assertThat (org.hamcrest.MatcherAssert.assertThat)27 Grouped (org.apache.kafka.streams.kstream.Grouped)25 List (java.util.List)24 Serde (org.apache.kafka.common.serialization.Serde)24 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)24 KafkaStreams (org.apache.kafka.streams.KafkaStreams)24