Search in sources :

Example 1 with TestRecord

use of org.apache.kafka.streams.test.TestRecord in project kafka by apache.

the class KStreamRepartitionTest method shouldInvokePartitionerWhenSet.

@Test
public void shouldInvokePartitionerWhenSet() {
    final int[] expectedKeys = new int[] { 0, 1 };
    final StreamPartitioner<Integer, String> streamPartitionerMock = EasyMock.mock(StreamPartitioner.class);
    expect(streamPartitionerMock.partition(anyString(), eq(0), eq("X0"), anyInt())).andReturn(1).times(1);
    expect(streamPartitionerMock.partition(anyString(), eq(1), eq("X1"), anyInt())).andReturn(1).times(1);
    replay(streamPartitionerMock);
    final String repartitionOperationName = "test";
    final Repartitioned<Integer, String> repartitioned = Repartitioned.streamPartitioner(streamPartitionerMock).withName(repartitionOperationName);
    builder.<Integer, String>stream(inputTopic).repartition(repartitioned);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<Integer, String> testInputTopic = driver.createInputTopic(inputTopic, new IntegerSerializer(), new StringSerializer());
        final String topicName = repartitionOutputTopic(props, repartitionOperationName);
        final TestOutputTopic<Integer, String> testOutputTopic = driver.createOutputTopic(topicName, new IntegerDeserializer(), new StringDeserializer());
        for (int i = 0; i < 2; i++) {
            testInputTopic.pipeInput(expectedKeys[i], "X" + expectedKeys[i], i + 10);
        }
        assertThat(testOutputTopic.readRecord(), equalTo(new TestRecord<>(0, "X0", Instant.ofEpochMilli(10))));
        assertThat(testOutputTopic.readRecord(), equalTo(new TestRecord<>(1, "X1", Instant.ofEpochMilli(11))));
        assertTrue(testOutputTopic.readRecordsToList().isEmpty());
    }
    verify(streamPartitionerMock);
}
Also used : IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) EasyMock.anyString(org.easymock.EasyMock.anyString) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) Test(org.junit.Test)

Example 2 with TestRecord

use of org.apache.kafka.streams.test.TestRecord in project kafka by apache.

the class KStreamImplTest method shouldSupportKeyChangeKTableFromKStream.

@Test
public void shouldSupportKeyChangeKTableFromKStream() {
    final Consumed<String, String> consumed = Consumed.with(Serdes.String(), Serdes.String());
    final StreamsBuilder builder = new StreamsBuilder();
    final String input = "input";
    final String output = "output";
    builder.stream(input, consumed).map((key, value) -> new KeyValue<>(key.charAt(0) - 'A', value)).toTable(Materialized.with(Serdes.Integer(), null)).toStream().to(output);
    final Topology topology = builder.build();
    final String topologyDescription = topology.describe().toString();
    assertThat(topologyDescription, equalTo("Topologies:\n" + "   Sub-topology: 0\n" + "    Source: KSTREAM-SOURCE-0000000000 (topics: [input])\n" + "      --> KSTREAM-MAP-0000000001\n" + "    Processor: KSTREAM-MAP-0000000001 (stores: [])\n" + "      --> KSTREAM-FILTER-0000000005\n" + "      <-- KSTREAM-SOURCE-0000000000\n" + "    Processor: KSTREAM-FILTER-0000000005 (stores: [])\n" + "      --> KSTREAM-SINK-0000000004\n" + "      <-- KSTREAM-MAP-0000000001\n" + "    Sink: KSTREAM-SINK-0000000004 (topic: KSTREAM-TOTABLE-0000000002-repartition)\n" + "      <-- KSTREAM-FILTER-0000000005\n" + "\n" + "  Sub-topology: 1\n" + "    Source: KSTREAM-SOURCE-0000000006 (topics: [KSTREAM-TOTABLE-0000000002-repartition])\n" + "      --> KSTREAM-TOTABLE-0000000002\n" + "    Processor: KSTREAM-TOTABLE-0000000002 (stores: [])\n" + "      --> KTABLE-TOSTREAM-0000000007\n" + "      <-- KSTREAM-SOURCE-0000000006\n" + "    Processor: KTABLE-TOSTREAM-0000000007 (stores: [])\n" + "      --> KSTREAM-SINK-0000000008\n" + "      <-- KSTREAM-TOTABLE-0000000002\n" + "    Sink: KSTREAM-SINK-0000000008 (topic: output)\n" + "      <-- KTABLE-TOSTREAM-0000000007\n\n"));
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(input, Serdes.String().serializer(), Serdes.String().serializer());
        final TestOutputTopic<Integer, String> outputTopic = driver.createOutputTopic(output, Serdes.Integer().deserializer(), Serdes.String().deserializer());
        inputTopic.pipeInput("A", "01", 5L);
        inputTopic.pipeInput("B", "02", 100L);
        inputTopic.pipeInput("C", "03", 0L);
        inputTopic.pipeInput("D", "04", 0L);
        inputTopic.pipeInput("A", "05", 10L);
        inputTopic.pipeInput("A", "06", 8L);
        final List<TestRecord<Integer, String>> outputExpectRecords = new ArrayList<>();
        outputExpectRecords.add(new TestRecord<>(0, "01", Instant.ofEpochMilli(5L)));
        outputExpectRecords.add(new TestRecord<>(1, "02", Instant.ofEpochMilli(100L)));
        outputExpectRecords.add(new TestRecord<>(2, "03", Instant.ofEpochMilli(0L)));
        outputExpectRecords.add(new TestRecord<>(3, "04", Instant.ofEpochMilli(0L)));
        outputExpectRecords.add(new TestRecord<>(0, "05", Instant.ofEpochMilli(10L)));
        outputExpectRecords.add(new TestRecord<>(0, "06", Instant.ofEpochMilli(8L)));
        assertEquals(outputTopic.readRecordsToList(), outputExpectRecords);
    }
}
Also used : CoreMatchers.is(org.hamcrest.CoreMatchers.is) Arrays(java.util.Arrays) ValueTransformerSupplier(org.apache.kafka.streams.kstream.ValueTransformerSupplier) Produced(org.apache.kafka.streams.kstream.Produced) IsInstanceOf.instanceOf(org.hamcrest.core.IsInstanceOf.instanceOf) Stores(org.apache.kafka.streams.state.Stores) Repartitioned(org.apache.kafka.streams.kstream.Repartitioned) MockProcessorSupplier(org.apache.kafka.test.MockProcessorSupplier) Joined(org.apache.kafka.streams.kstream.Joined) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Matcher(java.util.regex.Matcher) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ProcessorSupplier(org.apache.kafka.streams.processor.api.ProcessorSupplier) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) TransformerSupplier(org.apache.kafka.streams.kstream.TransformerSupplier) Serde(org.apache.kafka.common.serialization.Serde) Arrays.asList(java.util.Arrays.asList) TopologyWrapper(org.apache.kafka.streams.TopologyWrapper) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) Map(java.util.Map) SourceNode(org.apache.kafka.streams.processor.internals.SourceNode) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) ValueMapperWithKey(org.apache.kafka.streams.kstream.ValueMapperWithKey) MockValueJoiner(org.apache.kafka.test.MockValueJoiner) MockMapper(org.apache.kafka.test.MockMapper) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) TopicNameExtractor(org.apache.kafka.streams.processor.TopicNameExtractor) KeyValue(org.apache.kafka.streams.KeyValue) Instant(java.time.Instant) Bytes(org.apache.kafka.common.utils.Bytes) ProcessorContext(org.apache.kafka.streams.processor.ProcessorContext) List(java.util.List) Predicate(org.apache.kafka.streams.kstream.Predicate) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) ValueJoiner(org.apache.kafka.streams.kstream.ValueJoiner) Materialized(org.apache.kafka.streams.kstream.Materialized) Pattern(java.util.regex.Pattern) ProcessorTopology(org.apache.kafka.streams.processor.internals.ProcessorTopology) Duration.ofMillis(java.time.Duration.ofMillis) Topology(org.apache.kafka.streams.Topology) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Assert.assertThrows(org.junit.Assert.assertThrows) HashMap(java.util.HashMap) KStream(org.apache.kafka.streams.kstream.KStream) Function(java.util.function.Function) StreamJoined(org.apache.kafka.streams.kstream.StreamJoined) ArrayList(java.util.ArrayList) ValueJoinerWithKey(org.apache.kafka.streams.kstream.ValueJoinerWithKey) JoinWindows(org.apache.kafka.streams.kstream.JoinWindows) Named(org.apache.kafka.streams.kstream.Named) ValueTransformer(org.apache.kafka.streams.kstream.ValueTransformer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Before(org.junit.Before) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Collections.emptyMap(java.util.Collections.emptyMap) KTable(org.apache.kafka.streams.kstream.KTable) KeyValueMapper(org.apache.kafka.streams.kstream.KeyValueMapper) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Transformer(org.apache.kafka.streams.kstream.Transformer) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) ValueTransformerWithKeySupplier(org.apache.kafka.streams.kstream.ValueTransformerWithKeySupplier) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) TimeUnit(java.util.concurrent.TimeUnit) FailOnInvalidTimestamp(org.apache.kafka.streams.processor.FailOnInvalidTimestamp) Assert.assertNull(org.junit.Assert.assertNull) GlobalKTable(org.apache.kafka.streams.kstream.GlobalKTable) IsNull.notNullValue(org.hamcrest.core.IsNull.notNullValue) ValueTransformerWithKey(org.apache.kafka.streams.kstream.ValueTransformerWithKey) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) KeyValue(org.apache.kafka.streams.KeyValue) ArrayList(java.util.ArrayList) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) ProcessorTopology(org.apache.kafka.streams.processor.internals.ProcessorTopology) Topology(org.apache.kafka.streams.Topology) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TestRecord(org.apache.kafka.streams.test.TestRecord) Test(org.junit.Test)

Example 3 with TestRecord

use of org.apache.kafka.streams.test.TestRecord in project kafka by apache.

the class KStreamImplTest method shouldSupportGroupByCountWithKStreamToKTable.

@Test
public void shouldSupportGroupByCountWithKStreamToKTable() {
    final Consumed<String, String> consumed = Consumed.with(Serdes.String(), Serdes.String());
    final StreamsBuilder builder = new StreamsBuilder();
    final String input = "input";
    final String output = "output";
    builder.stream(input, consumed).toTable().groupBy(MockMapper.selectValueKeyValueMapper(), Grouped.with(Serdes.String(), Serdes.String())).count().toStream().to(output);
    final Topology topology = builder.build(props);
    final String topologyDescription = topology.describe().toString();
    assertThat(topologyDescription, equalTo("Topologies:\n" + "   Sub-topology: 0\n" + "    Source: KSTREAM-SOURCE-0000000000 (topics: [input])\n" + "      --> KSTREAM-TOTABLE-0000000001\n" + "    Processor: KSTREAM-TOTABLE-0000000001 (stores: [KSTREAM-TOTABLE-STATE-STORE-0000000002])\n" + "      --> KTABLE-SELECT-0000000003\n" + "      <-- KSTREAM-SOURCE-0000000000\n" + "    Processor: KTABLE-SELECT-0000000003 (stores: [])\n" + "      --> KSTREAM-SINK-0000000005\n" + "      <-- KSTREAM-TOTABLE-0000000001\n" + "    Sink: KSTREAM-SINK-0000000005 (topic: KTABLE-AGGREGATE-STATE-STORE-0000000004-repartition)\n" + "      <-- KTABLE-SELECT-0000000003\n" + "\n" + "  Sub-topology: 1\n" + "    Source: KSTREAM-SOURCE-0000000006 (topics: [KTABLE-AGGREGATE-STATE-STORE-0000000004-repartition])\n" + "      --> KTABLE-AGGREGATE-0000000007\n" + "    Processor: KTABLE-AGGREGATE-0000000007 (stores: [KTABLE-AGGREGATE-STATE-STORE-0000000004])\n" + "      --> KTABLE-TOSTREAM-0000000008\n" + "      <-- KSTREAM-SOURCE-0000000006\n" + "    Processor: KTABLE-TOSTREAM-0000000008 (stores: [])\n" + "      --> KSTREAM-SINK-0000000009\n" + "      <-- KTABLE-AGGREGATE-0000000007\n" + "    Sink: KSTREAM-SINK-0000000009 (topic: output)\n" + "      <-- KTABLE-TOSTREAM-0000000008\n\n"));
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(input, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final TestOutputTopic<String, Long> outputTopic = driver.createOutputTopic(output, Serdes.String().deserializer(), Serdes.Long().deserializer());
        inputTopic.pipeInput("A", "green", 10L);
        inputTopic.pipeInput("B", "green", 9L);
        inputTopic.pipeInput("A", "blue", 12L);
        inputTopic.pipeInput("C", "yellow", 15L);
        inputTopic.pipeInput("D", "green", 11L);
        assertEquals(asList(new TestRecord<>("green", 1L, Instant.ofEpochMilli(10)), new TestRecord<>("green", 2L, Instant.ofEpochMilli(10)), new TestRecord<>("green", 1L, Instant.ofEpochMilli(12)), new TestRecord<>("blue", 1L, Instant.ofEpochMilli(12)), new TestRecord<>("yellow", 1L, Instant.ofEpochMilli(15)), new TestRecord<>("green", 2L, Instant.ofEpochMilli(12))), outputTopic.readRecordsToList());
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) ProcessorTopology(org.apache.kafka.streams.processor.internals.ProcessorTopology) Topology(org.apache.kafka.streams.Topology) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) Test(org.junit.Test)

Example 4 with TestRecord

use of org.apache.kafka.streams.test.TestRecord in project kafka by apache.

the class KStreamImplTest method shouldNotMaterializedKTableFromKStream.

@Test
public void shouldNotMaterializedKTableFromKStream() {
    final Consumed<String, String> consumed = Consumed.with(Serdes.String(), Serdes.String());
    final StreamsBuilder builder = new StreamsBuilder();
    final String input = "input";
    final String output = "output";
    builder.stream(input, consumed).toTable().toStream().to(output);
    final String topologyDescription = builder.build().describe().toString();
    assertThat(topologyDescription, equalTo("Topologies:\n" + "   Sub-topology: 0\n" + "    Source: KSTREAM-SOURCE-0000000000 (topics: [input])\n" + "      --> KSTREAM-TOTABLE-0000000001\n" + "    Processor: KSTREAM-TOTABLE-0000000001 (stores: [])\n" + "      --> KTABLE-TOSTREAM-0000000003\n" + "      <-- KSTREAM-SOURCE-0000000000\n" + "    Processor: KTABLE-TOSTREAM-0000000003 (stores: [])\n" + "      --> KSTREAM-SINK-0000000004\n" + "      <-- KSTREAM-TOTABLE-0000000001\n" + "    Sink: KSTREAM-SINK-0000000004 (topic: output)\n" + "      <-- KTABLE-TOSTREAM-0000000003\n\n"));
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(input, Serdes.String().serializer(), Serdes.String().serializer());
        final TestOutputTopic<String, String> outputTopic = driver.createOutputTopic(output, Serdes.String().deserializer(), Serdes.String().deserializer());
        inputTopic.pipeInput("A", "01", 5L);
        inputTopic.pipeInput("B", "02", 100L);
        inputTopic.pipeInput("C", "03", 0L);
        inputTopic.pipeInput("D", "04", 0L);
        inputTopic.pipeInput("A", "05", 10L);
        inputTopic.pipeInput("A", "06", 8L);
        final List<TestRecord<String, String>> outputExpectRecords = new ArrayList<>();
        outputExpectRecords.add(new TestRecord<>("A", "01", Instant.ofEpochMilli(5L)));
        outputExpectRecords.add(new TestRecord<>("B", "02", Instant.ofEpochMilli(100L)));
        outputExpectRecords.add(new TestRecord<>("C", "03", Instant.ofEpochMilli(0L)));
        outputExpectRecords.add(new TestRecord<>("D", "04", Instant.ofEpochMilli(0L)));
        outputExpectRecords.add(new TestRecord<>("A", "05", Instant.ofEpochMilli(10L)));
        outputExpectRecords.add(new TestRecord<>("A", "06", Instant.ofEpochMilli(8L)));
        assertEquals(outputTopic.readRecordsToList(), outputExpectRecords);
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) ArrayList(java.util.ArrayList) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) TestRecord(org.apache.kafka.streams.test.TestRecord) Test(org.junit.Test)

Example 5 with TestRecord

use of org.apache.kafka.streams.test.TestRecord in project kafka by apache.

the class KStreamWindowAggregateTest method shouldLogAndMeterWhenSkippingExpiredWindow.

@Test
public void shouldLogAndMeterWhenSkippingExpiredWindow() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic = "topic";
    final KStream<String, String> stream1 = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String()));
    stream1.groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(TimeWindows.ofSizeAndGrace(ofMillis(10), ofMillis(90)).advanceBy(ofMillis(5))).aggregate(() -> "", MockAggregator.toStringInstance("+"), Materialized.<String, String, WindowStore<Bytes, byte[]>>as("topic1-Canonicalized").withValueSerde(Serdes.String()).withCachingDisabled().withLoggingDisabled().withRetention(Duration.ofMillis(100))).toStream().map((key, value) -> new KeyValue<>(key.toString(), value)).to("output");
    try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KStreamWindowAggregate.class);
        final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer());
        inputTopic.pipeInput("k", "100", 100L);
        inputTopic.pipeInput("k", "0", 0L);
        inputTopic.pipeInput("k", "1", 1L);
        inputTopic.pipeInput("k", "2", 2L);
        inputTopic.pipeInput("k", "3", 3L);
        inputTopic.pipeInput("k", "4", 4L);
        inputTopic.pipeInput("k", "5", 5L);
        inputTopic.pipeInput("k", "6", 6L);
        assertLatenessMetrics(driver, // how many events get dropped
        is(7.0), // k:0 is 100ms late, since its time is 0, but it arrives at stream time 100.
        is(100.0), // (0 + 100 + 99 + 98 + 97 + 96 + 95 + 94) / 8
        is(84.875));
        assertThat(appender.getMessages(), hasItems("Skipping record for expired window. topic=[topic] partition=[0] offset=[1] timestamp=[0] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[2] timestamp=[1] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[3] timestamp=[2] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[4] timestamp=[3] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[5] timestamp=[4] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[6] timestamp=[5] window=[0,10) expiration=[10] streamTime=[100]", "Skipping record for expired window. topic=[topic] partition=[0] offset=[7] timestamp=[6] window=[0,10) expiration=[10] streamTime=[100]"));
        final TestOutputTopic<String, String> outputTopic = driver.createOutputTopic("output", new StringDeserializer(), new StringDeserializer());
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("[k@95/105]", "+100", null, 100L)));
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("[k@100/110]", "+100", null, 100L)));
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("[k@5/15]", "+5", null, 5L)));
        assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>("[k@5/15]", "+5+6", null, 6L)));
        assertTrue(outputTopic.isEmpty());
    }
}
Also used : CoreMatchers.is(org.hamcrest.CoreMatchers.is) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) MockInitializer(org.apache.kafka.test.MockInitializer) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) CoreMatchers.not(org.hamcrest.CoreMatchers.not) KStream(org.apache.kafka.streams.kstream.KStream) WindowStore(org.apache.kafka.streams.state.WindowStore) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Windowed(org.apache.kafka.streams.kstream.Windowed) Arrays.asList(java.util.Arrays.asList) Duration(java.time.Duration) MetricName(org.apache.kafka.common.MetricName) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TestRecord(org.apache.kafka.streams.test.TestRecord) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) CoreMatchers.hasItems(org.hamcrest.CoreMatchers.hasItems) KeyValue(org.apache.kafka.streams.KeyValue) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) Bytes(org.apache.kafka.common.utils.Bytes) List(java.util.List) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) TimeWindows(org.apache.kafka.streams.kstream.TimeWindows) Matcher(org.hamcrest.Matcher) Materialized(org.apache.kafka.streams.kstream.Materialized) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Duration.ofMillis(java.time.Duration.ofMillis) Assert.assertEquals(org.junit.Assert.assertEquals) KeyValue(org.apache.kafka.streams.KeyValue) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) WindowStore(org.apache.kafka.streams.state.WindowStore) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) Test(org.junit.Test)

Aggregations

TestRecord (org.apache.kafka.streams.test.TestRecord)18 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)14 Test (org.junit.Test)12 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)11 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)9 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)9 CoreMatchers.containsString (org.hamcrest.CoreMatchers.containsString)8 Duration (java.time.Duration)6 LinkedList (java.util.LinkedList)5 Bytes (org.apache.kafka.common.utils.Bytes)5 TestInputTopic (org.apache.kafka.streams.TestInputTopic)5 Windowed (org.apache.kafka.streams.kstream.Windowed)5 Instant (java.time.Instant)4 List (java.util.List)4 KeyValue (org.apache.kafka.streams.KeyValue)4 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)4 Topology (org.apache.kafka.streams.Topology)4 Duration.ofMillis (java.time.Duration.ofMillis)3 ArrayList (java.util.ArrayList)3 Arrays.asList (java.util.Arrays.asList)3