Search in sources :

Example 81 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamKStreamOuterJoinTest method testUpperWindowBound.

private void testUpperWindowBound(final int[] expectedKeys, final TopologyTestDriver driver, final MockApiProcessor<Integer, String, Void, Void> processor) {
    long time;
    final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
    final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
    // push four items with larger and increasing timestamp (out of window) to the other stream; this should produced 2 expired non-joined records
    // w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0) }
    // w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0) }
    // --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0) }
    // --> w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    time = 1000L;
    for (int i = 0; i < expectedKeys.length; i++) {
        inputTopic2.pipeInput(expectedKeys[i], "b" + expectedKeys[i], time + i);
    }
    processor.checkAndClearProcessResult(new KeyValueTimestamp<>(2, "null+a2", 0L), new KeyValueTimestamp<>(3, "null+a3", 0L));
    // push four items with larger timestamp to the primary stream; this should produce four full-join items
    // w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0) }
    // w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    // --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100) }
    // --> w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    time = 1000L + 100L;
    for (final int expectedKey : expectedKeys) {
        inputTopic1.pipeInput(expectedKey, "B" + expectedKey, time);
    }
    processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "B0+b0", 1100L), new KeyValueTimestamp<>(1, "B1+b1", 1100L), new KeyValueTimestamp<>(2, "B2+b2", 1100L), new KeyValueTimestamp<>(3, "B3+b3", 1100L));
    // push four items with increased timestamp to the primary stream; this should produce three full-join items (non-joined item is not produced yet)
    // w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100) }
    // w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    // --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100),
    // 0:C0 (ts: 1101), 1:C1 (ts: 1101), 2:C2 (ts: 1101), 3:C3 (ts: 1101) }
    // --> w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    time += 1L;
    for (final int expectedKey : expectedKeys) {
        inputTopic1.pipeInput(expectedKey, "C" + expectedKey, time);
    }
    processor.checkAndClearProcessResult(new KeyValueTimestamp<>(1, "C1+b1", 1101L), new KeyValueTimestamp<>(2, "C2+b2", 1101L), new KeyValueTimestamp<>(3, "C3+b3", 1101L));
    // push four items with increased timestamp to the primary stream; this should produce two full-join items (non-joined items are not produced yet)
    // w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100),
    // 0:C0 (ts: 1101), 1:C1 (ts: 1101), 2:C2 (ts: 1101), 3:C3 (ts: 1101) }
    // w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    // --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100),
    // 0:C0 (ts: 1101), 1:C1 (ts: 1101), 2:C2 (ts: 1101), 3:C3 (ts: 1101),
    // 0:D0 (ts: 1102), 1:D1 (ts: 1102), 2:D2 (ts: 1102), 3:D3 (ts: 1102) }
    // --> w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    time += 1L;
    for (final int expectedKey : expectedKeys) {
        inputTopic1.pipeInput(expectedKey, "D" + expectedKey, time);
    }
    processor.checkAndClearProcessResult(new KeyValueTimestamp<>(2, "D2+b2", 1102L), new KeyValueTimestamp<>(3, "D3+b3", 1102L));
    // push four items with increased timestamp to the primary stream; this should produce one full-join items (three non-joined left-join are not produced yet)
    // w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100),
    // 0:C0 (ts: 1101), 1:C1 (ts: 1101), 2:C2 (ts: 1101), 3:C3 (ts: 1101),
    // 0:D0 (ts: 1102), 1:D1 (ts: 1102), 2:D2 (ts: 1102), 3:D3 (ts: 1102) }
    // w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    // --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100),
    // 0:C0 (ts: 1101), 1:C1 (ts: 1101), 2:C2 (ts: 1101), 3:C3 (ts: 1101),
    // 0:D0 (ts: 1102), 1:D1 (ts: 1102), 2:D2 (ts: 1102), 3:D3 (ts: 1102),
    // 0:E0 (ts: 1103), 1:E1 (ts: 1103), 2:E2 (ts: 1103), 3:E3 (ts: 1103) }
    // --> w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    time += 1L;
    for (final int expectedKey : expectedKeys) {
        inputTopic1.pipeInput(expectedKey, "E" + expectedKey, time);
    }
    processor.checkAndClearProcessResult(new KeyValueTimestamp<>(3, "E3+b3", 1103L));
    // push four items with increased timestamp to the primary stream; this should produce no full-join items (four non-joined left-join are not produced yet)
    // w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100),
    // 0:C0 (ts: 1101), 1:C1 (ts: 1101), 2:C2 (ts: 1101), 3:C3 (ts: 1101),
    // 0:D0 (ts: 1102), 1:D1 (ts: 1102), 2:D2 (ts: 1102), 3:D3 (ts: 1102),
    // 0:E0 (ts: 1103), 1:E1 (ts: 1103), 2:E2 (ts: 1103), 3:E3 (ts: 1103) }
    // w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    // --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100),
    // 0:C0 (ts: 1101), 1:C1 (ts: 1101), 2:C2 (ts: 1101), 3:C3 (ts: 1101),
    // 0:D0 (ts: 1102), 1:D1 (ts: 1102), 2:D2 (ts: 1102), 3:D3 (ts: 1102),
    // 0:E0 (ts: 1103), 1:E1 (ts: 1103), 2:E2 (ts: 1103), 3:E3 (ts: 1103),
    // 0:F0 (ts: 1104), 1:F1 (ts: 1104), 2:F2 (ts: 1104), 3:F3 (ts: 1104) }
    // --> w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    time += 1L;
    for (final int expectedKey : expectedKeys) {
        inputTopic1.pipeInput(expectedKey, "F" + expectedKey, time);
    }
    processor.checkAndClearProcessResult();
    // push a dummy record to produce all left-join non-joined items
    time += 301L;
    inputTopic1.pipeInput(0, "dummy", time);
    processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "C0+null", 1101L), new KeyValueTimestamp<>(0, "D0+null", 1102L), new KeyValueTimestamp<>(1, "D1+null", 1102L), new KeyValueTimestamp<>(0, "E0+null", 1103L), new KeyValueTimestamp<>(1, "E1+null", 1103L), new KeyValueTimestamp<>(2, "E2+null", 1103L), new KeyValueTimestamp<>(0, "F0+null", 1104L), new KeyValueTimestamp<>(1, "F1+null", 1104L), new KeyValueTimestamp<>(2, "F2+null", 1104L), new KeyValueTimestamp<>(3, "F3+null", 1104L));
}
Also used : IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StringSerializer(org.apache.kafka.common.serialization.StringSerializer)

Example 82 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamKStreamOuterJoinTest method testLeftExpiredNonJoinedRecordsAreEmittedByTheLeftProcessor.

@Test
public void testLeftExpiredNonJoinedRecordsAreEmittedByTheLeftProcessor() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<Integer, String> stream1;
    final KStream<Integer, String> stream2;
    final KStream<Integer, String> joined;
    final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
    stream1 = builder.stream(topic1, consumed);
    stream2 = builder.stream(topic2, consumed);
    joined = stream1.outerJoin(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(100L)), StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String()));
    joined.process(supplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), PROPS)) {
        final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
        final long windowStart = 0L;
        // No joins detected; No null-joins emitted
        inputTopic1.pipeInput(0, "A0", windowStart + 1L);
        inputTopic1.pipeInput(1, "A1", windowStart + 2L);
        inputTopic1.pipeInput(0, "A0-0", windowStart + 3L);
        processor.checkAndClearProcessResult();
        // Join detected; No null-joins emitted
        inputTopic2.pipeInput(1, "a1", windowStart + 3L);
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(1, "A1+a1", windowStart + 3L));
        // Dummy record in left topic will emit expired non-joined records from the left topic
        inputTopic1.pipeInput(2, "dummy", windowStart + 401L);
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+null", windowStart + 1L), new KeyValueTimestamp<>(0, "A0-0+null", windowStart + 3L));
        // Flush internal non-joined state store by joining the dummy record
        inputTopic2.pipeInput(2, "dummy", windowStart + 401L);
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(2, "dummy+dummy", windowStart + 401L));
    }
}
Also used : MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 83 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamKStreamOuterJoinTest method testOrdering.

@Test
public void testOrdering() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<Integer, String> stream1;
    final KStream<Integer, String> stream2;
    final KStream<Integer, String> joined;
    final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
    stream1 = builder.stream(topic1, consumed);
    stream2 = builder.stream(topic2, consumed);
    joined = stream1.outerJoin(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(100)), StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String()));
    joined.process(supplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), PROPS)) {
        final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
        // push two items to the primary stream; the other window is empty; this should not produce any item yet
        // w1 = {}
        // w2 = {}
        // --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 100) }
        // --> w2 = {}
        inputTopic1.pipeInput(0, "A0", 0L);
        inputTopic1.pipeInput(1, "A1", 100L);
        processor.checkAndClearProcessResult();
        // push one item to the other window that has a join; this should produce non-joined records with a closed window first, then
        // the joined records
        // by the time they were produced before
        // w1 = { 0:A0 (ts: 0), 1:A1 (ts: 100) }
        // w2 = { }
        // --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0) }
        // --> w2 = { 0:a0 (ts: 110) }
        inputTopic2.pipeInput(1, "a1", 110L);
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+null", 0L), new KeyValueTimestamp<>(1, "A1+a1", 110L));
    }
}
Also used : MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 84 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamKStreamOuterJoinTest method testOuterJoinDuplicatesWithFixDisabledOldApi.

// old join semantics; can be removed when `JoinWindows.of()` is removed
@SuppressWarnings("deprecation")
@Test
public void testOuterJoinDuplicatesWithFixDisabledOldApi() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<Integer, String> stream1;
    final KStream<Integer, String> stream2;
    final KStream<Integer, String> joined;
    final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
    stream1 = builder.stream(topic1, consumed);
    stream2 = builder.stream(topic2, consumed);
    joined = stream1.outerJoin(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(ofMillis(100L)).grace(ofMillis(10L)), StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String()));
    joined.process(supplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(PROPS), PROPS)) {
        final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
        // Only 2 window stores should be available
        assertEquals(2, driver.getAllStateStores().size());
        inputTopic1.pipeInput(0, "A0", 0L);
        inputTopic1.pipeInput(0, "A0-0", 0L);
        inputTopic2.pipeInput(0, "a0", 0L);
        inputTopic2.pipeInput(1, "b1", 0L);
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+null", 0L), new KeyValueTimestamp<>(0, "A0-0+null", 0L), new KeyValueTimestamp<>(0, "A0+a0", 0L), new KeyValueTimestamp<>(0, "A0-0+a0", 0L), new KeyValueTimestamp<>(1, "null+b1", 0L));
    }
}
Also used : MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 85 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamKStreamOuterJoinTest method testOuterJoinDuplicates.

@Test
public void testOuterJoinDuplicates() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<Integer, String> stream1;
    final KStream<Integer, String> stream2;
    final KStream<Integer, String> joined;
    final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
    stream1 = builder.stream(topic1, consumed);
    stream2 = builder.stream(topic2, consumed);
    joined = stream1.outerJoin(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.ofTimeDifferenceAndGrace(ofMillis(100L), ofMillis(10L)), StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String()));
    joined.process(supplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), PROPS)) {
        final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
        // verifies non-joined duplicates are emitted when window has closed
        inputTopic1.pipeInput(0, "A0", 0L);
        inputTopic1.pipeInput(0, "A0-0", 0L);
        inputTopic2.pipeInput(1, "a1", 0L);
        inputTopic2.pipeInput(1, "a1-0", 0L);
        inputTopic2.pipeInput(1, "a0", 111L);
        // bump stream-time to trigger outer-join results
        inputTopic2.pipeInput(3, "dummy", 211);
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(1, "null+a1", 0L), new KeyValueTimestamp<>(1, "null+a1-0", 0L), new KeyValueTimestamp<>(0, "A0+null", 0L), new KeyValueTimestamp<>(0, "A0-0+null", 0L));
        // verifies joined duplicates are emitted
        inputTopic1.pipeInput(2, "A2", 200L);
        inputTopic1.pipeInput(2, "A2-0", 200L);
        inputTopic2.pipeInput(2, "a2", 201L);
        inputTopic2.pipeInput(2, "a2-0", 201L);
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(2, "A2+a2", 201L), new KeyValueTimestamp<>(2, "A2-0+a2", 201L), new KeyValueTimestamp<>(2, "A2+a2-0", 201L), new KeyValueTimestamp<>(2, "A2-0+a2-0", 201L));
        // this record should expired non-joined records; only null+a0 will be emitted because
        // it did not have a join
        inputTopic2.pipeInput(3, "dummy", 1500L);
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(1, "null+a0", 111L), new KeyValueTimestamp<>(3, "null+dummy", 211));
    }
}
Also used : MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Aggregations

IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)106 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)75 Test (org.junit.Test)74 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)72 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)58 MockApiProcessorSupplier (org.apache.kafka.test.MockApiProcessorSupplier)46 Properties (java.util.Properties)22 IntegerDeserializer (org.apache.kafka.common.serialization.IntegerDeserializer)16 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)13 HashSet (java.util.HashSet)11 Set (java.util.Set)11 KeyValue (org.apache.kafka.streams.KeyValue)10 LongSerializer (org.apache.kafka.common.serialization.LongSerializer)9 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)9 Serdes (org.apache.kafka.common.serialization.Serdes)8 TestInputTopic (org.apache.kafka.streams.TestInputTopic)8 Consumed (org.apache.kafka.streams.kstream.Consumed)8 KStream (org.apache.kafka.streams.kstream.KStream)8 StreamsTestUtils (org.apache.kafka.test.StreamsTestUtils)8 Duration (java.time.Duration)6