use of org.apache.kafka.test.MockApiProcessorSupplier in project kafka by apache.
the class KStreamKStreamOuterJoinTest method testOrdering.
@Test
public void testOrdering() {
final StreamsBuilder builder = new StreamsBuilder();
final KStream<Integer, String> stream1;
final KStream<Integer, String> stream2;
final KStream<Integer, String> joined;
final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
stream1 = builder.stream(topic1, consumed);
stream2 = builder.stream(topic2, consumed);
joined = stream1.outerJoin(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(100)), StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String()));
joined.process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
// push two items to the primary stream; the other window is empty; this should not produce any item yet
// w1 = {}
// w2 = {}
// --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 100) }
// --> w2 = {}
inputTopic1.pipeInput(0, "A0", 0L);
inputTopic1.pipeInput(1, "A1", 100L);
processor.checkAndClearProcessResult();
// push one item to the other window that has a join; this should produce non-joined records with a closed window first, then
// the joined records
// by the time they were produced before
// w1 = { 0:A0 (ts: 0), 1:A1 (ts: 100) }
// w2 = { }
// --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0) }
// --> w2 = { 0:a0 (ts: 110) }
inputTopic2.pipeInput(1, "a1", 110L);
processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+null", 0L), new KeyValueTimestamp<>(1, "A1+a1", 110L));
}
}
use of org.apache.kafka.test.MockApiProcessorSupplier in project kafka by apache.
the class KStreamKStreamOuterJoinTest method testRightExpiredNonJoinedRecordsAreEmittedByTheLeftProcessor.
@Test
public void testRightExpiredNonJoinedRecordsAreEmittedByTheLeftProcessor() {
final StreamsBuilder builder = new StreamsBuilder();
final KStream<Integer, String> stream1;
final KStream<Integer, String> stream2;
final KStream<Integer, String> joined;
final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
stream1 = builder.stream(topic1, consumed);
stream2 = builder.stream(topic2, consumed);
joined = stream1.outerJoin(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(100L)), StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String()));
joined.process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
final long windowStart = 0L;
// No joins detected; No null-joins emitted
inputTopic2.pipeInput(0, "A0", windowStart + 1L);
inputTopic2.pipeInput(1, "A1", windowStart + 2L);
inputTopic2.pipeInput(0, "A0-0", windowStart + 3L);
processor.checkAndClearProcessResult();
// Join detected; No null-joins emitted
inputTopic1.pipeInput(1, "a1", windowStart + 3L);
processor.checkAndClearProcessResult(new KeyValueTimestamp<>(1, "a1+A1", windowStart + 3L));
// Dummy record in left topic will emit expired non-joined records from the right topic
inputTopic1.pipeInput(2, "dummy", windowStart + 401L);
processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "null+A0", windowStart + 1L), new KeyValueTimestamp<>(0, "null+A0-0", windowStart + 3L));
// Process the dummy joined record
inputTopic2.pipeInput(2, "dummy", windowStart + 402L);
processor.checkAndClearProcessResult(new KeyValueTimestamp<>(2, "dummy+dummy", windowStart + 402L));
}
}
use of org.apache.kafka.test.MockApiProcessorSupplier in project kafka by apache.
the class KStreamKStreamOuterJoinTest method runOuterJoin.
public void runOuterJoin(final StreamJoined<Integer, String, String> streamJoined, final JoinWindows joinWindows) {
final StreamsBuilder builder = new StreamsBuilder();
final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
final KStream<Integer, String> stream1;
final KStream<Integer, String> stream2;
final KStream<Integer, String> joined;
final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
stream1 = builder.stream(topic1, consumed);
stream2 = builder.stream(topic2, consumed);
joined = stream1.outerJoin(stream2, MockValueJoiner.TOSTRING_JOINER, joinWindows, streamJoined);
joined.process(supplier);
final Collection<Set<String>> copartitionGroups = TopologyWrapper.getInternalTopologyBuilder(builder.build()).copartitionGroups();
assertEquals(1, copartitionGroups.size());
assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next());
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
// 2 window stores + 1 shared window store should be available
assertEquals(3, driver.getAllStateStores().size());
// --> w2 = {}
for (int i = 0; i < 2; i++) {
inputTopic1.pipeInput(expectedKeys[i], "A" + expectedKeys[i]);
}
processor.checkAndClearProcessResult();
// --> w2 = { 0:a0, 1:a1 }
for (int i = 0; i < 2; i++) {
inputTopic2.pipeInput(expectedKeys[i], "a" + expectedKeys[i]);
}
processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+a0", 0L), new KeyValueTimestamp<>(1, "A1+a1", 0L));
// --> w2 = { 0:a0, 1:a1 }
for (int i = 0; i < 3; i++) {
inputTopic1.pipeInput(expectedKeys[i], "B" + expectedKeys[i]);
}
processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "B0+a0", 0L), new KeyValueTimestamp<>(1, "B1+a1", 0L));
// --> w2 = { 0:a0, 1:a1, 0:b0, 1:b1, 2:b2, 3:b3 }
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "b" + expectedKey);
}
processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+b0", 0L), new KeyValueTimestamp<>(0, "B0+b0", 0L), new KeyValueTimestamp<>(1, "A1+b1", 0L), new KeyValueTimestamp<>(1, "B1+b1", 0L), new KeyValueTimestamp<>(2, "B2+b2", 0L));
// --> w2 = { 0:a0, 1:a1, 0:b0, 1:b1, 2:b2, 3:b3 }
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "C" + expectedKey);
}
processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "C0+a0", 0L), new KeyValueTimestamp<>(0, "C0+b0", 0L), new KeyValueTimestamp<>(1, "C1+a1", 0L), new KeyValueTimestamp<>(1, "C1+b1", 0L), new KeyValueTimestamp<>(2, "C2+b2", 0L), new KeyValueTimestamp<>(3, "C3+b3", 0L));
// push a dummy record that should expire non-joined items; it should not produce any items because
// all of them are joined
inputTopic1.pipeInput(0, "dummy", 400L);
processor.checkAndClearProcessResult();
}
}
use of org.apache.kafka.test.MockApiProcessorSupplier in project kafka by apache.
the class KStreamKStreamOuterJoinTest method testOuterJoinDuplicates.
@Test
public void testOuterJoinDuplicates() {
final StreamsBuilder builder = new StreamsBuilder();
final KStream<Integer, String> stream1;
final KStream<Integer, String> stream2;
final KStream<Integer, String> joined;
final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
stream1 = builder.stream(topic1, consumed);
stream2 = builder.stream(topic2, consumed);
joined = stream1.outerJoin(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.ofTimeDifferenceAndGrace(ofMillis(100L), ofMillis(10L)), StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String()));
joined.process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
// verifies non-joined duplicates are emitted when window has closed
inputTopic1.pipeInput(0, "A0", 0L);
inputTopic1.pipeInput(0, "A0-0", 0L);
inputTopic2.pipeInput(1, "a1", 0L);
inputTopic2.pipeInput(1, "a1-0", 0L);
inputTopic2.pipeInput(1, "a0", 111L);
// bump stream-time to trigger outer-join results
inputTopic2.pipeInput(3, "dummy", 211);
processor.checkAndClearProcessResult(new KeyValueTimestamp<>(1, "null+a1", 0L), new KeyValueTimestamp<>(1, "null+a1-0", 0L), new KeyValueTimestamp<>(0, "A0+null", 0L), new KeyValueTimestamp<>(0, "A0-0+null", 0L));
// verifies joined duplicates are emitted
inputTopic1.pipeInput(2, "A2", 200L);
inputTopic1.pipeInput(2, "A2-0", 200L);
inputTopic2.pipeInput(2, "a2", 201L);
inputTopic2.pipeInput(2, "a2-0", 201L);
processor.checkAndClearProcessResult(new KeyValueTimestamp<>(2, "A2+a2", 201L), new KeyValueTimestamp<>(2, "A2-0+a2", 201L), new KeyValueTimestamp<>(2, "A2+a2-0", 201L), new KeyValueTimestamp<>(2, "A2-0+a2-0", 201L));
// this record should expired non-joined records; only null+a0 will be emitted because
// it did not have a join
driver.advanceWallClockTime(Duration.ofMillis(1000L));
inputTopic2.pipeInput(3, "dummy", 1500L);
processor.checkAndClearProcessResult(new KeyValueTimestamp<>(1, "null+a0", 111L), new KeyValueTimestamp<>(3, "null+dummy", 211));
}
}
use of org.apache.kafka.test.MockApiProcessorSupplier in project kafka by apache.
the class KStreamKStreamOuterJoinTest method testLeftExpiredNonJoinedRecordsAreEmittedByTheLeftProcessor.
@Test
public void testLeftExpiredNonJoinedRecordsAreEmittedByTheLeftProcessor() {
final StreamsBuilder builder = new StreamsBuilder();
final KStream<Integer, String> stream1;
final KStream<Integer, String> stream2;
final KStream<Integer, String> joined;
final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
stream1 = builder.stream(topic1, consumed);
stream2 = builder.stream(topic2, consumed);
joined = stream1.outerJoin(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(100L)), StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String()));
joined.process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
final long windowStart = 0L;
// No joins detected; No null-joins emitted
inputTopic1.pipeInput(0, "A0", windowStart + 1L);
inputTopic1.pipeInput(1, "A1", windowStart + 2L);
inputTopic1.pipeInput(0, "A0-0", windowStart + 3L);
processor.checkAndClearProcessResult();
// Join detected; No null-joins emitted
inputTopic2.pipeInput(1, "a1", windowStart + 3L);
processor.checkAndClearProcessResult(new KeyValueTimestamp<>(1, "A1+a1", windowStart + 3L));
// Dummy record in left topic will emit expired non-joined records from the left topic
inputTopic1.pipeInput(2, "dummy", windowStart + 401L);
processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+null", windowStart + 1L), new KeyValueTimestamp<>(0, "A0-0+null", windowStart + 3L));
// Flush internal non-joined state store by joining the dummy record
inputTopic2.pipeInput(2, "dummy", windowStart + 401L);
processor.checkAndClearProcessResult(new KeyValueTimestamp<>(2, "dummy+dummy", windowStart + 401L));
}
}
Aggregations