use of org.apache.kafka.test.MockApiProcessorSupplier in project kafka by apache.
the class KTableFilterTest method doTestSkipNullOnMaterialization.
private void doTestSkipNullOnMaterialization(final StreamsBuilder builder, final KTableImpl<String, String, String> table1, final KTableImpl<String, String, String> table2, final String topic1) {
final MockApiProcessorSupplier<String, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
final Topology topology = builder.build();
topology.addProcessor("proc1", supplier, table1.name);
topology.addProcessor("proc2", supplier, table2.name);
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
final TestInputTopic<String, String> stringinputTopic = driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
stringinputTopic.pipeInput("A", "reject", 5L);
stringinputTopic.pipeInput("B", "reject", 10L);
stringinputTopic.pipeInput("C", "reject", 20L);
}
final List<MockApiProcessor<String, String, Void, Void>> processors = supplier.capturedProcessors(2);
processors.get(0).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>("reject", null), 5), new KeyValueTimestamp<>("B", new Change<>("reject", null), 10), new KeyValueTimestamp<>("C", new Change<>("reject", null), 20));
processors.get(1).checkEmptyAndClearProcessResult();
}
use of org.apache.kafka.test.MockApiProcessorSupplier in project kafka by apache.
the class KTableImplTest method testMaterializedKTable.
@Test
public void testMaterializedKTable() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final String topic2 = "topic2";
final KTable<String, String> table1 = builder.table(topic1, consumed, Materialized.as("fred"));
final MockApiProcessorSupplier<String, Object, Void, Void> supplier = new MockApiProcessorSupplier<>();
table1.toStream().process(supplier);
final KTable<String, Integer> table2 = table1.mapValues(s -> Integer.valueOf(s));
table2.toStream().process(supplier);
final KTable<String, Integer> table3 = table2.filter((key, value) -> (value % 2) == 0);
table3.toStream().process(supplier);
table1.toStream().to(topic2, produced);
final KTable<String, String> table4 = builder.table(topic2, consumed);
table4.toStream().process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer());
inputTopic.pipeInput("A", "01", 5L);
inputTopic.pipeInput("B", "02", 100L);
inputTopic.pipeInput("C", "03", 0L);
inputTopic.pipeInput("D", "04", 0L);
inputTopic.pipeInput("A", "05", 10L);
inputTopic.pipeInput("A", "06", 8L);
}
final List<MockApiProcessor<String, Object, Void, Void>> processors = supplier.capturedProcessors(4);
assertEquals(asList(new KeyValueTimestamp<>("A", "01", 5), new KeyValueTimestamp<>("B", "02", 100), new KeyValueTimestamp<>("C", "03", 0), new KeyValueTimestamp<>("D", "04", 0), new KeyValueTimestamp<>("A", "05", 10), new KeyValueTimestamp<>("A", "06", 8)), processors.get(0).processed());
assertEquals(asList(new KeyValueTimestamp<>("A", 1, 5), new KeyValueTimestamp<>("B", 2, 100), new KeyValueTimestamp<>("C", 3, 0), new KeyValueTimestamp<>("D", 4, 0), new KeyValueTimestamp<>("A", 5, 10), new KeyValueTimestamp<>("A", 6, 8)), processors.get(1).processed());
assertEquals(asList(new KeyValueTimestamp<>("B", 2, 100), new KeyValueTimestamp<>("D", 4, 0), new KeyValueTimestamp<>("A", 6, 8)), processors.get(2).processed());
assertEquals(asList(new KeyValueTimestamp<>("A", "01", 5), new KeyValueTimestamp<>("B", "02", 100), new KeyValueTimestamp<>("C", "03", 0), new KeyValueTimestamp<>("D", "04", 0), new KeyValueTimestamp<>("A", "05", 10), new KeyValueTimestamp<>("A", "06", 8)), processors.get(3).processed());
}
use of org.apache.kafka.test.MockApiProcessorSupplier in project kafka by apache.
the class KTableKTableInnerJoinTest method testSendingOldValues.
@Test
public void testSendingOldValues() {
final StreamsBuilder builder = new StreamsBuilder();
final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
final KTable<Integer, String> table1;
final KTable<Integer, String> table2;
final KTable<Integer, String> joined;
final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
table1 = builder.table(topic1, consumed);
table2 = builder.table(topic2, consumed);
joined = table1.join(table2, MockValueJoiner.TOSTRING_JOINER);
((KTableImpl<?, ?, ?>) joined).enableSendingOldValues(true);
builder.build().addProcessor("proc", supplier, ((KTableImpl<?, ?, ?>) joined).name);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, Serdes.Integer().serializer(), Serdes.String().serializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, Serdes.Integer().serializer(), Serdes.String().serializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final MockApiProcessor<Integer, String, Void, Void> proc = supplier.theCapturedProcessor();
assertTrue(((KTableImpl<?, ?, ?>) table1).sendingOldValueEnabled());
assertTrue(((KTableImpl<?, ?, ?>) table2).sendingOldValueEnabled());
assertTrue(((KTableImpl<?, ?, ?>) joined).sendingOldValueEnabled());
// push two items to the primary stream. the other table is empty
for (int i = 0; i < 2; i++) {
inputTopic1.pipeInput(expectedKeys[i], "X" + expectedKeys[i], 5L + i);
}
// pass tuple with null key, it will be discarded in join process
inputTopic1.pipeInput(null, "SomeVal", 42L);
// left: X0:0 (ts: 5), X1:1 (ts: 6)
// right:
proc.checkAndClearProcessResult(EMPTY);
// push two items to the other stream. this should produce two items.
for (int i = 0; i < 2; i++) {
inputTopic2.pipeInput(expectedKeys[i], "Y" + expectedKeys[i], 10L * i);
}
// pass tuple with null key, it will be discarded in join process
inputTopic2.pipeInput(null, "AnotherVal", 73L);
// left: X0:0 (ts: 5), X1:1 (ts: 6)
// right: Y0:0 (ts: 0), Y1:1 (ts: 10)
proc.checkAndClearProcessResult(new KeyValueTimestamp<>(0, new Change<>("X0+Y0", null), 5), new KeyValueTimestamp<>(1, new Change<>("X1+Y1", null), 10));
// push all four items to the primary stream. this should produce two items.
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "XX" + expectedKey, 7L);
}
// left: XX0:0 (ts: 7), XX1:1 (ts: 7), XX2:2 (ts: 7), XX3:3 (ts: 7)
// right: Y0:0 (ts: 0), Y1:1 (ts: 10)
proc.checkAndClearProcessResult(new KeyValueTimestamp<>(0, new Change<>("XX0+Y0", "X0+Y0"), 7), new KeyValueTimestamp<>(1, new Change<>("XX1+Y1", "X1+Y1"), 10));
// push all items to the other stream. this should produce four items.
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "YY" + expectedKey, expectedKey * 5L);
}
// left: XX0:0 (ts: 7), XX1:1 (ts: 7), XX2:2 (ts: 7), XX3:3 (ts: 7)
// right: YY0:0 (ts: 0), YY1:1 (ts: 5), YY2:2 (ts: 10), YY3:3 (ts: 15)
proc.checkAndClearProcessResult(new KeyValueTimestamp<>(0, new Change<>("XX0+YY0", "XX0+Y0"), 7), new KeyValueTimestamp<>(1, new Change<>("XX1+YY1", "XX1+Y1"), 7), new KeyValueTimestamp<>(2, new Change<>("XX2+YY2", null), 10), new KeyValueTimestamp<>(3, new Change<>("XX3+YY3", null), 15));
// push all four items to the primary stream. this should produce four items.
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "XXX" + expectedKey, 6L);
}
// left: XXX0:0 (ts: 6), XXX1:1 (ts: 6), XXX2:2 (ts: 6), XXX3:3 (ts: 6)
// right: YY0:0 (ts: 0), YY1:1 (ts: 5), YY2:2 (ts: 10), YY3:3 (ts: 15)
proc.checkAndClearProcessResult(new KeyValueTimestamp<>(0, new Change<>("XXX0+YY0", "XX0+YY0"), 6), new KeyValueTimestamp<>(1, new Change<>("XXX1+YY1", "XX1+YY1"), 6), new KeyValueTimestamp<>(2, new Change<>("XXX2+YY2", "XX2+YY2"), 10), new KeyValueTimestamp<>(3, new Change<>("XXX3+YY3", "XX3+YY3"), 15));
// push two items with null to the other stream as deletes. this should produce two item.
inputTopic2.pipeInput(expectedKeys[0], null, 5L);
inputTopic2.pipeInput(expectedKeys[1], null, 7L);
// left: XXX0:0 (ts: 6), XXX1:1 (ts: 6), XXX2:2 (ts: 6), XXX3:3 (ts: 6)
// right: YY2:2 (ts: 10), YY3:3 (ts: 15)
proc.checkAndClearProcessResult(new KeyValueTimestamp<>(0, new Change<>(null, "XXX0+YY0"), 6), new KeyValueTimestamp<>(1, new Change<>(null, "XXX1+YY1"), 7));
// push all four items to the primary stream. this should produce two items.
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "XXXX" + expectedKey, 13L);
}
// left: XXXX0:0 (ts: 13), XXXX1:1 (ts: 13), XXXX2:2 (ts: 13), XXXX3:3 (ts: 13)
// right: YY2:2 (ts: 10), YY3:3 (ts: 15)
proc.checkAndClearProcessResult(new KeyValueTimestamp<>(2, new Change<>("XXXX2+YY2", "XXX2+YY2"), 13), new KeyValueTimestamp<>(3, new Change<>("XXXX3+YY3", "XXX3+YY3"), 15));
// push four items to the primary stream with null. this should produce two items.
inputTopic1.pipeInput(expectedKeys[0], null, 0L);
inputTopic1.pipeInput(expectedKeys[1], null, 42L);
inputTopic1.pipeInput(expectedKeys[2], null, 5L);
inputTopic1.pipeInput(expectedKeys[3], null, 20L);
// left:
// right: YY2:2 (ts: 10), YY3:3 (ts: 15)
proc.checkAndClearProcessResult(new KeyValueTimestamp<>(2, new Change<>(null, "XXXX2+YY2"), 10), new KeyValueTimestamp<>(3, new Change<>(null, "XXXX3+YY3"), 20));
}
}
use of org.apache.kafka.test.MockApiProcessorSupplier in project kafka by apache.
the class KTableKTableInnerJoinTest method testNotSendingOldValues.
@Test
public void testNotSendingOldValues() {
final StreamsBuilder builder = new StreamsBuilder();
final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
final KTable<Integer, String> table1;
final KTable<Integer, String> table2;
final KTable<Integer, String> joined;
final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
table1 = builder.table(topic1, consumed);
table2 = builder.table(topic2, consumed);
joined = table1.join(table2, MockValueJoiner.TOSTRING_JOINER);
builder.build().addProcessor("proc", supplier, ((KTableImpl<?, ?, ?>) joined).name);
doTestNotSendingOldValues(builder, expectedKeys, table1, table2, supplier, joined);
}
use of org.apache.kafka.test.MockApiProcessorSupplier in project kafka by apache.
the class KTableSourceTest method testKTable.
@Test
public void testKTable() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final KTable<String, Integer> table1 = builder.table(topic1, Consumed.with(Serdes.String(), Serdes.Integer()));
final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
table1.toStream().process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, Integer> inputTopic = driver.createInputTopic(topic1, new StringSerializer(), new IntegerSerializer());
inputTopic.pipeInput("A", 1, 10L);
inputTopic.pipeInput("B", 2, 11L);
inputTopic.pipeInput("C", 3, 12L);
inputTopic.pipeInput("D", 4, 13L);
inputTopic.pipeInput("A", null, 14L);
inputTopic.pipeInput("B", null, 15L);
}
assertEquals(asList(new KeyValueTimestamp<>("A", 1, 10L), new KeyValueTimestamp<>("B", 2, 11L), new KeyValueTimestamp<>("C", 3, 12L), new KeyValueTimestamp<>("D", 4, 13L), new KeyValueTimestamp<>("A", null, 14L), new KeyValueTimestamp<>("B", null, 15L)), supplier.theCapturedProcessor().processed());
}
Aggregations