use of org.apache.kafka.test.MockApiProcessorSupplier in project kafka by apache.
the class KStreamKStreamOuterJoinTest method testOuterJoinDuplicatesWithFixDisabledOldApi.
@SuppressWarnings("deprecation")
@Test
public void testOuterJoinDuplicatesWithFixDisabledOldApi() {
final StreamsBuilder builder = new StreamsBuilder();
final KStream<Integer, String> stream1;
final KStream<Integer, String> stream2;
final KStream<Integer, String> joined;
final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
stream1 = builder.stream(topic1, consumed);
stream2 = builder.stream(topic2, consumed);
joined = stream1.outerJoin(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(ofMillis(100L)).grace(ofMillis(10L)), StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String()));
joined.process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(props), props)) {
final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
// Only 2 window stores should be available
assertEquals(2, driver.getAllStateStores().size());
inputTopic1.pipeInput(0, "A0", 0L);
inputTopic1.pipeInput(0, "A0-0", 0L);
inputTopic2.pipeInput(0, "a0", 0L);
inputTopic2.pipeInput(1, "b1", 0L);
processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+null", 0L), new KeyValueTimestamp<>(0, "A0-0+null", 0L), new KeyValueTimestamp<>(0, "A0+a0", 0L), new KeyValueTimestamp<>(0, "A0-0+a0", 0L), new KeyValueTimestamp<>(1, "null+b1", 0L));
}
}
use of org.apache.kafka.test.MockApiProcessorSupplier in project kafka by apache.
the class KTableFilterTest method doTestKTable.
private void doTestKTable(final StreamsBuilder builder, final KTable<String, Integer> table2, final KTable<String, Integer> table3, final String topic) {
final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
table2.toStream().process(supplier);
table3.toStream().process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, Integer> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new IntegerSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
inputTopic.pipeInput("A", 1, 10L);
inputTopic.pipeInput("B", 2, 5L);
inputTopic.pipeInput("C", 3, 8L);
inputTopic.pipeInput("D", 4, 14L);
inputTopic.pipeInput("A", null, 18L);
inputTopic.pipeInput("B", null, 15L);
}
final List<MockApiProcessor<String, Integer, Void, Void>> processors = supplier.capturedProcessors(2);
processors.get(0).checkAndClearProcessResult(new KeyValueTimestamp<>("A", null, 10), new KeyValueTimestamp<>("B", 2, 5), new KeyValueTimestamp<>("C", null, 8), new KeyValueTimestamp<>("D", 4, 14), new KeyValueTimestamp<>("A", null, 18), new KeyValueTimestamp<>("B", null, 15));
processors.get(1).checkAndClearProcessResult(new KeyValueTimestamp<>("A", 1, 10), new KeyValueTimestamp<>("B", null, 5), new KeyValueTimestamp<>("C", 3, 8), new KeyValueTimestamp<>("D", null, 14), new KeyValueTimestamp<>("A", null, 18), new KeyValueTimestamp<>("B", null, 15));
}
use of org.apache.kafka.test.MockApiProcessorSupplier in project kafka by apache.
the class KTableMapKeysTest method testMapKeysConvertingToStream.
@Test
public void testMapKeysConvertingToStream() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic_map_keys";
final KTable<Integer, String> table1 = builder.table(topic1, Consumed.with(Serdes.Integer(), Serdes.String()));
final Map<Integer, String> keyMap = new HashMap<>();
keyMap.put(1, "ONE");
keyMap.put(2, "TWO");
keyMap.put(3, "THREE");
final KStream<String, String> convertedStream = table1.toStream((key, value) -> keyMap.get(key));
final KeyValueTimestamp[] expected = new KeyValueTimestamp[] { new KeyValueTimestamp<>("ONE", "V_ONE", 5), new KeyValueTimestamp<>("TWO", "V_TWO", 10), new KeyValueTimestamp<>("THREE", "V_THREE", 15) };
final int[] originalKeys = new int[] { 1, 2, 3 };
final String[] values = new String[] { "V_ONE", "V_TWO", "V_THREE" };
final MockApiProcessorSupplier<String, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
convertedStream.process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
for (int i = 0; i < originalKeys.length; i++) {
final TestInputTopic<Integer, String> inputTopic = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer());
inputTopic.pipeInput(originalKeys[i], values[i], 5 + i * 5);
}
}
assertEquals(3, supplier.theCapturedProcessor().processed().size());
for (int i = 0; i < expected.length; i++) {
assertEquals(expected[i], supplier.theCapturedProcessor().processed().get(i));
}
}
use of org.apache.kafka.test.MockApiProcessorSupplier in project kafka by apache.
the class KStreamKTableLeftJoinTest method setUp.
@Before
public void setUp() {
builder = new StreamsBuilder();
final KStream<Integer, String> stream;
final KTable<Integer, String> table;
final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
final Consumed<Integer, String> consumed = Consumed.with(Serdes.Integer(), Serdes.String());
stream = builder.stream(streamTopic, consumed);
table = builder.table(tableTopic, consumed);
stream.leftJoin(table, MockValueJoiner.TOSTRING_JOINER).process(supplier);
final Properties props = StreamsTestUtils.getStreamsConfig(Serdes.Integer(), Serdes.String());
driver = new TopologyTestDriver(builder.build(), props);
inputStreamTopic = driver.createInputTopic(streamTopic, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
inputTableTopic = driver.createInputTopic(tableTopic, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
processor = supplier.theCapturedProcessor();
}
use of org.apache.kafka.test.MockApiProcessorSupplier in project kafka by apache.
the class KStreamSelectKeyTest method testSelectKey.
@Test
public void testSelectKey() {
final StreamsBuilder builder = new StreamsBuilder();
final Map<Number, String> keyMap = new HashMap<>();
keyMap.put(1, "ONE");
keyMap.put(2, "TWO");
keyMap.put(3, "THREE");
final KeyValueTimestamp[] expected = new KeyValueTimestamp[] { new KeyValueTimestamp<>("ONE", 1, 0), new KeyValueTimestamp<>("TWO", 2, 0), new KeyValueTimestamp<>("THREE", 3, 0) };
final int[] expectedValues = new int[] { 1, 2, 3 };
final KStream<String, Integer> stream = builder.stream(topicName, Consumed.with(Serdes.String(), Serdes.Integer()));
final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
stream.selectKey((key, value) -> keyMap.get(value)).process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, Integer> inputTopic = driver.createInputTopic(topicName, new StringSerializer(), new IntegerSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
for (final int expectedValue : expectedValues) {
inputTopic.pipeInput(expectedValue);
}
}
assertEquals(3, supplier.theCapturedProcessor().processed().size());
for (int i = 0; i < expected.length; i++) {
assertEquals(expected[i], supplier.theCapturedProcessor().processed().get(i));
}
}
Aggregations