use of org.apache.kafka.streams.TopologyTestDriver in project kafka by apache.
the class KStreamWindowAggregateTest method shouldLogAndMeterWhenSkippingNullKey.
@Test
public void shouldLogAndMeterWhenSkippingNullKey() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic = "topic";
builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String())).groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(TimeWindows.ofSizeWithNoGrace(ofMillis(10)).advanceBy(ofMillis(5))).aggregate(MockInitializer.STRING_INIT, MockAggregator.toStringInstance("+"), Materialized.<String, String, WindowStore<Bytes, byte[]>>as("topic1-Canonicalized").withValueSerde(Serdes.String()));
try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KStreamWindowAggregate.class);
final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer());
inputTopic.pipeInput(null, "1");
assertThat(appender.getMessages(), hasItem("Skipping record due to null key. topic=[topic] partition=[0] offset=[0]"));
}
}
use of org.apache.kafka.streams.TopologyTestDriver in project kafka by apache.
the class KStreamWindowAggregateTest method testAggBasic.
@Test
public void testAggBasic() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final KTable<Windowed<String>, String> table2 = builder.stream(topic1, Consumed.with(Serdes.String(), Serdes.String())).groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(TimeWindows.ofSizeAndGrace(ofMillis(10), ofMillis(100)).advanceBy(ofMillis(5))).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.<String, String, WindowStore<Bytes, byte[]>>as("topic1-Canonized").withValueSerde(Serdes.String()));
final MockApiProcessorSupplier<Windowed<String>, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
table2.toStream().process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic1 = driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer());
inputTopic1.pipeInput("A", "1", 0L);
inputTopic1.pipeInput("B", "2", 1L);
inputTopic1.pipeInput("C", "3", 2L);
inputTopic1.pipeInput("D", "4", 3L);
inputTopic1.pipeInput("A", "1", 4L);
inputTopic1.pipeInput("A", "1", 5L);
inputTopic1.pipeInput("B", "2", 6L);
inputTopic1.pipeInput("D", "4", 7L);
inputTopic1.pipeInput("B", "2", 8L);
inputTopic1.pipeInput("C", "3", 9L);
inputTopic1.pipeInput("A", "1", 10L);
inputTopic1.pipeInput("B", "2", 11L);
inputTopic1.pipeInput("D", "4", 12L);
inputTopic1.pipeInput("B", "2", 13L);
inputTopic1.pipeInput("C", "3", 14L);
inputTopic1.pipeInput("B", "1", 3L);
inputTopic1.pipeInput("B", "2", 2L);
inputTopic1.pipeInput("B", "3", 9L);
}
assertEquals(asList(new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(0, 10)), "0+1", 0), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(0, 10)), "0+2", 1), new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(0, 10)), "0+3", 2), new KeyValueTimestamp<>(new Windowed<>("D", new TimeWindow(0, 10)), "0+4", 3), new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(0, 10)), "0+1+1", 4), new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(0, 10)), "0+1+1+1", 5), new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(5, 15)), "0+1", 5), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(0, 10)), "0+2+2", 6), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(5, 15)), "0+2", 6), new KeyValueTimestamp<>(new Windowed<>("D", new TimeWindow(0, 10)), "0+4+4", 7), new KeyValueTimestamp<>(new Windowed<>("D", new TimeWindow(5, 15)), "0+4", 7), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(0, 10)), "0+2+2+2", 8), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(5, 15)), "0+2+2", 8), new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(0, 10)), "0+3+3", 9), new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(5, 15)), "0+3", 9), new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(5, 15)), "0+1+1", 10), new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(10, 20)), "0+1", 10), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(5, 15)), "0+2+2+2", 11), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(10, 20)), "0+2", 11), new KeyValueTimestamp<>(new Windowed<>("D", new TimeWindow(5, 15)), "0+4+4", 12), new KeyValueTimestamp<>(new Windowed<>("D", new TimeWindow(10, 20)), "0+4", 12), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(5, 15)), "0+2+2+2+2", 13), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(10, 20)), "0+2+2", 13), new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(5, 15)), "0+3+3", 14), new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(10, 20)), "0+3", 14), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(0, 10)), "0+2+2+2+1", 8), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(0, 10)), "0+2+2+2+1+2", 8), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(0, 10)), "0+2+2+2+1+2+3", 9), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(5, 15)), "0+2+2+2+2+3", 13)), supplier.theCapturedProcessor().processed());
}
use of org.apache.kafka.streams.TopologyTestDriver in project kafka by apache.
the class KTableFilterTest method doTestSendingOldValue.
private void doTestSendingOldValue(final StreamsBuilder builder, final KTableImpl<String, Integer, Integer> table1, final KTableImpl<String, Integer, Integer> table2, final String topic1) {
final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
final Topology topology = builder.build();
topology.addProcessor("proc1", supplier, table1.name);
topology.addProcessor("proc2", supplier, table2.name);
final boolean parentSendOldVals = table1.sendingOldValueEnabled();
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
final TestInputTopic<String, Integer> inputTopic = driver.createInputTopic(topic1, new StringSerializer(), new IntegerSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
inputTopic.pipeInput("A", 1, 5L);
inputTopic.pipeInput("B", 1, 10L);
inputTopic.pipeInput("C", 1, 15L);
final List<MockApiProcessor<String, Integer, Void, Void>> processors = supplier.capturedProcessors(2);
final MockApiProcessor<String, Integer, Void, Void> table1Output = processors.get(0);
final MockApiProcessor<String, Integer, Void, Void> table2Output = processors.get(1);
table1Output.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(1, null), 5), new KeyValueTimestamp<>("B", new Change<>(1, null), 10), new KeyValueTimestamp<>("C", new Change<>(1, null), 15));
table2Output.checkEmptyAndClearProcessResult();
inputTopic.pipeInput("A", 2, 15L);
inputTopic.pipeInput("B", 2, 8L);
table1Output.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(2, parentSendOldVals ? 1 : null), 15), new KeyValueTimestamp<>("B", new Change<>(2, parentSendOldVals ? 1 : null), 8));
table2Output.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(2, null), 15), new KeyValueTimestamp<>("B", new Change<>(2, null), 8));
inputTopic.pipeInput("A", 3, 20L);
table1Output.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(3, parentSendOldVals ? 2 : null), 20));
table2Output.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, 2), 20));
inputTopic.pipeInput("A", null, 10L);
inputTopic.pipeInput("B", null, 20L);
table1Output.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, parentSendOldVals ? 3 : null), 10), new KeyValueTimestamp<>("B", new Change<>(null, parentSendOldVals ? 2 : null), 20));
table2Output.checkAndClearProcessResult(new KeyValueTimestamp<>("B", new Change<>(null, 2), 20));
}
}
use of org.apache.kafka.streams.TopologyTestDriver in project kafka by apache.
the class KTableFilterTest method doTestNotSendingOldValue.
private void doTestNotSendingOldValue(final StreamsBuilder builder, final KTableImpl<String, Integer, Integer> table1, final KTableImpl<String, Integer, Integer> table2, final String topic1) {
final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
builder.build().addProcessor("proc1", supplier, table1.name);
builder.build().addProcessor("proc2", supplier, table2.name);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, Integer> inputTopic = driver.createInputTopic(topic1, new StringSerializer(), new IntegerSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
inputTopic.pipeInput("A", 1, 5L);
inputTopic.pipeInput("B", 1, 10L);
inputTopic.pipeInput("C", 1, 15L);
final List<MockApiProcessor<String, Integer, Void, Void>> processors = supplier.capturedProcessors(2);
processors.get(0).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(1, null), 5), new KeyValueTimestamp<>("B", new Change<>(1, null), 10), new KeyValueTimestamp<>("C", new Change<>(1, null), 15));
processors.get(1).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, null), 5), new KeyValueTimestamp<>("B", new Change<>(null, null), 10), new KeyValueTimestamp<>("C", new Change<>(null, null), 15));
inputTopic.pipeInput("A", 2, 15L);
inputTopic.pipeInput("B", 2, 8L);
processors.get(0).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(2, null), 15), new KeyValueTimestamp<>("B", new Change<>(2, null), 8));
processors.get(1).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(2, null), 15), new KeyValueTimestamp<>("B", new Change<>(2, null), 8));
inputTopic.pipeInput("A", 3, 20L);
processors.get(0).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(3, null), 20));
processors.get(1).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, null), 20));
inputTopic.pipeInput("A", null, 10L);
inputTopic.pipeInput("B", null, 20L);
processors.get(0).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, null), 10), new KeyValueTimestamp<>("B", new Change<>(null, null), 20));
processors.get(1).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, null), 10), new KeyValueTimestamp<>("B", new Change<>(null, null), 20));
}
}
use of org.apache.kafka.streams.TopologyTestDriver in project kafka by apache.
the class KTableFilterTest method doTestSkipNullOnMaterialization.
private void doTestSkipNullOnMaterialization(final StreamsBuilder builder, final KTableImpl<String, String, String> table1, final KTableImpl<String, String, String> table2, final String topic1) {
final MockApiProcessorSupplier<String, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
final Topology topology = builder.build();
topology.addProcessor("proc1", supplier, table1.name);
topology.addProcessor("proc2", supplier, table2.name);
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
final TestInputTopic<String, String> stringinputTopic = driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
stringinputTopic.pipeInput("A", "reject", 5L);
stringinputTopic.pipeInput("B", "reject", 10L);
stringinputTopic.pipeInput("C", "reject", 20L);
}
final List<MockApiProcessor<String, String, Void, Void>> processors = supplier.capturedProcessors(2);
processors.get(0).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>("reject", null), 5), new KeyValueTimestamp<>("B", new Change<>("reject", null), 10), new KeyValueTimestamp<>("C", new Change<>("reject", null), 20));
processors.get(1).checkEmptyAndClearProcessResult();
}
Aggregations