use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KStreamWindowAggregateTest method testAggBasic.
@Test
public void testAggBasic() throws Exception {
final File baseDir = Files.createTempDirectory("test").toFile();
try {
final KStreamBuilder builder = new KStreamBuilder();
String topic1 = "topic1";
KStream<String, String> stream1 = builder.stream(strSerde, strSerde, topic1);
KTable<Windowed<String>, String> table2 = stream1.groupByKey(strSerde, strSerde).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, TimeWindows.of(10).advanceBy(5), strSerde, "topic1-Canonized");
MockProcessorSupplier<Windowed<String>, String> proc2 = new MockProcessorSupplier<>();
table2.toStream().process(proc2);
driver = new KStreamTestDriver(builder, baseDir);
setRecordContext(0, topic1);
driver.process(topic1, "A", "1");
driver.flushState();
setRecordContext(1, topic1);
driver.process(topic1, "B", "2");
driver.flushState();
setRecordContext(2, topic1);
driver.process(topic1, "C", "3");
driver.flushState();
setRecordContext(3, topic1);
driver.process(topic1, "D", "4");
driver.flushState();
setRecordContext(4, topic1);
driver.process(topic1, "A", "1");
driver.flushState();
setRecordContext(5, topic1);
driver.process(topic1, "A", "1");
driver.flushState();
setRecordContext(6, topic1);
driver.process(topic1, "B", "2");
driver.flushState();
setRecordContext(7, topic1);
driver.process(topic1, "D", "4");
driver.flushState();
setRecordContext(8, topic1);
driver.process(topic1, "B", "2");
driver.flushState();
setRecordContext(9, topic1);
driver.process(topic1, "C", "3");
driver.flushState();
setRecordContext(10, topic1);
driver.process(topic1, "A", "1");
driver.flushState();
setRecordContext(11, topic1);
driver.process(topic1, "B", "2");
driver.flushState();
setRecordContext(12, topic1);
driver.flushState();
driver.process(topic1, "D", "4");
driver.flushState();
setRecordContext(13, topic1);
driver.process(topic1, "B", "2");
driver.flushState();
setRecordContext(14, topic1);
driver.process(topic1, "C", "3");
driver.flushState();
assertEquals(Utils.mkList("[A@0]:0+1", "[B@0]:0+2", "[C@0]:0+3", "[D@0]:0+4", "[A@0]:0+1+1", "[A@0]:0+1+1+1", "[A@5]:0+1", "[B@0]:0+2+2", "[B@5]:0+2", "[D@0]:0+4+4", "[D@5]:0+4", "[B@0]:0+2+2+2", "[B@5]:0+2+2", "[C@0]:0+3+3", "[C@5]:0+3", "[A@5]:0+1+1", "[A@10]:0+1", "[B@5]:0+2+2+2", "[B@10]:0+2", "[D@5]:0+4+4", "[D@10]:0+4", "[B@5]:0+2+2+2+2", "[B@10]:0+2+2", "[C@5]:0+3+3", "[C@10]:0+3"), proc2.processed);
} finally {
Utils.delete(baseDir);
}
}
use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KTableAggregateTest method shouldForwardToCorrectProcessorNodeWhenMultiCacheEvictions.
@Test
public void shouldForwardToCorrectProcessorNodeWhenMultiCacheEvictions() throws Exception {
final String tableOne = "tableOne";
final String tableTwo = "tableTwo";
final KStreamBuilder builder = new KStreamBuilder();
final String reduceTopic = "TestDriver-reducer-store-repartition";
final Map<String, Long> reduceResults = new HashMap<>();
final KTable<String, String> one = builder.table(Serdes.String(), Serdes.String(), tableOne, tableOne);
final KTable<Long, String> two = builder.table(Serdes.Long(), Serdes.String(), tableTwo, tableTwo);
final KTable<String, Long> reduce = two.groupBy(new KeyValueMapper<Long, String, KeyValue<String, Long>>() {
@Override
public KeyValue<String, Long> apply(final Long key, final String value) {
return new KeyValue<>(value, key);
}
}, Serdes.String(), Serdes.Long()).reduce(new Reducer<Long>() {
@Override
public Long apply(final Long value1, final Long value2) {
return value1 + value2;
}
}, new Reducer<Long>() {
@Override
public Long apply(final Long value1, final Long value2) {
return value1 - value2;
}
}, "reducer-store");
reduce.foreach(new ForeachAction<String, Long>() {
@Override
public void apply(final String key, final Long value) {
reduceResults.put(key, value);
}
});
one.leftJoin(reduce, new ValueJoiner<String, Long, String>() {
@Override
public String apply(final String value1, final Long value2) {
return value1 + ":" + value2;
}
}).mapValues(new ValueMapper<String, String>() {
@Override
public String apply(final String value) {
return value;
}
});
driver = new KStreamTestDriver(builder, stateDir, 111);
driver.process(reduceTopic, "1", new Change<>(1L, null));
driver.process("tableOne", "2", "2");
// this should trigger eviction on the reducer-store topic
driver.process(reduceTopic, "2", new Change<>(2L, null));
// this wont as it is the same value
driver.process(reduceTopic, "2", new Change<>(2L, null));
assertEquals(Long.valueOf(2L), reduceResults.get("2"));
// this will trigger eviction on the tableOne topic
// that in turn will cause an eviction on reducer-topic. It will flush
// key 2 as it is the only dirty entry in the cache
driver.process("tableOne", "1", "5");
assertEquals(Long.valueOf(4L), reduceResults.get("2"));
}
use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KTableAggregateTest method testAggRepartition.
@Test
public void testAggRepartition() throws Exception {
final KStreamBuilder builder = new KStreamBuilder();
final String topic1 = "topic1";
final MockProcessorSupplier<String, String> proc = new MockProcessorSupplier<>();
KTable<String, String> table1 = builder.table(stringSerde, stringSerde, topic1, "anyStoreName");
KTable<String, String> table2 = table1.groupBy(new KeyValueMapper<String, String, KeyValue<String, String>>() {
@Override
public KeyValue<String, String> apply(String key, String value) {
switch(key) {
case "null":
return KeyValue.pair(null, value);
case "NULL":
return null;
default:
return KeyValue.pair(value, value);
}
}
}, stringSerde, stringSerde).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, MockAggregator.TOSTRING_REMOVER, stringSerde, "topic1-Canonized");
table2.toStream().process(proc);
driver = new KStreamTestDriver(builder, stateDir);
driver.process(topic1, "A", "1");
driver.flushState();
driver.process(topic1, "A", null);
driver.flushState();
driver.process(topic1, "A", "1");
driver.flushState();
driver.process(topic1, "B", "2");
driver.flushState();
driver.process(topic1, "null", "3");
driver.flushState();
driver.process(topic1, "B", "4");
driver.flushState();
driver.process(topic1, "NULL", "5");
driver.flushState();
driver.process(topic1, "B", "7");
driver.flushState();
assertEquals(Utils.mkList("1:0+1", "1:0+1-1", "1:0+1-1+1", "2:0+2", //noop
"2:0+2-2", "4:0+4", //noop
"4:0+4-4", "7:0+7"), proc.processed);
}
use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KTableAggregateTest method testAggBasic.
@Test
public void testAggBasic() throws Exception {
final KStreamBuilder builder = new KStreamBuilder();
final String topic1 = "topic1";
final MockProcessorSupplier<String, String> proc = new MockProcessorSupplier<>();
KTable<String, String> table1 = builder.table(stringSerde, stringSerde, topic1, "anyStoreName");
KTable<String, String> table2 = table1.groupBy(MockKeyValueMapper.<String, String>NoOpKeyValueMapper(), stringSerde, stringSerde).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, MockAggregator.TOSTRING_REMOVER, stringSerde, "topic1-Canonized");
table2.toStream().process(proc);
driver = new KStreamTestDriver(builder, stateDir);
driver.process(topic1, "A", "1");
driver.flushState();
driver.process(topic1, "B", "2");
driver.flushState();
driver.process(topic1, "A", "3");
driver.flushState();
driver.process(topic1, "B", "4");
driver.flushState();
driver.process(topic1, "C", "5");
driver.flushState();
driver.process(topic1, "D", "6");
driver.flushState();
driver.process(topic1, "B", "7");
driver.flushState();
driver.process(topic1, "C", "8");
driver.flushState();
assertEquals(Utils.mkList("A:0+1", "B:0+2", "A:0+1-1+3", "B:0+2-2+4", "C:0+5", "D:0+6", "B:0+2-2+4-4+7", "C:0+5-5+8"), proc.processed);
}
use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KTableAggregateTest method testCountCoalesced.
@Test
public void testCountCoalesced() throws IOException {
final KStreamBuilder builder = new KStreamBuilder();
final String input = "count-test-input";
final MockProcessorSupplier<String, Long> proc = new MockProcessorSupplier<>();
builder.table(Serdes.String(), Serdes.String(), input, "anyStoreName").groupBy(MockKeyValueMapper.<String, String>SelectValueKeyValueMapper(), stringSerde, stringSerde).count("count").toStream().process(proc);
driver = new KStreamTestDriver(builder, stateDir);
driver.process(input, "A", "green");
driver.process(input, "B", "green");
driver.process(input, "A", "blue");
driver.process(input, "C", "yellow");
driver.process(input, "D", "green");
driver.flushState();
assertEquals(Utils.mkList("blue:1", "yellow:1", "green:2"), proc.processed);
}
Aggregations