use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KStreamBuilderTest method testMerge.
@Test
public void testMerge() {
String topic1 = "topic-1";
String topic2 = "topic-2";
KStream<String, String> source1 = builder.stream(topic1);
KStream<String, String> source2 = builder.stream(topic2);
KStream<String, String> merged = builder.merge(source1, source2);
MockProcessorSupplier<String, String> processorSupplier = new MockProcessorSupplier<>();
merged.process(processorSupplier);
driver = new KStreamTestDriver(builder);
driver.setTime(0L);
driver.process(topic1, "A", "aa");
driver.process(topic2, "B", "bb");
driver.process(topic2, "C", "cc");
driver.process(topic1, "D", "dd");
assertEquals(Utils.mkList("A:aa", "B:bb", "C:cc", "D:dd"), processorSupplier.processed);
}
use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class GlobalKTableJoinsTest method verifyJoin.
private void verifyJoin(final Map<String, String> expected, final String joinInput) {
driver = new KStreamTestDriver(builder, stateDir);
driver.setTime(0L);
// write some data to the global table
driver.process(globalTopic, "a", "A");
driver.process(globalTopic, "b", "B");
//write some data to the stream
driver.process(joinInput, "1", "a");
driver.process(joinInput, "2", "b");
driver.process(joinInput, "3", "c");
driver.flushState();
assertEquals(expected, results);
}
use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KGroupedStreamImplTest method shouldAggregateSessionWindows.
@Test
public void shouldAggregateSessionWindows() throws Exception {
final Map<Windowed<String>, Integer> results = new HashMap<>();
groupedStream.aggregate(new Initializer<Integer>() {
@Override
public Integer apply() {
return 0;
}
}, new Aggregator<String, String, Integer>() {
@Override
public Integer apply(final String aggKey, final String value, final Integer aggregate) {
return aggregate + 1;
}
}, new Merger<String, Integer>() {
@Override
public Integer apply(final String aggKey, final Integer aggOne, final Integer aggTwo) {
return aggOne + aggTwo;
}
}, SessionWindows.with(30), Serdes.Integer(), "session-store").foreach(new ForeachAction<Windowed<String>, Integer>() {
@Override
public void apply(final Windowed<String> key, final Integer value) {
results.put(key, value);
}
});
driver = new KStreamTestDriver(builder, TestUtils.tempDirectory());
driver.setTime(10);
driver.process(TOPIC, "1", "1");
driver.setTime(15);
driver.process(TOPIC, "2", "2");
driver.setTime(30);
driver.process(TOPIC, "1", "1");
driver.setTime(70);
driver.process(TOPIC, "1", "1");
driver.setTime(90);
driver.process(TOPIC, "1", "1");
driver.setTime(100);
driver.process(TOPIC, "1", "1");
driver.flushState();
assertEquals(Integer.valueOf(2), results.get(new Windowed<>("1", new SessionWindow(10, 30))));
assertEquals(Integer.valueOf(1), results.get(new Windowed<>("2", new SessionWindow(15, 15))));
assertEquals(Integer.valueOf(3), results.get(new Windowed<>("1", new SessionWindow(70, 100))));
}
use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KGroupedStreamImplTest method shouldReduceSessionWindows.
@Test
public void shouldReduceSessionWindows() throws Exception {
final Map<Windowed<String>, String> results = new HashMap<>();
groupedStream.reduce(new Reducer<String>() {
@Override
public String apply(final String value1, final String value2) {
return value1 + ":" + value2;
}
}, SessionWindows.with(30), "session-store").foreach(new ForeachAction<Windowed<String>, String>() {
@Override
public void apply(final Windowed<String> key, final String value) {
results.put(key, value);
}
});
driver = new KStreamTestDriver(builder, TestUtils.tempDirectory());
driver.setTime(10);
driver.process(TOPIC, "1", "A");
driver.setTime(15);
driver.process(TOPIC, "2", "Z");
driver.setTime(30);
driver.process(TOPIC, "1", "B");
driver.setTime(70);
driver.process(TOPIC, "1", "A");
driver.setTime(90);
driver.process(TOPIC, "1", "B");
driver.setTime(100);
driver.process(TOPIC, "1", "C");
driver.flushState();
assertEquals("A:B", results.get(new Windowed<>("1", new SessionWindow(10, 30))));
assertEquals("Z", results.get(new Windowed<>("2", new SessionWindow(15, 15))));
assertEquals("A:B:C", results.get(new Windowed<>("1", new SessionWindow(70, 100))));
}
use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KStreamKStreamJoinTest method testAsymetricWindowingBefore.
@Test
public void testAsymetricWindowingBefore() throws Exception {
long time = 1000L;
KStreamBuilder builder = new KStreamBuilder();
final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
KStream<Integer, String> stream1;
KStream<Integer, String> stream2;
KStream<Integer, String> joined;
MockProcessorSupplier<Integer, String> processor;
processor = new MockProcessorSupplier<>();
stream1 = builder.stream(intSerde, stringSerde, topic1);
stream2 = builder.stream(intSerde, stringSerde, topic2);
joined = stream1.join(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(0).before(100), intSerde, stringSerde, stringSerde);
joined.process(processor);
Collection<Set<String>> copartitionGroups = builder.copartitionGroups();
assertEquals(1, copartitionGroups.size());
assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next());
driver = new KStreamTestDriver(builder, stateDir);
for (int i = 0; i < expectedKeys.length; i++) {
setRecordContext(time + i, topic1);
driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]);
}
processor.checkAndClearProcessResult();
time = 1000L - 100L - 1L;
setRecordContext(time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult();
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3");
time = 1000L;
setRecordContext(time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("1:X1+YY1", "2:X2+YY2", "3:X3+YY3");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("2:X2+YY2", "3:X3+YY3");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("3:X3+YY3");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult();
}
Aggregations