use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KStreamForeachTest method testForeach.
@Test
public void testForeach() {
// Given
List<KeyValue<Integer, String>> inputRecords = Arrays.asList(new KeyValue<>(0, "zero"), new KeyValue<>(1, "one"), new KeyValue<>(2, "two"), new KeyValue<>(3, "three"));
List<KeyValue<Integer, String>> expectedRecords = Arrays.asList(new KeyValue<>(0, "ZERO"), new KeyValue<>(2, "ONE"), new KeyValue<>(4, "TWO"), new KeyValue<>(6, "THREE"));
final List<KeyValue<Integer, String>> actualRecords = new ArrayList<>();
ForeachAction<Integer, String> action = new ForeachAction<Integer, String>() {
@Override
public void apply(Integer key, String value) {
actualRecords.add(new KeyValue<>(key * 2, value.toUpperCase(Locale.ROOT)));
}
};
// When
KStreamBuilder builder = new KStreamBuilder();
KStream<Integer, String> stream = builder.stream(intSerde, stringSerde, topicName);
stream.foreach(action);
// Then
driver = new KStreamTestDriver(builder);
for (KeyValue<Integer, String> record : inputRecords) {
driver.process(topicName, record.key, record.value);
}
assertEquals(expectedRecords.size(), actualRecords.size());
for (int i = 0; i < expectedRecords.size(); i++) {
KeyValue<Integer, String> expectedRecord = expectedRecords.get(i);
KeyValue<Integer, String> actualRecord = actualRecords.get(i);
assertEquals(expectedRecord, actualRecord);
}
}
use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KStreamKStreamJoinTest method testOuterJoin.
@Test
public void testOuterJoin() throws Exception {
KStreamBuilder builder = new KStreamBuilder();
final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
KStream<Integer, String> stream1;
KStream<Integer, String> stream2;
KStream<Integer, String> joined;
MockProcessorSupplier<Integer, String> processor;
processor = new MockProcessorSupplier<>();
stream1 = builder.stream(intSerde, stringSerde, topic1);
stream2 = builder.stream(intSerde, stringSerde, topic2);
joined = stream1.outerJoin(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(100), intSerde, stringSerde, stringSerde);
joined.process(processor);
Collection<Set<String>> copartitionGroups = builder.copartitionGroups();
assertEquals(1, copartitionGroups.size());
assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next());
driver = new KStreamTestDriver(builder, stateDir);
driver.setTime(0L);
for (int i = 0; i < 2; i++) {
driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]);
}
processor.checkAndClearProcessResult("0:X0+null", "1:X1+null");
for (int i = 0; i < 2; i++) {
driver.process(topic2, expectedKeys[i], "Y" + expectedKeys[i]);
}
processor.checkAndClearProcessResult("0:X0+Y0", "1:X1+Y1");
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "X" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+Y0", "1:X1+Y1", "2:X2+null", "3:X3+null");
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0", "0:X0+YY0", "1:X1+YY1", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3");
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "XX" + expectedKey);
}
processor.checkAndClearProcessResult("0:XX0+Y0", "0:XX0+YY0", "1:XX1+Y1", "1:XX1+YY1", "2:XX2+YY2", "3:XX3+YY3");
for (int i = 0; i < 2; i++) {
driver.process(topic2, expectedKeys[i], "YYY" + expectedKeys[i]);
}
processor.checkAndClearProcessResult("0:X0+YYY0", "0:X0+YYY0", "0:XX0+YYY0", "1:X1+YYY1", "1:X1+YYY1", "1:XX1+YYY1");
}
use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KStreamKStreamJoinTest method testJoin.
@Test
public void testJoin() throws Exception {
KStreamBuilder builder = new KStreamBuilder();
final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
KStream<Integer, String> stream1;
KStream<Integer, String> stream2;
KStream<Integer, String> joined;
MockProcessorSupplier<Integer, String> processor;
processor = new MockProcessorSupplier<>();
stream1 = builder.stream(intSerde, stringSerde, topic1);
stream2 = builder.stream(intSerde, stringSerde, topic2);
joined = stream1.join(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(100), intSerde, stringSerde, stringSerde);
joined.process(processor);
Collection<Set<String>> copartitionGroups = builder.copartitionGroups();
assertEquals(1, copartitionGroups.size());
assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next());
driver = new KStreamTestDriver(builder, stateDir);
driver.setTime(0L);
for (int i = 0; i < 2; i++) {
driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]);
}
processor.checkAndClearProcessResult();
for (int i = 0; i < 2; i++) {
driver.process(topic2, expectedKeys[i], "Y" + expectedKeys[i]);
}
processor.checkAndClearProcessResult("0:X0+Y0", "1:X1+Y1");
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "X" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+Y0", "1:X1+Y1");
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0", "0:X0+YY0", "1:X1+YY1", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3");
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "XX" + expectedKey);
}
processor.checkAndClearProcessResult("0:XX0+Y0", "0:XX0+YY0", "1:XX1+Y1", "1:XX1+YY1", "2:XX2+YY2", "3:XX3+YY3");
for (int i = 0; i < 2; i++) {
driver.process(topic2, expectedKeys[i], "YYY" + expectedKeys[i]);
}
processor.checkAndClearProcessResult("0:X0+YYY0", "0:X0+YYY0", "0:XX0+YYY0", "1:X1+YYY1", "1:X1+YYY1", "1:XX1+YYY1");
}
use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KStreamKStreamJoinTest method testAsymetricWindowingAfter.
@Test
public void testAsymetricWindowingAfter() throws Exception {
long time = 1000L;
KStreamBuilder builder = new KStreamBuilder();
final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
KStream<Integer, String> stream1;
KStream<Integer, String> stream2;
KStream<Integer, String> joined;
MockProcessorSupplier<Integer, String> processor;
processor = new MockProcessorSupplier<>();
stream1 = builder.stream(intSerde, stringSerde, topic1);
stream2 = builder.stream(intSerde, stringSerde, topic2);
joined = stream1.join(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(0).after(100), intSerde, stringSerde, stringSerde);
joined.process(processor);
Collection<Set<String>> copartitionGroups = builder.copartitionGroups();
assertEquals(1, copartitionGroups.size());
assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next());
driver = new KStreamTestDriver(builder, stateDir);
for (int i = 0; i < expectedKeys.length; i++) {
setRecordContext(time + i, topic1);
driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]);
}
processor.checkAndClearProcessResult();
time = 1000L - 1L;
setRecordContext(time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult();
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3");
time = 1000 + 100L;
setRecordContext(time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("1:X1+YY1", "2:X2+YY2", "3:X3+YY3");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("2:X2+YY2", "3:X3+YY3");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("3:X3+YY3");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult();
}
use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KStreamKStreamLeftJoinTest method testWindowing.
@Test
public void testWindowing() throws Exception {
final KStreamBuilder builder = new KStreamBuilder();
final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
long time = 0L;
final KStream<Integer, String> stream1;
final KStream<Integer, String> stream2;
final KStream<Integer, String> joined;
final MockProcessorSupplier<Integer, String> processor;
processor = new MockProcessorSupplier<>();
stream1 = builder.stream(intSerde, stringSerde, topic1);
stream2 = builder.stream(intSerde, stringSerde, topic2);
joined = stream1.leftJoin(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(100), intSerde, stringSerde, stringSerde);
joined.process(processor);
final Collection<Set<String>> copartitionGroups = builder.copartitionGroups();
assertEquals(1, copartitionGroups.size());
assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next());
driver = new KStreamTestDriver(builder, stateDir);
// push two items to the primary stream. the other window is empty. this should produce two items
// w1 = {}
// w2 = {}
// --> w1 = { 0:X0, 1:X1 }
// --> w2 = {}
setRecordContext(time, topic1);
for (int i = 0; i < 2; i++) {
driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]);
}
driver.flushState();
processor.checkAndClearProcessResult("0:X0+null", "1:X1+null");
// push two items to the other stream. this should produce no items.
// w1 = { 0:X0, 1:X1 }
// w2 = {}
// --> w1 = { 0:X0, 1:X1 }
// --> w2 = { 0:Y0, 1:Y1 }
setRecordContext(time, topic2);
for (int i = 0; i < 2; i++) {
driver.process(topic2, expectedKeys[i], "Y" + expectedKeys[i]);
}
driver.flushState();
processor.checkAndClearProcessResult("0:X0+Y0", "1:X1+Y1");
// clear logically
time = 1000L;
setRecordContext(time, topic2);
// --> w2 = { 0:Y0, 1:Y1, 2:Y2, 3:Y3 }
for (int i = 0; i < expectedKeys.length; i++) {
setRecordContext(time + i, topic2);
driver.process(topic2, expectedKeys[i], "Y" + expectedKeys[i]);
}
driver.flushState();
processor.checkAndClearProcessResult();
// gradually expire items in window 2.
// w1 = {}
// w2 = {}
// --> w1 = {}
// --> w2 = { 0:Y0, 1:Y1, 2:Y2, 3:Y3 }
time = 1000L + 100L;
setRecordContext(time, topic1);
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "XX" + expectedKey);
}
driver.flushState();
processor.checkAndClearProcessResult("0:XX0+Y0", "1:XX1+Y1", "2:XX2+Y2", "3:XX3+Y3");
setRecordContext(++time, topic1);
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "XX" + expectedKey);
}
driver.flushState();
processor.checkAndClearProcessResult("0:XX0+null", "1:XX1+Y1", "2:XX2+Y2", "3:XX3+Y3");
setRecordContext(++time, topic1);
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "XX" + expectedKey);
}
driver.flushState();
processor.checkAndClearProcessResult("0:XX0+null", "1:XX1+null", "2:XX2+Y2", "3:XX3+Y3");
setRecordContext(++time, topic1);
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "XX" + expectedKey);
}
driver.flushState();
processor.checkAndClearProcessResult("0:XX0+null", "1:XX1+null", "2:XX2+null", "3:XX3+Y3");
setRecordContext(++time, topic1);
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "XX" + expectedKey);
}
driver.flushState();
processor.checkAndClearProcessResult("0:XX0+null", "1:XX1+null", "2:XX2+null", "3:XX3+null");
// go back to the time before expiration
time = 1000L - 100L - 1L;
setRecordContext(time, topic1);
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "XX" + expectedKey);
}
driver.flushState();
processor.checkAndClearProcessResult("0:XX0+null", "1:XX1+null", "2:XX2+null", "3:XX3+null");
setRecordContext(++time, topic1);
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "XX" + expectedKey);
}
driver.flushState();
processor.checkAndClearProcessResult("0:XX0+Y0", "1:XX1+null", "2:XX2+null", "3:XX3+null");
setRecordContext(++time, topic1);
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "XX" + expectedKey);
}
driver.flushState();
processor.checkAndClearProcessResult("0:XX0+Y0", "1:XX1+Y1", "2:XX2+null", "3:XX3+null");
setRecordContext(++time, topic1);
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "XX" + expectedKey);
}
driver.flushState();
processor.checkAndClearProcessResult("0:XX0+Y0", "1:XX1+Y1", "2:XX2+Y2", "3:XX3+null");
setRecordContext(++time, topic1);
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "XX" + expectedKey);
}
driver.flushState();
processor.checkAndClearProcessResult("0:XX0+Y0", "1:XX1+Y1", "2:XX2+Y2", "3:XX3+Y3");
}
Aggregations