use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KTableForeachTest method testForeach.
@Test
public void testForeach() {
// Given
List<KeyValue<Integer, String>> inputRecords = Arrays.asList(new KeyValue<>(0, "zero"), new KeyValue<>(1, "one"), new KeyValue<>(2, "two"), new KeyValue<>(3, "three"));
List<KeyValue<Integer, String>> expectedRecords = Arrays.asList(new KeyValue<>(0, "ZERO"), new KeyValue<>(2, "ONE"), new KeyValue<>(4, "TWO"), new KeyValue<>(6, "THREE"));
final List<KeyValue<Integer, String>> actualRecords = new ArrayList<>();
ForeachAction<Integer, String> action = new ForeachAction<Integer, String>() {
@Override
public void apply(Integer key, String value) {
actualRecords.add(new KeyValue<>(key * 2, value.toUpperCase(Locale.ROOT)));
}
};
// When
KStreamBuilder builder = new KStreamBuilder();
KTable<Integer, String> table = builder.table(intSerde, stringSerde, topicName, "anyStoreName");
table.foreach(action);
// Then
driver = new KStreamTestDriver(builder, stateDir);
for (KeyValue<Integer, String> record : inputRecords) {
driver.process(topicName, record.key, record.value);
}
driver.flushState();
assertEquals(expectedRecords.size(), actualRecords.size());
for (int i = 0; i < expectedRecords.size(); i++) {
KeyValue<Integer, String> expectedRecord = expectedRecords.get(i);
KeyValue<Integer, String> actualRecord = actualRecords.get(i);
assertEquals(expectedRecord, actualRecord);
}
}
use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KTableImplTest method testRepartition.
@Test
public void testRepartition() throws Exception {
String topic1 = "topic1";
String storeName1 = "storeName1";
final KStreamBuilder builder = new KStreamBuilder();
KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(stringSerde, stringSerde, topic1, storeName1);
KTableImpl<String, String, String> table1Aggregated = (KTableImpl<String, String, String>) table1.groupBy(MockKeyValueMapper.<String, String>NoOpKeyValueMapper()).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, MockAggregator.TOSTRING_REMOVER, "mock-result1");
KTableImpl<String, String, String> table1Reduced = (KTableImpl<String, String, String>) table1.groupBy(MockKeyValueMapper.<String, String>NoOpKeyValueMapper()).reduce(MockReducer.STRING_ADDER, MockReducer.STRING_REMOVER, "mock-result2");
driver = new KStreamTestDriver(builder, stateDir, stringSerde, stringSerde);
driver.setTime(0L);
// three state store should be created, one for source, one for aggregate and one for reduce
assertEquals(3, driver.allStateStores().size());
// contains the corresponding repartition source / sink nodes
assertTrue(driver.allProcessorNames().contains("KSTREAM-SINK-0000000003"));
assertTrue(driver.allProcessorNames().contains("KSTREAM-SOURCE-0000000004"));
assertTrue(driver.allProcessorNames().contains("KSTREAM-SINK-0000000007"));
assertTrue(driver.allProcessorNames().contains("KSTREAM-SOURCE-0000000008"));
Field valSerializerField = ((SinkNode) driver.processor("KSTREAM-SINK-0000000003")).getClass().getDeclaredField("valSerializer");
Field valDeserializerField = ((SourceNode) driver.processor("KSTREAM-SOURCE-0000000004")).getClass().getDeclaredField("valDeserializer");
valSerializerField.setAccessible(true);
valDeserializerField.setAccessible(true);
assertNotNull(((ChangedSerializer) valSerializerField.get(driver.processor("KSTREAM-SINK-0000000003"))).inner());
assertNotNull(((ChangedDeserializer) valDeserializerField.get(driver.processor("KSTREAM-SOURCE-0000000004"))).inner());
assertNotNull(((ChangedSerializer) valSerializerField.get(driver.processor("KSTREAM-SINK-0000000007"))).inner());
assertNotNull(((ChangedDeserializer) valDeserializerField.get(driver.processor("KSTREAM-SOURCE-0000000008"))).inner());
}
use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KTableImplTest method testKTable.
@Test
public void testKTable() {
final KStreamBuilder builder = new KStreamBuilder();
String topic1 = "topic1";
String topic2 = "topic2";
String storeName1 = "storeName1";
String storeName2 = "storeName2";
KTable<String, String> table1 = builder.table(stringSerde, stringSerde, topic1, storeName1);
MockProcessorSupplier<String, String> proc1 = new MockProcessorSupplier<>();
table1.toStream().process(proc1);
KTable<String, Integer> table2 = table1.mapValues(new ValueMapper<String, Integer>() {
@Override
public Integer apply(String value) {
return new Integer(value);
}
});
MockProcessorSupplier<String, Integer> proc2 = new MockProcessorSupplier<>();
table2.toStream().process(proc2);
KTable<String, Integer> table3 = table2.filter(new Predicate<String, Integer>() {
@Override
public boolean test(String key, Integer value) {
return (value % 2) == 0;
}
});
MockProcessorSupplier<String, Integer> proc3 = new MockProcessorSupplier<>();
table3.toStream().process(proc3);
KTable<String, String> table4 = table1.through(stringSerde, stringSerde, topic2, storeName2);
MockProcessorSupplier<String, String> proc4 = new MockProcessorSupplier<>();
table4.toStream().process(proc4);
driver = new KStreamTestDriver(builder, stateDir);
driver.process(topic1, "A", "01");
driver.flushState();
driver.process(topic1, "B", "02");
driver.flushState();
driver.process(topic1, "C", "03");
driver.flushState();
driver.process(topic1, "D", "04");
driver.flushState();
driver.flushState();
assertEquals(Utils.mkList("A:01", "B:02", "C:03", "D:04"), proc1.processed);
assertEquals(Utils.mkList("A:1", "B:2", "C:3", "D:4"), proc2.processed);
assertEquals(Utils.mkList("A:null", "B:2", "C:null", "D:4"), proc3.processed);
assertEquals(Utils.mkList("A:01", "B:02", "C:03", "D:04"), proc4.processed);
}
use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KTableKTableJoinTest method testNotSendingOldValues.
@Test
public void testNotSendingOldValues() throws Exception {
final KStreamBuilder builder = new KStreamBuilder();
final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
final KTable<Integer, String> table1;
final KTable<Integer, String> table2;
final KTable<Integer, String> joined;
final MockProcessorSupplier<Integer, String> proc;
table1 = builder.table(intSerde, stringSerde, topic1, storeName1);
table2 = builder.table(intSerde, stringSerde, topic2, storeName2);
joined = table1.join(table2, MockValueJoiner.TOSTRING_JOINER);
proc = new MockProcessorSupplier<>();
builder.addProcessor("proc", proc, ((KTableImpl<?, ?, ?>) joined).name);
driver = new KStreamTestDriver(builder, stateDir);
driver.setTime(0L);
assertFalse(((KTableImpl<?, ?, ?>) table1).sendingOldValueEnabled());
assertFalse(((KTableImpl<?, ?, ?>) table2).sendingOldValueEnabled());
assertFalse(((KTableImpl<?, ?, ?>) joined).sendingOldValueEnabled());
for (int i = 0; i < 2; i++) {
driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]);
}
driver.flushState();
proc.checkAndClearProcessResult();
for (int i = 0; i < 2; i++) {
driver.process(topic2, expectedKeys[i], "Y" + expectedKeys[i]);
}
driver.flushState();
proc.checkAndClearProcessResult("0:(X0+Y0<-null)", "1:(X1+Y1<-null)");
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "XX" + expectedKey);
}
driver.flushState();
proc.checkAndClearProcessResult("0:(XX0+Y0<-null)", "1:(XX1+Y1<-null)");
// push all items to the other stream. this should produce four items.
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
driver.flushState();
proc.checkAndClearProcessResult("0:(XX0+YY0<-null)", "1:(XX1+YY1<-null)", "2:(XX2+YY2<-null)", "3:(XX3+YY3<-null)");
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "X" + expectedKey);
}
driver.flushState();
proc.checkAndClearProcessResult("0:(X0+YY0<-null)", "1:(X1+YY1<-null)", "2:(X2+YY2<-null)", "3:(X3+YY3<-null)");
for (int i = 0; i < 2; i++) {
driver.process(topic2, expectedKeys[i], null);
}
driver.flushState();
proc.checkAndClearProcessResult("0:(null<-null)", "1:(null<-null)");
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "XX" + expectedKey);
}
driver.flushState();
proc.checkAndClearProcessResult("2:(XX2+YY2<-null)", "3:(XX3+YY3<-null)");
}
use of org.apache.kafka.test.KStreamTestDriver in project kafka by apache.
the class KTableKTableLeftJoinTest method testJoin.
@Test
public void testJoin() throws Exception {
final KStreamBuilder builder = new KStreamBuilder();
final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
KTable<Integer, String> table1 = builder.table(intSerde, stringSerde, topic1, storeName1);
KTable<Integer, String> table2 = builder.table(intSerde, stringSerde, topic2, storeName2);
KTable<Integer, String> joined = table1.leftJoin(table2, MockValueJoiner.TOSTRING_JOINER);
MockProcessorSupplier<Integer, String> processor;
processor = new MockProcessorSupplier<>();
joined.toStream().process(processor);
Collection<Set<String>> copartitionGroups = builder.copartitionGroups();
assertEquals(1, copartitionGroups.size());
assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next());
KTableValueGetterSupplier<Integer, String> getterSupplier = ((KTableImpl<Integer, String, String>) joined).valueGetterSupplier();
driver = new KStreamTestDriver(builder, stateDir);
driver.setTime(0L);
KTableValueGetter<Integer, String> getter = getterSupplier.get();
getter.init(driver.context());
for (int i = 0; i < 2; i++) {
driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]);
}
// pass tuple with null key, it will be discarded in join process
driver.process(topic1, null, "SomeVal");
driver.flushState();
processor.checkAndClearProcessResult("0:X0+null", "1:X1+null");
checkJoinedValues(getter, kv(0, "X0+null"), kv(1, "X1+null"), kv(2, null), kv(3, null));
for (int i = 0; i < 2; i++) {
driver.process(topic2, expectedKeys[i], "Y" + expectedKeys[i]);
}
// pass tuple with null key, it will be discarded in join process
driver.process(topic2, null, "AnotherVal");
driver.flushState();
processor.checkAndClearProcessResult("0:X0+Y0", "1:X1+Y1");
checkJoinedValues(getter, kv(0, "X0+Y0"), kv(1, "X1+Y1"), kv(2, null), kv(3, null));
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "X" + expectedKey);
}
driver.flushState();
processor.checkAndClearProcessResult("0:X0+Y0", "1:X1+Y1", "2:X2+null", "3:X3+null");
checkJoinedValues(getter, kv(0, "X0+Y0"), kv(1, "X1+Y1"), kv(2, "X2+null"), kv(3, "X3+null"));
// push all items to the other stream. this should produce four items.
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
driver.flushState();
processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3");
checkJoinedValues(getter, kv(0, "X0+YY0"), kv(1, "X1+YY1"), kv(2, "X2+YY2"), kv(3, "X3+YY3"));
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "X" + expectedKey);
}
driver.flushState();
processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3");
checkJoinedValues(getter, kv(0, "X0+YY0"), kv(1, "X1+YY1"), kv(2, "X2+YY2"), kv(3, "X3+YY3"));
for (int i = 0; i < 2; i++) {
driver.process(topic2, expectedKeys[i], null);
}
driver.flushState();
processor.checkAndClearProcessResult("0:X0+null", "1:X1+null");
checkJoinedValues(getter, kv(0, "X0+null"), kv(1, "X1+null"), kv(2, "X2+YY2"), kv(3, "X3+YY3"));
for (int expectedKey : expectedKeys) {
driver.process(topic1, expectedKey, "XX" + expectedKey);
}
driver.flushState();
processor.checkAndClearProcessResult("0:XX0+null", "1:XX1+null", "2:XX2+YY2", "3:XX3+YY3");
checkJoinedValues(getter, kv(0, "XX0+null"), kv(1, "XX1+null"), kv(2, "XX2+YY2"), kv(3, "XX3+YY3"));
}
Aggregations