use of org.apache.kafka.streams.Topology in project kafka by apache.
the class WindowStoreFetchTest method testStoreConfig.
@Test
public void testStoreConfig() {
final Materialized<String, Long, WindowStore<Bytes, byte[]>> stateStoreConfig = getStoreConfig(storeType, STORE_NAME, enableLogging, enableCaching);
// Create topology: table from input topic
final StreamsBuilder builder = new StreamsBuilder();
final KStream<String, String> stream = builder.stream("input", Consumed.with(Serdes.String(), Serdes.String()));
stream.groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(TimeWindows.ofSizeWithNoGrace(ofMillis(WINDOW_SIZE))).count(stateStoreConfig).toStream().to("output");
final Topology topology = builder.build();
try (final TopologyTestDriver driver = new TopologyTestDriver(topology)) {
// get input topic and stateStore
final TestInputTopic<String, String> input = driver.createInputTopic("input", new StringSerializer(), new StringSerializer());
final WindowStore<String, Long> stateStore = driver.getWindowStore(STORE_NAME);
// write some data
final int medium = DATA_SIZE / 2 * 2;
for (int i = 0; i < records.size(); i++) {
final KeyValue<String, String> kv = records.get(i);
final long windowStartTime = i < medium ? 0 : WINDOW_SIZE;
input.pipeInput(kv.key, kv.value, windowStartTime + i);
}
// query the state store
try (final KeyValueIterator<Windowed<String>, Long> scanIterator = forward ? stateStore.fetchAll(0, Long.MAX_VALUE) : stateStore.backwardFetchAll(0, Long.MAX_VALUE)) {
final Iterator<KeyValue<Windowed<String>, Long>> dataIterator = forward ? expectedRecords.iterator() : expectedRecords.descendingIterator();
TestUtils.checkEquals(scanIterator, dataIterator);
}
try (final KeyValueIterator<Windowed<String>, Long> scanIterator = forward ? stateStore.fetch(null, null, 0, Long.MAX_VALUE) : stateStore.backwardFetch(null, null, 0, Long.MAX_VALUE)) {
final Iterator<KeyValue<Windowed<String>, Long>> dataIterator = forward ? expectedRecords.iterator() : expectedRecords.descendingIterator();
TestUtils.checkEquals(scanIterator, dataIterator);
}
testRange("range", stateStore, innerLow, innerHigh, forward);
testRange("until", stateStore, null, middle, forward);
testRange("from", stateStore, middle, null, forward);
testRange("untilBetween", stateStore, null, innerHighBetween, forward);
testRange("fromBetween", stateStore, innerLowBetween, null, forward);
}
}
use of org.apache.kafka.streams.Topology in project kafka by apache.
the class KTableKTableLeftJoinTest method testSendingOldValue.
@Test
public void testSendingOldValue() {
final StreamsBuilder builder = new StreamsBuilder();
final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
final KTable<Integer, String> table1;
final KTable<Integer, String> table2;
final KTable<Integer, String> joined;
final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
table1 = builder.table(topic1, consumed);
table2 = builder.table(topic2, consumed);
joined = table1.leftJoin(table2, MockValueJoiner.TOSTRING_JOINER);
((KTableImpl<?, ?, ?>) joined).enableSendingOldValues(true);
assertThat(((KTableImpl<?, ?, ?>) table1).sendingOldValueEnabled(), is(true));
assertThat(((KTableImpl<?, ?, ?>) table2).sendingOldValueEnabled(), is(true));
assertThat(((KTableImpl<?, ?, ?>) joined).sendingOldValueEnabled(), is(true));
final Topology topology = builder.build().addProcessor("proc", supplier, ((KTableImpl<?, ?, ?>) joined).name);
try (final TopologyTestDriver driver = new TopologyTestDriverWrapper(topology, props)) {
final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, Serdes.Integer().serializer(), Serdes.String().serializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, Serdes.Integer().serializer(), Serdes.String().serializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final MockApiProcessor<Integer, String, Void, Void> proc = supplier.theCapturedProcessor();
assertTrue(((KTableImpl<?, ?, ?>) table1).sendingOldValueEnabled());
assertTrue(((KTableImpl<?, ?, ?>) table2).sendingOldValueEnabled());
assertTrue(((KTableImpl<?, ?, ?>) joined).sendingOldValueEnabled());
// push two items to the primary stream. the other table is empty
for (int i = 0; i < 2; i++) {
inputTopic1.pipeInput(expectedKeys[i], "X" + expectedKeys[i], 5L + i);
}
// pass tuple with null key, it will be discarded in join process
inputTopic1.pipeInput(null, "SomeVal", 42L);
// left: X0:0 (ts: 5), X1:1 (ts: 6)
// right:
proc.checkAndClearProcessResult(new KeyValueTimestamp<>(0, new Change<>("X0+null", null), 5), new KeyValueTimestamp<>(1, new Change<>("X1+null", null), 6));
// push two items to the other stream. this should produce two items.
for (int i = 0; i < 2; i++) {
inputTopic2.pipeInput(expectedKeys[i], "Y" + expectedKeys[i], 10L * i);
}
// pass tuple with null key, it will be discarded in join process
inputTopic2.pipeInput(null, "AnotherVal", 73L);
// left: X0:0 (ts: 5), X1:1 (ts: 6)
// right: Y0:0 (ts: 0), Y1:1 (ts: 10)
proc.checkAndClearProcessResult(new KeyValueTimestamp<>(0, new Change<>("X0+Y0", "X0+null"), 5), new KeyValueTimestamp<>(1, new Change<>("X1+Y1", "X1+null"), 10));
// push all four items to the primary stream. this should produce four items.
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "XX" + expectedKey, 7L);
}
// left: XX0:0 (ts: 7), XX1:1 (ts: 7), XX2:2 (ts: 7), XX3:3 (ts: 7)
// right: Y0:0 (ts: 0), Y1:1 (ts: 10)
proc.checkAndClearProcessResult(new KeyValueTimestamp<>(0, new Change<>("XX0+Y0", "X0+Y0"), 7), new KeyValueTimestamp<>(1, new Change<>("XX1+Y1", "X1+Y1"), 10), new KeyValueTimestamp<>(2, new Change<>("XX2+null", null), 7), new KeyValueTimestamp<>(3, new Change<>("XX3+null", null), 7));
// push all items to the other stream. this should produce four items.
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "YY" + expectedKey, expectedKey * 5L);
}
// left: XX0:0 (ts: 7), XX1:1 (ts: 7), XX2:2 (ts: 7), XX3:3 (ts: 7)
// right: YY0:0 (ts: 0), YY1:1 (ts: 5), YY2:2 (ts: 10), YY3:3 (ts: 15)
proc.checkAndClearProcessResult(new KeyValueTimestamp<>(0, new Change<>("XX0+YY0", "XX0+Y0"), 7), new KeyValueTimestamp<>(1, new Change<>("XX1+YY1", "XX1+Y1"), 7), new KeyValueTimestamp<>(2, new Change<>("XX2+YY2", "XX2+null"), 10), new KeyValueTimestamp<>(3, new Change<>("XX3+YY3", "XX3+null"), 15));
// push all four items to the primary stream. this should produce four items.
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "XXX" + expectedKey, 6L);
}
// left: XXX0:0 (ts: 6), XXX1:1 (ts: 6), XXX2:2 (ts: 6), XXX3:3 (ts: 6)
// right: YY0:0 (ts: 0), YY1:1 (ts: 5), YY2:2 (ts: 10), YY3:3 (ts: 15)
proc.checkAndClearProcessResult(new KeyValueTimestamp<>(0, new Change<>("XXX0+YY0", "XX0+YY0"), 6), new KeyValueTimestamp<>(1, new Change<>("XXX1+YY1", "XX1+YY1"), 6), new KeyValueTimestamp<>(2, new Change<>("XXX2+YY2", "XX2+YY2"), 10), new KeyValueTimestamp<>(3, new Change<>("XXX3+YY3", "XX3+YY3"), 15));
// push two items with null to the other stream as deletes. this should produce two item.
inputTopic2.pipeInput(expectedKeys[0], null, 5L);
inputTopic2.pipeInput(expectedKeys[1], null, 7L);
// left: XXX0:0 (ts: 6), XXX1:1 (ts: 6), XXX2:2 (ts: 6), XXX3:3 (ts: 6)
// right: YY2:2 (ts: 10), YY3:3 (ts: 15)
proc.checkAndClearProcessResult(new KeyValueTimestamp<>(0, new Change<>("XXX0+null", "XXX0+YY0"), 6), new KeyValueTimestamp<>(1, new Change<>("XXX1+null", "XXX1+YY1"), 7));
// push all four items to the primary stream. this should produce four items.
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "XXXX" + expectedKey, 13L);
}
// left: XXXX0:0 (ts: 13), XXXX1:1 (ts: 13), XXXX2:2 (ts: 13), XXXX3:3 (ts: 13)
// right: YY2:2 (ts: 10), YY3:3 (ts: 15)
proc.checkAndClearProcessResult(new KeyValueTimestamp<>(0, new Change<>("XXXX0+null", "XXX0+null"), 13), new KeyValueTimestamp<>(1, new Change<>("XXXX1+null", "XXX1+null"), 13), new KeyValueTimestamp<>(2, new Change<>("XXXX2+YY2", "XXX2+YY2"), 13), new KeyValueTimestamp<>(3, new Change<>("XXXX3+YY3", "XXX3+YY3"), 15));
// push four items to the primary stream with null. this should produce four items.
inputTopic1.pipeInput(expectedKeys[0], null, 0L);
inputTopic1.pipeInput(expectedKeys[1], null, 42L);
inputTopic1.pipeInput(expectedKeys[2], null, 5L);
inputTopic1.pipeInput(expectedKeys[3], null, 20L);
// left:
// right: YY2:2 (ts: 10), YY3:3 (ts: 15)
proc.checkAndClearProcessResult(new KeyValueTimestamp<>(0, new Change<>(null, "XXXX0+null"), 0), new KeyValueTimestamp<>(1, new Change<>(null, "XXXX1+null"), 42), new KeyValueTimestamp<>(2, new Change<>(null, "XXXX2+YY2"), 10), new KeyValueTimestamp<>(3, new Change<>(null, "XXXX3+YY3"), 20));
}
}
use of org.apache.kafka.streams.Topology in project kafka by apache.
the class KTableMapValuesTest method doTestValueGetter.
private void doTestValueGetter(final StreamsBuilder builder, final String topic1, final KTableImpl<String, String, Integer> table2, final KTableImpl<String, String, Integer> table3) {
final Topology topology = builder.build();
final KTableValueGetterSupplier<String, Integer> getterSupplier2 = table2.valueGetterSupplier();
final KTableValueGetterSupplier<String, Integer> getterSupplier3 = table3.valueGetterSupplier();
final InternalTopologyBuilder topologyBuilder = TopologyWrapper.getInternalTopologyBuilder(topology);
topologyBuilder.connectProcessorAndStateStores(table2.name, getterSupplier2.storeNames());
topologyBuilder.connectProcessorAndStateStores(table3.name, getterSupplier3.storeNames());
try (final TopologyTestDriverWrapper driver = new TopologyTestDriverWrapper(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic1 = driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final KTableValueGetter<String, Integer> getter2 = getterSupplier2.get();
final KTableValueGetter<String, Integer> getter3 = getterSupplier3.get();
getter2.init(driver.setCurrentNodeForProcessorContext(table2.name));
getter3.init(driver.setCurrentNodeForProcessorContext(table3.name));
inputTopic1.pipeInput("A", "01", 50L);
inputTopic1.pipeInput("B", "01", 10L);
inputTopic1.pipeInput("C", "01", 30L);
assertEquals(ValueAndTimestamp.make(1, 50L), getter2.get("A"));
assertEquals(ValueAndTimestamp.make(1, 10L), getter2.get("B"));
assertEquals(ValueAndTimestamp.make(1, 30L), getter2.get("C"));
assertEquals(ValueAndTimestamp.make(-1, 50L), getter3.get("A"));
assertEquals(ValueAndTimestamp.make(-1, 10L), getter3.get("B"));
assertEquals(ValueAndTimestamp.make(-1, 30L), getter3.get("C"));
inputTopic1.pipeInput("A", "02", 25L);
inputTopic1.pipeInput("B", "02", 20L);
assertEquals(ValueAndTimestamp.make(2, 25L), getter2.get("A"));
assertEquals(ValueAndTimestamp.make(2, 20L), getter2.get("B"));
assertEquals(ValueAndTimestamp.make(1, 30L), getter2.get("C"));
assertEquals(ValueAndTimestamp.make(-2, 25L), getter3.get("A"));
assertEquals(ValueAndTimestamp.make(-2, 20L), getter3.get("B"));
assertEquals(ValueAndTimestamp.make(-1, 30L), getter3.get("C"));
inputTopic1.pipeInput("A", "03", 35L);
assertEquals(ValueAndTimestamp.make(3, 35L), getter2.get("A"));
assertEquals(ValueAndTimestamp.make(2, 20L), getter2.get("B"));
assertEquals(ValueAndTimestamp.make(1, 30L), getter2.get("C"));
assertEquals(ValueAndTimestamp.make(-3, 35L), getter3.get("A"));
assertEquals(ValueAndTimestamp.make(-2, 20L), getter3.get("B"));
assertEquals(ValueAndTimestamp.make(-1, 30L), getter3.get("C"));
inputTopic1.pipeInput("A", (String) null, 1L);
assertNull(getter2.get("A"));
assertEquals(ValueAndTimestamp.make(2, 20L), getter2.get("B"));
assertEquals(ValueAndTimestamp.make(1, 30L), getter2.get("C"));
assertNull(getter3.get("A"));
assertEquals(ValueAndTimestamp.make(-2, 20L), getter3.get("B"));
assertEquals(ValueAndTimestamp.make(-1, 30L), getter3.get("C"));
}
}
use of org.apache.kafka.streams.Topology in project kafka by apache.
the class KTableMapValuesTest method testNotSendingOldValue.
@Test
public void testNotSendingOldValue() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, consumed);
final KTableImpl<String, String, Integer> table2 = (KTableImpl<String, String, Integer>) table1.mapValues(s -> Integer.valueOf(s));
final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
final Topology topology = builder.build().addProcessor("proc", supplier, table2.name);
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
final TestInputTopic<String, String> inputTopic1 = driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final MockApiProcessor<String, Integer, Void, Void> proc = supplier.theCapturedProcessor();
assertFalse(table1.sendingOldValueEnabled());
assertFalse(table2.sendingOldValueEnabled());
inputTopic1.pipeInput("A", "01", 5L);
inputTopic1.pipeInput("B", "01", 10L);
inputTopic1.pipeInput("C", "01", 15L);
proc.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(1, null), 5), new KeyValueTimestamp<>("B", new Change<>(1, null), 10), new KeyValueTimestamp<>("C", new Change<>(1, null), 15));
inputTopic1.pipeInput("A", "02", 10L);
inputTopic1.pipeInput("B", "02", 8L);
proc.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(2, null), 10), new KeyValueTimestamp<>("B", new Change<>(2, null), 8));
inputTopic1.pipeInput("A", "03", 20L);
proc.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(3, null), 20));
inputTopic1.pipeInput("A", (String) null, 30L);
proc.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, null), 30));
}
}
use of org.apache.kafka.streams.Topology in project kafka by apache.
the class KTableSourceTest method testNotSendingOldValue.
@Test
public void testNotSendingOldValue() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
@SuppressWarnings("unchecked") final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, stringConsumed);
final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
final Topology topology = builder.build().addProcessor("proc1", supplier, table1.name);
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
final TestInputTopic<String, String> inputTopic1 = driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final MockApiProcessor<String, Integer, Void, Void> proc1 = supplier.theCapturedProcessor();
inputTopic1.pipeInput("A", "01", 10L);
inputTopic1.pipeInput("B", "01", 20L);
inputTopic1.pipeInput("C", "01", 15L);
proc1.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>("01", null), 10), new KeyValueTimestamp<>("B", new Change<>("01", null), 20), new KeyValueTimestamp<>("C", new Change<>("01", null), 15));
inputTopic1.pipeInput("A", "02", 8L);
inputTopic1.pipeInput("B", "02", 22L);
proc1.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>("02", null), 8), new KeyValueTimestamp<>("B", new Change<>("02", null), 22));
inputTopic1.pipeInput("A", "03", 12L);
proc1.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>("03", null), 12));
inputTopic1.pipeInput("A", null, 15L);
inputTopic1.pipeInput("B", null, 20L);
proc1.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, null), 15), new KeyValueTimestamp<>("B", new Change<>(null, null), 20));
}
}
Aggregations