use of org.apache.kafka.streams.state.WindowBytesStoreSupplier in project kafka by apache.
the class KStreamSlidingWindowAggregateTest method testJoin.
@Test
public void testJoin() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final String topic2 = "topic2";
final WindowBytesStoreSupplier storeSupplier1 = inOrderIterator ? new InOrderMemoryWindowStoreSupplier("InOrder1", 50000L, 10L, false) : Stores.inMemoryWindowStore("Reverse1", Duration.ofMillis(50000), Duration.ofMillis(10), false);
final WindowBytesStoreSupplier storeSupplier2 = inOrderIterator ? new InOrderMemoryWindowStoreSupplier("InOrder2", 50000L, 10L, false) : Stores.inMemoryWindowStore("Reverse2", Duration.ofMillis(50000), Duration.ofMillis(10), false);
final KTable<Windowed<String>, String> table1 = builder.stream(topic1, Consumed.with(Serdes.String(), Serdes.String())).groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(SlidingWindows.ofTimeDifferenceAndGrace(ofMillis(10), ofMillis(100))).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.as(storeSupplier1));
final KTable<Windowed<String>, String> table2 = builder.stream(topic2, Consumed.with(Serdes.String(), Serdes.String())).groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(SlidingWindows.ofTimeDifferenceAndGrace(ofMillis(10), ofMillis(100))).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.as(storeSupplier2));
final MockApiProcessorSupplier<Windowed<String>, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
table1.toStream().process(supplier);
table2.toStream().process(supplier);
table1.join(table2, (p1, p2) -> p1 + "%" + p2).toStream().process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic1 = driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer());
final TestInputTopic<String, String> inputTopic2 = driver.createInputTopic(topic2, new StringSerializer(), new StringSerializer());
inputTopic1.pipeInput("A", "1", 10L);
inputTopic1.pipeInput("B", "2", 11L);
inputTopic1.pipeInput("C", "3", 12L);
final List<MockApiProcessor<Windowed<String>, String, Void, Void>> processors = supplier.capturedProcessors(3);
processors.get(0).checkAndClearProcessResult(// left windows created by the first set of records to table 1
new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(0, 10)), "0+1", 10), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(1, 11)), "0+2", 11), new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(2, 12)), "0+3", 12));
processors.get(1).checkAndClearProcessResult();
processors.get(2).checkAndClearProcessResult();
inputTopic1.pipeInput("A", "1", 15L);
inputTopic1.pipeInput("B", "2", 16L);
inputTopic1.pipeInput("C", "3", 19L);
processors.get(0).checkAndClearProcessResult(// right windows from previous records are created, and left windows from new records to table 1
new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(11, 21)), "0+1", 15), new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(5, 15)), "0+1+1", 15), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(12, 22)), "0+2", 16), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(6, 16)), "0+2+2", 16), new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(13, 23)), "0+3", 19), new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(9, 19)), "0+3+3", 19));
processors.get(1).checkAndClearProcessResult();
processors.get(2).checkAndClearProcessResult();
inputTopic2.pipeInput("A", "a", 10L);
inputTopic2.pipeInput("B", "b", 30L);
inputTopic2.pipeInput("C", "c", 12L);
inputTopic2.pipeInput("C", "c", 35L);
processors.get(0).checkAndClearProcessResult();
processors.get(1).checkAndClearProcessResult(// left windows from first set of records sent to table 2
new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(0, 10)), "0+a", 10), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(20, 30)), "0+b", 30), new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(2, 12)), "0+c", 12), new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(25, 35)), "0+c", 35));
processors.get(2).checkAndClearProcessResult(// set of join windows from windows created by table 1 and table 2
new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(0, 10)), "0+1%0+a", 10), new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(2, 12)), "0+3%0+c", 12));
inputTopic2.pipeInput("A", "a", 15L);
inputTopic2.pipeInput("B", "b", 16L);
inputTopic2.pipeInput("C", "c", 17L);
processors.get(0).checkAndClearProcessResult();
processors.get(1).checkAndClearProcessResult(// right windows from previous records are created (where applicable), and left windows from new records to table 2
new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(11, 21)), "0+a", 15), new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(5, 15)), "0+a+a", 15), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(6, 16)), "0+b", 16), new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(13, 23)), "0+c", 17), new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(7, 17)), "0+c+c", 17));
processors.get(2).checkAndClearProcessResult(// set of join windows from windows created by table 1 and table 2
new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(11, 21)), "0+1%0+a", 15), new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(5, 15)), "0+1+1%0+a+a", 15), new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(6, 16)), "0+2+2%0+b", 16), new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(13, 23)), "0+3%0+c", 19));
}
}
use of org.apache.kafka.streams.state.WindowBytesStoreSupplier in project kafka by apache.
the class KStreamSlidingWindowAggregateTest method testAggregateLargeInput.
@Test
public void testAggregateLargeInput() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final WindowBytesStoreSupplier storeSupplier = inOrderIterator ? new InOrderMemoryWindowStoreSupplier("InOrder", 50000L, 10L, false) : Stores.inMemoryWindowStore("Reverse", Duration.ofMillis(50000), Duration.ofMillis(10), false);
final KTable<Windowed<String>, String> table2 = builder.stream(topic1, Consumed.with(Serdes.String(), Serdes.String())).groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(SlidingWindows.ofTimeDifferenceAndGrace(ofMillis(10), ofMillis(50))).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.as(storeSupplier));
final MockApiProcessorSupplier<Windowed<String>, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
table2.toStream().process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic1 = driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer());
inputTopic1.pipeInput("A", "1", 10L);
inputTopic1.pipeInput("A", "2", 20L);
inputTopic1.pipeInput("A", "3", 22L);
inputTopic1.pipeInput("A", "4", 15L);
inputTopic1.pipeInput("B", "1", 12L);
inputTopic1.pipeInput("B", "2", 13L);
inputTopic1.pipeInput("B", "3", 18L);
inputTopic1.pipeInput("B", "4", 19L);
inputTopic1.pipeInput("B", "5", 25L);
inputTopic1.pipeInput("B", "6", 14L);
inputTopic1.pipeInput("C", "1", 11L);
inputTopic1.pipeInput("C", "2", 15L);
inputTopic1.pipeInput("C", "3", 16L);
inputTopic1.pipeInput("C", "4", 21);
inputTopic1.pipeInput("C", "5", 23L);
inputTopic1.pipeInput("D", "4", 11L);
inputTopic1.pipeInput("D", "2", 12L);
inputTopic1.pipeInput("D", "3", 29L);
inputTopic1.pipeInput("D", "5", 16L);
}
final Comparator<KeyValueTimestamp<Windowed<String>, String>> comparator = Comparator.comparing((KeyValueTimestamp<Windowed<String>, String> o) -> o.key().key()).thenComparing((KeyValueTimestamp<Windowed<String>, String> o) -> o.key().window().start());
final ArrayList<KeyValueTimestamp<Windowed<String>, String>> actual = supplier.theCapturedProcessor().processed();
actual.sort(comparator);
assertEquals(asList(// FINAL WINDOW: A@10 left window created when A@10 processed
new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(0, 10)), "0+1", 10), // FINAL WINDOW: A@15 left window created when A@15 processed
new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(5, 15)), "0+1+4", 15), // A@20 left window created when A@20 processed
new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(10, 20)), "0+1+2", 20), // FINAL WINDOW: A@20 left window updated when A@15 processed
new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(10, 20)), "0+1+2+4", 20), // A@10 right window created when A@20 processed
new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(11, 21)), "0+2", 20), // FINAL WINDOW: A@10 right window updated when A@15 processed
new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(11, 21)), "0+2+4", 20), // A@22 left window created when A@22 processed
new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(12, 22)), "0+2+3", 22), // FINAL WINDOW: A@22 left window updated when A@15 processed
new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(12, 22)), "0+2+3+4", 22), // FINAL WINDOW: A@15 right window created when A@15 processed
new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(16, 26)), "0+2+3", 22), // FINAL WINDOW: A@20 right window created when A@22 processed
new KeyValueTimestamp<>(new Windowed<>("A", new TimeWindow(21, 31)), "0+3", 22), // FINAL WINDOW: B@12 left window created when B@12 processed
new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(2, 12)), "0+1", 12), // FINAL WINDOW: B@13 left window created when B@13 processed
new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(3, 13)), "0+1+2", 13), // FINAL WINDOW: B@14 left window created when B@14 processed
new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(4, 14)), "0+1+2+6", 14), // B@18 left window created when B@18 processed
new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(8, 18)), "0+1+2+3", 18), // FINAL WINDOW: B@18 left window updated when B@14 processed
new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(8, 18)), "0+1+2+3+6", 18), // B@19 left window created when B@19 processed
new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(9, 19)), "0+1+2+3+4", 19), // FINAL WINDOW: B@19 left window updated when B@14 processed
new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(9, 19)), "0+1+2+3+4+6", 19), // B@12 right window created when B@13 processed
new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(13, 23)), "0+2", 13), // B@12 right window updated when B@18 processed
new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(13, 23)), "0+2+3", 18), // B@12 right window updated when B@19 processed
new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(13, 23)), "0+2+3+4", 19), // FINAL WINDOW: B@12 right window updated when B@14 processed
new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(13, 23)), "0+2+3+4+6", 19), // B@13 right window created when B@18 processed
new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(14, 24)), "0+3", 18), // B@13 right window updated when B@19 processed
new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(14, 24)), "0+3+4", 19), // FINAL WINDOW: B@13 right window updated when B@14 processed
new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(14, 24)), "0+3+4+6", 19), // FINAL WINDOW: B@25 left window created when B@25 processed
new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(15, 25)), "0+3+4+5", 25), // B@18 right window created when B@19 processed
new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(19, 29)), "0+4", 19), // FINAL WINDOW: B@18 right window updated when B@25 processed
new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(19, 29)), "0+4+5", 25), // FINAL WINDOW: B@19 right window updated when B@25 processed
new KeyValueTimestamp<>(new Windowed<>("B", new TimeWindow(20, 30)), "0+5", 25), // FINAL WINDOW: C@11 left window created when C@11 processed
new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(1, 11)), "0+1", 11), // FINAL WINDOW: C@15 left window created when C@15 processed
new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(5, 15)), "0+1+2", 15), // FINAL WINDOW: C@16 left window created when C@16 processed
new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(6, 16)), "0+1+2+3", 16), // FINAL WINDOW: C@21 left window created when C@21 processed
new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(11, 21)), "0+1+2+3+4", 21), // C@11 right window created when C@15 processed
new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(12, 22)), "0+2", 15), // C@11 right window updated when C@16 processed
new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(12, 22)), "0+2+3", 16), // FINAL WINDOW: C@11 right window updated when C@21 processed
new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(12, 22)), "0+2+3+4", 21), // FINAL WINDOW: C@23 left window created when C@23 processed
new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(13, 23)), "0+2+3+4+5", 23), // C@15 right window created when C@16 processed
new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(16, 26)), "0+3", 16), // C@15 right window updated when C@21 processed
new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(16, 26)), "0+3+4", 21), // FINAL WINDOW: C@15 right window updated when C@23 processed
new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(16, 26)), "0+3+4+5", 23), // C@16 right window created when C@21 processed
new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(17, 27)), "0+4", 21), // FINAL WINDOW: C@16 right window updated when C@23 processed
new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(17, 27)), "0+4+5", 23), // FINAL WINDOW: C@21 right window created when C@23 processed
new KeyValueTimestamp<>(new Windowed<>("C", new TimeWindow(22, 32)), "0+5", 23), // FINAL WINDOW: D@11 left window created when D@11 processed
new KeyValueTimestamp<>(new Windowed<>("D", new TimeWindow(1, 11)), "0+4", 11), // FINAL WINDOW: D@12 left window created when D@12 processed
new KeyValueTimestamp<>(new Windowed<>("D", new TimeWindow(2, 12)), "0+4+2", 12), // FINAL WINDOW: D@16 left window created when D@16 processed
new KeyValueTimestamp<>(new Windowed<>("D", new TimeWindow(6, 16)), "0+4+2+5", 16), // D@11 right window created when D@12 processed
new KeyValueTimestamp<>(new Windowed<>("D", new TimeWindow(12, 22)), "0+2", 12), // FINAL WINDOW: D@11 right window updated when D@16 processed
new KeyValueTimestamp<>(new Windowed<>("D", new TimeWindow(12, 22)), "0+2+5", 16), // FINAL WINDOW: D@12 right window created when D@16 processed
new KeyValueTimestamp<>(new Windowed<>("D", new TimeWindow(13, 23)), "0+5", 16), // FINAL WINDOW: D@29 left window created when D@29 processed
new KeyValueTimestamp<>(new Windowed<>("D", new TimeWindow(19, 29)), "0+3", 29)), actual);
}
use of org.apache.kafka.streams.state.WindowBytesStoreSupplier in project kafka by apache.
the class KStreamSlidingWindowAggregateTest method testReduceSmallInput.
@Test
public void testReduceSmallInput() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic = "topic";
final WindowBytesStoreSupplier storeSupplier = inOrderIterator ? new InOrderMemoryWindowStoreSupplier("InOrder", 50000L, 10L, false) : Stores.inMemoryWindowStore("Reverse", Duration.ofMillis(50000), Duration.ofMillis(10), false);
final KTable<Windowed<String>, String> table = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String())).groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(SlidingWindows.ofTimeDifferenceAndGrace(ofMillis(10), ofMillis(50))).reduce(MockReducer.STRING_ADDER, Materialized.as(storeSupplier));
final MockApiProcessorSupplier<Windowed<String>, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
table.toStream().process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer());
inputTopic.pipeInput("A", "1", 10L);
inputTopic.pipeInput("A", "2", 14L);
inputTopic.pipeInput("A", "3", 15L);
inputTopic.pipeInput("A", "4", 22L);
inputTopic.pipeInput("A", "5", 26L);
inputTopic.pipeInput("A", "6", 30L);
}
final Map<Long, ValueAndTimestamp<String>> actual = new HashMap<>();
for (final KeyValueTimestamp<Windowed<String>, String> entry : supplier.theCapturedProcessor().processed()) {
final Windowed<String> window = entry.key();
final Long start = window.window().start();
final ValueAndTimestamp<String> valueAndTimestamp = ValueAndTimestamp.make(entry.value(), entry.timestamp());
if (actual.putIfAbsent(start, valueAndTimestamp) != null) {
actual.replace(start, valueAndTimestamp);
}
}
final Map<Long, ValueAndTimestamp<String>> expected = new HashMap<>();
expected.put(0L, ValueAndTimestamp.make("1", 10L));
expected.put(4L, ValueAndTimestamp.make("1+2", 14L));
expected.put(5L, ValueAndTimestamp.make("1+2+3", 15L));
expected.put(11L, ValueAndTimestamp.make("2+3", 15L));
expected.put(12L, ValueAndTimestamp.make("2+3+4", 22L));
expected.put(15L, ValueAndTimestamp.make("3+4", 22L));
expected.put(16L, ValueAndTimestamp.make("4+5", 26L));
expected.put(20L, ValueAndTimestamp.make("4+5+6", 30L));
expected.put(23L, ValueAndTimestamp.make("5+6", 30L));
expected.put(27L, ValueAndTimestamp.make("6", 30L));
assertEquals(expected, actual);
}
use of org.apache.kafka.streams.state.WindowBytesStoreSupplier in project kafka by apache.
the class SlidingWindowedKStreamImplTest method shouldDropWindowsOutsideOfRetention.
@Test
public void shouldDropWindowsOutsideOfRetention() {
final WindowBytesStoreSupplier storeSupplier = Stores.inMemoryWindowStore("aggregated", ofMillis(1200L), ofMillis(100L), false);
windowedStream.aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.<String, String>as(storeSupplier).withKeySerde(Serdes.String()).withValueSerde(Serdes.String()).withCachingDisabled());
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic = driver.createInputTopic(TOPIC, new StringSerializer(), new StringSerializer());
inputTopic.pipeInput("1", "2", 100L);
inputTopic.pipeInput("1", "3", 500L);
inputTopic.pipeInput("1", "4", 799L);
inputTopic.pipeInput("1", "4", 1000L);
inputTopic.pipeInput("1", "5", 2000L);
{
final WindowStore<String, String> windowStore = driver.getWindowStore("aggregated");
final List<KeyValue<Windowed<String>, String>> data = StreamsTestUtils.toList(windowStore.fetch("1", "1", ofEpochMilli(0), ofEpochMilli(10000L)));
assertThat(data, equalTo(Arrays.asList(KeyValue.pair(new Windowed<>("1", new TimeWindow(900, 1000)), "0+4"), KeyValue.pair(new Windowed<>("1", new TimeWindow(1900, 2000)), "0+5"))));
}
{
final WindowStore<String, ValueAndTimestamp<Long>> windowStore = driver.getTimestampedWindowStore("aggregated");
final List<KeyValue<Windowed<String>, ValueAndTimestamp<Long>>> data = StreamsTestUtils.toList(windowStore.fetch("1", "1", ofEpochMilli(0), ofEpochMilli(2000L)));
assertThat(data, equalTo(Arrays.asList(KeyValue.pair(new Windowed<>("1", new TimeWindow(900, 1000)), ValueAndTimestamp.make("0+4", 1000L)), KeyValue.pair(new Windowed<>("1", new TimeWindow(1900, 2000)), ValueAndTimestamp.make("0+5", 2000L)))));
}
}
}
use of org.apache.kafka.streams.state.WindowBytesStoreSupplier in project kafka by apache.
the class PositionRestartIntegrationTest method beforeTest.
public void beforeTest(final boolean cleanup) {
final StoreSupplier<?> supplier = storeToTest.supplier();
final StreamsBuilder builder = new StreamsBuilder();
if (Objects.equals(kind, "DSL") && supplier instanceof KeyValueBytesStoreSupplier) {
setUpKeyValueDSLTopology((KeyValueBytesStoreSupplier) supplier, builder);
} else if (Objects.equals(kind, "PAPI") && supplier instanceof KeyValueBytesStoreSupplier) {
setUpKeyValuePAPITopology((KeyValueBytesStoreSupplier) supplier, builder);
} else if (Objects.equals(kind, "DSL") && supplier instanceof WindowBytesStoreSupplier) {
setUpWindowDSLTopology((WindowBytesStoreSupplier) supplier, builder);
} else if (Objects.equals(kind, "PAPI") && supplier instanceof WindowBytesStoreSupplier) {
setUpWindowPAPITopology((WindowBytesStoreSupplier) supplier, builder);
} else if (Objects.equals(kind, "DSL") && supplier instanceof SessionBytesStoreSupplier) {
setUpSessionDSLTopology((SessionBytesStoreSupplier) supplier, builder);
} else if (Objects.equals(kind, "PAPI") && supplier instanceof SessionBytesStoreSupplier) {
setUpSessionPAPITopology((SessionBytesStoreSupplier) supplier, builder);
} else {
throw new AssertionError("Store supplier is an unrecognized type.");
}
kafkaStreams = IntegrationTestUtils.getStartedStreams(streamsConfig, builder, cleanup);
}
Aggregations