use of org.apache.kafka.streams.state.ValueAndTimestamp in project kafka by apache.
the class KStreamSlidingWindowAggregateTest method testAggregateSmallInput.
@Test
public void testAggregateSmallInput() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic = "topic";
final WindowBytesStoreSupplier storeSupplier = inOrderIterator ? new InOrderMemoryWindowStoreSupplier("InOrder", 50000L, 10L, false) : Stores.inMemoryWindowStore("Reverse", Duration.ofMillis(50000), Duration.ofMillis(10), false);
final KTable<Windowed<String>, String> table = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String())).groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(SlidingWindows.ofTimeDifferenceAndGrace(ofMillis(10), ofMillis(50))).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.as(storeSupplier));
final MockApiProcessorSupplier<Windowed<String>, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
table.toStream().process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer());
inputTopic.pipeInput("A", "1", 10L);
inputTopic.pipeInput("A", "2", 15L);
inputTopic.pipeInput("A", "3", 20L);
inputTopic.pipeInput("A", "4", 22L);
inputTopic.pipeInput("A", "5", 30L);
}
final Map<Long, ValueAndTimestamp<String>> actual = new HashMap<>();
for (final KeyValueTimestamp<Windowed<String>, String> entry : supplier.theCapturedProcessor().processed()) {
final Windowed<String> window = entry.key();
final Long start = window.window().start();
final ValueAndTimestamp<String> valueAndTimestamp = ValueAndTimestamp.make(entry.value(), entry.timestamp());
if (actual.putIfAbsent(start, valueAndTimestamp) != null) {
actual.replace(start, valueAndTimestamp);
}
}
final Map<Long, ValueAndTimestamp<String>> expected = new HashMap<>();
expected.put(0L, ValueAndTimestamp.make("0+1", 10L));
expected.put(5L, ValueAndTimestamp.make("0+1+2", 15L));
expected.put(10L, ValueAndTimestamp.make("0+1+2+3", 20L));
expected.put(11L, ValueAndTimestamp.make("0+2+3", 20L));
expected.put(12L, ValueAndTimestamp.make("0+2+3+4", 22L));
expected.put(16L, ValueAndTimestamp.make("0+3+4", 22L));
expected.put(20L, ValueAndTimestamp.make("0+3+4+5", 30L));
expected.put(21L, ValueAndTimestamp.make("0+4+5", 30L));
expected.put(23L, ValueAndTimestamp.make("0+5", 30L));
assertEquals(expected, actual);
}
use of org.apache.kafka.streams.state.ValueAndTimestamp in project kafka by apache.
the class KStreamSlidingWindowAggregateTest method testReduceSmallInput.
@Test
public void testReduceSmallInput() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic = "topic";
final WindowBytesStoreSupplier storeSupplier = inOrderIterator ? new InOrderMemoryWindowStoreSupplier("InOrder", 50000L, 10L, false) : Stores.inMemoryWindowStore("Reverse", Duration.ofMillis(50000), Duration.ofMillis(10), false);
final KTable<Windowed<String>, String> table = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String())).groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(SlidingWindows.ofTimeDifferenceAndGrace(ofMillis(10), ofMillis(50))).reduce(MockReducer.STRING_ADDER, Materialized.as(storeSupplier));
final MockApiProcessorSupplier<Windowed<String>, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
table.toStream().process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer());
inputTopic.pipeInput("A", "1", 10L);
inputTopic.pipeInput("A", "2", 14L);
inputTopic.pipeInput("A", "3", 15L);
inputTopic.pipeInput("A", "4", 22L);
inputTopic.pipeInput("A", "5", 26L);
inputTopic.pipeInput("A", "6", 30L);
}
final Map<Long, ValueAndTimestamp<String>> actual = new HashMap<>();
for (final KeyValueTimestamp<Windowed<String>, String> entry : supplier.theCapturedProcessor().processed()) {
final Windowed<String> window = entry.key();
final Long start = window.window().start();
final ValueAndTimestamp<String> valueAndTimestamp = ValueAndTimestamp.make(entry.value(), entry.timestamp());
if (actual.putIfAbsent(start, valueAndTimestamp) != null) {
actual.replace(start, valueAndTimestamp);
}
}
final Map<Long, ValueAndTimestamp<String>> expected = new HashMap<>();
expected.put(0L, ValueAndTimestamp.make("1", 10L));
expected.put(4L, ValueAndTimestamp.make("1+2", 14L));
expected.put(5L, ValueAndTimestamp.make("1+2+3", 15L));
expected.put(11L, ValueAndTimestamp.make("2+3", 15L));
expected.put(12L, ValueAndTimestamp.make("2+3+4", 22L));
expected.put(15L, ValueAndTimestamp.make("3+4", 22L));
expected.put(16L, ValueAndTimestamp.make("4+5", 26L));
expected.put(20L, ValueAndTimestamp.make("4+5+6", 30L));
expected.put(23L, ValueAndTimestamp.make("5+6", 30L));
expected.put(27L, ValueAndTimestamp.make("6", 30L));
assertEquals(expected, actual);
}
use of org.apache.kafka.streams.state.ValueAndTimestamp in project kafka by apache.
the class SessionWindowedKStreamImplTest method shouldAggregateSessionWindowed.
@Test
public void shouldAggregateSessionWindowed() {
final MockApiProcessorSupplier<Windowed<String>, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
stream.aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, sessionMerger, Materialized.with(Serdes.String(), Serdes.String())).toStream().process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
processData(driver);
}
final Map<Windowed<String>, ValueAndTimestamp<String>> result = supplier.theCapturedProcessor().lastValueAndTimestampPerKey();
assertThat(result.size(), equalTo(3));
assertThat(result.get(new Windowed<>("1", new SessionWindow(10, 15))), equalTo(ValueAndTimestamp.make("0+0+1+2", 15L)));
assertThat(result.get(new Windowed<>("2", new SessionWindow(599, 600))), equalTo(ValueAndTimestamp.make("0+0+1+2", 600L)));
assertThat(result.get(new Windowed<>("1", new SessionWindow(600, 600))), equalTo(ValueAndTimestamp.make("0+3", 600L)));
}
use of org.apache.kafka.streams.state.ValueAndTimestamp in project kafka by apache.
the class SessionWindowedKStreamImplTest method shouldReduceWindowed.
@Test
public void shouldReduceWindowed() {
final MockApiProcessorSupplier<Windowed<String>, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
stream.reduce(MockReducer.STRING_ADDER).toStream().process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
processData(driver);
}
final Map<Windowed<String>, ValueAndTimestamp<String>> result = supplier.theCapturedProcessor().lastValueAndTimestampPerKey();
assertThat(result.size(), equalTo(3));
assertThat(result.get(new Windowed<>("1", new SessionWindow(10, 15))), equalTo(ValueAndTimestamp.make("1+2", 15L)));
assertThat(result.get(new Windowed<>("2", new SessionWindow(599L, 600))), equalTo(ValueAndTimestamp.make("1+2", 600L)));
assertThat(result.get(new Windowed<>("1", new SessionWindow(600, 600))), equalTo(ValueAndTimestamp.make("3", 600L)));
}
use of org.apache.kafka.streams.state.ValueAndTimestamp in project kafka by apache.
the class SlidingWindowedKStreamImplTest method shouldDropWindowsOutsideOfRetention.
@Test
public void shouldDropWindowsOutsideOfRetention() {
final WindowBytesStoreSupplier storeSupplier = Stores.inMemoryWindowStore("aggregated", ofMillis(1200L), ofMillis(100L), false);
windowedStream.aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.<String, String>as(storeSupplier).withKeySerde(Serdes.String()).withValueSerde(Serdes.String()).withCachingDisabled());
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic = driver.createInputTopic(TOPIC, new StringSerializer(), new StringSerializer());
inputTopic.pipeInput("1", "2", 100L);
inputTopic.pipeInput("1", "3", 500L);
inputTopic.pipeInput("1", "4", 799L);
inputTopic.pipeInput("1", "4", 1000L);
inputTopic.pipeInput("1", "5", 2000L);
{
final WindowStore<String, String> windowStore = driver.getWindowStore("aggregated");
final List<KeyValue<Windowed<String>, String>> data = StreamsTestUtils.toList(windowStore.fetch("1", "1", ofEpochMilli(0), ofEpochMilli(10000L)));
assertThat(data, equalTo(Arrays.asList(KeyValue.pair(new Windowed<>("1", new TimeWindow(900, 1000)), "0+4"), KeyValue.pair(new Windowed<>("1", new TimeWindow(1900, 2000)), "0+5"))));
}
{
final WindowStore<String, ValueAndTimestamp<Long>> windowStore = driver.getTimestampedWindowStore("aggregated");
final List<KeyValue<Windowed<String>, ValueAndTimestamp<Long>>> data = StreamsTestUtils.toList(windowStore.fetch("1", "1", ofEpochMilli(0), ofEpochMilli(2000L)));
assertThat(data, equalTo(Arrays.asList(KeyValue.pair(new Windowed<>("1", new TimeWindow(900, 1000)), ValueAndTimestamp.make("0+4", 1000L)), KeyValue.pair(new Windowed<>("1", new TimeWindow(1900, 2000)), ValueAndTimestamp.make("0+5", 2000L)))));
}
}
}
Aggregations