use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.
the class SuppressScenarioTest method shouldWorkBeforeJoinRight.
@Test
public void shouldWorkBeforeJoinRight() {
final StreamsBuilder builder = new StreamsBuilder();
final KTable<String, String> left = builder.table("left", Consumed.with(Serdes.String(), Serdes.String()));
final KTable<String, String> right = builder.table("right", Consumed.with(Serdes.String(), Serdes.String())).suppress(untilTimeLimit(ofMillis(10), unbounded()));
left.outerJoin(right, (l, r) -> String.format("(%s,%s)", l, r)).toStream().to("output", Produced.with(Serdes.String(), Serdes.String()));
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), config)) {
final TestInputTopic<String, String> inputTopicRight = driver.createInputTopic("right", STRING_SERIALIZER, STRING_SERIALIZER);
final TestInputTopic<String, String> inputTopicLeft = driver.createInputTopic("left", STRING_SERIALIZER, STRING_SERIALIZER);
inputTopicRight.pipeInput("B", "1", 0L);
inputTopicRight.pipeInput("A", "1", 0L);
// buffered, no output
verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), emptyList());
inputTopicRight.pipeInput("tick", "tick", 10L);
// flush buffer
verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), asList(new KeyValueTimestamp<>("A", "(null,1)", 0L), new KeyValueTimestamp<>("B", "(null,1)", 0L)));
inputTopicRight.pipeInput("A", "2", 11L);
// buffered, no output
verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), emptyList());
inputTopicLeft.pipeInput("A", "a", 12L);
// should join with previously emitted right side
verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), singletonList(new KeyValueTimestamp<>("A", "(a,1)", 12L)));
inputTopicLeft.pipeInput("B", "b", 12L);
// should view through to the parent KTable, since B is no longer buffered
verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), singletonList(new KeyValueTimestamp<>("B", "(b,1)", 12L)));
inputTopicLeft.pipeInput("A", "b", 13L);
// should join with previously emitted right side
verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), singletonList(new KeyValueTimestamp<>("A", "(b,1)", 13L)));
inputTopicRight.pipeInput("tick", "tick1", 21L);
verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), asList(// just a testing artifact
new KeyValueTimestamp<>("tick", "(null,tick1)", 21), new KeyValueTimestamp<>("A", "(b,2)", 13L)));
}
}
use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.
the class SuppressScenarioTest method shouldImmediatelyEmitEventsWithZeroEmitAfter.
@Test
public void shouldImmediatelyEmitEventsWithZeroEmitAfter() {
final StreamsBuilder builder = new StreamsBuilder();
final KTable<String, Long> valueCounts = builder.table("input", Consumed.with(STRING_SERDE, STRING_SERDE), Materialized.<String, String, KeyValueStore<Bytes, byte[]>>with(STRING_SERDE, STRING_SERDE).withCachingDisabled().withLoggingDisabled()).groupBy((k, v) -> new KeyValue<>(v, k), Grouped.with(STRING_SERDE, STRING_SERDE)).count();
valueCounts.suppress(untilTimeLimit(ZERO, unbounded())).toStream().to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
valueCounts.toStream().to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
final Topology topology = builder.build();
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
final TestInputTopic<String, String> inputTopic = driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
inputTopic.pipeInput("k1", "v1", 0L);
inputTopic.pipeInput("k1", "v2", 1L);
inputTopic.pipeInput("k2", "v1", 2L);
verify(drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("v1", 1L, 0L), new KeyValueTimestamp<>("v1", 0L, 1L), new KeyValueTimestamp<>("v2", 1L, 1L), new KeyValueTimestamp<>("v1", 1L, 2L)));
verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("v1", 1L, 0L), new KeyValueTimestamp<>("v1", 0L, 1L), new KeyValueTimestamp<>("v2", 1L, 1L), new KeyValueTimestamp<>("v1", 1L, 2L)));
inputTopic.pipeInput("x", "x", 3L);
verify(drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER), singletonList(new KeyValueTimestamp<>("x", 1L, 3L)));
verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), singletonList(new KeyValueTimestamp<>("x", 1L, 3L)));
inputTopic.pipeInput("x", "y", 4L);
verify(drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("x", 0L, 4L), new KeyValueTimestamp<>("y", 1L, 4L)));
verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("x", 0L, 4L), new KeyValueTimestamp<>("y", 1L, 4L)));
}
}
use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.
the class SuppressScenarioTest method shouldWorkBeforeJoinLeft.
@Test
public void shouldWorkBeforeJoinLeft() {
final StreamsBuilder builder = new StreamsBuilder();
final KTable<String, String> left = builder.table("left", Consumed.with(Serdes.String(), Serdes.String())).suppress(untilTimeLimit(ofMillis(10), unbounded()));
final KTable<String, String> right = builder.table("right", Consumed.with(Serdes.String(), Serdes.String()));
left.outerJoin(right, (l, r) -> String.format("(%s,%s)", l, r)).toStream().to("output", Produced.with(Serdes.String(), Serdes.String()));
final Topology topology = builder.build();
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
final TestInputTopic<String, String> inputTopicRight = driver.createInputTopic("right", STRING_SERIALIZER, STRING_SERIALIZER);
final TestInputTopic<String, String> inputTopicLeft = driver.createInputTopic("left", STRING_SERIALIZER, STRING_SERIALIZER);
inputTopicLeft.pipeInput("B", "1", 0L);
inputTopicLeft.pipeInput("A", "1", 0L);
// buffered, no output
verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), emptyList());
inputTopicLeft.pipeInput("tick", "tick", 10L);
// flush buffer
verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), asList(new KeyValueTimestamp<>("A", "(1,null)", 0L), new KeyValueTimestamp<>("B", "(1,null)", 0L)));
inputTopicLeft.pipeInput("A", "2", 11L);
// buffered, no output
verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), emptyList());
inputTopicRight.pipeInput("A", "a", 12L);
// should join with previously emitted left side
verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), singletonList(new KeyValueTimestamp<>("A", "(1,a)", 12L)));
inputTopicRight.pipeInput("B", "b", 12L);
// should view through to the parent KTable, since B is no longer buffered
verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), singletonList(new KeyValueTimestamp<>("B", "(1,b)", 12L)));
inputTopicRight.pipeInput("A", "b", 13L);
// should join with previously emitted left side
verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), singletonList(new KeyValueTimestamp<>("A", "(1,b)", 13L)));
inputTopicLeft.pipeInput("tick", "tick1", 21L);
verify(drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER), asList(// just a testing artifact
new KeyValueTimestamp<>("tick", "(tick1,null)", 21), new KeyValueTimestamp<>("A", "(2,b)", 13L)));
}
}
use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.
the class SuppressScenarioTest method shouldSupportFinalResultsForSlidingWindows.
@Test
public void shouldSupportFinalResultsForSlidingWindows() {
final StreamsBuilder builder = new StreamsBuilder();
final KTable<Windowed<String>, Long> valueCounts = builder.stream("input", Consumed.with(STRING_SERDE, STRING_SERDE)).groupBy((String k, String v) -> k, Grouped.with(STRING_SERDE, STRING_SERDE)).windowedBy(SlidingWindows.withTimeDifferenceAndGrace(ofMillis(5L), ofMillis(15L))).count(Materialized.<String, Long, WindowStore<Bytes, byte[]>>as("counts").withCachingDisabled().withKeySerde(STRING_SERDE));
valueCounts.suppress(untilWindowCloses(unbounded())).toStream().map((final Windowed<String> k, final Long v) -> new KeyValue<>(k.toString(), v)).to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
valueCounts.toStream().map((final Windowed<String> k, final Long v) -> new KeyValue<>(k.toString(), v)).to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
final Topology topology = builder.build();
System.out.println(topology.describe());
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
final TestInputTopic<String, String> inputTopic = driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
inputTopic.pipeInput("k1", "v1", 10L);
inputTopic.pipeInput("k1", "v1", 11L);
inputTopic.pipeInput("k1", "v1", 10L);
inputTopic.pipeInput("k1", "v1", 13L);
inputTopic.pipeInput("k1", "v1", 10L);
inputTopic.pipeInput("k1", "v1", 24L);
// this update should get dropped, since the previous event advanced the stream time and closed the window.
inputTopic.pipeInput("k1", "v1", 5L);
inputTopic.pipeInput("k1", "v1", 7L);
// final record to advance stream time and flush windows
inputTopic.pipeInput("k1", "v1", 90L);
final Comparator<TestRecord<String, Long>> comparator = Comparator.comparing((TestRecord<String, Long> o) -> o.getKey()).thenComparing((TestRecord<String, Long> o) -> o.timestamp());
final List<TestRecord<String, Long>> actual = drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER);
actual.sort(comparator);
verify(actual, asList(// right window for k1@10 created when k1@11 is processed
new KeyValueTimestamp<>("[k1@11/16]", 1L, 11L), // right window for k1@10 updated when k1@13 is processed
new KeyValueTimestamp<>("[k1@11/16]", 2L, 13L), // right window for k1@11 created when k1@13 is processed
new KeyValueTimestamp<>("[k1@12/17]", 1L, 13L), // left window for k1@24 created when k1@24 is processed
new KeyValueTimestamp<>("[k1@19/24]", 1L, 24L), // left window for k1@10 created when k1@10 is processed
new KeyValueTimestamp<>("[k1@5/10]", 1L, 10L), // left window for k1@10 updated when k1@10 is processed
new KeyValueTimestamp<>("[k1@5/10]", 2L, 10L), // left window for k1@10 updated when k1@10 is processed
new KeyValueTimestamp<>("[k1@5/10]", 3L, 10L), // left window for k1@10 updated when k1@5 is processed
new KeyValueTimestamp<>("[k1@5/10]", 4L, 10L), // left window for k1@10 updated when k1@7 is processed
new KeyValueTimestamp<>("[k1@5/10]", 5L, 10L), // left window for k1@11 created when k1@11 is processed
new KeyValueTimestamp<>("[k1@6/11]", 2L, 11L), // left window for k1@11 updated when k1@10 is processed
new KeyValueTimestamp<>("[k1@6/11]", 3L, 11L), // left window for k1@11 updated when k1@10 is processed
new KeyValueTimestamp<>("[k1@6/11]", 4L, 11L), // left window for k1@11 updated when k1@7 is processed
new KeyValueTimestamp<>("[k1@6/11]", 5L, 11L), // left window for k1@13 created when k1@13 is processed
new KeyValueTimestamp<>("[k1@8/13]", 4L, 13L), // left window for k1@13 updated when k1@10 is processed
new KeyValueTimestamp<>("[k1@8/13]", 5L, 13L), // right window for k1@90 created when k1@90 is processed
new KeyValueTimestamp<>("[k1@85/90]", 1L, 90L)));
verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("[k1@5/10]", 5L, 10L), new KeyValueTimestamp<>("[k1@6/11]", 5L, 11L), new KeyValueTimestamp<>("[k1@8/13]", 5L, 13L), new KeyValueTimestamp<>("[k1@11/16]", 2L, 13L), new KeyValueTimestamp<>("[k1@12/17]", 1L, 13L), new KeyValueTimestamp<>("[k1@19/24]", 1L, 24L)));
}
}
use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.
the class SuppressScenarioTest method shouldSupportFinalResultsForTimeWindowsWithLargeJump.
@Test
public void shouldSupportFinalResultsForTimeWindowsWithLargeJump() {
final StreamsBuilder builder = new StreamsBuilder();
final KTable<Windowed<String>, Long> valueCounts = builder.stream("input", Consumed.with(STRING_SERDE, STRING_SERDE)).groupBy((String k, String v) -> k, Grouped.with(STRING_SERDE, STRING_SERDE)).windowedBy(TimeWindows.of(ofMillis(2L)).grace(ofMillis(2L))).count(Materialized.<String, Long, WindowStore<Bytes, byte[]>>as("counts").withCachingDisabled().withKeySerde(STRING_SERDE));
valueCounts.suppress(untilWindowCloses(unbounded())).toStream().map((final Windowed<String> k, final Long v) -> new KeyValue<>(k.toString(), v)).to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
valueCounts.toStream().map((final Windowed<String> k, final Long v) -> new KeyValue<>(k.toString(), v)).to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
final Topology topology = builder.build();
System.out.println(topology.describe());
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
final TestInputTopic<String, String> inputTopic = driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
inputTopic.pipeInput("k1", "v1", 0L);
inputTopic.pipeInput("k1", "v1", 1L);
inputTopic.pipeInput("k1", "v1", 2L);
inputTopic.pipeInput("k1", "v1", 0L);
inputTopic.pipeInput("k1", "v1", 3L);
inputTopic.pipeInput("k1", "v1", 0L);
inputTopic.pipeInput("k1", "v1", 4L);
// this update should get dropped, since the previous event advanced the stream time and closed the window.
inputTopic.pipeInput("k1", "v1", 0L);
inputTopic.pipeInput("k1", "v1", 30L);
verify(drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("[k1@0/2]", 1L, 0L), new KeyValueTimestamp<>("[k1@0/2]", 2L, 1L), new KeyValueTimestamp<>("[k1@2/4]", 1L, 2L), new KeyValueTimestamp<>("[k1@0/2]", 3L, 1L), new KeyValueTimestamp<>("[k1@2/4]", 2L, 3L), new KeyValueTimestamp<>("[k1@0/2]", 4L, 1L), new KeyValueTimestamp<>("[k1@4/6]", 1L, 4L), new KeyValueTimestamp<>("[k1@30/32]", 1L, 30L)));
verify(drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER), asList(new KeyValueTimestamp<>("[k1@0/2]", 4L, 1L), new KeyValueTimestamp<>("[k1@2/4]", 2L, 3L), new KeyValueTimestamp<>("[k1@4/6]", 1L, 4L)));
}
}
Aggregations