use of org.apache.kafka.streams.state.WindowBytesStoreSupplier in project kafka by apache.
the class MockProcessorContextStateStoreTest method parameters.
public static Stream<Arguments> parameters() {
final List<Boolean> booleans = asList(true, false);
final List<Arguments> values = new ArrayList<>();
for (final Boolean timestamped : booleans) {
for (final Boolean caching : booleans) {
for (final Boolean logging : booleans) {
final List<KeyValueBytesStoreSupplier> keyValueBytesStoreSuppliers = asList(Stores.inMemoryKeyValueStore("kv" + timestamped + caching + logging), Stores.persistentKeyValueStore("kv" + timestamped + caching + logging), Stores.persistentTimestampedKeyValueStore("kv" + timestamped + caching + logging));
for (final KeyValueBytesStoreSupplier supplier : keyValueBytesStoreSuppliers) {
final StoreBuilder<? extends KeyValueStore<String, ?>> builder;
if (timestamped) {
builder = Stores.timestampedKeyValueStoreBuilder(supplier, Serdes.String(), Serdes.Long());
} else {
builder = Stores.keyValueStoreBuilder(supplier, Serdes.String(), Serdes.Long());
}
if (caching) {
builder.withCachingEnabled();
} else {
builder.withCachingDisabled();
}
if (logging) {
builder.withLoggingEnabled(Collections.emptyMap());
} else {
builder.withLoggingDisabled();
}
values.add(Arguments.of(builder, timestamped, caching, logging));
}
}
}
}
for (final Boolean timestamped : booleans) {
for (final Boolean caching : booleans) {
for (final Boolean logging : booleans) {
final List<WindowBytesStoreSupplier> windowBytesStoreSuppliers = asList(Stores.inMemoryWindowStore("w" + timestamped + caching + logging, Duration.ofSeconds(1), Duration.ofSeconds(1), false), Stores.persistentWindowStore("w" + timestamped + caching + logging, Duration.ofSeconds(1), Duration.ofSeconds(1), false), Stores.persistentTimestampedWindowStore("w" + timestamped + caching + logging, Duration.ofSeconds(1), Duration.ofSeconds(1), false));
for (final WindowBytesStoreSupplier supplier : windowBytesStoreSuppliers) {
final StoreBuilder<? extends WindowStore<String, ?>> builder;
if (timestamped) {
builder = Stores.timestampedWindowStoreBuilder(supplier, Serdes.String(), Serdes.Long());
} else {
builder = Stores.windowStoreBuilder(supplier, Serdes.String(), Serdes.Long());
}
if (caching) {
builder.withCachingEnabled();
} else {
builder.withCachingDisabled();
}
if (logging) {
builder.withLoggingEnabled(Collections.emptyMap());
} else {
builder.withLoggingDisabled();
}
values.add(Arguments.of(builder, timestamped, caching, logging));
}
}
}
}
for (final Boolean caching : booleans) {
for (final Boolean logging : booleans) {
final List<SessionBytesStoreSupplier> sessionBytesStoreSuppliers = asList(Stores.inMemorySessionStore("s" + caching + logging, Duration.ofSeconds(1)), Stores.persistentSessionStore("s" + caching + logging, Duration.ofSeconds(1)));
for (final SessionBytesStoreSupplier supplier : sessionBytesStoreSuppliers) {
final StoreBuilder<? extends SessionStore<String, ?>> builder = Stores.sessionStoreBuilder(supplier, Serdes.String(), Serdes.Long());
if (caching) {
builder.withCachingEnabled();
} else {
builder.withCachingDisabled();
}
if (logging) {
builder.withLoggingEnabled(Collections.emptyMap());
} else {
builder.withLoggingDisabled();
}
values.add(Arguments.of(builder, false, caching, logging));
}
}
}
return values.stream();
}
use of org.apache.kafka.streams.state.WindowBytesStoreSupplier in project apache-kafka-on-k8s by banzaicloud.
the class TimeWindowedKStreamImpl method materialize.
private <VR> StoreBuilder<WindowStore<K, VR>> materialize(final MaterializedInternal<K, VR, WindowStore<Bytes, byte[]>> materialized) {
WindowBytesStoreSupplier supplier = (WindowBytesStoreSupplier) materialized.storeSupplier();
if (supplier == null) {
supplier = Stores.persistentWindowStore(materialized.storeName(), windows.maintainMs(), windows.segments, windows.size(), false);
}
final StoreBuilder<WindowStore<K, VR>> builder = Stores.windowStoreBuilder(supplier, materialized.keySerde(), materialized.valueSerde());
if (materialized.loggingEnabled()) {
builder.withLoggingEnabled(materialized.logConfig());
} else {
builder.withLoggingDisabled();
}
if (materialized.cachingEnabled()) {
builder.withCachingEnabled();
}
return builder;
}
use of org.apache.kafka.streams.state.WindowBytesStoreSupplier in project kafka by apache.
the class KStreamImplJoin method join.
public <K, V1, V2, VOut> KStream<K, VOut> join(final KStream<K, V1> lhs, final KStream<K, V2> other, final ValueJoinerWithKey<? super K, ? super V1, ? super V2, ? extends VOut> joiner, final JoinWindows windows, final StreamJoined<K, V1, V2> streamJoined) {
final StreamJoinedInternal<K, V1, V2> streamJoinedInternal = new StreamJoinedInternal<>(streamJoined);
final NamedInternal renamed = new NamedInternal(streamJoinedInternal.name());
final String joinThisSuffix = rightOuter ? "-outer-this-join" : "-this-join";
final String joinOtherSuffix = leftOuter ? "-outer-other-join" : "-other-join";
final String thisWindowStreamProcessorName = renamed.suffixWithOrElseGet("-this-windowed", builder, KStreamImpl.WINDOWED_NAME);
final String otherWindowStreamProcessorName = renamed.suffixWithOrElseGet("-other-windowed", builder, KStreamImpl.WINDOWED_NAME);
final String joinThisGeneratedName = rightOuter ? builder.newProcessorName(KStreamImpl.OUTERTHIS_NAME) : builder.newProcessorName(KStreamImpl.JOINTHIS_NAME);
final String joinOtherGeneratedName = leftOuter ? builder.newProcessorName(KStreamImpl.OUTEROTHER_NAME) : builder.newProcessorName(KStreamImpl.JOINOTHER_NAME);
final String joinThisName = renamed.suffixWithOrElseGet(joinThisSuffix, joinThisGeneratedName);
final String joinOtherName = renamed.suffixWithOrElseGet(joinOtherSuffix, joinOtherGeneratedName);
final String joinMergeName = renamed.suffixWithOrElseGet("-merge", builder, KStreamImpl.MERGE_NAME);
final GraphNode thisGraphNode = ((AbstractStream<?, ?>) lhs).graphNode;
final GraphNode otherGraphNode = ((AbstractStream<?, ?>) other).graphNode;
final StoreBuilder<WindowStore<K, V1>> thisWindowStore;
final StoreBuilder<WindowStore<K, V2>> otherWindowStore;
final String userProvidedBaseStoreName = streamJoinedInternal.storeName();
final WindowBytesStoreSupplier thisStoreSupplier = streamJoinedInternal.thisStoreSupplier();
final WindowBytesStoreSupplier otherStoreSupplier = streamJoinedInternal.otherStoreSupplier();
assertUniqueStoreNames(thisStoreSupplier, otherStoreSupplier);
if (thisStoreSupplier == null) {
final String thisJoinStoreName = userProvidedBaseStoreName == null ? joinThisGeneratedName : userProvidedBaseStoreName + joinThisSuffix;
thisWindowStore = joinWindowStoreBuilder(thisJoinStoreName, windows, streamJoinedInternal.keySerde(), streamJoinedInternal.valueSerde(), streamJoinedInternal.loggingEnabled(), streamJoinedInternal.logConfig());
} else {
assertWindowSettings(thisStoreSupplier, windows);
thisWindowStore = joinWindowStoreBuilderFromSupplier(thisStoreSupplier, streamJoinedInternal.keySerde(), streamJoinedInternal.valueSerde());
}
if (otherStoreSupplier == null) {
final String otherJoinStoreName = userProvidedBaseStoreName == null ? joinOtherGeneratedName : userProvidedBaseStoreName + joinOtherSuffix;
otherWindowStore = joinWindowStoreBuilder(otherJoinStoreName, windows, streamJoinedInternal.keySerde(), streamJoinedInternal.otherValueSerde(), streamJoinedInternal.loggingEnabled(), streamJoinedInternal.logConfig());
} else {
assertWindowSettings(otherStoreSupplier, windows);
otherWindowStore = joinWindowStoreBuilderFromSupplier(otherStoreSupplier, streamJoinedInternal.keySerde(), streamJoinedInternal.otherValueSerde());
}
final KStreamJoinWindow<K, V1> thisWindowedStream = new KStreamJoinWindow<>(thisWindowStore.name());
final ProcessorParameters<K, V1, ?, ?> thisWindowStreamProcessorParams = new ProcessorParameters<>(thisWindowedStream, thisWindowStreamProcessorName);
final ProcessorGraphNode<K, V1> thisWindowedStreamsNode = new ProcessorGraphNode<>(thisWindowStreamProcessorName, thisWindowStreamProcessorParams);
builder.addGraphNode(thisGraphNode, thisWindowedStreamsNode);
final KStreamJoinWindow<K, V2> otherWindowedStream = new KStreamJoinWindow<>(otherWindowStore.name());
final ProcessorParameters<K, V2, ?, ?> otherWindowStreamProcessorParams = new ProcessorParameters<>(otherWindowedStream, otherWindowStreamProcessorName);
final ProcessorGraphNode<K, V2> otherWindowedStreamsNode = new ProcessorGraphNode<>(otherWindowStreamProcessorName, otherWindowStreamProcessorParams);
builder.addGraphNode(otherGraphNode, otherWindowedStreamsNode);
Optional<StoreBuilder<KeyValueStore<TimestampedKeyAndJoinSide<K>, LeftOrRightValue<V1, V2>>>> outerJoinWindowStore = Optional.empty();
if (leftOuter) {
outerJoinWindowStore = Optional.of(sharedOuterJoinWindowStoreBuilder(windows, streamJoinedInternal, joinThisGeneratedName));
}
// Time-shared between joins to keep track of the maximum stream time
final TimeTracker sharedTimeTracker = new TimeTracker();
final JoinWindowsInternal internalWindows = new JoinWindowsInternal(windows);
final KStreamKStreamJoin<K, V1, V2, VOut> joinThis = new KStreamKStreamJoin<>(true, otherWindowStore.name(), internalWindows, joiner, leftOuter, outerJoinWindowStore.map(StoreBuilder::name), sharedTimeTracker);
final KStreamKStreamJoin<K, V2, V1, VOut> joinOther = new KStreamKStreamJoin<>(false, thisWindowStore.name(), internalWindows, AbstractStream.reverseJoinerWithKey(joiner), rightOuter, outerJoinWindowStore.map(StoreBuilder::name), sharedTimeTracker);
final PassThrough<K, VOut> joinMerge = new PassThrough<>();
final StreamStreamJoinNode.StreamStreamJoinNodeBuilder<K, V1, V2, VOut> joinBuilder = StreamStreamJoinNode.streamStreamJoinNodeBuilder();
final ProcessorParameters<K, V1, ?, ?> joinThisProcessorParams = new ProcessorParameters<>(joinThis, joinThisName);
final ProcessorParameters<K, V2, ?, ?> joinOtherProcessorParams = new ProcessorParameters<>(joinOther, joinOtherName);
final ProcessorParameters<K, VOut, ?, ?> joinMergeProcessorParams = new ProcessorParameters<>(joinMerge, joinMergeName);
joinBuilder.withJoinMergeProcessorParameters(joinMergeProcessorParams).withJoinThisProcessorParameters(joinThisProcessorParams).withJoinOtherProcessorParameters(joinOtherProcessorParams).withThisWindowStoreBuilder(thisWindowStore).withOtherWindowStoreBuilder(otherWindowStore).withThisWindowedStreamProcessorParameters(thisWindowStreamProcessorParams).withOtherWindowedStreamProcessorParameters(otherWindowStreamProcessorParams).withOuterJoinWindowStoreBuilder(outerJoinWindowStore).withValueJoiner(joiner).withNodeName(joinMergeName);
if (internalWindows.spuriousResultFixEnabled()) {
joinBuilder.withSpuriousResultFixEnabled();
}
final GraphNode joinGraphNode = joinBuilder.build();
builder.addGraphNode(Arrays.asList(thisGraphNode, otherGraphNode), joinGraphNode);
final Set<String> allSourceNodes = new HashSet<>(((KStreamImpl<K, V1>) lhs).subTopologySourceNodes);
allSourceNodes.addAll(((KStreamImpl<K, V2>) other).subTopologySourceNodes);
// also for key serde we do not inherit from either since we cannot tell if these two serdes are different
return new KStreamImpl<>(joinMergeName, streamJoinedInternal.keySerde(), null, allSourceNodes, false, joinGraphNode, builder);
}
use of org.apache.kafka.streams.state.WindowBytesStoreSupplier in project kafka by apache.
the class KStreamSlidingWindowAggregateTest method shouldLogAndMeterWhenSkippingExpiredWindowByGrace.
@Test
public void shouldLogAndMeterWhenSkippingExpiredWindowByGrace() {
final String builtInMetricsVersion = StreamsConfig.METRICS_LATEST;
final StreamsBuilder builder = new StreamsBuilder();
final String topic = "topic";
final WindowBytesStoreSupplier storeSupplier = inOrderIterator ? new InOrderMemoryWindowStoreSupplier("InOrder", 50000L, 10L, false) : Stores.inMemoryWindowStore("Reverse", Duration.ofMillis(50000), Duration.ofMillis(10), false);
final KStream<String, String> stream1 = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String()));
stream1.groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(SlidingWindows.ofTimeDifferenceAndGrace(ofMillis(10), ofMillis(90))).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.as(storeSupplier)).toStream().to("output");
props.setProperty(StreamsConfig.BUILT_IN_METRICS_VERSION_CONFIG, builtInMetricsVersion);
try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KStreamSlidingWindowAggregate.class);
final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer());
inputTopic.pipeInput("k", "100", 200L);
inputTopic.pipeInput("k", "0", 100L);
inputTopic.pipeInput("k", "1", 101L);
inputTopic.pipeInput("k", "2", 102L);
inputTopic.pipeInput("k", "3", 103L);
inputTopic.pipeInput("k", "4", 104L);
inputTopic.pipeInput("k", "5", 105L);
inputTopic.pipeInput("k", "6", 15L);
assertLatenessMetrics(driver, is(7.0), is(185.0), is(96.25));
assertThat(appender.getMessages(), hasItems(// left window for k@100
"Skipping record for expired window. topic=[topic] partition=[0] offset=[1] timestamp=[100] window=[90,100] expiration=[110] streamTime=[200]", // left window for k@101
"Skipping record for expired window. topic=[topic] partition=[0] offset=[2] timestamp=[101] window=[91,101] expiration=[110] streamTime=[200]", // left window for k@102
"Skipping record for expired window. topic=[topic] partition=[0] offset=[3] timestamp=[102] window=[92,102] expiration=[110] streamTime=[200]", // left window for k@103
"Skipping record for expired window. topic=[topic] partition=[0] offset=[4] timestamp=[103] window=[93,103] expiration=[110] streamTime=[200]", // left window for k@104
"Skipping record for expired window. topic=[topic] partition=[0] offset=[5] timestamp=[104] window=[94,104] expiration=[110] streamTime=[200]", // left window for k@105
"Skipping record for expired window. topic=[topic] partition=[0] offset=[6] timestamp=[105] window=[95,105] expiration=[110] streamTime=[200]", // left window for k@15
"Skipping record for expired window. topic=[topic] partition=[0] offset=[7] timestamp=[15] window=[5,15] expiration=[110] streamTime=[200]"));
final TestOutputTopic<Windowed<String>, String> outputTopic = driver.createOutputTopic("output", new TimeWindowedDeserializer<>(new StringDeserializer(), 10L), new StringDeserializer());
assertThat(outputTopic.readRecord(), equalTo(new TestRecord<>(new Windowed<>("k", new TimeWindow(190, 200)), "0+100", null, 200L)));
assertTrue(outputTopic.isEmpty());
}
}
use of org.apache.kafka.streams.state.WindowBytesStoreSupplier in project kafka by apache.
the class KStreamSlidingWindowAggregateTest method testAggregateSmallInput.
@Test
public void testAggregateSmallInput() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic = "topic";
final WindowBytesStoreSupplier storeSupplier = inOrderIterator ? new InOrderMemoryWindowStoreSupplier("InOrder", 50000L, 10L, false) : Stores.inMemoryWindowStore("Reverse", Duration.ofMillis(50000), Duration.ofMillis(10), false);
final KTable<Windowed<String>, String> table = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String())).groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(SlidingWindows.ofTimeDifferenceAndGrace(ofMillis(10), ofMillis(50))).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.as(storeSupplier));
final MockApiProcessorSupplier<Windowed<String>, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
table.toStream().process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer());
inputTopic.pipeInput("A", "1", 10L);
inputTopic.pipeInput("A", "2", 15L);
inputTopic.pipeInput("A", "3", 20L);
inputTopic.pipeInput("A", "4", 22L);
inputTopic.pipeInput("A", "5", 30L);
}
final Map<Long, ValueAndTimestamp<String>> actual = new HashMap<>();
for (final KeyValueTimestamp<Windowed<String>, String> entry : supplier.theCapturedProcessor().processed()) {
final Windowed<String> window = entry.key();
final Long start = window.window().start();
final ValueAndTimestamp<String> valueAndTimestamp = ValueAndTimestamp.make(entry.value(), entry.timestamp());
if (actual.putIfAbsent(start, valueAndTimestamp) != null) {
actual.replace(start, valueAndTimestamp);
}
}
final Map<Long, ValueAndTimestamp<String>> expected = new HashMap<>();
expected.put(0L, ValueAndTimestamp.make("0+1", 10L));
expected.put(5L, ValueAndTimestamp.make("0+1+2", 15L));
expected.put(10L, ValueAndTimestamp.make("0+1+2+3", 20L));
expected.put(11L, ValueAndTimestamp.make("0+2+3", 20L));
expected.put(12L, ValueAndTimestamp.make("0+2+3+4", 22L));
expected.put(16L, ValueAndTimestamp.make("0+3+4", 22L));
expected.put(20L, ValueAndTimestamp.make("0+3+4+5", 30L));
expected.put(21L, ValueAndTimestamp.make("0+4+5", 30L));
expected.put(23L, ValueAndTimestamp.make("0+5", 30L));
assertEquals(expected, actual);
}
Aggregations