use of org.apache.kafka.streams.state.KeyValueStore in project kafka by apache.
the class ProcessorTopologyTest method testPrefixScanPersistentTimestampedStoreNoCachingNoLogging.
@Test
public void testPrefixScanPersistentTimestampedStoreNoCachingNoLogging() {
final StoreBuilder<KeyValueStore<String, String>> storeBuilder = Stores.keyValueStoreBuilder(Stores.persistentTimestampedKeyValueStore(DEFAULT_STORE_NAME), Serdes.String(), Serdes.String()).withCachingDisabled().withLoggingDisabled();
topology.addSource("source1", STRING_DESERIALIZER, STRING_DESERIALIZER, INPUT_TOPIC_1).addProcessor("processor1", defineWithStores(() -> new StatefulProcessor(DEFAULT_STORE_NAME), Collections.singleton(storeBuilder)), "source1").addSink("counts", OUTPUT_TOPIC_1, "processor1");
driver = new TopologyTestDriver(topology, props);
final TestInputTopic<String, String> inputTopic = driver.createInputTopic(INPUT_TOPIC_1, STRING_SERIALIZER, STRING_SERIALIZER);
final TestOutputTopic<Integer, String> outputTopic1 = driver.createOutputTopic(OUTPUT_TOPIC_1, Serdes.Integer().deserializer(), Serdes.String().deserializer());
inputTopic.pipeInput("key1", "value1");
inputTopic.pipeInput("key2", "value2");
inputTopic.pipeInput("key3", "value3");
inputTopic.pipeInput("key1", "value4");
assertTrue(outputTopic1.isEmpty());
final KeyValueStore<String, String> store = driver.getKeyValueStore(DEFAULT_STORE_NAME);
final List<KeyValue<String, String>> results = prefixScanResults(store, DEFAULT_PREFIX);
assertEquals("key1", results.get(0).key);
assertEquals("value4", results.get(0).value);
assertEquals("key2", results.get(1).key);
assertEquals("value2", results.get(1).value);
assertEquals("key3", results.get(2).key);
assertEquals("value3", results.get(2).value);
}
use of org.apache.kafka.streams.state.KeyValueStore in project kafka by apache.
the class ProcessorTopologyTest method testPrefixScanInMemoryStoreWithCachingWithLogging.
@Test
public void testPrefixScanInMemoryStoreWithCachingWithLogging() {
final StoreBuilder<KeyValueStore<String, String>> storeBuilder = Stores.keyValueStoreBuilder(Stores.inMemoryKeyValueStore(DEFAULT_STORE_NAME), Serdes.String(), Serdes.String()).withCachingEnabled().withLoggingEnabled(Collections.emptyMap());
topology.addSource("source1", STRING_DESERIALIZER, STRING_DESERIALIZER, INPUT_TOPIC_1).addProcessor("processor1", defineWithStores(() -> new StatefulProcessor(DEFAULT_STORE_NAME), Collections.singleton(storeBuilder)), "source1").addSink("counts", OUTPUT_TOPIC_1, "processor1");
driver = new TopologyTestDriver(topology, props);
final TestInputTopic<String, String> inputTopic = driver.createInputTopic(INPUT_TOPIC_1, STRING_SERIALIZER, STRING_SERIALIZER);
final TestOutputTopic<Integer, String> outputTopic1 = driver.createOutputTopic(OUTPUT_TOPIC_1, Serdes.Integer().deserializer(), Serdes.String().deserializer());
inputTopic.pipeInput("key1", "value1");
inputTopic.pipeInput("key2", "value2");
inputTopic.pipeInput("key3", "value3");
inputTopic.pipeInput("key1", "value4");
assertTrue(outputTopic1.isEmpty());
final KeyValueStore<String, String> store = driver.getKeyValueStore(DEFAULT_STORE_NAME);
final List<KeyValue<String, String>> results = prefixScanResults(store, DEFAULT_PREFIX);
assertEquals("key1", results.get(0).key);
assertEquals("value4", results.get(0).value);
assertEquals("key2", results.get(1).key);
assertEquals("value2", results.get(1).value);
assertEquals("key3", results.get(2).key);
assertEquals("value3", results.get(2).value);
}
use of org.apache.kafka.streams.state.KeyValueStore in project kafka by apache.
the class ProcessorTopologyTest method testPrefixScanLruMapWithCachingNoLoggingOldProcessor.
// testing old PAPI
@Deprecated
@Test
public void testPrefixScanLruMapWithCachingNoLoggingOldProcessor() {
final StoreBuilder<KeyValueStore<String, String>> storeBuilder = Stores.keyValueStoreBuilder(Stores.lruMap(DEFAULT_STORE_NAME, 100), Serdes.String(), Serdes.String()).withCachingEnabled().withLoggingDisabled();
topology.addSource("source1", STRING_DESERIALIZER, STRING_DESERIALIZER, INPUT_TOPIC_1).addProcessor("processor1", defineWithStoresOldAPI(() -> new OldAPIStatefulProcessor(DEFAULT_STORE_NAME), Collections.singleton(storeBuilder)), "source1").addSink("counts", OUTPUT_TOPIC_1, "processor1");
driver = new TopologyTestDriver(topology, props);
final TestInputTopic<String, String> inputTopic = driver.createInputTopic(INPUT_TOPIC_1, STRING_SERIALIZER, STRING_SERIALIZER);
final TestOutputTopic<Integer, String> outputTopic1 = driver.createOutputTopic(OUTPUT_TOPIC_1, Serdes.Integer().deserializer(), Serdes.String().deserializer());
inputTopic.pipeInput("key1", "value1");
inputTopic.pipeInput("key2", "value2");
inputTopic.pipeInput("key3", "value3");
inputTopic.pipeInput("key1", "value4");
assertTrue(outputTopic1.isEmpty());
final KeyValueStore<String, String> store = driver.getKeyValueStore(DEFAULT_STORE_NAME);
final List<KeyValue<String, String>> results = prefixScanResults(store, DEFAULT_PREFIX);
assertEquals("key1", results.get(0).key);
assertEquals("value4", results.get(0).value);
assertEquals("key2", results.get(1).key);
assertEquals("value2", results.get(1).value);
assertEquals("key3", results.get(2).key);
assertEquals("value3", results.get(2).value);
}
use of org.apache.kafka.streams.state.KeyValueStore in project kafka by apache.
the class KStreamImplJoin method sharedOuterJoinWindowStoreBuilder.
private <K, V1, V2> StoreBuilder<KeyValueStore<TimestampedKeyAndJoinSide<K>, LeftOrRightValue<V1, V2>>> sharedOuterJoinWindowStoreBuilder(final JoinWindows windows, final StreamJoinedInternal<K, V1, V2> streamJoinedInternal, final String joinThisGeneratedName) {
final boolean persistent = streamJoinedInternal.thisStoreSupplier() == null || streamJoinedInternal.thisStoreSupplier().get().persistent();
final String storeName = buildOuterJoinWindowStoreName(streamJoinedInternal, joinThisGeneratedName) + "-store";
// we are using a key-value store with list-values for the shared store, and have the window retention / grace period
// handled totally on the processor node level, and hence here we are only validating these values but not using them at all
final Duration retentionPeriod = Duration.ofMillis(windows.size() + windows.gracePeriodMs());
final Duration windowSize = Duration.ofMillis(windows.size());
final String rpMsgPrefix = prepareMillisCheckFailMsgPrefix(retentionPeriod, "retentionPeriod");
final long retentionMs = validateMillisecondDuration(retentionPeriod, rpMsgPrefix);
final String wsMsgPrefix = prepareMillisCheckFailMsgPrefix(windowSize, "windowSize");
final long windowSizeMs = validateMillisecondDuration(windowSize, wsMsgPrefix);
if (retentionMs < 0L) {
throw new IllegalArgumentException("retentionPeriod cannot be negative");
}
if (windowSizeMs < 0L) {
throw new IllegalArgumentException("windowSize cannot be negative");
}
if (windowSizeMs > retentionMs) {
throw new IllegalArgumentException("The retention period of the window store " + storeName + " must be no smaller than its window size. Got size=[" + windowSizeMs + "], retention=[" + retentionMs + "]");
}
final TimestampedKeyAndJoinSideSerde<K> timestampedKeyAndJoinSideSerde = new TimestampedKeyAndJoinSideSerde<>(streamJoinedInternal.keySerde());
final LeftOrRightValueSerde<V1, V2> leftOrRightValueSerde = new LeftOrRightValueSerde<>(streamJoinedInternal.valueSerde(), streamJoinedInternal.otherValueSerde());
final StoreBuilder<KeyValueStore<TimestampedKeyAndJoinSide<K>, LeftOrRightValue<V1, V2>>> builder = new ListValueStoreBuilder<>(persistent ? Stores.persistentKeyValueStore(storeName) : Stores.inMemoryKeyValueStore(storeName), timestampedKeyAndJoinSideSerde, leftOrRightValueSerde, Time.SYSTEM);
if (streamJoinedInternal.loggingEnabled()) {
builder.withLoggingEnabled(streamJoinedInternal.logConfig());
} else {
builder.withLoggingDisabled();
}
return builder;
}
use of org.apache.kafka.streams.state.KeyValueStore in project kafka by apache.
the class KTableMapValuesTest method testQueryableValueGetter.
@Test
public void testQueryableValueGetter() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final String storeName2 = "store2";
final String storeName3 = "store3";
final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, consumed);
final KTableImpl<String, String, Integer> table2 = (KTableImpl<String, String, Integer>) table1.mapValues(s -> Integer.valueOf(s), Materialized.<String, Integer, KeyValueStore<Bytes, byte[]>>as(storeName2).withValueSerde(Serdes.Integer()));
final KTableImpl<String, String, Integer> table3 = (KTableImpl<String, String, Integer>) table1.mapValues(value -> Integer.valueOf(value) * (-1), Materialized.<String, Integer, KeyValueStore<Bytes, byte[]>>as(storeName3).withValueSerde(Serdes.Integer()));
final KTableImpl<String, String, Integer> table4 = (KTableImpl<String, String, Integer>) table1.mapValues(s -> Integer.valueOf(s));
assertEquals(storeName2, table2.queryableStoreName());
assertEquals(storeName3, table3.queryableStoreName());
assertNull(table4.queryableStoreName());
doTestValueGetter(builder, topic1, table2, table3);
}
Aggregations