Search in sources :

Example 16 with ReadOnlyKeyValueStore

use of org.apache.kafka.streams.state.ReadOnlyKeyValueStore in project apache-kafka-on-k8s by banzaicloud.

the class GlobalKTableIntegrationTest method shouldRestoreTransactionalMessages.

@Test
public void shouldRestoreTransactionalMessages() throws Exception {
    produceInitialGlobalTableValues(true);
    startStreams();
    final Map<Long, String> expected = new HashMap<>();
    expected.put(1L, "A");
    expected.put(2L, "B");
    expected.put(3L, "C");
    expected.put(4L, "D");
    TestUtils.waitForCondition(new TestCondition() {

        @Override
        public boolean conditionMet() {
            ReadOnlyKeyValueStore<Long, String> store = null;
            try {
                store = kafkaStreams.store(globalStore, QueryableStoreTypes.<Long, String>keyValueStore());
            } catch (InvalidStateStoreException ex) {
                return false;
            }
            Map<Long, String> result = new HashMap<>();
            Iterator<KeyValue<Long, String>> it = store.all();
            while (it.hasNext()) {
                KeyValue<Long, String> kv = it.next();
                result.put(kv.key, kv.value);
            }
            return result.equals(expected);
        }
    }, 30000L, "waiting for initial values");
    System.out.println("no failed test");
}
Also used : InvalidStateStoreException(org.apache.kafka.streams.errors.InvalidStateStoreException) KeyValue(org.apache.kafka.streams.KeyValue) HashMap(java.util.HashMap) Iterator(java.util.Iterator) TestCondition(org.apache.kafka.test.TestCondition) ReadOnlyKeyValueStore(org.apache.kafka.streams.state.ReadOnlyKeyValueStore) HashMap(java.util.HashMap) Map(java.util.Map) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 17 with ReadOnlyKeyValueStore

use of org.apache.kafka.streams.state.ReadOnlyKeyValueStore in project apache-kafka-on-k8s by banzaicloud.

the class QueryableStateIntegrationTest method shouldBeAbleToQueryMapValuesAfterFilterState.

@Test
public void shouldBeAbleToQueryMapValuesAfterFilterState() throws Exception {
    streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
    streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
    final StreamsBuilder builder = new StreamsBuilder();
    final String[] keys = { "hello", "goodbye", "welcome", "go", "kafka" };
    final Set<KeyValue<String, String>> batch1 = new HashSet<>(Arrays.asList(new KeyValue<>(keys[0], "1"), new KeyValue<>(keys[1], "1"), new KeyValue<>(keys[2], "3"), new KeyValue<>(keys[3], "5"), new KeyValue<>(keys[4], "2")));
    final Set<KeyValue<String, Long>> expectedBatch1 = new HashSet<>(Collections.singleton(new KeyValue<>(keys[4], 2L)));
    IntegrationTestUtils.produceKeyValuesSynchronously(streamOne, batch1, TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class, new Properties()), mockTime);
    final Predicate<String, String> filterPredicate = new Predicate<String, String>() {

        @Override
        public boolean test(final String key, final String value) {
            return key.contains("kafka");
        }
    };
    final KTable<String, String> t1 = builder.table(streamOne);
    final KTable<String, String> t2 = t1.filter(filterPredicate, Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("queryFilter"));
    final KTable<String, Long> t3 = t2.mapValues(new ValueMapper<String, Long>() {

        @Override
        public Long apply(final String value) {
            return Long.valueOf(value);
        }
    }, Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as("queryMapValues").withValueSerde(Serdes.Long()));
    t3.toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));
    kafkaStreams = new KafkaStreams(builder.build(), streamsConfiguration);
    kafkaStreams.start();
    waitUntilAtLeastNumRecordProcessed(outputTopic, 1);
    final ReadOnlyKeyValueStore<String, Long> myMapStore = kafkaStreams.store("queryMapValues", QueryableStoreTypes.<String, Long>keyValueStore());
    for (final KeyValue<String, Long> expectedEntry : expectedBatch1) {
        assertEquals(myMapStore.get(expectedEntry.key), expectedEntry.value);
    }
    for (final KeyValue<String, String> batchEntry : batch1) {
        final KeyValue<String, Long> batchEntryMapValue = new KeyValue<>(batchEntry.key, Long.valueOf(batchEntry.value));
        if (!expectedBatch1.contains(batchEntryMapValue)) {
            assertNull(myMapStore.get(batchEntry.key));
        }
    }
}
Also used : KafkaStreams(org.apache.kafka.streams.KafkaStreams) KeyValue(org.apache.kafka.streams.KeyValue) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) ReadOnlyKeyValueStore(org.apache.kafka.streams.state.ReadOnlyKeyValueStore) Properties(java.util.Properties) Predicate(org.apache.kafka.streams.kstream.Predicate) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Bytes(org.apache.kafka.common.utils.Bytes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) HashSet(java.util.HashSet) KafkaStreamsTest(org.apache.kafka.streams.KafkaStreamsTest) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 18 with ReadOnlyKeyValueStore

use of org.apache.kafka.streams.state.ReadOnlyKeyValueStore in project kafka by apache.

the class OptimizedKTableIntegrationTest method shouldApplyUpdatesToStandbyStore.

@Test
public void shouldApplyUpdatesToStandbyStore() throws Exception {
    final int batch1NumMessages = 100;
    final int batch2NumMessages = 100;
    final int key = 1;
    final Semaphore semaphore = new Semaphore(0);
    final StreamsBuilder builder = new StreamsBuilder();
    builder.table(INPUT_TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()), Materialized.<Integer, Integer, KeyValueStore<Bytes, byte[]>>as(TABLE_NAME).withCachingDisabled()).toStream().peek((k, v) -> semaphore.release());
    final KafkaStreams kafkaStreams1 = createKafkaStreams(builder, streamsConfiguration());
    final KafkaStreams kafkaStreams2 = createKafkaStreams(builder, streamsConfiguration());
    final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
    startApplicationAndWaitUntilRunning(kafkaStreamsList, Duration.ofSeconds(60));
    produceValueRange(key, 0, batch1NumMessages);
    // Assert that all messages in the first batch were processed in a timely manner
    assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
    final ReadOnlyKeyValueStore<Integer, Integer> store1 = IntegrationTestUtils.getStore(TABLE_NAME, kafkaStreams1, QueryableStoreTypes.keyValueStore());
    final ReadOnlyKeyValueStore<Integer, Integer> store2 = IntegrationTestUtils.getStore(TABLE_NAME, kafkaStreams2, QueryableStoreTypes.keyValueStore());
    final boolean kafkaStreams1WasFirstActive;
    final KeyQueryMetadata keyQueryMetadata = kafkaStreams1.queryMetadataForKey(TABLE_NAME, key, (topic, somekey, value, numPartitions) -> 0);
    // Assert that the current value in store reflects all messages being processed
    if ((keyQueryMetadata.activeHost().port() % 2) == 1) {
        assertThat(store1.get(key), is(equalTo(batch1NumMessages - 1)));
        kafkaStreams1WasFirstActive = true;
    } else {
        assertThat(store2.get(key), is(equalTo(batch1NumMessages - 1)));
        kafkaStreams1WasFirstActive = false;
    }
    if (kafkaStreams1WasFirstActive) {
        kafkaStreams1.close();
    } else {
        kafkaStreams2.close();
    }
    final ReadOnlyKeyValueStore<Integer, Integer> newActiveStore = kafkaStreams1WasFirstActive ? store2 : store1;
    TestUtils.retryOnExceptionWithTimeout(60 * 1000, 100, () -> {
        // Assert that after failover we have recovered to the last store write
        assertThat(newActiveStore.get(key), is(equalTo(batch1NumMessages - 1)));
    });
    final int totalNumMessages = batch1NumMessages + batch2NumMessages;
    produceValueRange(key, batch1NumMessages, totalNumMessages);
    // Assert that all messages in the second batch were processed in a timely manner
    assertThat(semaphore.tryAcquire(batch2NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
    TestUtils.retryOnExceptionWithTimeout(60 * 1000, 100, () -> {
        // Assert that the current value in store reflects all messages being processed
        assertThat(newActiveStore.get(key), is(equalTo(totalNumMessages - 1)));
    });
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) ReadOnlyKeyValueStore(org.apache.kafka.streams.state.ReadOnlyKeyValueStore) Semaphore(java.util.concurrent.Semaphore) KeyQueryMetadata(org.apache.kafka.streams.KeyQueryMetadata) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 19 with ReadOnlyKeyValueStore

use of org.apache.kafka.streams.state.ReadOnlyKeyValueStore in project kafka by apache.

the class RangeQueryIntegrationTest method testStoreConfig.

@Test
public void testStoreConfig() throws Exception {
    final StreamsBuilder builder = new StreamsBuilder();
    final Materialized<String, String, KeyValueStore<Bytes, byte[]>> stateStoreConfig = getStoreConfig(storeType, enableLogging, enableCaching);
    builder.table(inputStream, stateStoreConfig);
    try (final KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), STREAMS_CONFIG)) {
        IntegrationTestUtils.startApplicationAndWaitUntilRunning(Collections.singletonList(kafkaStreams), Duration.ofSeconds(60));
        writeInputData();
        final ReadOnlyKeyValueStore<String, String> stateStore = IntegrationTestUtils.getStore(1000_000L, TABLE_NAME, kafkaStreams, QueryableStoreTypes.keyValueStore());
        // wait for the store to populate
        TestUtils.waitForCondition(() -> stateStore.get(high) != null, "The store never finished populating");
        // query the state store
        try (final KeyValueIterator<String, String> scanIterator = forward ? stateStore.range(null, null) : stateStore.reverseRange(null, null)) {
            final Iterator<KeyValue<String, String>> dataIterator = forward ? records.iterator() : records.descendingIterator();
            TestUtils.checkEquals(scanIterator, dataIterator);
        }
        try (final KeyValueIterator<String, String> allIterator = forward ? stateStore.all() : stateStore.reverseAll()) {
            final Iterator<KeyValue<String, String>> dataIterator = forward ? records.iterator() : records.descendingIterator();
            TestUtils.checkEquals(allIterator, dataIterator);
        }
        testRange("range", stateStore, innerLow, innerHigh, forward);
        testRange("until", stateStore, null, middle, forward);
        testRange("from", stateStore, middle, null, forward);
        testRange("untilBetween", stateStore, null, innerHighBetween, forward);
        testRange("fromBetween", stateStore, innerLowBetween, null, forward);
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KeyValue(org.apache.kafka.streams.KeyValue) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) ReadOnlyKeyValueStore(org.apache.kafka.streams.state.ReadOnlyKeyValueStore) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 20 with ReadOnlyKeyValueStore

use of org.apache.kafka.streams.state.ReadOnlyKeyValueStore in project kafka by apache.

the class QueryableStateIntegrationTest method shouldBeAbleToQueryKeysWithGivenPrefix.

@Test
public void shouldBeAbleToQueryKeysWithGivenPrefix() throws Exception {
    streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
    streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
    final StreamsBuilder builder = new StreamsBuilder();
    final String[] keys = { "hello", "goodbye", "welcome", "go", "kafka" };
    final Set<KeyValue<String, String>> batch1 = new HashSet<>(Arrays.asList(new KeyValue<>(keys[0], "1"), new KeyValue<>(keys[1], "1"), new KeyValue<>(keys[2], "3"), new KeyValue<>(keys[3], "5"), new KeyValue<>(keys[4], "2")));
    final List<KeyValue<String, Long>> expectedPrefixScanResult = Arrays.asList(new KeyValue<>(keys[3], 5L), new KeyValue<>(keys[1], 1L));
    IntegrationTestUtils.produceKeyValuesSynchronously(streamOne, batch1, TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class, new Properties()), mockTime);
    final KTable<String, String> t1 = builder.table(streamOne);
    t1.mapValues((ValueMapper<String, Long>) Long::valueOf, Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as("queryMapValues").withValueSerde(Serdes.Long())).toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));
    kafkaStreams = new KafkaStreams(builder.build(), streamsConfiguration);
    startKafkaStreamsAndWaitForRunningState(kafkaStreams);
    waitUntilAtLeastNumRecordProcessed(outputTopic, 5);
    final ReadOnlyKeyValueStore<String, Long> myMapStore = IntegrationTestUtils.getStore("queryMapValues", kafkaStreams, keyValueStore());
    int index = 0;
    try (final KeyValueIterator<String, Long> range = myMapStore.prefixScan("go", Serdes.String().serializer())) {
        while (range.hasNext()) {
            assertEquals(expectedPrefixScanResult.get(index++), range.next());
        }
    }
}
Also used : KafkaStreams(org.apache.kafka.streams.KafkaStreams) KeyValue(org.apache.kafka.streams.KeyValue) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) ReadOnlyKeyValueStore(org.apache.kafka.streams.state.ReadOnlyKeyValueStore) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) Properties(java.util.Properties) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) HashSet(java.util.HashSet) KafkaStreamsTest(org.apache.kafka.streams.KafkaStreamsTest) Test(org.junit.Test) IntegrationTest(org.apache.kafka.test.IntegrationTest)

Aggregations

ReadOnlyKeyValueStore (org.apache.kafka.streams.state.ReadOnlyKeyValueStore)29 Test (org.junit.Test)20 KafkaStreams (org.apache.kafka.streams.KafkaStreams)17 IntegrationTest (org.apache.kafka.test.IntegrationTest)17 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)16 InvalidStateStoreException (org.apache.kafka.streams.errors.InvalidStateStoreException)15 Properties (java.util.Properties)11 KeyValue (org.apache.kafka.streams.KeyValue)10 KeyValueStore (org.apache.kafka.streams.state.KeyValueStore)10 Semaphore (java.util.concurrent.Semaphore)9 KeyQueryMetadata (org.apache.kafka.streams.KeyQueryMetadata)9 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)8 HashSet (java.util.HashSet)6 KafkaStreamsTest (org.apache.kafka.streams.KafkaStreamsTest)6 Map (java.util.Map)5 List (java.util.List)4 Utils.mkProperties (org.apache.kafka.common.utils.Utils.mkProperties)4 Duration (java.time.Duration)3 Arrays (java.util.Arrays)3 Collections (java.util.Collections)3