Search in sources :

Example 51 with KeyValueStore

use of org.apache.kafka.streams.state.KeyValueStore in project kafka by apache.

the class StandbyTaskEOSIntegrationTest method buildWithDeduplicationTopology.

private KafkaStreams buildWithDeduplicationTopology(final String stateDirPath) {
    final StreamsBuilder builder = new StreamsBuilder();
    builder.addStateStore(Stores.keyValueStoreBuilder(Stores.persistentKeyValueStore(storeName), Serdes.Integer(), Serdes.Integer()));
    builder.<Integer, Integer>stream(inputTopic).transform(() -> new Transformer<Integer, Integer, KeyValue<Integer, Integer>>() {

        private KeyValueStore<Integer, Integer> store;

        @SuppressWarnings("unchecked")
        @Override
        public void init(final ProcessorContext context) {
            store = (KeyValueStore<Integer, Integer>) context.getStateStore(storeName);
        }

        @Override
        public KeyValue<Integer, Integer> transform(final Integer key, final Integer value) {
            if (skipRecord.get()) {
                // the goal is skipping is to not modify the state store
                return KeyValue.pair(key, value);
            }
            if (store.get(key) != null) {
                return null;
            }
            store.put(key, value);
            store.flush();
            if (key == KEY_1) {
                // after error injection, we need to avoid a consecutive error after rebalancing
                skipRecord.set(true);
                throw new RuntimeException("Injected test error");
            }
            return KeyValue.pair(key, value);
        }

        @Override
        public void close() {
        }
    }, storeName).to(outputTopic);
    return new KafkaStreams(builder.build(), props(stateDirPath));
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) Transformer(org.apache.kafka.streams.kstream.Transformer) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) ProcessorContext(org.apache.kafka.streams.processor.ProcessorContext)

Example 52 with KeyValueStore

use of org.apache.kafka.streams.state.KeyValueStore in project kafka by apache.

the class StateDirectoryIntegrationTest method testCleanUpStateDirIfEmpty.

@Test
public void testCleanUpStateDirIfEmpty() throws InterruptedException {
    final String uniqueTestName = safeUniqueTestName(getClass(), testName);
    // Create Topic
    final String input = uniqueTestName + "-input";
    CLUSTER.createTopic(input);
    final Properties producerConfig = mkProperties(mkMap(mkEntry(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()), mkEntry(ProducerConfig.ACKS_CONFIG, "all"), mkEntry(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getCanonicalName()), mkEntry(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getCanonicalName())));
    try (final KafkaProducer<String, String> producer = new KafkaProducer<>(producerConfig, Serdes.String().serializer(), Serdes.String().serializer())) {
        // Create Test Records
        producer.send(new ProducerRecord<>(input, "a"));
        producer.send(new ProducerRecord<>(input, "b"));
        producer.send(new ProducerRecord<>(input, "c"));
        // Create Topology
        final String storeName = uniqueTestName + "-input-table";
        final StreamsBuilder builder = new StreamsBuilder();
        builder.table(input, Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as(storeName).withKeySerde(Serdes.String()).withValueSerde(Serdes.String()));
        final Topology topology = builder.build();
        // State Store Directory
        final String stateDir = TestUtils.tempDirectory(uniqueTestName).getPath();
        // Create KafkaStreams instance
        final String applicationId = uniqueTestName + "-app";
        final Properties streamsConfig = mkProperties(mkMap(mkEntry(StreamsConfig.APPLICATION_ID_CONFIG, applicationId), mkEntry(StreamsConfig.STATE_DIR_CONFIG, stateDir), mkEntry(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers())));
        final KafkaStreams streams = new KafkaStreams(topology, streamsConfig);
        // Create StateListener
        final CountDownLatch runningLatch = new CountDownLatch(1);
        final CountDownLatch notRunningLatch = new CountDownLatch(1);
        final KafkaStreams.StateListener stateListener = (newState, oldState) -> {
            if (newState == KafkaStreams.State.RUNNING) {
                runningLatch.countDown();
            }
            if (newState == KafkaStreams.State.NOT_RUNNING) {
                notRunningLatch.countDown();
            }
        };
        streams.setStateListener(stateListener);
        // Application state directory
        final File appDir = new File(stateDir, applicationId);
        // Validate application state directory is created.
        streams.start();
        try {
            runningLatch.await(IntegrationTestUtils.DEFAULT_TIMEOUT, TimeUnit.MILLISECONDS);
        } catch (final InterruptedException e) {
            throw new RuntimeException("Streams didn't start in time.", e);
        }
        // State directory exists
        assertTrue((new File(stateDir)).exists());
        // Application state directory Exists
        assertTrue(appDir.exists());
        // Validate StateStore directory is deleted.
        streams.close();
        try {
            notRunningLatch.await(IntegrationTestUtils.DEFAULT_TIMEOUT, TimeUnit.MILLISECONDS);
        } catch (final InterruptedException e) {
            throw new RuntimeException("Streams didn't cleaned up in time.", e);
        }
        streams.cleanUp();
        // Root state store exists
        assertTrue((new File(stateDir)).exists());
        // case 1: the state directory is cleaned up without any problems.
        // case 2: The state directory is not cleaned up, for it does not include any checkpoint file.
        // case 3: The state directory is not cleaned up, for it includes a checkpoint file but it is empty.
        assertTrue(appDir.exists() || Arrays.stream(appDir.listFiles()).filter((File f) -> f.isDirectory() && f.listFiles().length > 0 && !(new File(f, ".checkpoint")).exists()).findFirst().isPresent() || Arrays.stream(appDir.listFiles()).filter((File f) -> f.isDirectory() && (new File(f, ".checkpoint")).length() == 0L).findFirst().isPresent());
    } finally {
        CLUSTER.deleteAllTopicsAndWait(0L);
    }
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) StreamsConfig(org.apache.kafka.streams.StreamsConfig) Arrays(java.util.Arrays) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) BeforeClass(org.junit.BeforeClass) IntegrationTest(org.apache.kafka.test.IntegrationTest) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) IntegrationTestUtils.safeUniqueTestName(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.safeUniqueTestName) EmbeddedKafkaCluster(org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster) TestName(org.junit.rules.TestName) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) AfterClass(org.junit.AfterClass) Properties(java.util.Properties) TestUtils(org.apache.kafka.test.TestUtils) Assert.assertTrue(org.junit.Assert.assertTrue) IOException(java.io.IOException) Test(org.junit.Test) Category(org.junit.experimental.categories.Category) File(java.io.File) Bytes(org.apache.kafka.common.utils.Bytes) TimeUnit(java.util.concurrent.TimeUnit) CountDownLatch(java.util.concurrent.CountDownLatch) IntegrationTestUtils(org.apache.kafka.streams.integration.utils.IntegrationTestUtils) Rule(org.junit.Rule) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) Materialized(org.apache.kafka.streams.kstream.Materialized) KafkaStreams(org.apache.kafka.streams.KafkaStreams) Topology(org.apache.kafka.streams.Topology) KafkaStreams(org.apache.kafka.streams.KafkaStreams) Topology(org.apache.kafka.streams.Topology) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) Properties(java.util.Properties) CountDownLatch(java.util.concurrent.CountDownLatch) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Bytes(org.apache.kafka.common.utils.Bytes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) File(java.io.File) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 53 with KeyValueStore

use of org.apache.kafka.streams.state.KeyValueStore in project kafka by apache.

the class TimestampedKeyValueStoreMaterializerTest method shouldCreateBuilderThatBuildsStoreWithCachingAndLoggingDisabled.

@Test
public void shouldCreateBuilderThatBuildsStoreWithCachingAndLoggingDisabled() {
    final MaterializedInternal<String, String, KeyValueStore<Bytes, byte[]>> materialized = new MaterializedInternal<>(Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("store").withCachingDisabled().withLoggingDisabled(), nameProvider, storePrefix);
    final TimestampedKeyValueStoreMaterializer<String, String> materializer = new TimestampedKeyValueStoreMaterializer<>(materialized);
    final StoreBuilder<TimestampedKeyValueStore<String, String>> builder = materializer.materialize();
    final TimestampedKeyValueStore<String, String> store = builder.build();
    final StateStore wrapped = ((WrappedStateStore) store).wrapped();
    assertThat(wrapped, not(instanceOf(CachingKeyValueStore.class)));
    assertThat(wrapped, not(instanceOf(ChangeLoggingKeyValueBytesStore.class)));
}
Also used : Bytes(org.apache.kafka.common.utils.Bytes) TimestampedKeyValueStoreMaterializer(org.apache.kafka.streams.kstream.internals.TimestampedKeyValueStoreMaterializer) WrappedStateStore(org.apache.kafka.streams.state.internals.WrappedStateStore) TimestampedKeyValueStore(org.apache.kafka.streams.state.TimestampedKeyValueStore) MeteredTimestampedKeyValueStore(org.apache.kafka.streams.state.internals.MeteredTimestampedKeyValueStore) WrappedStateStore(org.apache.kafka.streams.state.internals.WrappedStateStore) StateStore(org.apache.kafka.streams.processor.StateStore) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) InMemoryKeyValueStore(org.apache.kafka.streams.state.internals.InMemoryKeyValueStore) CachingKeyValueStore(org.apache.kafka.streams.state.internals.CachingKeyValueStore) TimestampedKeyValueStore(org.apache.kafka.streams.state.TimestampedKeyValueStore) MeteredTimestampedKeyValueStore(org.apache.kafka.streams.state.internals.MeteredTimestampedKeyValueStore) MaterializedInternal(org.apache.kafka.streams.kstream.internals.MaterializedInternal) Test(org.junit.Test)

Example 54 with KeyValueStore

use of org.apache.kafka.streams.state.KeyValueStore in project kafka by apache.

the class TimestampedKeyValueStoreMaterializerTest method shouldCreateBuilderThatBuildsMeteredStoreWithCachingAndLoggingEnabled.

@Test
public void shouldCreateBuilderThatBuildsMeteredStoreWithCachingAndLoggingEnabled() {
    final MaterializedInternal<String, String, KeyValueStore<Bytes, byte[]>> materialized = new MaterializedInternal<>(Materialized.as("store"), nameProvider, storePrefix);
    final TimestampedKeyValueStoreMaterializer<String, String> materializer = new TimestampedKeyValueStoreMaterializer<>(materialized);
    final StoreBuilder<TimestampedKeyValueStore<String, String>> builder = materializer.materialize();
    final TimestampedKeyValueStore<String, String> store = builder.build();
    final WrappedStateStore caching = (WrappedStateStore) ((WrappedStateStore) store).wrapped();
    final StateStore logging = caching.wrapped();
    assertThat(store, instanceOf(MeteredTimestampedKeyValueStore.class));
    assertThat(caching, instanceOf(CachingKeyValueStore.class));
    assertThat(logging, instanceOf(ChangeLoggingTimestampedKeyValueBytesStore.class));
}
Also used : TimestampedKeyValueStoreMaterializer(org.apache.kafka.streams.kstream.internals.TimestampedKeyValueStoreMaterializer) CachingKeyValueStore(org.apache.kafka.streams.state.internals.CachingKeyValueStore) WrappedStateStore(org.apache.kafka.streams.state.internals.WrappedStateStore) WrappedStateStore(org.apache.kafka.streams.state.internals.WrappedStateStore) StateStore(org.apache.kafka.streams.processor.StateStore) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) InMemoryKeyValueStore(org.apache.kafka.streams.state.internals.InMemoryKeyValueStore) CachingKeyValueStore(org.apache.kafka.streams.state.internals.CachingKeyValueStore) TimestampedKeyValueStore(org.apache.kafka.streams.state.TimestampedKeyValueStore) MeteredTimestampedKeyValueStore(org.apache.kafka.streams.state.internals.MeteredTimestampedKeyValueStore) ChangeLoggingTimestampedKeyValueBytesStore(org.apache.kafka.streams.state.internals.ChangeLoggingTimestampedKeyValueBytesStore) TimestampedKeyValueStore(org.apache.kafka.streams.state.TimestampedKeyValueStore) MeteredTimestampedKeyValueStore(org.apache.kafka.streams.state.internals.MeteredTimestampedKeyValueStore) MeteredTimestampedKeyValueStore(org.apache.kafka.streams.state.internals.MeteredTimestampedKeyValueStore) MaterializedInternal(org.apache.kafka.streams.kstream.internals.MaterializedInternal) Test(org.junit.Test)

Example 55 with KeyValueStore

use of org.apache.kafka.streams.state.KeyValueStore in project kafka by apache.

the class TimestampedKeyValueStoreMaterializerTest method shouldCreateBuilderThatBuildsStoreWithCachingDisabled.

@Test
public void shouldCreateBuilderThatBuildsStoreWithCachingDisabled() {
    final MaterializedInternal<String, String, KeyValueStore<Bytes, byte[]>> materialized = new MaterializedInternal<>(Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("store").withCachingDisabled(), nameProvider, storePrefix);
    final TimestampedKeyValueStoreMaterializer<String, String> materializer = new TimestampedKeyValueStoreMaterializer<>(materialized);
    final StoreBuilder<TimestampedKeyValueStore<String, String>> builder = materializer.materialize();
    final TimestampedKeyValueStore<String, String> store = builder.build();
    final WrappedStateStore logging = (WrappedStateStore) ((WrappedStateStore) store).wrapped();
    assertThat(logging, instanceOf(ChangeLoggingKeyValueBytesStore.class));
}
Also used : TimestampedKeyValueStoreMaterializer(org.apache.kafka.streams.kstream.internals.TimestampedKeyValueStoreMaterializer) WrappedStateStore(org.apache.kafka.streams.state.internals.WrappedStateStore) TimestampedKeyValueStore(org.apache.kafka.streams.state.TimestampedKeyValueStore) MeteredTimestampedKeyValueStore(org.apache.kafka.streams.state.internals.MeteredTimestampedKeyValueStore) ChangeLoggingKeyValueBytesStore(org.apache.kafka.streams.state.internals.ChangeLoggingKeyValueBytesStore) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) InMemoryKeyValueStore(org.apache.kafka.streams.state.internals.InMemoryKeyValueStore) CachingKeyValueStore(org.apache.kafka.streams.state.internals.CachingKeyValueStore) TimestampedKeyValueStore(org.apache.kafka.streams.state.TimestampedKeyValueStore) MeteredTimestampedKeyValueStore(org.apache.kafka.streams.state.internals.MeteredTimestampedKeyValueStore) MaterializedInternal(org.apache.kafka.streams.kstream.internals.MaterializedInternal) Test(org.junit.Test)

Aggregations

KeyValueStore (org.apache.kafka.streams.state.KeyValueStore)133 Test (org.junit.Test)101 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)54 KeyValue (org.apache.kafka.streams.KeyValue)49 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)47 Properties (java.util.Properties)37 Bytes (org.apache.kafka.common.utils.Bytes)36 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)32 CoreMatchers.containsString (org.hamcrest.CoreMatchers.containsString)29 KafkaStreams (org.apache.kafka.streams.KafkaStreams)28 Serdes (org.apache.kafka.common.serialization.Serdes)26 Materialized (org.apache.kafka.streams.kstream.Materialized)25 StreamsConfig (org.apache.kafka.streams.StreamsConfig)24 IntegrationTest (org.apache.kafka.test.IntegrationTest)21 KTable (org.apache.kafka.streams.kstream.KTable)20 Consumed (org.apache.kafka.streams.kstream.Consumed)19 StateStore (org.apache.kafka.streams.processor.StateStore)17 ReadOnlyKeyValueStore (org.apache.kafka.streams.state.ReadOnlyKeyValueStore)17 TestUtils (org.apache.kafka.test.TestUtils)16 MatcherAssert.assertThat (org.hamcrest.MatcherAssert.assertThat)16