Search in sources :

Example 11 with ValueMapper

use of org.apache.kafka.streams.kstream.ValueMapper in project apache-kafka-on-k8s by banzaicloud.

the class StreamsStandByReplicaTest method main.

public static void main(final String[] args) throws IOException {
    System.out.println("StreamsTest instance started");
    final String kafka = args.length > 0 ? args[0] : "localhost:9092";
    final String propFileName = args.length > 1 ? args[1] : null;
    final String additionalConfigs = args.length > 2 ? args[2] : null;
    final Serde<String> stringSerde = Serdes.String();
    final Properties streamsProperties = Utils.loadProps(propFileName);
    streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, "kafka-streams-standby-tasks");
    streamsProperties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, kafka);
    streamsProperties.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100);
    streamsProperties.put(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 1);
    streamsProperties.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
    streamsProperties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
    streamsProperties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
    streamsProperties.put(StreamsConfig.producerPrefix(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG), true);
    if (additionalConfigs == null) {
        System.err.println("additional configs are not provided");
        System.err.flush();
        System.exit(1);
    }
    final Map<String, String> updated = SystemTestUtil.parseConfigs(additionalConfigs);
    System.out.println("Updating configs with " + updated);
    final String sourceTopic = updated.remove("sourceTopic");
    final String sinkTopic1 = updated.remove("sinkTopic1");
    final String sinkTopic2 = updated.remove("sinkTopic2");
    if (sourceTopic == null || sinkTopic1 == null || sinkTopic2 == null) {
        System.err.println(String.format("one or more required topics null sourceTopic[%s], sinkTopic1[%s], sinkTopic2[%s]", sourceTopic, sinkTopic1, sinkTopic2));
        System.err.flush();
        System.exit(1);
    }
    streamsProperties.putAll(updated);
    if (!confirmCorrectConfigs(streamsProperties)) {
        System.err.println(String.format("ERROR: Did not have all required configs expected  to contain %s, %s,  %s,  %s", StreamsConfig.consumerPrefix(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG), StreamsConfig.producerPrefix(ProducerConfig.RETRIES_CONFIG), StreamsConfig.producerPrefix(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG), StreamsConfig.producerPrefix(ProducerConfig.MAX_BLOCK_MS_CONFIG)));
        System.exit(1);
    }
    final StreamsBuilder builder = new StreamsBuilder();
    String inMemoryStoreName = "in-memory-store";
    String persistentMemoryStoreName = "persistent-memory-store";
    KeyValueBytesStoreSupplier inMemoryStoreSupplier = Stores.inMemoryKeyValueStore(inMemoryStoreName);
    KeyValueBytesStoreSupplier persistentStoreSupplier = Stores.persistentKeyValueStore(persistentMemoryStoreName);
    KStream<String, String> inputStream = builder.stream(sourceTopic, Consumed.with(stringSerde, stringSerde));
    ValueMapper<Long, String> countMapper = new ValueMapper<Long, String>() {

        @Override
        public String apply(final Long value) {
            return value.toString();
        }
    };
    inputStream.groupByKey().count(Materialized.<String, Long>as(inMemoryStoreSupplier)).toStream().mapValues(countMapper).to(sinkTopic1, Produced.with(stringSerde, stringSerde));
    inputStream.groupByKey().count(Materialized.<String, Long>as(persistentStoreSupplier)).toStream().mapValues(countMapper).to(sinkTopic2, Produced.with(stringSerde, stringSerde));
    final KafkaStreams streams = new KafkaStreams(builder.build(), streamsProperties);
    streams.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {

        @Override
        public void uncaughtException(final Thread t, final Throwable e) {
            System.err.println("FATAL: An unexpected exception " + e);
            e.printStackTrace(System.err);
            System.err.flush();
            shutdown(streams);
        }
    });
    streams.setStateListener(new KafkaStreams.StateListener() {

        @Override
        public void onChange(final KafkaStreams.State newState, final KafkaStreams.State oldState) {
            if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) {
                final Set<ThreadMetadata> threadMetadata = streams.localThreadsMetadata();
                for (final ThreadMetadata threadMetadatum : threadMetadata) {
                    System.out.println("ACTIVE_TASKS:" + threadMetadatum.activeTasks().size() + " STANDBY_TASKS:" + threadMetadatum.standbyTasks().size());
                }
            }
        }
    });
    System.out.println("Start Kafka Streams");
    streams.start();
    Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {

        @Override
        public void run() {
            shutdown(streams);
            System.out.println("Shut down streams now");
        }
    }));
}
Also used : KafkaStreams(org.apache.kafka.streams.KafkaStreams) Set(java.util.Set) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) Properties(java.util.Properties) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KeyValueBytesStoreSupplier(org.apache.kafka.streams.state.KeyValueBytesStoreSupplier) ThreadMetadata(org.apache.kafka.streams.processor.ThreadMetadata)

Example 12 with ValueMapper

use of org.apache.kafka.streams.kstream.ValueMapper in project kafka by apache.

the class QueryableStateIntegrationTest method shouldBeAbleToQueryMapValuesAfterFilterState.

@Test
public void shouldBeAbleToQueryMapValuesAfterFilterState() throws Exception {
    streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
    streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
    final StreamsBuilder builder = new StreamsBuilder();
    final String[] keys = { "hello", "goodbye", "welcome", "go", "kafka" };
    final Set<KeyValue<String, String>> batch1 = new HashSet<>(Arrays.asList(new KeyValue<>(keys[0], "1"), new KeyValue<>(keys[1], "1"), new KeyValue<>(keys[2], "3"), new KeyValue<>(keys[3], "5"), new KeyValue<>(keys[4], "2")));
    final Set<KeyValue<String, Long>> expectedBatch1 = new HashSet<>(Collections.singleton(new KeyValue<>(keys[4], 2L)));
    IntegrationTestUtils.produceKeyValuesSynchronously(streamOne, batch1, TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class, new Properties()), mockTime);
    final Predicate<String, String> filterPredicate = (key, value) -> key.contains("kafka");
    final KTable<String, String> t1 = builder.table(streamOne);
    final KTable<String, String> t2 = t1.filter(filterPredicate, Materialized.as("queryFilter"));
    final KTable<String, Long> t3 = t2.mapValues((ValueMapper<String, Long>) Long::valueOf, Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as("queryMapValues").withValueSerde(Serdes.Long()));
    t3.toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));
    kafkaStreams = new KafkaStreams(builder.build(), streamsConfiguration);
    startKafkaStreamsAndWaitForRunningState(kafkaStreams);
    waitUntilAtLeastNumRecordProcessed(outputTopic, 1);
    final ReadOnlyKeyValueStore<String, Long> myMapStore = IntegrationTestUtils.getStore("queryMapValues", kafkaStreams, keyValueStore());
    for (final KeyValue<String, Long> expectedEntry : expectedBatch1) {
        assertEquals(expectedEntry.value, myMapStore.get(expectedEntry.key));
    }
    for (final KeyValue<String, String> batchEntry : batch1) {
        final KeyValue<String, Long> batchEntryMapValue = new KeyValue<>(batchEntry.key, Long.valueOf(batchEntry.value));
        if (!expectedBatch1.contains(batchEntryMapValue)) {
            assertNull(myMapStore.get(batchEntry.key));
        }
    }
}
Also used : Arrays(java.util.Arrays) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) MockTime(kafka.utils.MockTime) Instant.ofEpochMilli(java.time.Instant.ofEpochMilli) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) IntegrationTestUtils.safeUniqueTestName(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.safeUniqueTestName) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) Map(java.util.Map) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) StoreQueryParameters.fromNameAndType(org.apache.kafka.streams.StoreQueryParameters.fromNameAndType) AfterClass(org.junit.AfterClass) TestUtils(org.apache.kafka.test.TestUtils) StreamsTestUtils.startKafkaStreamsAndWaitForRunningState(org.apache.kafka.test.StreamsTestUtils.startKafkaStreamsAndWaitForRunningState) Utils.mkSet(org.apache.kafka.common.utils.Utils.mkSet) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) State(org.apache.kafka.streams.KafkaStreams.State) Category(org.junit.experimental.categories.Category) KafkaStreamsTest(org.apache.kafka.streams.KafkaStreamsTest) QueryableStoreTypes(org.apache.kafka.streams.state.QueryableStoreTypes) Predicate(org.apache.kafka.streams.kstream.Predicate) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) Matchers.is(org.hamcrest.Matchers.is) ReadOnlyKeyValueStore(org.apache.kafka.streams.state.ReadOnlyKeyValueStore) KGroupedStream(org.apache.kafka.streams.kstream.KGroupedStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) TreeSet(java.util.TreeSet) UnknownStateStoreException(org.apache.kafka.streams.errors.UnknownStateStoreException) ArrayList(java.util.ArrayList) EmbeddedKafkaCluster(org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster) TestName(org.junit.rules.TestName) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Before(org.junit.Before) KTable(org.apache.kafka.streams.kstream.KTable) IntegrationTestUtils.waitForApplicationState(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.waitForApplicationState) Properties(java.util.Properties) Test(org.junit.Test) IOException(java.io.IOException) File(java.io.File) Assert.assertNull(org.junit.Assert.assertNull) KeyQueryMetadata(org.apache.kafka.streams.KeyQueryMetadata) StringReader(java.io.StringReader) TreeMap(java.util.TreeMap) IntegrationTestUtils.getRunningStreams(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.getRunningStreams) TimeWindows(org.apache.kafka.streams.kstream.TimeWindows) KafkaStreams(org.apache.kafka.streams.KafkaStreams) BufferedReader(java.io.BufferedReader) ReadOnlySessionStore(org.apache.kafka.streams.state.ReadOnlySessionStore) Assert.assertEquals(org.junit.Assert.assertEquals) QueryableStoreTypes.sessionStore(org.apache.kafka.streams.state.QueryableStoreTypes.sessionStore) QueryableStoreTypes.keyValueStore(org.apache.kafka.streams.state.QueryableStoreTypes.keyValueStore) Produced(org.apache.kafka.streams.kstream.Produced) LoggerFactory(org.slf4j.LoggerFactory) IsEqual.equalTo(org.hamcrest.core.IsEqual.equalTo) Serde(org.apache.kafka.common.serialization.Serde) After(org.junit.After) Serdes(org.apache.kafka.common.serialization.Serdes) MockMapper(org.apache.kafka.test.MockMapper) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) LongSerializer(org.apache.kafka.common.serialization.LongSerializer) Bytes(org.apache.kafka.common.utils.Bytes) Objects(java.util.Objects) IntegrationTestUtils(org.apache.kafka.streams.integration.utils.IntegrationTestUtils) List(java.util.List) Materialized(org.apache.kafka.streams.kstream.Materialized) Entry(java.util.Map.Entry) Duration.ofMillis(java.time.Duration.ofMillis) InvalidStateStoreException(org.apache.kafka.streams.errors.InvalidStateStoreException) StreamsConfig(org.apache.kafka.streams.StreamsConfig) ReadOnlyWindowStore(org.apache.kafka.streams.state.ReadOnlyWindowStore) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) IntegrationTestUtils.startApplicationAndWaitUntilRunning(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.startApplicationAndWaitUntilRunning) BeforeClass(org.junit.BeforeClass) Assert.assertThrows(org.junit.Assert.assertThrows) IntegrationTest(org.apache.kafka.test.IntegrationTest) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) KStream(org.apache.kafka.streams.kstream.KStream) Duration.ofSeconds(java.time.Duration.ofSeconds) NoRetryException(org.apache.kafka.test.NoRetryException) HashSet(java.util.HashSet) TestUtils.retryOnExceptionWithTimeout(org.apache.kafka.test.TestUtils.retryOnExceptionWithTimeout) KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) PrintStream(java.io.PrintStream) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Logger(org.slf4j.Logger) Consumed(org.apache.kafka.streams.kstream.Consumed) Matchers(org.hamcrest.Matchers) TimeUnit(java.util.concurrent.TimeUnit) KeyValueIterator(org.apache.kafka.streams.state.KeyValueIterator) Rule(org.junit.Rule) LagInfo(org.apache.kafka.streams.LagInfo) WindowStoreIterator(org.apache.kafka.streams.state.WindowStoreIterator) FileReader(java.io.FileReader) Comparator(java.util.Comparator) Collections(java.util.Collections) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KeyValue(org.apache.kafka.streams.KeyValue) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) ReadOnlyKeyValueStore(org.apache.kafka.streams.state.ReadOnlyKeyValueStore) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) Properties(java.util.Properties) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) HashSet(java.util.HashSet) KafkaStreamsTest(org.apache.kafka.streams.KafkaStreamsTest) Test(org.junit.Test) IntegrationTest(org.apache.kafka.test.IntegrationTest)

Example 13 with ValueMapper

use of org.apache.kafka.streams.kstream.ValueMapper in project kafka by apache.

the class QueryableStateIntegrationTest method createCountStream.

/**
 * Creates a typical word count topology
 */
private KafkaStreams createCountStream(final String inputTopic, final String outputTopic, final String windowOutputTopic, final String storeName, final String windowStoreName, final Properties streamsConfiguration) {
    final StreamsBuilder builder = new StreamsBuilder();
    final Serde<String> stringSerde = Serdes.String();
    final KStream<String, String> textLines = builder.stream(inputTopic, Consumed.with(stringSerde, stringSerde));
    final KGroupedStream<String, String> groupedByWord = textLines.flatMapValues((ValueMapper<String, Iterable<String>>) value -> Arrays.asList(value.split("\\W+"))).groupBy(MockMapper.selectValueMapper());
    // Create a State Store for the all time word count
    groupedByWord.count(Materialized.as(storeName + "-" + inputTopic)).toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));
    // Create a Windowed State Store that contains the word count for every 1 minute
    groupedByWord.windowedBy(TimeWindows.of(ofMillis(WINDOW_SIZE))).count(Materialized.as(windowStoreName + "-" + inputTopic)).toStream((key, value) -> key.key()).to(windowOutputTopic, Produced.with(Serdes.String(), Serdes.Long()));
    return new KafkaStreams(builder.build(), streamsConfiguration);
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Arrays(java.util.Arrays) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) MockTime(kafka.utils.MockTime) Instant.ofEpochMilli(java.time.Instant.ofEpochMilli) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) IntegrationTestUtils.safeUniqueTestName(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.safeUniqueTestName) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) Map(java.util.Map) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) StoreQueryParameters.fromNameAndType(org.apache.kafka.streams.StoreQueryParameters.fromNameAndType) AfterClass(org.junit.AfterClass) TestUtils(org.apache.kafka.test.TestUtils) StreamsTestUtils.startKafkaStreamsAndWaitForRunningState(org.apache.kafka.test.StreamsTestUtils.startKafkaStreamsAndWaitForRunningState) Utils.mkSet(org.apache.kafka.common.utils.Utils.mkSet) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) State(org.apache.kafka.streams.KafkaStreams.State) Category(org.junit.experimental.categories.Category) KafkaStreamsTest(org.apache.kafka.streams.KafkaStreamsTest) QueryableStoreTypes(org.apache.kafka.streams.state.QueryableStoreTypes) Predicate(org.apache.kafka.streams.kstream.Predicate) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) Matchers.is(org.hamcrest.Matchers.is) ReadOnlyKeyValueStore(org.apache.kafka.streams.state.ReadOnlyKeyValueStore) KGroupedStream(org.apache.kafka.streams.kstream.KGroupedStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) TreeSet(java.util.TreeSet) UnknownStateStoreException(org.apache.kafka.streams.errors.UnknownStateStoreException) ArrayList(java.util.ArrayList) EmbeddedKafkaCluster(org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster) TestName(org.junit.rules.TestName) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Before(org.junit.Before) KTable(org.apache.kafka.streams.kstream.KTable) IntegrationTestUtils.waitForApplicationState(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.waitForApplicationState) Properties(java.util.Properties) Test(org.junit.Test) IOException(java.io.IOException) File(java.io.File) Assert.assertNull(org.junit.Assert.assertNull) KeyQueryMetadata(org.apache.kafka.streams.KeyQueryMetadata) StringReader(java.io.StringReader) TreeMap(java.util.TreeMap) IntegrationTestUtils.getRunningStreams(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.getRunningStreams) TimeWindows(org.apache.kafka.streams.kstream.TimeWindows) KafkaStreams(org.apache.kafka.streams.KafkaStreams) BufferedReader(java.io.BufferedReader) ReadOnlySessionStore(org.apache.kafka.streams.state.ReadOnlySessionStore) Assert.assertEquals(org.junit.Assert.assertEquals) QueryableStoreTypes.sessionStore(org.apache.kafka.streams.state.QueryableStoreTypes.sessionStore) QueryableStoreTypes.keyValueStore(org.apache.kafka.streams.state.QueryableStoreTypes.keyValueStore) Produced(org.apache.kafka.streams.kstream.Produced) LoggerFactory(org.slf4j.LoggerFactory) IsEqual.equalTo(org.hamcrest.core.IsEqual.equalTo) Serde(org.apache.kafka.common.serialization.Serde) After(org.junit.After) Serdes(org.apache.kafka.common.serialization.Serdes) MockMapper(org.apache.kafka.test.MockMapper) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) LongSerializer(org.apache.kafka.common.serialization.LongSerializer) Bytes(org.apache.kafka.common.utils.Bytes) Objects(java.util.Objects) IntegrationTestUtils(org.apache.kafka.streams.integration.utils.IntegrationTestUtils) List(java.util.List) Materialized(org.apache.kafka.streams.kstream.Materialized) Entry(java.util.Map.Entry) Duration.ofMillis(java.time.Duration.ofMillis) InvalidStateStoreException(org.apache.kafka.streams.errors.InvalidStateStoreException) StreamsConfig(org.apache.kafka.streams.StreamsConfig) ReadOnlyWindowStore(org.apache.kafka.streams.state.ReadOnlyWindowStore) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) IntegrationTestUtils.startApplicationAndWaitUntilRunning(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.startApplicationAndWaitUntilRunning) BeforeClass(org.junit.BeforeClass) Assert.assertThrows(org.junit.Assert.assertThrows) IntegrationTest(org.apache.kafka.test.IntegrationTest) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) KStream(org.apache.kafka.streams.kstream.KStream) Duration.ofSeconds(java.time.Duration.ofSeconds) NoRetryException(org.apache.kafka.test.NoRetryException) HashSet(java.util.HashSet) TestUtils.retryOnExceptionWithTimeout(org.apache.kafka.test.TestUtils.retryOnExceptionWithTimeout) KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) PrintStream(java.io.PrintStream) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Logger(org.slf4j.Logger) Consumed(org.apache.kafka.streams.kstream.Consumed) Matchers(org.hamcrest.Matchers) TimeUnit(java.util.concurrent.TimeUnit) KeyValueIterator(org.apache.kafka.streams.state.KeyValueIterator) Rule(org.junit.Rule) LagInfo(org.apache.kafka.streams.LagInfo) WindowStoreIterator(org.apache.kafka.streams.state.WindowStoreIterator) FileReader(java.io.FileReader) Comparator(java.util.Comparator) Collections(java.util.Collections) KafkaStreams(org.apache.kafka.streams.KafkaStreams) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper)

Example 14 with ValueMapper

use of org.apache.kafka.streams.kstream.ValueMapper in project kafka by apache.

the class KTableKTableLeftJoinTest method shouldNotThrowIllegalStateExceptionWhenMultiCacheEvictions.

/**
 * This test was written to reproduce https://issues.apache.org/jira/browse/KAFKA-4492
 * It is based on a fairly complicated join used by the developer that reported the bug.
 * Before the fix this would trigger an IllegalStateException.
 */
@Test
public void shouldNotThrowIllegalStateExceptionWhenMultiCacheEvictions() {
    final String agg = "agg";
    final String tableOne = "tableOne";
    final String tableTwo = "tableTwo";
    final String tableThree = "tableThree";
    final String tableFour = "tableFour";
    final String tableFive = "tableFive";
    final String tableSix = "tableSix";
    final String[] inputs = { agg, tableOne, tableTwo, tableThree, tableFour, tableFive, tableSix };
    final StreamsBuilder builder = new StreamsBuilder();
    final Consumed<Long, String> consumed = Consumed.with(Serdes.Long(), Serdes.String());
    final KTable<Long, String> aggTable = builder.table(agg, consumed, Materialized.as(Stores.inMemoryKeyValueStore("agg-base-store"))).groupBy(KeyValue::new, Grouped.with(Serdes.Long(), Serdes.String())).reduce(MockReducer.STRING_ADDER, MockReducer.STRING_ADDER, Materialized.as(Stores.inMemoryKeyValueStore("agg-store")));
    final KTable<Long, String> one = builder.table(tableOne, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableOne-base-store")));
    final KTable<Long, String> two = builder.table(tableTwo, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableTwo-base-store")));
    final KTable<Long, String> three = builder.table(tableThree, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableThree-base-store")));
    final KTable<Long, String> four = builder.table(tableFour, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableFour-base-store")));
    final KTable<Long, String> five = builder.table(tableFive, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableFive-base-store")));
    final KTable<Long, String> six = builder.table(tableSix, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableSix-base-store")));
    final ValueMapper<String, String> mapper = value -> value.toUpperCase(Locale.ROOT);
    final KTable<Long, String> seven = one.mapValues(mapper);
    final KTable<Long, String> eight = six.leftJoin(seven, MockValueJoiner.TOSTRING_JOINER);
    aggTable.leftJoin(one, MockValueJoiner.TOSTRING_JOINER).leftJoin(two, MockValueJoiner.TOSTRING_JOINER).leftJoin(three, MockValueJoiner.TOSTRING_JOINER).leftJoin(four, MockValueJoiner.TOSTRING_JOINER).leftJoin(five, MockValueJoiner.TOSTRING_JOINER).leftJoin(eight, MockValueJoiner.TOSTRING_JOINER).mapValues(mapper);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final String[] values = { "a", "AA", "BBB", "CCCC", "DD", "EEEEEEEE", "F", "GGGGGGGGGGGGGGG", "HHH", "IIIIIIIIII", "J", "KK", "LLLL", "MMMMMMMMMMMMMMMMMMMMMM", "NNNNN", "O", "P", "QQQQQ", "R", "SSSS", "T", "UU", "VVVVVVVVVVVVVVVVVVV" };
        TestInputTopic<Long, String> inputTopic;
        final Random random = new Random();
        for (int i = 0; i < 1000; i++) {
            for (final String input : inputs) {
                final Long key = (long) random.nextInt(1000);
                final String value = values[random.nextInt(values.length)];
                inputTopic = driver.createInputTopic(input, Serdes.Long().serializer(), Serdes.String().serializer());
                inputTopic.pipeInput(key, value);
            }
        }
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Arrays(java.util.Arrays) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Stores(org.apache.kafka.streams.state.Stores) MockReducer(org.apache.kafka.test.MockReducer) Random(java.util.Random) MockProcessorContext(org.apache.kafka.streams.processor.api.MockProcessorContext) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) HashSet(java.util.HashSet) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Locale(java.util.Locale) TopologyWrapper(org.apache.kafka.streams.TopologyWrapper) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) Record(org.apache.kafka.streams.processor.api.Record) Processor(org.apache.kafka.streams.processor.api.Processor) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TestRecord(org.apache.kafka.streams.test.TestRecord) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) MockValueJoiner(org.apache.kafka.test.MockValueJoiner) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TopologyTestDriverWrapper(org.apache.kafka.streams.TopologyTestDriverWrapper) KTable(org.apache.kafka.streams.kstream.KTable) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Collection(java.util.Collection) KeyValue(org.apache.kafka.streams.KeyValue) Set(java.util.Set) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) Instant(java.time.Instant) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) Assert.assertFalse(org.junit.Assert.assertFalse) Materialized(org.apache.kafka.streams.kstream.Materialized) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TestInputTopic(org.apache.kafka.streams.TestInputTopic) Matchers.is(org.hamcrest.Matchers.is) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Topology(org.apache.kafka.streams.Topology) Assert.assertEquals(org.junit.Assert.assertEquals) Random(java.util.Random) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Test(org.junit.Test)

Example 15 with ValueMapper

use of org.apache.kafka.streams.kstream.ValueMapper in project kafka by apache.

the class KTableMapValuesTest method testSendingOldValue.

@Test
public void testSendingOldValue() throws IOException {
    KStreamBuilder builder = new KStreamBuilder();
    String topic1 = "topic1";
    KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(stringSerde, stringSerde, topic1, "anyStoreName");
    KTableImpl<String, String, Integer> table2 = (KTableImpl<String, String, Integer>) table1.mapValues(new ValueMapper<String, Integer>() {

        @Override
        public Integer apply(String value) {
            return new Integer(value);
        }
    });
    table2.enableSendingOldValues();
    MockProcessorSupplier<String, Integer> proc = new MockProcessorSupplier<>();
    builder.addProcessor("proc", proc, table2.name);
    driver = new KStreamTestDriver(builder, stateDir, null, null);
    assertTrue(table1.sendingOldValueEnabled());
    assertTrue(table2.sendingOldValueEnabled());
    driver.process(topic1, "A", "01");
    driver.process(topic1, "B", "01");
    driver.process(topic1, "C", "01");
    driver.flushState();
    proc.checkAndClearProcessResult("A:(1<-null)", "B:(1<-null)", "C:(1<-null)");
    driver.process(topic1, "A", "02");
    driver.process(topic1, "B", "02");
    driver.flushState();
    proc.checkAndClearProcessResult("A:(2<-1)", "B:(2<-1)");
    driver.process(topic1, "A", "03");
    driver.flushState();
    proc.checkAndClearProcessResult("A:(3<-2)");
    driver.process(topic1, "A", null);
    driver.flushState();
    proc.checkAndClearProcessResult("A:(null<-3)");
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) KStreamTestDriver(org.apache.kafka.test.KStreamTestDriver) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) MockProcessorSupplier(org.apache.kafka.test.MockProcessorSupplier) Test(org.junit.Test)

Aggregations

ValueMapper (org.apache.kafka.streams.kstream.ValueMapper)27 Test (org.junit.Test)23 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)21 Properties (java.util.Properties)12 Predicate (org.apache.kafka.streams.kstream.Predicate)10 ArrayList (java.util.ArrayList)7 Serdes (org.apache.kafka.common.serialization.Serdes)7 Bytes (org.apache.kafka.common.utils.Bytes)7 KeyValue (org.apache.kafka.streams.KeyValue)7 Consumed (org.apache.kafka.streams.kstream.Consumed)7 Duration (java.time.Duration)6 List (java.util.List)6 KafkaStreams (org.apache.kafka.streams.KafkaStreams)6 KTable (org.apache.kafka.streams.kstream.KTable)6 Materialized (org.apache.kafka.streams.kstream.Materialized)6 MatcherAssert.assertThat (org.hamcrest.MatcherAssert.assertThat)6 MockProcessorSupplier (org.apache.kafka.test.MockProcessorSupplier)5 Set (java.util.Set)4 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)4 TestInputTopic (org.apache.kafka.streams.TestInputTopic)4