Search in sources :

Example 6 with KStreamBuilder

use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.

the class InternalTopicIntegrationTest method shouldCompactTopicsForStateChangelogs.

@Test
public void shouldCompactTopicsForStateChangelogs() throws Exception {
    //
    // Step 1: Configure and start a simple word count topology
    //
    final Serde<String> stringSerde = Serdes.String();
    final Serde<Long> longSerde = Serdes.Long();
    final Properties streamsConfiguration = new Properties();
    streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "compact-topics-integration-test");
    streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
    streamsConfiguration.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    streamsConfiguration.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
    streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    final KStreamBuilder builder = new KStreamBuilder();
    final KStream<String, String> textLines = builder.stream(DEFAULT_INPUT_TOPIC);
    final KStream<String, Long> wordCounts = textLines.flatMapValues(new ValueMapper<String, Iterable<String>>() {

        @Override
        public Iterable<String> apply(final String value) {
            return Arrays.asList(value.toLowerCase(Locale.getDefault()).split("\\W+"));
        }
    }).groupBy(MockKeyValueMapper.<String, String>SelectValueMapper()).count("Counts").toStream();
    wordCounts.to(stringSerde, longSerde, DEFAULT_OUTPUT_TOPIC);
    // Remove any state from previous test runs
    IntegrationTestUtils.purgeLocalStreamsState(streamsConfiguration);
    final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration);
    streams.start();
    //
    // Step 2: Produce some input data to the input topic.
    //
    produceData(Arrays.asList("hello", "world", "world", "hello world"));
    //
    // Step 3: Verify the state changelog topics are compact
    //
    streams.close();
    final Properties properties = getTopicConfigProperties(ProcessorStateManager.storeChangelogTopic(applicationId, "Counts"));
    assertEquals(LogConfig.Compact(), properties.getProperty(LogConfig.CleanupPolicyProp()));
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) MockKeyValueMapper(org.apache.kafka.test.MockKeyValueMapper) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) Properties(java.util.Properties) Test(org.junit.Test)

Example 7 with KStreamBuilder

use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.

the class QueryableStateIntegrationTest method createCountStream.

/**
     * Creates a typical word count topology
     *
     * @param inputTopic
     * @param outputTopic
     * @param streamsConfiguration config
     * @return
     */
private KafkaStreams createCountStream(final String inputTopic, final String outputTopic, final Properties streamsConfiguration) {
    final KStreamBuilder builder = new KStreamBuilder();
    final Serde<String> stringSerde = Serdes.String();
    final KStream<String, String> textLines = builder.stream(stringSerde, stringSerde, inputTopic);
    final KGroupedStream<String, String> groupedByWord = textLines.flatMapValues(new ValueMapper<String, Iterable<String>>() {

        @Override
        public Iterable<String> apply(final String value) {
            return Arrays.asList(value.toLowerCase(Locale.getDefault()).split("\\W+"));
        }
    }).groupBy(MockKeyValueMapper.<String, String>SelectValueMapper());
    // Create a State Store for the all time word count
    groupedByWord.count("word-count-store-" + inputTopic).to(Serdes.String(), Serdes.Long(), outputTopic);
    // Create a Windowed State Store that contains the word count for every 1 minute
    groupedByWord.count(TimeWindows.of(WINDOW_SIZE), "windowed-word-count-store-" + inputTopic);
    return new KafkaStreams(builder, streamsConfiguration);
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) MockKeyValueMapper(org.apache.kafka.test.MockKeyValueMapper) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper)

Example 8 with KStreamBuilder

use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.

the class RegexSourceIntegrationTest method testRegexMatchesTopicsAWhenDeleted.

@Test
public void testRegexMatchesTopicsAWhenDeleted() throws Exception {
    final Serde<String> stringSerde = Serdes.String();
    final List<String> expectedFirstAssignment = Arrays.asList("TEST-TOPIC-A", "TEST-TOPIC-B");
    final List<String> expectedSecondAssignment = Arrays.asList("TEST-TOPIC-B");
    final StreamsConfig streamsConfig = new StreamsConfig(streamsConfiguration);
    CLUSTER.createTopic("TEST-TOPIC-A");
    CLUSTER.createTopic("TEST-TOPIC-B");
    final KStreamBuilder builder = new KStreamBuilder();
    final KStream<String, String> pattern1Stream = builder.stream(Pattern.compile("TEST-TOPIC-[A-Z]"));
    pattern1Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
    final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration);
    final Field streamThreadsField = streams.getClass().getDeclaredField("threads");
    streamThreadsField.setAccessible(true);
    final StreamThread[] streamThreads = (StreamThread[]) streamThreadsField.get(streams);
    final StreamThread originalThread = streamThreads[0];
    final TestStreamThread testStreamThread = new TestStreamThread(builder, streamsConfig, new DefaultKafkaClientSupplier(), originalThread.applicationId, originalThread.clientId, originalThread.processId, new Metrics(), Time.SYSTEM);
    streamThreads[0] = testStreamThread;
    final TestCondition bothTopicsAdded = new TestCondition() {

        @Override
        public boolean conditionMet() {
            return testStreamThread.assignedTopicPartitions.equals(expectedFirstAssignment);
        }
    };
    streams.start();
    TestUtils.waitForCondition(bothTopicsAdded, STREAM_TASKS_NOT_UPDATED);
    CLUSTER.deleteTopic("TEST-TOPIC-A");
    final TestCondition oneTopicRemoved = new TestCondition() {

        @Override
        public boolean conditionMet() {
            return testStreamThread.assignedTopicPartitions.equals(expectedSecondAssignment);
        }
    };
    TestUtils.waitForCondition(oneTopicRemoved, STREAM_TASKS_NOT_UPDATED);
    streams.close();
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) DefaultKafkaClientSupplier(org.apache.kafka.streams.processor.internals.DefaultKafkaClientSupplier) StreamThread(org.apache.kafka.streams.processor.internals.StreamThread) Field(java.lang.reflect.Field) Metrics(org.apache.kafka.common.metrics.Metrics) TestCondition(org.apache.kafka.test.TestCondition) StreamsConfig(org.apache.kafka.streams.StreamsConfig) Test(org.junit.Test)

Example 9 with KStreamBuilder

use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.

the class RegexSourceIntegrationTest method testRegexMatchesTopicsAWhenCreated.

@Test
public void testRegexMatchesTopicsAWhenCreated() throws Exception {
    final Serde<String> stringSerde = Serdes.String();
    final List<String> expectedFirstAssignment = Arrays.asList("TEST-TOPIC-1");
    final List<String> expectedSecondAssignment = Arrays.asList("TEST-TOPIC-1", "TEST-TOPIC-2");
    final StreamsConfig streamsConfig = new StreamsConfig(streamsConfiguration);
    CLUSTER.createTopic("TEST-TOPIC-1");
    final KStreamBuilder builder = new KStreamBuilder();
    final KStream<String, String> pattern1Stream = builder.stream(Pattern.compile("TEST-TOPIC-\\d"));
    pattern1Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
    final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration);
    final Field streamThreadsField = streams.getClass().getDeclaredField("threads");
    streamThreadsField.setAccessible(true);
    final StreamThread[] streamThreads = (StreamThread[]) streamThreadsField.get(streams);
    final StreamThread originalThread = streamThreads[0];
    final TestStreamThread testStreamThread = new TestStreamThread(builder, streamsConfig, new DefaultKafkaClientSupplier(), originalThread.applicationId, originalThread.clientId, originalThread.processId, new Metrics(), Time.SYSTEM);
    final TestCondition oneTopicAdded = new TestCondition() {

        @Override
        public boolean conditionMet() {
            return testStreamThread.assignedTopicPartitions.equals(expectedFirstAssignment);
        }
    };
    streamThreads[0] = testStreamThread;
    streams.start();
    TestUtils.waitForCondition(oneTopicAdded, STREAM_TASKS_NOT_UPDATED);
    CLUSTER.createTopic("TEST-TOPIC-2");
    final TestCondition secondTopicAdded = new TestCondition() {

        @Override
        public boolean conditionMet() {
            return testStreamThread.assignedTopicPartitions.equals(expectedSecondAssignment);
        }
    };
    TestUtils.waitForCondition(secondTopicAdded, STREAM_TASKS_NOT_UPDATED);
    streams.close();
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) DefaultKafkaClientSupplier(org.apache.kafka.streams.processor.internals.DefaultKafkaClientSupplier) StreamThread(org.apache.kafka.streams.processor.internals.StreamThread) Field(java.lang.reflect.Field) Metrics(org.apache.kafka.common.metrics.Metrics) TestCondition(org.apache.kafka.test.TestCondition) StreamsConfig(org.apache.kafka.streams.StreamsConfig) Test(org.junit.Test)

Example 10 with KStreamBuilder

use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.

the class ResetIntegrationTest method setupTopologyWithIntermediateUserTopic.

private KStreamBuilder setupTopologyWithIntermediateUserTopic(final String outputTopic2) {
    final KStreamBuilder builder = new KStreamBuilder();
    final KStream<Long, String> input = builder.stream(INPUT_TOPIC);
    // use map to trigger internal re-partitioning before groupByKey
    input.map(new KeyValueMapper<Long, String, KeyValue<Long, String>>() {

        @Override
        public KeyValue<Long, String> apply(final Long key, final String value) {
            return new KeyValue<>(key, value);
        }
    }).groupByKey().count("global-count").to(Serdes.Long(), Serdes.Long(), OUTPUT_TOPIC);
    input.through(INTERMEDIATE_USER_TOPIC).groupByKey().count(TimeWindows.of(35).advanceBy(10), "count").toStream().map(new KeyValueMapper<Windowed<Long>, Long, KeyValue<Long, Long>>() {

        @Override
        public KeyValue<Long, Long> apply(final Windowed<Long> key, final Long value) {
            return new KeyValue<>(key.window().start() + key.window().end(), value);
        }
    }).to(Serdes.Long(), Serdes.Long(), outputTopic2);
    return builder;
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) Windowed(org.apache.kafka.streams.kstream.Windowed) KeyValue(org.apache.kafka.streams.KeyValue) KeyValueMapper(org.apache.kafka.streams.kstream.KeyValueMapper)

Aggregations

KStreamBuilder (org.apache.kafka.streams.kstream.KStreamBuilder)122 Test (org.junit.Test)95 KStreamTestDriver (org.apache.kafka.test.KStreamTestDriver)60 Properties (java.util.Properties)31 MockProcessorSupplier (org.apache.kafka.test.MockProcessorSupplier)25 KafkaStreams (org.apache.kafka.streams.KafkaStreams)23 HashSet (java.util.HashSet)21 Set (java.util.Set)19 KeyValue (org.apache.kafka.streams.KeyValue)19 HashMap (java.util.HashMap)14 Metrics (org.apache.kafka.common.metrics.Metrics)13 StreamsConfig (org.apache.kafka.streams.StreamsConfig)13 KeyValueMapper (org.apache.kafka.streams.kstream.KeyValueMapper)13 ValueMapper (org.apache.kafka.streams.kstream.ValueMapper)13 TopicPartition (org.apache.kafka.common.TopicPartition)11 Predicate (org.apache.kafka.streams.kstream.Predicate)10 TaskId (org.apache.kafka.streams.processor.TaskId)9 MockKeyValueMapper (org.apache.kafka.test.MockKeyValueMapper)9 ArrayList (java.util.ArrayList)8 Before (org.junit.Before)8