Search in sources :

Example 31 with KafkaStreams

use of org.apache.kafka.streams.KafkaStreams in project kafka by apache.

the class RegexSourceIntegrationTest method testNoMessagesSentExceptionFromOverlappingPatterns.

// TODO should be updated to expected = TopologyBuilderException after KAFKA-3708
@Test(expected = AssertionError.class)
public void testNoMessagesSentExceptionFromOverlappingPatterns() throws Exception {
    final String fooMessage = "fooMessage";
    final String fMessage = "fMessage";
    final Serde<String> stringSerde = Serdes.String();
    final KStreamBuilder builder = new KStreamBuilder();
    // overlapping patterns here, no messages should be sent as TopologyBuilderException
    // will be thrown when the processor topology is built.
    final KStream<String, String> pattern1Stream = builder.stream(Pattern.compile("foo.*"));
    final KStream<String, String> pattern2Stream = builder.stream(Pattern.compile("f.*"));
    pattern1Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
    pattern2Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
    final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration);
    streams.start();
    final Properties producerConfig = TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class);
    IntegrationTestUtils.produceValuesSynchronously(FA_TOPIC, Arrays.asList(fMessage), producerConfig, mockTime);
    IntegrationTestUtils.produceValuesSynchronously(FOO_TOPIC, Arrays.asList(fooMessage), producerConfig, mockTime);
    final Properties consumerConfig = TestUtils.consumerConfig(CLUSTER.bootstrapServers(), StringDeserializer.class, StringDeserializer.class);
    try {
        IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(consumerConfig, DEFAULT_OUTPUT_TOPIC, 2, 5000);
        fail("Should not get here");
    } finally {
        streams.close();
    }
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) Properties(java.util.Properties) Test(org.junit.Test)

Example 32 with KafkaStreams

use of org.apache.kafka.streams.KafkaStreams in project kafka by apache.

the class RegexSourceIntegrationTest method testMultipleConsumersCanReadFromPartitionedTopic.

@Test
public void testMultipleConsumersCanReadFromPartitionedTopic() throws Exception {
    final Serde<String> stringSerde = Serdes.String();
    final KStreamBuilder builderLeader = new KStreamBuilder();
    final KStreamBuilder builderFollower = new KStreamBuilder();
    final List<String> expectedAssignment = Arrays.asList(PARTITIONED_TOPIC_1, PARTITIONED_TOPIC_2);
    final KStream<String, String> partitionedStreamLeader = builderLeader.stream(Pattern.compile("partitioned-\\d"));
    final KStream<String, String> partitionedStreamFollower = builderFollower.stream(Pattern.compile("partitioned-\\d"));
    partitionedStreamLeader.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
    partitionedStreamFollower.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
    final KafkaStreams partitionedStreamsLeader = new KafkaStreams(builderLeader, streamsConfiguration);
    final KafkaStreams partitionedStreamsFollower = new KafkaStreams(builderFollower, streamsConfiguration);
    final StreamsConfig streamsConfig = new StreamsConfig(streamsConfiguration);
    final Field leaderStreamThreadsField = partitionedStreamsLeader.getClass().getDeclaredField("threads");
    leaderStreamThreadsField.setAccessible(true);
    final StreamThread[] leaderStreamThreads = (StreamThread[]) leaderStreamThreadsField.get(partitionedStreamsLeader);
    final StreamThread originalLeaderThread = leaderStreamThreads[0];
    final TestStreamThread leaderTestStreamThread = new TestStreamThread(builderLeader, streamsConfig, new DefaultKafkaClientSupplier(), originalLeaderThread.applicationId, originalLeaderThread.clientId, originalLeaderThread.processId, new Metrics(), Time.SYSTEM);
    leaderStreamThreads[0] = leaderTestStreamThread;
    final TestCondition bothTopicsAddedToLeader = new TestCondition() {

        @Override
        public boolean conditionMet() {
            return leaderTestStreamThread.assignedTopicPartitions.equals(expectedAssignment);
        }
    };
    final Field followerStreamThreadsField = partitionedStreamsFollower.getClass().getDeclaredField("threads");
    followerStreamThreadsField.setAccessible(true);
    final StreamThread[] followerStreamThreads = (StreamThread[]) followerStreamThreadsField.get(partitionedStreamsFollower);
    final StreamThread originalFollowerThread = followerStreamThreads[0];
    final TestStreamThread followerTestStreamThread = new TestStreamThread(builderFollower, streamsConfig, new DefaultKafkaClientSupplier(), originalFollowerThread.applicationId, originalFollowerThread.clientId, originalFollowerThread.processId, new Metrics(), Time.SYSTEM);
    followerStreamThreads[0] = followerTestStreamThread;
    final TestCondition bothTopicsAddedToFollower = new TestCondition() {

        @Override
        public boolean conditionMet() {
            return followerTestStreamThread.assignedTopicPartitions.equals(expectedAssignment);
        }
    };
    partitionedStreamsLeader.start();
    TestUtils.waitForCondition(bothTopicsAddedToLeader, "Topics never assigned to leader stream");
    partitionedStreamsFollower.start();
    TestUtils.waitForCondition(bothTopicsAddedToFollower, "Topics never assigned to follower stream");
    partitionedStreamsLeader.close();
    partitionedStreamsFollower.close();
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) DefaultKafkaClientSupplier(org.apache.kafka.streams.processor.internals.DefaultKafkaClientSupplier) StreamThread(org.apache.kafka.streams.processor.internals.StreamThread) Field(java.lang.reflect.Field) Metrics(org.apache.kafka.common.metrics.Metrics) TestCondition(org.apache.kafka.test.TestCondition) StreamsConfig(org.apache.kafka.streams.StreamsConfig) Test(org.junit.Test)

Example 33 with KafkaStreams

use of org.apache.kafka.streams.KafkaStreams in project kafka by apache.

the class RegexSourceIntegrationTest method testShouldReadFromRegexAndNamedTopics.

@Test
public void testShouldReadFromRegexAndNamedTopics() throws Exception {
    final String topic1TestMessage = "topic-1 test";
    final String topic2TestMessage = "topic-2 test";
    final String topicATestMessage = "topic-A test";
    final String topicCTestMessage = "topic-C test";
    final String topicYTestMessage = "topic-Y test";
    final String topicZTestMessage = "topic-Z test";
    final Serde<String> stringSerde = Serdes.String();
    final KStreamBuilder builder = new KStreamBuilder();
    final KStream<String, String> pattern1Stream = builder.stream(Pattern.compile("topic-\\d"));
    final KStream<String, String> pattern2Stream = builder.stream(Pattern.compile("topic-[A-D]"));
    final KStream<String, String> namedTopicsStream = builder.stream(TOPIC_Y, TOPIC_Z);
    pattern1Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
    pattern2Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
    namedTopicsStream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
    final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration);
    streams.start();
    final Properties producerConfig = TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class);
    IntegrationTestUtils.produceValuesSynchronously(TOPIC_1, Arrays.asList(topic1TestMessage), producerConfig, mockTime);
    IntegrationTestUtils.produceValuesSynchronously(TOPIC_2, Arrays.asList(topic2TestMessage), producerConfig, mockTime);
    IntegrationTestUtils.produceValuesSynchronously(TOPIC_A, Arrays.asList(topicATestMessage), producerConfig, mockTime);
    IntegrationTestUtils.produceValuesSynchronously(TOPIC_C, Arrays.asList(topicCTestMessage), producerConfig, mockTime);
    IntegrationTestUtils.produceValuesSynchronously(TOPIC_Y, Arrays.asList(topicYTestMessage), producerConfig, mockTime);
    IntegrationTestUtils.produceValuesSynchronously(TOPIC_Z, Arrays.asList(topicZTestMessage), producerConfig, mockTime);
    final Properties consumerConfig = TestUtils.consumerConfig(CLUSTER.bootstrapServers(), StringDeserializer.class, StringDeserializer.class);
    final List<String> expectedReceivedValues = Arrays.asList(topicATestMessage, topic1TestMessage, topic2TestMessage, topicCTestMessage, topicYTestMessage, topicZTestMessage);
    final List<KeyValue<String, String>> receivedKeyValues = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(consumerConfig, DEFAULT_OUTPUT_TOPIC, 6);
    final List<String> actualValues = new ArrayList<>(6);
    for (final KeyValue<String, String> receivedKeyValue : receivedKeyValues) {
        actualValues.add(receivedKeyValue.value);
    }
    streams.close();
    Collections.sort(actualValues);
    Collections.sort(expectedReceivedValues);
    assertThat(actualValues, equalTo(expectedReceivedValues));
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KeyValue(org.apache.kafka.streams.KeyValue) ArrayList(java.util.ArrayList) Properties(java.util.Properties) Test(org.junit.Test)

Example 34 with KafkaStreams

use of org.apache.kafka.streams.KafkaStreams in project kafka by apache.

the class ResetIntegrationTest method testReprocessingFromScratchAfterResetWithoutIntermediateUserTopic.

@Test
public void testReprocessingFromScratchAfterResetWithoutIntermediateUserTopic() throws Exception {
    final Properties streamsConfiguration = prepareTest();
    final Properties resultTopicConsumerConfig = TestUtils.consumerConfig(CLUSTER.bootstrapServers(), APP_ID + "-standard-consumer-" + OUTPUT_TOPIC, LongDeserializer.class, LongDeserializer.class);
    // RUN
    KafkaStreams streams = new KafkaStreams(setupTopologyWithoutIntermediateUserTopic(), streamsConfiguration);
    streams.start();
    final List<KeyValue<Long, Long>> result = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(resultTopicConsumerConfig, OUTPUT_TOPIC, 10, 60000);
    streams.close();
    TestUtils.waitForCondition(consumerGroupInactive, TIMEOUT_MULTIPLIER * STREAMS_CONSUMER_TIMEOUT, "Streams Application consumer group did not time out after " + (TIMEOUT_MULTIPLIER * STREAMS_CONSUMER_TIMEOUT) + " ms.");
    // RESET
    streams = new KafkaStreams(setupTopologyWithoutIntermediateUserTopic(), streamsConfiguration);
    streams.cleanUp();
    cleanGlobal(null);
    TestUtils.waitForCondition(consumerGroupInactive, TIMEOUT_MULTIPLIER * CLEANUP_CONSUMER_TIMEOUT, "Reset Tool consumer group did not time out after " + (TIMEOUT_MULTIPLIER * CLEANUP_CONSUMER_TIMEOUT) + " ms.");
    assertInternalTopicsGotDeleted(null);
    // RE-RUN
    streams.start();
    final List<KeyValue<Long, Long>> resultRerun = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(resultTopicConsumerConfig, OUTPUT_TOPIC, 10, 60000);
    streams.close();
    assertThat(resultRerun, equalTo(result));
    TestUtils.waitForCondition(consumerGroupInactive, TIMEOUT_MULTIPLIER * CLEANUP_CONSUMER_TIMEOUT, "Reset Tool consumer group did not time out after " + (TIMEOUT_MULTIPLIER * CLEANUP_CONSUMER_TIMEOUT) + " ms.");
    cleanGlobal(null);
}
Also used : KafkaStreams(org.apache.kafka.streams.KafkaStreams) KeyValue(org.apache.kafka.streams.KeyValue) Properties(java.util.Properties) Test(org.junit.Test)

Example 35 with KafkaStreams

use of org.apache.kafka.streams.KafkaStreams in project kafka by apache.

the class GlobalKTableIntegrationTest method startStreams.

private void startStreams() {
    kafkaStreams = new KafkaStreams(builder, streamsConfiguration);
    kafkaStreams.start();
}
Also used : KafkaStreams(org.apache.kafka.streams.KafkaStreams)

Aggregations

KafkaStreams (org.apache.kafka.streams.KafkaStreams)40 Properties (java.util.Properties)24 KStreamBuilder (org.apache.kafka.streams.kstream.KStreamBuilder)23 Test (org.junit.Test)15 KeyValue (org.apache.kafka.streams.KeyValue)9 CountDownLatch (java.util.concurrent.CountDownLatch)8 TestCondition (org.apache.kafka.test.TestCondition)5 StreamsConfig (org.apache.kafka.streams.StreamsConfig)4 ValueJoiner (org.apache.kafka.streams.kstream.ValueJoiner)4 ValueMapper (org.apache.kafka.streams.kstream.ValueMapper)4 Field (java.lang.reflect.Field)3 ArrayList (java.util.ArrayList)3 Metrics (org.apache.kafka.common.metrics.Metrics)3 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)3 DefaultKafkaClientSupplier (org.apache.kafka.streams.processor.internals.DefaultKafkaClientSupplier)3 StreamThread (org.apache.kafka.streams.processor.internals.StreamThread)3 MockKeyValueMapper (org.apache.kafka.test.MockKeyValueMapper)3 KafkaProducer (org.apache.kafka.clients.producer.KafkaProducer)2 KafkaStreamsTest (org.apache.kafka.streams.KafkaStreamsTest)2 Windowed (org.apache.kafka.streams.kstream.Windowed)2