use of org.apache.kafka.streams.KafkaStreams in project kafka by apache.
the class RegexSourceIntegrationTest method testNoMessagesSentExceptionFromOverlappingPatterns.
// TODO should be updated to expected = TopologyBuilderException after KAFKA-3708
@Test(expected = AssertionError.class)
public void testNoMessagesSentExceptionFromOverlappingPatterns() throws Exception {
final String fooMessage = "fooMessage";
final String fMessage = "fMessage";
final Serde<String> stringSerde = Serdes.String();
final KStreamBuilder builder = new KStreamBuilder();
// overlapping patterns here, no messages should be sent as TopologyBuilderException
// will be thrown when the processor topology is built.
final KStream<String, String> pattern1Stream = builder.stream(Pattern.compile("foo.*"));
final KStream<String, String> pattern2Stream = builder.stream(Pattern.compile("f.*"));
pattern1Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
pattern2Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration);
streams.start();
final Properties producerConfig = TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class);
IntegrationTestUtils.produceValuesSynchronously(FA_TOPIC, Arrays.asList(fMessage), producerConfig, mockTime);
IntegrationTestUtils.produceValuesSynchronously(FOO_TOPIC, Arrays.asList(fooMessage), producerConfig, mockTime);
final Properties consumerConfig = TestUtils.consumerConfig(CLUSTER.bootstrapServers(), StringDeserializer.class, StringDeserializer.class);
try {
IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(consumerConfig, DEFAULT_OUTPUT_TOPIC, 2, 5000);
fail("Should not get here");
} finally {
streams.close();
}
}
use of org.apache.kafka.streams.KafkaStreams in project kafka by apache.
the class RegexSourceIntegrationTest method testMultipleConsumersCanReadFromPartitionedTopic.
@Test
public void testMultipleConsumersCanReadFromPartitionedTopic() throws Exception {
final Serde<String> stringSerde = Serdes.String();
final KStreamBuilder builderLeader = new KStreamBuilder();
final KStreamBuilder builderFollower = new KStreamBuilder();
final List<String> expectedAssignment = Arrays.asList(PARTITIONED_TOPIC_1, PARTITIONED_TOPIC_2);
final KStream<String, String> partitionedStreamLeader = builderLeader.stream(Pattern.compile("partitioned-\\d"));
final KStream<String, String> partitionedStreamFollower = builderFollower.stream(Pattern.compile("partitioned-\\d"));
partitionedStreamLeader.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
partitionedStreamFollower.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
final KafkaStreams partitionedStreamsLeader = new KafkaStreams(builderLeader, streamsConfiguration);
final KafkaStreams partitionedStreamsFollower = new KafkaStreams(builderFollower, streamsConfiguration);
final StreamsConfig streamsConfig = new StreamsConfig(streamsConfiguration);
final Field leaderStreamThreadsField = partitionedStreamsLeader.getClass().getDeclaredField("threads");
leaderStreamThreadsField.setAccessible(true);
final StreamThread[] leaderStreamThreads = (StreamThread[]) leaderStreamThreadsField.get(partitionedStreamsLeader);
final StreamThread originalLeaderThread = leaderStreamThreads[0];
final TestStreamThread leaderTestStreamThread = new TestStreamThread(builderLeader, streamsConfig, new DefaultKafkaClientSupplier(), originalLeaderThread.applicationId, originalLeaderThread.clientId, originalLeaderThread.processId, new Metrics(), Time.SYSTEM);
leaderStreamThreads[0] = leaderTestStreamThread;
final TestCondition bothTopicsAddedToLeader = new TestCondition() {
@Override
public boolean conditionMet() {
return leaderTestStreamThread.assignedTopicPartitions.equals(expectedAssignment);
}
};
final Field followerStreamThreadsField = partitionedStreamsFollower.getClass().getDeclaredField("threads");
followerStreamThreadsField.setAccessible(true);
final StreamThread[] followerStreamThreads = (StreamThread[]) followerStreamThreadsField.get(partitionedStreamsFollower);
final StreamThread originalFollowerThread = followerStreamThreads[0];
final TestStreamThread followerTestStreamThread = new TestStreamThread(builderFollower, streamsConfig, new DefaultKafkaClientSupplier(), originalFollowerThread.applicationId, originalFollowerThread.clientId, originalFollowerThread.processId, new Metrics(), Time.SYSTEM);
followerStreamThreads[0] = followerTestStreamThread;
final TestCondition bothTopicsAddedToFollower = new TestCondition() {
@Override
public boolean conditionMet() {
return followerTestStreamThread.assignedTopicPartitions.equals(expectedAssignment);
}
};
partitionedStreamsLeader.start();
TestUtils.waitForCondition(bothTopicsAddedToLeader, "Topics never assigned to leader stream");
partitionedStreamsFollower.start();
TestUtils.waitForCondition(bothTopicsAddedToFollower, "Topics never assigned to follower stream");
partitionedStreamsLeader.close();
partitionedStreamsFollower.close();
}
use of org.apache.kafka.streams.KafkaStreams in project kafka by apache.
the class RegexSourceIntegrationTest method testShouldReadFromRegexAndNamedTopics.
@Test
public void testShouldReadFromRegexAndNamedTopics() throws Exception {
final String topic1TestMessage = "topic-1 test";
final String topic2TestMessage = "topic-2 test";
final String topicATestMessage = "topic-A test";
final String topicCTestMessage = "topic-C test";
final String topicYTestMessage = "topic-Y test";
final String topicZTestMessage = "topic-Z test";
final Serde<String> stringSerde = Serdes.String();
final KStreamBuilder builder = new KStreamBuilder();
final KStream<String, String> pattern1Stream = builder.stream(Pattern.compile("topic-\\d"));
final KStream<String, String> pattern2Stream = builder.stream(Pattern.compile("topic-[A-D]"));
final KStream<String, String> namedTopicsStream = builder.stream(TOPIC_Y, TOPIC_Z);
pattern1Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
pattern2Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
namedTopicsStream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration);
streams.start();
final Properties producerConfig = TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class);
IntegrationTestUtils.produceValuesSynchronously(TOPIC_1, Arrays.asList(topic1TestMessage), producerConfig, mockTime);
IntegrationTestUtils.produceValuesSynchronously(TOPIC_2, Arrays.asList(topic2TestMessage), producerConfig, mockTime);
IntegrationTestUtils.produceValuesSynchronously(TOPIC_A, Arrays.asList(topicATestMessage), producerConfig, mockTime);
IntegrationTestUtils.produceValuesSynchronously(TOPIC_C, Arrays.asList(topicCTestMessage), producerConfig, mockTime);
IntegrationTestUtils.produceValuesSynchronously(TOPIC_Y, Arrays.asList(topicYTestMessage), producerConfig, mockTime);
IntegrationTestUtils.produceValuesSynchronously(TOPIC_Z, Arrays.asList(topicZTestMessage), producerConfig, mockTime);
final Properties consumerConfig = TestUtils.consumerConfig(CLUSTER.bootstrapServers(), StringDeserializer.class, StringDeserializer.class);
final List<String> expectedReceivedValues = Arrays.asList(topicATestMessage, topic1TestMessage, topic2TestMessage, topicCTestMessage, topicYTestMessage, topicZTestMessage);
final List<KeyValue<String, String>> receivedKeyValues = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(consumerConfig, DEFAULT_OUTPUT_TOPIC, 6);
final List<String> actualValues = new ArrayList<>(6);
for (final KeyValue<String, String> receivedKeyValue : receivedKeyValues) {
actualValues.add(receivedKeyValue.value);
}
streams.close();
Collections.sort(actualValues);
Collections.sort(expectedReceivedValues);
assertThat(actualValues, equalTo(expectedReceivedValues));
}
use of org.apache.kafka.streams.KafkaStreams in project kafka by apache.
the class ResetIntegrationTest method testReprocessingFromScratchAfterResetWithoutIntermediateUserTopic.
@Test
public void testReprocessingFromScratchAfterResetWithoutIntermediateUserTopic() throws Exception {
final Properties streamsConfiguration = prepareTest();
final Properties resultTopicConsumerConfig = TestUtils.consumerConfig(CLUSTER.bootstrapServers(), APP_ID + "-standard-consumer-" + OUTPUT_TOPIC, LongDeserializer.class, LongDeserializer.class);
// RUN
KafkaStreams streams = new KafkaStreams(setupTopologyWithoutIntermediateUserTopic(), streamsConfiguration);
streams.start();
final List<KeyValue<Long, Long>> result = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(resultTopicConsumerConfig, OUTPUT_TOPIC, 10, 60000);
streams.close();
TestUtils.waitForCondition(consumerGroupInactive, TIMEOUT_MULTIPLIER * STREAMS_CONSUMER_TIMEOUT, "Streams Application consumer group did not time out after " + (TIMEOUT_MULTIPLIER * STREAMS_CONSUMER_TIMEOUT) + " ms.");
// RESET
streams = new KafkaStreams(setupTopologyWithoutIntermediateUserTopic(), streamsConfiguration);
streams.cleanUp();
cleanGlobal(null);
TestUtils.waitForCondition(consumerGroupInactive, TIMEOUT_MULTIPLIER * CLEANUP_CONSUMER_TIMEOUT, "Reset Tool consumer group did not time out after " + (TIMEOUT_MULTIPLIER * CLEANUP_CONSUMER_TIMEOUT) + " ms.");
assertInternalTopicsGotDeleted(null);
// RE-RUN
streams.start();
final List<KeyValue<Long, Long>> resultRerun = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(resultTopicConsumerConfig, OUTPUT_TOPIC, 10, 60000);
streams.close();
assertThat(resultRerun, equalTo(result));
TestUtils.waitForCondition(consumerGroupInactive, TIMEOUT_MULTIPLIER * CLEANUP_CONSUMER_TIMEOUT, "Reset Tool consumer group did not time out after " + (TIMEOUT_MULTIPLIER * CLEANUP_CONSUMER_TIMEOUT) + " ms.");
cleanGlobal(null);
}
use of org.apache.kafka.streams.KafkaStreams in project kafka by apache.
the class GlobalKTableIntegrationTest method startStreams.
private void startStreams() {
kafkaStreams = new KafkaStreams(builder, streamsConfiguration);
kafkaStreams.start();
}
Aggregations