use of org.apache.kafka.streams.processor.internals.StreamThread in project kafka by apache.
the class KafkaStreams method toString.
/**
* Produce a string representation containing useful information about this {@code KafkaStream} instance such as
* thread IDs, task IDs, and a representation of the topology DAG including {@link StateStore}s (cf.
* {@link TopologyBuilder} and {@link KStreamBuilder}).
*
* @param indent the top-level indent for each line
* @return A string representation of the Kafka Streams instance.
*/
public String toString(final String indent) {
final StringBuilder sb = new StringBuilder().append(indent).append("KafkaStreams processID: ").append(processId).append("\n");
for (final StreamThread thread : threads) {
sb.append(thread.toString(indent + "\t"));
}
sb.append("\n");
return sb.toString();
}
use of org.apache.kafka.streams.processor.internals.StreamThread in project kafka by apache.
the class KafkaStreams method start.
/**
* Start the {@code KafkaStreams} instance by starting all its threads.
* <p>
* Note, for brokers with version {@code 0.9.x} or lower, the broker version cannot be checked.
* There will be no error and the client will hang and retry to verify the broker version until it
* {@link StreamsConfig#REQUEST_TIMEOUT_MS_CONFIG times out}.
* @throws IllegalStateException if process was already started
* @throws StreamsException if the Kafka brokers have version 0.10.0.x
*/
public synchronized void start() throws IllegalStateException, StreamsException {
log.debug("{} Starting Kafka Stream process.", logPrefix);
if (state == State.CREATED) {
checkBrokerVersionCompatibility();
setState(State.RUNNING);
if (globalStreamThread != null) {
globalStreamThread.start();
}
for (final StreamThread thread : threads) {
thread.start();
}
log.info("{} Started Kafka Stream process", logPrefix);
} else {
throw new IllegalStateException("Cannot start again.");
}
}
use of org.apache.kafka.streams.processor.internals.StreamThread in project kafka by apache.
the class RegexSourceIntegrationTest method testRegexMatchesTopicsAWhenDeleted.
@Test
public void testRegexMatchesTopicsAWhenDeleted() throws Exception {
final Serde<String> stringSerde = Serdes.String();
final List<String> expectedFirstAssignment = Arrays.asList("TEST-TOPIC-A", "TEST-TOPIC-B");
final List<String> expectedSecondAssignment = Arrays.asList("TEST-TOPIC-B");
final StreamsConfig streamsConfig = new StreamsConfig(streamsConfiguration);
CLUSTER.createTopic("TEST-TOPIC-A");
CLUSTER.createTopic("TEST-TOPIC-B");
final KStreamBuilder builder = new KStreamBuilder();
final KStream<String, String> pattern1Stream = builder.stream(Pattern.compile("TEST-TOPIC-[A-Z]"));
pattern1Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration);
final Field streamThreadsField = streams.getClass().getDeclaredField("threads");
streamThreadsField.setAccessible(true);
final StreamThread[] streamThreads = (StreamThread[]) streamThreadsField.get(streams);
final StreamThread originalThread = streamThreads[0];
final TestStreamThread testStreamThread = new TestStreamThread(builder, streamsConfig, new DefaultKafkaClientSupplier(), originalThread.applicationId, originalThread.clientId, originalThread.processId, new Metrics(), Time.SYSTEM);
streamThreads[0] = testStreamThread;
final TestCondition bothTopicsAdded = new TestCondition() {
@Override
public boolean conditionMet() {
return testStreamThread.assignedTopicPartitions.equals(expectedFirstAssignment);
}
};
streams.start();
TestUtils.waitForCondition(bothTopicsAdded, STREAM_TASKS_NOT_UPDATED);
CLUSTER.deleteTopic("TEST-TOPIC-A");
final TestCondition oneTopicRemoved = new TestCondition() {
@Override
public boolean conditionMet() {
return testStreamThread.assignedTopicPartitions.equals(expectedSecondAssignment);
}
};
TestUtils.waitForCondition(oneTopicRemoved, STREAM_TASKS_NOT_UPDATED);
streams.close();
}
use of org.apache.kafka.streams.processor.internals.StreamThread in project kafka by apache.
the class RegexSourceIntegrationTest method testRegexMatchesTopicsAWhenCreated.
@Test
public void testRegexMatchesTopicsAWhenCreated() throws Exception {
final Serde<String> stringSerde = Serdes.String();
final List<String> expectedFirstAssignment = Arrays.asList("TEST-TOPIC-1");
final List<String> expectedSecondAssignment = Arrays.asList("TEST-TOPIC-1", "TEST-TOPIC-2");
final StreamsConfig streamsConfig = new StreamsConfig(streamsConfiguration);
CLUSTER.createTopic("TEST-TOPIC-1");
final KStreamBuilder builder = new KStreamBuilder();
final KStream<String, String> pattern1Stream = builder.stream(Pattern.compile("TEST-TOPIC-\\d"));
pattern1Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration);
final Field streamThreadsField = streams.getClass().getDeclaredField("threads");
streamThreadsField.setAccessible(true);
final StreamThread[] streamThreads = (StreamThread[]) streamThreadsField.get(streams);
final StreamThread originalThread = streamThreads[0];
final TestStreamThread testStreamThread = new TestStreamThread(builder, streamsConfig, new DefaultKafkaClientSupplier(), originalThread.applicationId, originalThread.clientId, originalThread.processId, new Metrics(), Time.SYSTEM);
final TestCondition oneTopicAdded = new TestCondition() {
@Override
public boolean conditionMet() {
return testStreamThread.assignedTopicPartitions.equals(expectedFirstAssignment);
}
};
streamThreads[0] = testStreamThread;
streams.start();
TestUtils.waitForCondition(oneTopicAdded, STREAM_TASKS_NOT_UPDATED);
CLUSTER.createTopic("TEST-TOPIC-2");
final TestCondition secondTopicAdded = new TestCondition() {
@Override
public boolean conditionMet() {
return testStreamThread.assignedTopicPartitions.equals(expectedSecondAssignment);
}
};
TestUtils.waitForCondition(secondTopicAdded, STREAM_TASKS_NOT_UPDATED);
streams.close();
}
use of org.apache.kafka.streams.processor.internals.StreamThread in project kafka by apache.
the class RegexSourceIntegrationTest method testMultipleConsumersCanReadFromPartitionedTopic.
@Test
public void testMultipleConsumersCanReadFromPartitionedTopic() throws Exception {
final Serde<String> stringSerde = Serdes.String();
final KStreamBuilder builderLeader = new KStreamBuilder();
final KStreamBuilder builderFollower = new KStreamBuilder();
final List<String> expectedAssignment = Arrays.asList(PARTITIONED_TOPIC_1, PARTITIONED_TOPIC_2);
final KStream<String, String> partitionedStreamLeader = builderLeader.stream(Pattern.compile("partitioned-\\d"));
final KStream<String, String> partitionedStreamFollower = builderFollower.stream(Pattern.compile("partitioned-\\d"));
partitionedStreamLeader.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
partitionedStreamFollower.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
final KafkaStreams partitionedStreamsLeader = new KafkaStreams(builderLeader, streamsConfiguration);
final KafkaStreams partitionedStreamsFollower = new KafkaStreams(builderFollower, streamsConfiguration);
final StreamsConfig streamsConfig = new StreamsConfig(streamsConfiguration);
final Field leaderStreamThreadsField = partitionedStreamsLeader.getClass().getDeclaredField("threads");
leaderStreamThreadsField.setAccessible(true);
final StreamThread[] leaderStreamThreads = (StreamThread[]) leaderStreamThreadsField.get(partitionedStreamsLeader);
final StreamThread originalLeaderThread = leaderStreamThreads[0];
final TestStreamThread leaderTestStreamThread = new TestStreamThread(builderLeader, streamsConfig, new DefaultKafkaClientSupplier(), originalLeaderThread.applicationId, originalLeaderThread.clientId, originalLeaderThread.processId, new Metrics(), Time.SYSTEM);
leaderStreamThreads[0] = leaderTestStreamThread;
final TestCondition bothTopicsAddedToLeader = new TestCondition() {
@Override
public boolean conditionMet() {
return leaderTestStreamThread.assignedTopicPartitions.equals(expectedAssignment);
}
};
final Field followerStreamThreadsField = partitionedStreamsFollower.getClass().getDeclaredField("threads");
followerStreamThreadsField.setAccessible(true);
final StreamThread[] followerStreamThreads = (StreamThread[]) followerStreamThreadsField.get(partitionedStreamsFollower);
final StreamThread originalFollowerThread = followerStreamThreads[0];
final TestStreamThread followerTestStreamThread = new TestStreamThread(builderFollower, streamsConfig, new DefaultKafkaClientSupplier(), originalFollowerThread.applicationId, originalFollowerThread.clientId, originalFollowerThread.processId, new Metrics(), Time.SYSTEM);
followerStreamThreads[0] = followerTestStreamThread;
final TestCondition bothTopicsAddedToFollower = new TestCondition() {
@Override
public boolean conditionMet() {
return followerTestStreamThread.assignedTopicPartitions.equals(expectedAssignment);
}
};
partitionedStreamsLeader.start();
TestUtils.waitForCondition(bothTopicsAddedToLeader, "Topics never assigned to leader stream");
partitionedStreamsFollower.start();
TestUtils.waitForCondition(bothTopicsAddedToFollower, "Topics never assigned to follower stream");
partitionedStreamsLeader.close();
partitionedStreamsFollower.close();
}
Aggregations