Search in sources :

Example 6 with ThreadMetadata

use of org.apache.kafka.streams.ThreadMetadata in project kafka by apache.

the class ThreadMetadataImplTest method shouldNotBeEqualIfDifferInProducerClientIds.

@Test
public void shouldNotBeEqualIfDifferInProducerClientIds() {
    final ThreadMetadata differProducerClientIds = new ThreadMetadataImpl(THREAD_NAME, THREAD_STATE, MAIN_CONSUMER_CLIENT_ID, RESTORE_CONSUMER_CLIENT_ID, mkSet(CLIENT_ID_1), ADMIN_CLIENT_ID, ACTIVE_TASKS, STANDBY_TASKS);
    assertThat(threadMetadata, not(equalTo(differProducerClientIds)));
    assertThat(threadMetadata.hashCode(), not(equalTo(differProducerClientIds.hashCode())));
}
Also used : ThreadMetadata(org.apache.kafka.streams.ThreadMetadata) Test(org.junit.Test)

Example 7 with ThreadMetadata

use of org.apache.kafka.streams.ThreadMetadata in project kafka by apache.

the class ThreadMetadataImplTest method shouldNotBeEqualIfDifferInStandByTasks.

@Test
public void shouldNotBeEqualIfDifferInStandByTasks() {
    final ThreadMetadata differStandByTasks = new ThreadMetadataImpl(THREAD_NAME, THREAD_STATE, MAIN_CONSUMER_CLIENT_ID, RESTORE_CONSUMER_CLIENT_ID, PRODUCER_CLIENT_IDS, ADMIN_CLIENT_ID, ACTIVE_TASKS, mkSet(TM_0));
    assertThat(threadMetadata, not(equalTo(differStandByTasks)));
    assertThat(threadMetadata.hashCode(), not(equalTo(differStandByTasks.hashCode())));
}
Also used : ThreadMetadata(org.apache.kafka.streams.ThreadMetadata) Test(org.junit.Test)

Example 8 with ThreadMetadata

use of org.apache.kafka.streams.ThreadMetadata in project kafka by apache.

the class ThreadMetadataImplTest method shouldNotBeEqualIfDifferInConsumerClientId.

@Test
public void shouldNotBeEqualIfDifferInConsumerClientId() {
    final ThreadMetadata differRestoreConsumerClientId = new ThreadMetadataImpl(THREAD_NAME, THREAD_STATE, MAIN_CONSUMER_CLIENT_ID, "different", PRODUCER_CLIENT_IDS, ADMIN_CLIENT_ID, ACTIVE_TASKS, STANDBY_TASKS);
    assertThat(threadMetadata, not(equalTo(differRestoreConsumerClientId)));
    assertThat(threadMetadata.hashCode(), not(equalTo(differRestoreConsumerClientId.hashCode())));
}
Also used : ThreadMetadata(org.apache.kafka.streams.ThreadMetadata) Test(org.junit.Test)

Example 9 with ThreadMetadata

use of org.apache.kafka.streams.ThreadMetadata in project kafka by apache.

the class StreamsStandByReplicaTest method main.

public static void main(final String[] args) throws IOException {
    if (args.length < 2) {
        System.err.println("StreamsStandByReplicaTest are expecting two parameters: " + "propFile, additionalConfigs; but only see " + args.length + " parameter");
        Exit.exit(1);
    }
    System.out.println("StreamsTest instance started");
    final String propFileName = args[0];
    final String additionalConfigs = args[1];
    final Properties streamsProperties = Utils.loadProps(propFileName);
    final String kafka = streamsProperties.getProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG);
    if (kafka == null) {
        System.err.println("No bootstrap kafka servers specified in " + StreamsConfig.BOOTSTRAP_SERVERS_CONFIG);
        Exit.exit(1);
    }
    streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, "kafka-streams-standby-tasks");
    streamsProperties.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100L);
    streamsProperties.put(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 1);
    streamsProperties.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
    streamsProperties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
    streamsProperties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
    streamsProperties.put(StreamsConfig.producerPrefix(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG), true);
    if (additionalConfigs == null) {
        System.err.println("additional configs are not provided");
        System.err.flush();
        Exit.exit(1);
    }
    final Map<String, String> updated = SystemTestUtil.parseConfigs(additionalConfigs);
    System.out.println("Updating configs with " + updated);
    final String sourceTopic = updated.remove("sourceTopic");
    final String sinkTopic1 = updated.remove("sinkTopic1");
    final String sinkTopic2 = updated.remove("sinkTopic2");
    if (sourceTopic == null || sinkTopic1 == null || sinkTopic2 == null) {
        System.err.println(String.format("one or more required topics null sourceTopic[%s], sinkTopic1[%s], sinkTopic2[%s]", sourceTopic, sinkTopic1, sinkTopic2));
        System.err.flush();
        Exit.exit(1);
    }
    streamsProperties.putAll(updated);
    if (!confirmCorrectConfigs(streamsProperties)) {
        System.err.println(String.format("ERROR: Did not have all required configs expected  to contain %s, %s,  %s,  %s", StreamsConfig.consumerPrefix(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG), StreamsConfig.producerPrefix(ProducerConfig.RETRIES_CONFIG), StreamsConfig.producerPrefix(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG), StreamsConfig.producerPrefix(ProducerConfig.MAX_BLOCK_MS_CONFIG)));
        Exit.exit(1);
    }
    final StreamsBuilder builder = new StreamsBuilder();
    final String inMemoryStoreName = "in-memory-store";
    final String persistentMemoryStoreName = "persistent-memory-store";
    final KeyValueBytesStoreSupplier inMemoryStoreSupplier = Stores.inMemoryKeyValueStore(inMemoryStoreName);
    final KeyValueBytesStoreSupplier persistentStoreSupplier = Stores.persistentKeyValueStore(persistentMemoryStoreName);
    final Serde<String> stringSerde = Serdes.String();
    final ValueMapper<Long, String> countMapper = Object::toString;
    final KStream<String, String> inputStream = builder.stream(sourceTopic, Consumed.with(stringSerde, stringSerde));
    inputStream.groupByKey().count(Materialized.as(inMemoryStoreSupplier)).toStream().mapValues(countMapper).to(sinkTopic1, Produced.with(stringSerde, stringSerde));
    inputStream.groupByKey().count(Materialized.as(persistentStoreSupplier)).toStream().mapValues(countMapper).to(sinkTopic2, Produced.with(stringSerde, stringSerde));
    final KafkaStreams streams = new KafkaStreams(builder.build(), streamsProperties);
    streams.setUncaughtExceptionHandler(e -> {
        System.err.println("FATAL: An unexpected exception " + e);
        e.printStackTrace(System.err);
        System.err.flush();
        return StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_CLIENT;
    });
    streams.setStateListener((newState, oldState) -> {
        if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) {
            final Set<ThreadMetadata> threadMetadata = streams.metadataForLocalThreads();
            for (final ThreadMetadata threadMetadatum : threadMetadata) {
                System.out.println("ACTIVE_TASKS:" + threadMetadatum.activeTasks().size() + " STANDBY_TASKS:" + threadMetadatum.standbyTasks().size());
            }
        }
    });
    System.out.println("Start Kafka Streams");
    streams.start();
    Exit.addShutdownHook("streams-shutdown-hook", () -> {
        shutdown(streams);
        System.out.println("Shut down streams now");
    });
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KeyValueBytesStoreSupplier(org.apache.kafka.streams.state.KeyValueBytesStoreSupplier) ThreadMetadata(org.apache.kafka.streams.ThreadMetadata) Properties(java.util.Properties)

Example 10 with ThreadMetadata

use of org.apache.kafka.streams.ThreadMetadata in project kafka by apache.

the class AdjustStreamThreadCountTest method shouldAddStreamThread.

@Test
public void shouldAddStreamThread() throws Exception {
    try (final KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), properties)) {
        addStreamStateChangeListener(kafkaStreams);
        startStreamsAndWaitForRunning(kafkaStreams);
        final int oldThreadCount = kafkaStreams.metadataForLocalThreads().size();
        assertThat(kafkaStreams.metadataForLocalThreads().stream().map(t -> t.threadName().split("-StreamThread-")[1]).sorted().toArray(), equalTo(new String[] { "1", "2" }));
        stateTransitionHistory.clear();
        final Optional<String> name = kafkaStreams.addStreamThread();
        assertThat(name, not(Optional.empty()));
        TestUtils.waitForCondition(() -> kafkaStreams.metadataForLocalThreads().stream().sequential().map(ThreadMetadata::threadName).anyMatch(t -> t.equals(name.orElse(""))), "Wait for the thread to be added");
        assertThat(kafkaStreams.metadataForLocalThreads().size(), equalTo(oldThreadCount + 1));
        assertThat(kafkaStreams.metadataForLocalThreads().stream().map(t -> t.threadName().split("-StreamThread-")[1]).sorted().toArray(), equalTo(new String[] { "1", "2", "3" }));
        waitForTransitionFromRebalancingToRunning();
    }
}
Also used : CoreMatchers.is(org.hamcrest.CoreMatchers.is) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) IntegrationTestUtils.safeUniqueTestName(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.safeUniqueTestName) After(org.junit.After) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) Assert.fail(org.junit.Assert.fail) AfterClass(org.junit.AfterClass) ThreadMetadata(org.apache.kafka.streams.ThreadMetadata) TestUtils(org.apache.kafka.test.TestUtils) Utils.mkObjectProperties(org.apache.kafka.common.utils.Utils.mkObjectProperties) KeyValue(org.apache.kafka.streams.KeyValue) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) Category(org.junit.experimental.categories.Category) Executors(java.util.concurrent.Executors) IntegrationTestUtils(org.apache.kafka.streams.integration.utils.IntegrationTestUtils) ProcessorContext(org.apache.kafka.streams.processor.ProcessorContext) CountDownLatch(java.util.concurrent.CountDownLatch) IntegrationTestUtils.purgeLocalStreamsState(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.purgeLocalStreamsState) List(java.util.List) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) Optional(java.util.Optional) StreamsConfig(org.apache.kafka.streams.StreamsConfig) BeforeClass(org.junit.BeforeClass) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Assert.assertThrows(org.junit.Assert.assertThrows) CoreMatchers.not(org.hamcrest.CoreMatchers.not) IntegrationTest(org.apache.kafka.test.IntegrationTest) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) KStream(org.apache.kafka.streams.kstream.KStream) AtomicReference(java.util.concurrent.atomic.AtomicReference) ArrayList(java.util.ArrayList) EmbeddedKafkaCluster(org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster) TestName(org.junit.rules.TestName) PunctuationType(org.apache.kafka.streams.processor.PunctuationType) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) ExecutorService(java.util.concurrent.ExecutorService) Before(org.junit.Before) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TimeoutException(org.apache.kafka.common.errors.TimeoutException) Properties(java.util.Properties) Transformer(org.apache.kafka.streams.kstream.Transformer) TestUtils.waitForCondition(org.apache.kafka.test.TestUtils.waitForCondition) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) IOException(java.io.IOException) TimeUnit(java.util.concurrent.TimeUnit) Rule(org.junit.Rule) Assert.assertNull(org.junit.Assert.assertNull) StreamThreadExceptionResponse(org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) KafkaStreams(org.apache.kafka.streams.KafkaStreams) Assert.assertEquals(org.junit.Assert.assertEquals) KafkaStreams(org.apache.kafka.streams.KafkaStreams) ThreadMetadata(org.apache.kafka.streams.ThreadMetadata) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Aggregations

ThreadMetadata (org.apache.kafka.streams.ThreadMetadata)15 Test (org.junit.Test)14 ArrayList (java.util.ArrayList)3 Properties (java.util.Properties)3 KafkaStreams (org.apache.kafka.streams.KafkaStreams)3 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)3 IOException (java.io.IOException)2 Duration (java.time.Duration)2 Collections.emptySet (java.util.Collections.emptySet)2 HashMap (java.util.HashMap)2 HashSet (java.util.HashSet)2 List (java.util.List)2 Optional (java.util.Optional)2 Set (java.util.Set)2 CountDownLatch (java.util.concurrent.CountDownLatch)2 ExecutorService (java.util.concurrent.ExecutorService)2 Executors (java.util.concurrent.Executors)2 TimeUnit (java.util.concurrent.TimeUnit)2 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)2 AtomicReference (java.util.concurrent.atomic.AtomicReference)2