Search in sources :

Example 1 with KafkaStreamsNamedTopologyWrapper

use of org.apache.kafka.streams.processor.internals.namedtopology.KafkaStreamsNamedTopologyWrapper in project kafka by apache.

the class ErrorHandlingIntegrationTest method shouldBackOffTaskAndEmitDataWithinSameTopology.

@Test
public void shouldBackOffTaskAndEmitDataWithinSameTopology() throws Exception {
    final AtomicInteger noOutputExpected = new AtomicInteger(0);
    final AtomicInteger outputExpected = new AtomicInteger(0);
    try (final KafkaStreamsNamedTopologyWrapper kafkaStreams = new KafkaStreamsNamedTopologyWrapper(properties)) {
        kafkaStreams.setUncaughtExceptionHandler(exception -> StreamThreadExceptionResponse.REPLACE_THREAD);
        final NamedTopologyBuilder builder = kafkaStreams.newNamedTopologyBuilder("topology_A");
        builder.stream(inputTopic).peek((k, v) -> outputExpected.incrementAndGet()).to(outputTopic);
        builder.stream(errorInputTopic).peek((k, v) -> {
            throw new RuntimeException("Kaboom");
        }).peek((k, v) -> noOutputExpected.incrementAndGet()).to(errorOutputTopic);
        kafkaStreams.addNamedTopology(builder.build());
        StreamsTestUtils.startKafkaStreamsAndWaitForRunningState(kafkaStreams);
        IntegrationTestUtils.produceKeyValuesSynchronouslyWithTimestamp(errorInputTopic, Arrays.asList(new KeyValue<>(1, "A")), TestUtils.producerConfig(CLUSTER.bootstrapServers(), IntegerSerializer.class, StringSerializer.class, new Properties()), 0L);
        IntegrationTestUtils.produceKeyValuesSynchronouslyWithTimestamp(inputTopic, Arrays.asList(new KeyValue<>(1, "A"), new KeyValue<>(1, "B")), TestUtils.producerConfig(CLUSTER.bootstrapServers(), IntegerSerializer.class, StringSerializer.class, new Properties()), 0L);
        IntegrationTestUtils.waitUntilFinalKeyValueRecordsReceived(TestUtils.consumerConfig(CLUSTER.bootstrapServers(), IntegerDeserializer.class, StringDeserializer.class), outputTopic, Arrays.asList(new KeyValue<>(1, "A"), new KeyValue<>(1, "B")));
        assertThat(noOutputExpected.get(), equalTo(0));
        assertThat(outputExpected.get(), equalTo(2));
    }
}
Also used : StreamsConfig(org.apache.kafka.streams.StreamsConfig) Arrays(java.util.Arrays) BeforeClass(org.junit.BeforeClass) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) KafkaStreamsNamedTopologyWrapper(org.apache.kafka.streams.processor.internals.namedtopology.KafkaStreamsNamedTopologyWrapper) IntegrationTest(org.apache.kafka.test.IntegrationTest) NamedTopologyBuilder(org.apache.kafka.streams.processor.internals.namedtopology.NamedTopologyBuilder) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) IntegrationTestUtils.safeUniqueTestName(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.safeUniqueTestName) EmbeddedKafkaCluster(org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) TestName(org.junit.rules.TestName) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Before(org.junit.Before) AfterClass(org.junit.AfterClass) Properties(java.util.Properties) TestUtils(org.apache.kafka.test.TestUtils) Utils.mkObjectProperties(org.apache.kafka.common.utils.Utils.mkObjectProperties) KeyValue(org.apache.kafka.streams.KeyValue) IOException(java.io.IOException) Test(org.junit.Test) Category(org.junit.experimental.categories.Category) IntegrationTestUtils(org.apache.kafka.streams.integration.utils.IntegrationTestUtils) Rule(org.junit.Rule) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) StreamThreadExceptionResponse(org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) KafkaStreamsNamedTopologyWrapper(org.apache.kafka.streams.processor.internals.namedtopology.KafkaStreamsNamedTopologyWrapper) KeyValue(org.apache.kafka.streams.KeyValue) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) NamedTopologyBuilder(org.apache.kafka.streams.processor.internals.namedtopology.NamedTopologyBuilder) Properties(java.util.Properties) Utils.mkObjectProperties(org.apache.kafka.common.utils.Utils.mkObjectProperties) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 2 with KafkaStreamsNamedTopologyWrapper

use of org.apache.kafka.streams.processor.internals.namedtopology.KafkaStreamsNamedTopologyWrapper in project kafka by apache.

the class NamedTopologyIntegrationTest method setup.

@Before
public void setup() throws Exception {
    appId = safeUniqueTestName(NamedTopologyIntegrationTest.class, testName);
    changelog1 = TOPIC_PREFIX + "-" + TOPOLOGY_1 + "-store-changelog";
    changelog2 = TOPIC_PREFIX + "-" + TOPOLOGY_2 + "-store-changelog";
    changelog3 = TOPIC_PREFIX + "-" + TOPOLOGY_3 + "-store-changelog";
    props = configProps(appId, "host1");
    streams = new KafkaStreamsNamedTopologyWrapper(props, clientSupplier);
    topology1Builder = streams.newNamedTopologyBuilder(TOPOLOGY_1);
    topology1BuilderDup = streams.newNamedTopologyBuilder(TOPOLOGY_1);
    topology2Builder = streams.newNamedTopologyBuilder(TOPOLOGY_2);
    topology3Builder = streams.newNamedTopologyBuilder(TOPOLOGY_3);
    // TODO KAFKA-12648: refactor to avoid deleting & (re)creating outputs topics for each test
    CLUSTER.createTopic(OUTPUT_STREAM_1, 2, 1);
    CLUSTER.createTopic(OUTPUT_STREAM_2, 2, 1);
    CLUSTER.createTopic(OUTPUT_STREAM_3, 2, 1);
}
Also used : KafkaStreamsNamedTopologyWrapper(org.apache.kafka.streams.processor.internals.namedtopology.KafkaStreamsNamedTopologyWrapper) Before(org.junit.Before)

Example 3 with KafkaStreamsNamedTopologyWrapper

use of org.apache.kafka.streams.processor.internals.namedtopology.KafkaStreamsNamedTopologyWrapper in project kafka by apache.

the class StoreQueryIntegrationTest method createNamedTopologyKafkaStreams.

private KafkaStreamsNamedTopologyWrapper createNamedTopologyKafkaStreams(final Properties config) {
    final KafkaStreamsNamedTopologyWrapper streams = new KafkaStreamsNamedTopologyWrapper(config);
    streamsToCleanup.add(streams);
    return streams;
}
Also used : KafkaStreamsNamedTopologyWrapper(org.apache.kafka.streams.processor.internals.namedtopology.KafkaStreamsNamedTopologyWrapper)

Example 4 with KafkaStreamsNamedTopologyWrapper

use of org.apache.kafka.streams.processor.internals.namedtopology.KafkaStreamsNamedTopologyWrapper in project kafka by apache.

the class NamedTopologyIntegrationTest method setupSecondKafkaStreams.

private void setupSecondKafkaStreams() {
    props2 = configProps(appId, "host2");
    streams2 = new KafkaStreamsNamedTopologyWrapper(props2, clientSupplier);
    topology1Builder2 = streams2.newNamedTopologyBuilder(TOPOLOGY_1);
    topology2Builder2 = streams2.newNamedTopologyBuilder(TOPOLOGY_2);
}
Also used : KafkaStreamsNamedTopologyWrapper(org.apache.kafka.streams.processor.internals.namedtopology.KafkaStreamsNamedTopologyWrapper)

Example 5 with KafkaStreamsNamedTopologyWrapper

use of org.apache.kafka.streams.processor.internals.namedtopology.KafkaStreamsNamedTopologyWrapper in project kafka by apache.

the class StoreQueryIntegrationTest method shouldQuerySpecificStalePartitionStoresMultiStreamThreadsNamedTopology.

@Test
public void shouldQuerySpecificStalePartitionStoresMultiStreamThreadsNamedTopology() throws Exception {
    final int batch1NumMessages = 100;
    final int key = 1;
    final Semaphore semaphore = new Semaphore(0);
    final int numStreamThreads = 2;
    final Properties streamsConfiguration1 = streamsConfiguration();
    streamsConfiguration1.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, numStreamThreads);
    final Properties streamsConfiguration2 = streamsConfiguration();
    streamsConfiguration2.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, numStreamThreads);
    final String topologyA = "topology-A";
    final KafkaStreamsNamedTopologyWrapper kafkaStreams1 = createNamedTopologyKafkaStreams(streamsConfiguration1);
    final KafkaStreamsNamedTopologyWrapper kafkaStreams2 = createNamedTopologyKafkaStreams(streamsConfiguration2);
    final List<KafkaStreams> kafkaStreamsList = Arrays.asList(kafkaStreams1, kafkaStreams2);
    final NamedTopologyBuilder builder1A = kafkaStreams1.newNamedTopologyBuilder(topologyA, streamsConfiguration1);
    getStreamsBuilderWithTopology(builder1A, semaphore);
    final NamedTopologyBuilder builder2A = kafkaStreams2.newNamedTopologyBuilder(topologyA, streamsConfiguration2);
    getStreamsBuilderWithTopology(builder2A, semaphore);
    kafkaStreams1.start(builder1A.build());
    kafkaStreams2.start(builder2A.build());
    waitForApplicationState(kafkaStreamsList, State.RUNNING, Duration.ofSeconds(60));
    assertTrue(kafkaStreams1.metadataForLocalThreads().size() > 1);
    assertTrue(kafkaStreams2.metadataForLocalThreads().size() > 1);
    produceValueRange(key, 0, batch1NumMessages);
    // Assert that all messages in the first batch were processed in a timely manner
    assertThat(semaphore.tryAcquire(batch1NumMessages, 60, TimeUnit.SECONDS), is(equalTo(true)));
    final KeyQueryMetadata keyQueryMetadata = kafkaStreams1.queryMetadataForKey(TABLE_NAME, key, new IntegerSerializer(), topologyA);
    // key belongs to this partition
    final int keyPartition = keyQueryMetadata.partition();
    // key doesn't belongs to this partition
    final int keyDontBelongPartition = (keyPartition == 0) ? 1 : 0;
    final QueryableStoreType<ReadOnlyKeyValueStore<Integer, Integer>> queryableStoreType = keyValueStore();
    // Assert that both active and standby are able to query for a key
    final NamedTopologyStoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> param = NamedTopologyStoreQueryParameters.fromNamedTopologyAndStoreNameAndType(topologyA, TABLE_NAME, queryableStoreType).enableStaleStores().withPartition(keyPartition);
    TestUtils.waitForCondition(() -> {
        final ReadOnlyKeyValueStore<Integer, Integer> store1 = getStore(kafkaStreams1, param);
        return store1.get(key) != null;
    }, "store1 cannot find results for key");
    TestUtils.waitForCondition(() -> {
        final ReadOnlyKeyValueStore<Integer, Integer> store2 = getStore(kafkaStreams2, param);
        return store2.get(key) != null;
    }, "store2 cannot find results for key");
    final NamedTopologyStoreQueryParameters<ReadOnlyKeyValueStore<Integer, Integer>> otherParam = NamedTopologyStoreQueryParameters.fromNamedTopologyAndStoreNameAndType(topologyA, TABLE_NAME, queryableStoreType).enableStaleStores().withPartition(keyDontBelongPartition);
    final ReadOnlyKeyValueStore<Integer, Integer> store3 = getStore(kafkaStreams1, otherParam);
    final ReadOnlyKeyValueStore<Integer, Integer> store4 = getStore(kafkaStreams2, otherParam);
    // Assert that
    assertThat(store3.get(key), is(nullValue()));
    assertThat(store4.get(key), is(nullValue()));
}
Also used : KafkaStreams(org.apache.kafka.streams.KafkaStreams) Semaphore(java.util.concurrent.Semaphore) Matchers.containsString(org.hamcrest.Matchers.containsString) ReadOnlyKeyValueStore(org.apache.kafka.streams.state.ReadOnlyKeyValueStore) Properties(java.util.Properties) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) KeyQueryMetadata(org.apache.kafka.streams.KeyQueryMetadata) KafkaStreamsNamedTopologyWrapper(org.apache.kafka.streams.processor.internals.namedtopology.KafkaStreamsNamedTopologyWrapper) NamedTopologyBuilder(org.apache.kafka.streams.processor.internals.namedtopology.NamedTopologyBuilder) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Aggregations

KafkaStreamsNamedTopologyWrapper (org.apache.kafka.streams.processor.internals.namedtopology.KafkaStreamsNamedTopologyWrapper)6 Properties (java.util.Properties)2 IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)2 NamedTopologyBuilder (org.apache.kafka.streams.processor.internals.namedtopology.NamedTopologyBuilder)2 IntegrationTest (org.apache.kafka.test.IntegrationTest)2 Before (org.junit.Before)2 Test (org.junit.Test)2 IOException (java.io.IOException)1 Field (java.lang.reflect.Field)1 Arrays (java.util.Arrays)1 Semaphore (java.util.concurrent.Semaphore)1 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)1 IntegerDeserializer (org.apache.kafka.common.serialization.IntegerDeserializer)1 Serdes (org.apache.kafka.common.serialization.Serdes)1 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)1 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)1 Utils.mkEntry (org.apache.kafka.common.utils.Utils.mkEntry)1 Utils.mkMap (org.apache.kafka.common.utils.Utils.mkMap)1 Utils.mkObjectProperties (org.apache.kafka.common.utils.Utils.mkObjectProperties)1 KafkaStreams (org.apache.kafka.streams.KafkaStreams)1