Search in sources :

Example 46 with KStreamBuilder

use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.

the class KStreamsFineGrainedAutoResetIntegrationTest method shouldThrowStreamsExceptionNoResetSpecified.

@Test
public void shouldThrowStreamsExceptionNoResetSpecified() throws Exception {
    Properties props = new Properties();
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "none");
    Properties localConfig = StreamsTestUtils.getStreamsConfig("testAutoOffsetWithNone", CLUSTER.bootstrapServers(), STRING_SERDE_CLASSNAME, STRING_SERDE_CLASSNAME, props);
    final KStreamBuilder builder = new KStreamBuilder();
    final KStream<String, String> exceptionStream = builder.stream(NOOP);
    exceptionStream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC);
    KafkaStreams streams = new KafkaStreams(builder, localConfig);
    final TestingUncaughtExceptionHandler uncaughtExceptionHandler = new TestingUncaughtExceptionHandler();
    final TestCondition correctExceptionThrownCondition = new TestCondition() {

        @Override
        public boolean conditionMet() {
            return uncaughtExceptionHandler.correctExceptionThrown;
        }
    };
    streams.setUncaughtExceptionHandler(uncaughtExceptionHandler);
    streams.start();
    TestUtils.waitForCondition(correctExceptionThrownCondition, "The expected NoOffsetForPartitionException was never thrown");
    streams.close();
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) TestCondition(org.apache.kafka.test.TestCondition) Properties(java.util.Properties) Test(org.junit.Test)

Example 47 with KStreamBuilder

use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.

the class KStreamsFineGrainedAutoResetIntegrationTest method shouldThrowExceptionOverlappingTopic.

@Test(expected = TopologyBuilderException.class)
public void shouldThrowExceptionOverlappingTopic() throws Exception {
    final KStreamBuilder builder = new KStreamBuilder();
    //NOTE this would realistically get caught when building topology, the test is for completeness
    final KStream<String, String> pattern1Stream = builder.stream(KStreamBuilder.AutoOffsetReset.EARLIEST, Pattern.compile("topic-[A-D]"));
    final KStream<String, String> pattern2Stream = builder.stream(KStreamBuilder.AutoOffsetReset.LATEST, Pattern.compile("topic-\\d]"));
    final KStream<String, String> namedTopicsStream = builder.stream(KStreamBuilder.AutoOffsetReset.LATEST, TOPIC_A, TOPIC_Z);
    builder.latestResetTopicsPattern();
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) Test(org.junit.Test)

Example 48 with KStreamBuilder

use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.

the class QueryableStateIntegrationTest method shouldBeAbleToQueryState.

@Test
public void shouldBeAbleToQueryState() throws Exception {
    final KStreamBuilder builder = new KStreamBuilder();
    final String[] keys = { "hello", "goodbye", "welcome", "go", "kafka" };
    final Set<KeyValue<String, String>> batch1 = new TreeSet<>(stringComparator);
    batch1.addAll(Arrays.asList(new KeyValue<>(keys[0], "hello"), new KeyValue<>(keys[1], "goodbye"), new KeyValue<>(keys[2], "welcome"), new KeyValue<>(keys[3], "go"), new KeyValue<>(keys[4], "kafka")));
    final Set<KeyValue<String, Long>> expectedCount = new TreeSet<>(stringLongComparator);
    for (final String key : keys) {
        expectedCount.add(new KeyValue<>(key, 1L));
    }
    IntegrationTestUtils.produceKeyValuesSynchronously(streamOne, batch1, TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class, new Properties()), mockTime);
    final KStream<String, String> s1 = builder.stream(streamOne);
    // Non Windowed
    s1.groupByKey().count("my-count").to(Serdes.String(), Serdes.Long(), outputTopic);
    s1.groupByKey().count(TimeWindows.of(WINDOW_SIZE), "windowed-count");
    kafkaStreams = new KafkaStreams(builder, streamsConfiguration);
    kafkaStreams.start();
    waitUntilAtLeastNumRecordProcessed(outputTopic, 1);
    final ReadOnlyKeyValueStore<String, Long> myCount = kafkaStreams.store("my-count", QueryableStoreTypes.<String, Long>keyValueStore());
    final ReadOnlyWindowStore<String, Long> windowStore = kafkaStreams.store("windowed-count", QueryableStoreTypes.<String, Long>windowStore());
    verifyCanGetByKey(keys, expectedCount, expectedCount, windowStore, myCount);
    verifyRangeAndAll(expectedCount, myCount);
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KeyValue(org.apache.kafka.streams.KeyValue) Properties(java.util.Properties) TreeSet(java.util.TreeSet) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) KafkaStreamsTest(org.apache.kafka.streams.KafkaStreamsTest) Test(org.junit.Test)

Example 49 with KStreamBuilder

use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.

the class ResetIntegrationTest method setupTopologyWithoutIntermediateUserTopic.

private KStreamBuilder setupTopologyWithoutIntermediateUserTopic() {
    final KStreamBuilder builder = new KStreamBuilder();
    final KStream<Long, String> input = builder.stream(INPUT_TOPIC);
    // use map to trigger internal re-partitioning before groupByKey
    input.map(new KeyValueMapper<Long, String, KeyValue<Long, Long>>() {

        @Override
        public KeyValue<Long, Long> apply(final Long key, final String value) {
            return new KeyValue<>(key, key);
        }
    }).to(Serdes.Long(), Serdes.Long(), OUTPUT_TOPIC);
    return builder;
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) KeyValue(org.apache.kafka.streams.KeyValue) KeyValueMapper(org.apache.kafka.streams.kstream.KeyValueMapper)

Example 50 with KStreamBuilder

use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.

the class JoinIntegrationTest method prepareTopology.

@Before
public void prepareTopology() throws Exception {
    CLUSTER.createTopic(INPUT_TOPIC_1);
    CLUSTER.createTopic(INPUT_TOPIC_2);
    CLUSTER.createTopic(OUTPUT_TOPIC);
    builder = new KStreamBuilder();
    leftTable = builder.table(INPUT_TOPIC_1, "leftTable");
    rightTable = builder.table(INPUT_TOPIC_2, "rightTable");
    leftStream = leftTable.toStream();
    rightStream = rightTable.toStream();
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) Before(org.junit.Before)

Aggregations

KStreamBuilder (org.apache.kafka.streams.kstream.KStreamBuilder)69 Test (org.junit.Test)51 KStreamTestDriver (org.apache.kafka.test.KStreamTestDriver)21 Properties (java.util.Properties)20 HashSet (java.util.HashSet)15 Set (java.util.Set)13 KafkaStreams (org.apache.kafka.streams.KafkaStreams)13 HashMap (java.util.HashMap)12 StreamsConfig (org.apache.kafka.streams.StreamsConfig)12 TopicPartition (org.apache.kafka.common.TopicPartition)10 Metrics (org.apache.kafka.common.metrics.Metrics)10 KeyValue (org.apache.kafka.streams.KeyValue)10 TaskId (org.apache.kafka.streams.processor.TaskId)9 MockTime (org.apache.kafka.common.utils.MockTime)7 StreamsMetrics (org.apache.kafka.streams.StreamsMetrics)7 Predicate (org.apache.kafka.streams.kstream.Predicate)7 MockClientSupplier (org.apache.kafka.test.MockClientSupplier)7 MockProcessorSupplier (org.apache.kafka.test.MockProcessorSupplier)7 Collection (java.util.Collection)5 KeyValueMapper (org.apache.kafka.streams.kstream.KeyValueMapper)5