Search in sources :

Example 56 with KeyValueTimestamp

use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.

the class SuppressionIntegrationTest method shouldAllowDisablingChangelog.

@Test
public void shouldAllowDisablingChangelog() {
    final String testId = "-shouldAllowDisablingChangelog";
    final String appId = getClass().getSimpleName().toLowerCase(Locale.getDefault()) + testId;
    final String input = "input" + testId;
    final String outputSuppressed = "output-suppressed" + testId;
    final String outputRaw = "output-raw" + testId;
    cleanStateBeforeTest(CLUSTER, input, outputRaw, outputSuppressed);
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<String, String> inputStream = builder.stream(input);
    final KTable<String, String> valueCounts = inputStream.groupByKey().aggregate(() -> "()", (key, value, aggregate) -> aggregate + ",(" + key + ": " + value + ")");
    valueCounts.suppress(untilTimeLimit(ofMillis(MAX_VALUE), maxRecords(1L).emitEarlyWhenFull().withLoggingDisabled())).toStream().to(outputSuppressed);
    valueCounts.toStream().to(outputRaw);
    final Properties streamsConfig = getStreamsConfig(appId);
    streamsConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.StringSerde.class);
    streamsConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.StringSerde.class);
    final KafkaStreams driver = IntegrationTestUtils.getStartedStreams(streamsConfig, builder, true);
    try {
        produceSynchronously(input, asList(new KeyValueTimestamp<>("k1", "v1", scaledTime(0L)), new KeyValueTimestamp<>("k1", "v2", scaledTime(1L)), new KeyValueTimestamp<>("k2", "v1", scaledTime(2L)), new KeyValueTimestamp<>("x", "x", scaledTime(3L))));
        final boolean rawRecords = waitForAnyRecord(outputRaw);
        final boolean suppressedRecords = waitForAnyRecord(outputSuppressed);
        final Set<String> suppressChangeLog = CLUSTER.getAllTopicsInCluster().stream().filter(s -> s.contains("-changelog")).filter(s -> s.contains("KTABLE-SUPPRESS")).collect(Collectors.toSet());
        assertThat(suppressChangeLog, is(empty()));
        assertThat(rawRecords, Matchers.is(true));
        assertThat(suppressedRecords, is(true));
    } finally {
        driver.close();
        quietlyCleanStateAfterTest(CLUSTER, driver);
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) CoreMatchers.is(org.hamcrest.CoreMatchers.is) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) Produced(org.apache.kafka.streams.kstream.Produced) MAX_VALUE(java.lang.Long.MAX_VALUE) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Serde(org.apache.kafka.common.serialization.Serde) Locale(java.util.Locale) Arrays.asList(java.util.Arrays.asList) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Map(java.util.Map) BufferConfig.maxRecords(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxRecords) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Consumer(org.apache.kafka.clients.consumer.Consumer) TopicPartition(org.apache.kafka.common.TopicPartition) AfterClass(org.junit.AfterClass) TestUtils(org.apache.kafka.test.TestUtils) KeyValue(org.apache.kafka.streams.KeyValue) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) Category(org.junit.experimental.categories.Category) AT_LEAST_ONCE(org.apache.kafka.streams.StreamsConfig.AT_LEAST_ONCE) Collectors(java.util.stream.Collectors) Bytes(org.apache.kafka.common.utils.Bytes) IntegrationTestUtils(org.apache.kafka.streams.integration.utils.IntegrationTestUtils) List(java.util.List) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) Materialized(org.apache.kafka.streams.kstream.Materialized) Optional(java.util.Optional) Duration.ofMillis(java.time.Duration.ofMillis) IntegrationTestUtils.quietlyCleanStateAfterTest(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.quietlyCleanStateAfterTest) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) StreamsConfig(org.apache.kafka.streams.StreamsConfig) BeforeClass(org.junit.BeforeClass) IntegrationTest(org.apache.kafka.test.IntegrationTest) KStream(org.apache.kafka.streams.kstream.KStream) BufferConfig.maxBytes(org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxBytes) DEFAULT_TIMEOUT(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.DEFAULT_TIMEOUT) EmbeddedKafkaCluster(org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) Matchers.empty(org.hamcrest.Matchers.empty) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) TestUtils.waitForCondition(org.apache.kafka.test.TestUtils.waitForCondition) Matchers(org.hamcrest.Matchers) Test(org.junit.Test) IOException(java.io.IOException) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) Serializer(org.apache.kafka.common.serialization.Serializer) KafkaStreams(org.apache.kafka.streams.KafkaStreams) IntegrationTestUtils.cleanStateBeforeTest(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.cleanStateBeforeTest) Suppressed.untilTimeLimit(org.apache.kafka.streams.kstream.Suppressed.untilTimeLimit) Collections(java.util.Collections) KafkaStreams(org.apache.kafka.streams.KafkaStreams) Serdes(org.apache.kafka.common.serialization.Serdes) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) Properties(java.util.Properties) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) IntegrationTestUtils.quietlyCleanStateAfterTest(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.quietlyCleanStateAfterTest) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test) IntegrationTestUtils.cleanStateBeforeTest(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.cleanStateBeforeTest)

Example 57 with KeyValueTimestamp

use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.

the class IntegrationTestUtils method waitUntilFinalKeyValueRecordsReceived.

@SuppressWarnings("unchecked")
private static <K, V, T> List<T> waitUntilFinalKeyValueRecordsReceived(final Properties consumerConfig, final String topic, final List<T> expectedRecords, final long waitTime, final boolean withTimestamp) throws Exception {
    final List<T> accumData = new ArrayList<>();
    try (final Consumer<K, V> consumer = createConsumer(consumerConfig)) {
        final TestCondition valuesRead = () -> {
            final List<T> readData;
            if (withTimestamp) {
                readData = (List<T>) readKeyValuesWithTimestamp(topic, consumer, waitTime, expectedRecords.size());
            } else {
                readData = (List<T>) readKeyValues(topic, consumer, waitTime, expectedRecords.size());
            }
            accumData.addAll(readData);
            // filter out all intermediate records we don't want
            final List<T> accumulatedActual = accumData.stream().filter(expectedRecords::contains).collect(Collectors.toList());
            // still need to check that for each key, the ordering is expected
            final Map<K, List<T>> finalAccumData = new HashMap<>();
            for (final T kv : accumulatedActual) {
                finalAccumData.computeIfAbsent(withTimestamp ? ((KeyValueTimestamp<K, V>) kv).key() : ((KeyValue<K, V>) kv).key, key -> new ArrayList<>()).add(kv);
            }
            final Map<K, List<T>> finalExpected = new HashMap<>();
            for (final T kv : expectedRecords) {
                finalExpected.computeIfAbsent(withTimestamp ? ((KeyValueTimestamp<K, V>) kv).key() : ((KeyValue<K, V>) kv).key, key -> new ArrayList<>()).add(kv);
            }
            // and the last record received matches the last expected record
            return finalAccumData.equals(finalExpected);
        };
        final String conditionDetails = "Did not receive all " + expectedRecords + " records from topic " + topic + " (got " + accumData + ")";
        TestUtils.waitForCondition(valuesRead, waitTime, conditionDetails);
    }
    return accumData;
}
Also used : KeyValue(org.apache.kafka.streams.KeyValue) ArrayList(java.util.ArrayList) TestCondition(org.apache.kafka.test.TestCondition) List(java.util.List) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp)

Example 58 with KeyValueTimestamp

use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.

the class KStreamImplTest method shouldProcessFromSourceThatMatchPattern.

@Test
public void shouldProcessFromSourceThatMatchPattern() {
    final KStream<String, String> pattern2Source = builder.stream(Pattern.compile("topic-\\d"));
    pattern2Source.process(processorSupplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic3 = driver.createInputTopic("topic-3", new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final TestInputTopic<String, String> inputTopic4 = driver.createInputTopic("topic-4", new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final TestInputTopic<String, String> inputTopic5 = driver.createInputTopic("topic-5", new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final TestInputTopic<String, String> inputTopic6 = driver.createInputTopic("topic-6", new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final TestInputTopic<String, String> inputTopic7 = driver.createInputTopic("topic-7", new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        inputTopic3.pipeInput("A", "aa", 1L);
        inputTopic4.pipeInput("B", "bb", 5L);
        inputTopic5.pipeInput("C", "cc", 10L);
        inputTopic6.pipeInput("D", "dd", 8L);
        inputTopic7.pipeInput("E", "ee", 3L);
    }
    assertEquals(asList(new KeyValueTimestamp<>("A", "aa", 1), new KeyValueTimestamp<>("B", "bb", 5), new KeyValueTimestamp<>("C", "cc", 10), new KeyValueTimestamp<>("D", "dd", 8), new KeyValueTimestamp<>("E", "ee", 3)), processorSupplier.theCapturedProcessor().processed());
}
Also used : TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Example 59 with KeyValueTimestamp

use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.

the class KStreamFlatMapValuesTest method testFlatMapValues.

@Test
public void testFlatMapValues() {
    final StreamsBuilder builder = new StreamsBuilder();
    final ValueMapper<Number, Iterable<String>> mapper = value -> {
        final ArrayList<String> result = new ArrayList<>();
        result.add("v" + value);
        result.add("V" + value);
        return result;
    };
    final int[] expectedKeys = { 0, 1, 2, 3 };
    final KStream<Integer, Integer> stream = builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.Integer()));
    final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
    stream.flatMapValues(mapper).process(supplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<Integer, Integer> inputTopic = driver.createInputTopic(topicName, new IntegerSerializer(), new IntegerSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        for (final int expectedKey : expectedKeys) {
            // passing the timestamp to inputTopic.create to disambiguate the call
            inputTopic.pipeInput(expectedKey, expectedKey, 0L);
        }
    }
    final KeyValueTimestamp[] expected = { new KeyValueTimestamp<>(0, "v0", 0), new KeyValueTimestamp<>(0, "V0", 0), new KeyValueTimestamp<>(1, "v1", 0), new KeyValueTimestamp<>(1, "V1", 0), new KeyValueTimestamp<>(2, "v2", 0), new KeyValueTimestamp<>(2, "V2", 0), new KeyValueTimestamp<>(3, "v3", 0), new KeyValueTimestamp<>(3, "V3", 0) };
    assertArrayEquals(expected, supplier.theCapturedProcessor().processed().toArray());
}
Also used : TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) ValueMapperWithKey(org.apache.kafka.streams.kstream.ValueMapperWithKey) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Test(org.junit.Test) KStream(org.apache.kafka.streams.kstream.KStream) Instant(java.time.Instant) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) ArrayList(java.util.ArrayList) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Duration(java.time.Duration) Assert.assertArrayEquals(org.junit.Assert.assertArrayEquals) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) Serdes(org.apache.kafka.common.serialization.Serdes) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) ArrayList(java.util.ArrayList) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Example 60 with KeyValueTimestamp

use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.

the class KStreamFlatMapValuesTest method testFlatMapValuesWithKeys.

@Test
public void testFlatMapValuesWithKeys() {
    final StreamsBuilder builder = new StreamsBuilder();
    final ValueMapperWithKey<Integer, Number, Iterable<String>> mapper = (readOnlyKey, value) -> {
        final ArrayList<String> result = new ArrayList<>();
        result.add("v" + value);
        result.add("k" + readOnlyKey);
        return result;
    };
    final int[] expectedKeys = { 0, 1, 2, 3 };
    final KStream<Integer, Integer> stream = builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.Integer()));
    final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
    stream.flatMapValues(mapper).process(supplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<Integer, Integer> inputTopic = driver.createInputTopic(topicName, new IntegerSerializer(), new IntegerSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        for (final int expectedKey : expectedKeys) {
            // passing the timestamp to inputTopic.create to disambiguate the call
            inputTopic.pipeInput(expectedKey, expectedKey, 0L);
        }
    }
    final KeyValueTimestamp[] expected = { new KeyValueTimestamp<>(0, "v0", 0), new KeyValueTimestamp<>(0, "k0", 0), new KeyValueTimestamp<>(1, "v1", 0), new KeyValueTimestamp<>(1, "k1", 0), new KeyValueTimestamp<>(2, "v2", 0), new KeyValueTimestamp<>(2, "k2", 0), new KeyValueTimestamp<>(3, "v3", 0), new KeyValueTimestamp<>(3, "k3", 0) };
    assertArrayEquals(expected, supplier.theCapturedProcessor().processed().toArray());
}
Also used : TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) ValueMapperWithKey(org.apache.kafka.streams.kstream.ValueMapperWithKey) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Test(org.junit.Test) KStream(org.apache.kafka.streams.kstream.KStream) Instant(java.time.Instant) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) ArrayList(java.util.ArrayList) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Duration(java.time.Duration) Assert.assertArrayEquals(org.junit.Assert.assertArrayEquals) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) Serdes(org.apache.kafka.common.serialization.Serdes) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) ArrayList(java.util.ArrayList) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Aggregations

KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)71 Test (org.junit.Test)65 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)46 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)44 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)36 Properties (java.util.Properties)26 Windowed (org.apache.kafka.streams.kstream.Windowed)22 Serdes (org.apache.kafka.common.serialization.Serdes)21 IntegrationTest (org.apache.kafka.test.IntegrationTest)20 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)19 KeyValue (org.apache.kafka.streams.KeyValue)19 Consumed (org.apache.kafka.streams.kstream.Consumed)17 IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)16 MockApiProcessorSupplier (org.apache.kafka.test.MockApiProcessorSupplier)16 Bytes (org.apache.kafka.common.utils.Bytes)15 KStream (org.apache.kafka.streams.kstream.KStream)14 Duration (java.time.Duration)13 KafkaStreams (org.apache.kafka.streams.KafkaStreams)13 TestInputTopic (org.apache.kafka.streams.TestInputTopic)13 Utils.mkProperties (org.apache.kafka.common.utils.Utils.mkProperties)12