Search in sources :

Example 1 with Consumed

use of org.apache.kafka.streams.kstream.Consumed in project kafka by apache.

the class KStreamImplTest method shouldSupportKeyChangeKTableFromKStream.

@Test
public void shouldSupportKeyChangeKTableFromKStream() {
    final Consumed<String, String> consumed = Consumed.with(Serdes.String(), Serdes.String());
    final StreamsBuilder builder = new StreamsBuilder();
    final String input = "input";
    final String output = "output";
    builder.stream(input, consumed).map((key, value) -> new KeyValue<>(key.charAt(0) - 'A', value)).toTable(Materialized.with(Serdes.Integer(), null)).toStream().to(output);
    final Topology topology = builder.build();
    final String topologyDescription = topology.describe().toString();
    assertThat(topologyDescription, equalTo("Topologies:\n" + "   Sub-topology: 0\n" + "    Source: KSTREAM-SOURCE-0000000000 (topics: [input])\n" + "      --> KSTREAM-MAP-0000000001\n" + "    Processor: KSTREAM-MAP-0000000001 (stores: [])\n" + "      --> KSTREAM-FILTER-0000000005\n" + "      <-- KSTREAM-SOURCE-0000000000\n" + "    Processor: KSTREAM-FILTER-0000000005 (stores: [])\n" + "      --> KSTREAM-SINK-0000000004\n" + "      <-- KSTREAM-MAP-0000000001\n" + "    Sink: KSTREAM-SINK-0000000004 (topic: KSTREAM-TOTABLE-0000000002-repartition)\n" + "      <-- KSTREAM-FILTER-0000000005\n" + "\n" + "  Sub-topology: 1\n" + "    Source: KSTREAM-SOURCE-0000000006 (topics: [KSTREAM-TOTABLE-0000000002-repartition])\n" + "      --> KSTREAM-TOTABLE-0000000002\n" + "    Processor: KSTREAM-TOTABLE-0000000002 (stores: [])\n" + "      --> KTABLE-TOSTREAM-0000000007\n" + "      <-- KSTREAM-SOURCE-0000000006\n" + "    Processor: KTABLE-TOSTREAM-0000000007 (stores: [])\n" + "      --> KSTREAM-SINK-0000000008\n" + "      <-- KSTREAM-TOTABLE-0000000002\n" + "    Sink: KSTREAM-SINK-0000000008 (topic: output)\n" + "      <-- KTABLE-TOSTREAM-0000000007\n\n"));
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(input, Serdes.String().serializer(), Serdes.String().serializer());
        final TestOutputTopic<Integer, String> outputTopic = driver.createOutputTopic(output, Serdes.Integer().deserializer(), Serdes.String().deserializer());
        inputTopic.pipeInput("A", "01", 5L);
        inputTopic.pipeInput("B", "02", 100L);
        inputTopic.pipeInput("C", "03", 0L);
        inputTopic.pipeInput("D", "04", 0L);
        inputTopic.pipeInput("A", "05", 10L);
        inputTopic.pipeInput("A", "06", 8L);
        final List<TestRecord<Integer, String>> outputExpectRecords = new ArrayList<>();
        outputExpectRecords.add(new TestRecord<>(0, "01", Instant.ofEpochMilli(5L)));
        outputExpectRecords.add(new TestRecord<>(1, "02", Instant.ofEpochMilli(100L)));
        outputExpectRecords.add(new TestRecord<>(2, "03", Instant.ofEpochMilli(0L)));
        outputExpectRecords.add(new TestRecord<>(3, "04", Instant.ofEpochMilli(0L)));
        outputExpectRecords.add(new TestRecord<>(0, "05", Instant.ofEpochMilli(10L)));
        outputExpectRecords.add(new TestRecord<>(0, "06", Instant.ofEpochMilli(8L)));
        assertEquals(outputTopic.readRecordsToList(), outputExpectRecords);
    }
}
Also used : CoreMatchers.is(org.hamcrest.CoreMatchers.is) Arrays(java.util.Arrays) ValueTransformerSupplier(org.apache.kafka.streams.kstream.ValueTransformerSupplier) Produced(org.apache.kafka.streams.kstream.Produced) IsInstanceOf.instanceOf(org.hamcrest.core.IsInstanceOf.instanceOf) Stores(org.apache.kafka.streams.state.Stores) Repartitioned(org.apache.kafka.streams.kstream.Repartitioned) MockProcessorSupplier(org.apache.kafka.test.MockProcessorSupplier) Joined(org.apache.kafka.streams.kstream.Joined) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Matcher(java.util.regex.Matcher) Utils.mkMap(org.apache.kafka.common.utils.Utils.mkMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) ProcessorSupplier(org.apache.kafka.streams.processor.api.ProcessorSupplier) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) TransformerSupplier(org.apache.kafka.streams.kstream.TransformerSupplier) Serde(org.apache.kafka.common.serialization.Serde) Arrays.asList(java.util.Arrays.asList) TopologyWrapper(org.apache.kafka.streams.TopologyWrapper) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) Map(java.util.Map) SourceNode(org.apache.kafka.streams.processor.internals.SourceNode) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) TestRecord(org.apache.kafka.streams.test.TestRecord) ValueMapperWithKey(org.apache.kafka.streams.kstream.ValueMapperWithKey) MockValueJoiner(org.apache.kafka.test.MockValueJoiner) MockMapper(org.apache.kafka.test.MockMapper) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) TopicNameExtractor(org.apache.kafka.streams.processor.TopicNameExtractor) KeyValue(org.apache.kafka.streams.KeyValue) Instant(java.time.Instant) Bytes(org.apache.kafka.common.utils.Bytes) ProcessorContext(org.apache.kafka.streams.processor.ProcessorContext) List(java.util.List) Predicate(org.apache.kafka.streams.kstream.Predicate) Utils.mkEntry(org.apache.kafka.common.utils.Utils.mkEntry) ValueJoiner(org.apache.kafka.streams.kstream.ValueJoiner) Materialized(org.apache.kafka.streams.kstream.Materialized) Pattern(java.util.regex.Pattern) ProcessorTopology(org.apache.kafka.streams.processor.internals.ProcessorTopology) Duration.ofMillis(java.time.Duration.ofMillis) Topology(org.apache.kafka.streams.Topology) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Assert.assertThrows(org.junit.Assert.assertThrows) HashMap(java.util.HashMap) KStream(org.apache.kafka.streams.kstream.KStream) Function(java.util.function.Function) StreamJoined(org.apache.kafka.streams.kstream.StreamJoined) ArrayList(java.util.ArrayList) ValueJoinerWithKey(org.apache.kafka.streams.kstream.ValueJoinerWithKey) JoinWindows(org.apache.kafka.streams.kstream.JoinWindows) Named(org.apache.kafka.streams.kstream.Named) ValueTransformer(org.apache.kafka.streams.kstream.ValueTransformer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Before(org.junit.Before) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Collections.emptyMap(java.util.Collections.emptyMap) KTable(org.apache.kafka.streams.kstream.KTable) KeyValueMapper(org.apache.kafka.streams.kstream.KeyValueMapper) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Transformer(org.apache.kafka.streams.kstream.Transformer) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) ValueTransformerWithKeySupplier(org.apache.kafka.streams.kstream.ValueTransformerWithKeySupplier) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) TimeUnit(java.util.concurrent.TimeUnit) FailOnInvalidTimestamp(org.apache.kafka.streams.processor.FailOnInvalidTimestamp) Assert.assertNull(org.junit.Assert.assertNull) GlobalKTable(org.apache.kafka.streams.kstream.GlobalKTable) IsNull.notNullValue(org.hamcrest.core.IsNull.notNullValue) ValueTransformerWithKey(org.apache.kafka.streams.kstream.ValueTransformerWithKey) TestInputTopic(org.apache.kafka.streams.TestInputTopic) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) KeyValue(org.apache.kafka.streams.KeyValue) ArrayList(java.util.ArrayList) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) ProcessorTopology(org.apache.kafka.streams.processor.internals.ProcessorTopology) Topology(org.apache.kafka.streams.Topology) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TestRecord(org.apache.kafka.streams.test.TestRecord) Test(org.junit.Test)

Example 2 with Consumed

use of org.apache.kafka.streams.kstream.Consumed in project kafka by apache.

the class SmokeTestClient method getTopology.

public Topology getTopology() {
    final StreamsBuilder builder = new StreamsBuilder();
    final Consumed<String, Integer> stringIntConsumed = Consumed.with(stringSerde, intSerde);
    final KStream<String, Integer> source = builder.stream("data", stringIntConsumed);
    source.filterNot((k, v) -> k.equals("flush")).to("echo", Produced.with(stringSerde, intSerde));
    final KStream<String, Integer> data = source.filter((key, value) -> value == null || value != END);
    data.process(SmokeTestUtil.printProcessorSupplier("data", name));
    // min
    final KGroupedStream<String, Integer> groupedData = data.groupByKey(Grouped.with(stringSerde, intSerde));
    final KTable<Windowed<String>, Integer> minAggregation = groupedData.windowedBy(TimeWindows.ofSizeAndGrace(Duration.ofDays(1), Duration.ofMinutes(1))).aggregate(() -> Integer.MAX_VALUE, (aggKey, value, aggregate) -> (value < aggregate) ? value : aggregate, Materialized.<String, Integer, WindowStore<Bytes, byte[]>>as("uwin-min").withValueSerde(intSerde).withRetention(Duration.ofHours(25)));
    streamify(minAggregation, "min-raw");
    streamify(minAggregation.suppress(untilWindowCloses(BufferConfig.unbounded())), "min-suppressed");
    minAggregation.toStream(new Unwindow<>()).filterNot((k, v) -> k.equals("flush")).to("min", Produced.with(stringSerde, intSerde));
    final KTable<Windowed<String>, Integer> smallWindowSum = groupedData.windowedBy(TimeWindows.ofSizeAndGrace(Duration.ofSeconds(2), Duration.ofSeconds(30)).advanceBy(Duration.ofSeconds(1))).reduce(Integer::sum);
    streamify(smallWindowSum, "sws-raw");
    streamify(smallWindowSum.suppress(untilWindowCloses(BufferConfig.unbounded())), "sws-suppressed");
    final KTable<String, Integer> minTable = builder.table("min", Consumed.with(stringSerde, intSerde), Materialized.as("minStoreName"));
    minTable.toStream().process(SmokeTestUtil.printProcessorSupplier("min", name));
    // max
    groupedData.windowedBy(TimeWindows.ofSizeWithNoGrace(Duration.ofDays(2))).aggregate(() -> Integer.MIN_VALUE, (aggKey, value, aggregate) -> (value > aggregate) ? value : aggregate, Materialized.<String, Integer, WindowStore<Bytes, byte[]>>as("uwin-max").withValueSerde(intSerde)).toStream(new Unwindow<>()).filterNot((k, v) -> k.equals("flush")).to("max", Produced.with(stringSerde, intSerde));
    final KTable<String, Integer> maxTable = builder.table("max", Consumed.with(stringSerde, intSerde), Materialized.as("maxStoreName"));
    maxTable.toStream().process(SmokeTestUtil.printProcessorSupplier("max", name));
    // sum
    groupedData.windowedBy(TimeWindows.ofSizeWithNoGrace(Duration.ofDays(2))).aggregate(() -> 0L, (aggKey, value, aggregate) -> (long) value + aggregate, Materialized.<String, Long, WindowStore<Bytes, byte[]>>as("win-sum").withValueSerde(longSerde)).toStream(new Unwindow<>()).filterNot((k, v) -> k.equals("flush")).to("sum", Produced.with(stringSerde, longSerde));
    final Consumed<String, Long> stringLongConsumed = Consumed.with(stringSerde, longSerde);
    final KTable<String, Long> sumTable = builder.table("sum", stringLongConsumed);
    sumTable.toStream().process(SmokeTestUtil.printProcessorSupplier("sum", name));
    // cnt
    groupedData.windowedBy(TimeWindows.ofSizeWithNoGrace(Duration.ofDays(2))).count(Materialized.as("uwin-cnt")).toStream(new Unwindow<>()).filterNot((k, v) -> k.equals("flush")).to("cnt", Produced.with(stringSerde, longSerde));
    final KTable<String, Long> cntTable = builder.table("cnt", Consumed.with(stringSerde, longSerde), Materialized.as("cntStoreName"));
    cntTable.toStream().process(SmokeTestUtil.printProcessorSupplier("cnt", name));
    // dif
    maxTable.join(minTable, (value1, value2) -> value1 - value2).toStream().filterNot((k, v) -> k.equals("flush")).to("dif", Produced.with(stringSerde, intSerde));
    // avg
    sumTable.join(cntTable, (value1, value2) -> (double) value1 / (double) value2).toStream().filterNot((k, v) -> k.equals("flush")).to("avg", Produced.with(stringSerde, doubleSerde));
    // test repartition
    final Agg agg = new Agg();
    cntTable.groupBy(agg.selector(), Grouped.with(stringSerde, longSerde)).aggregate(agg.init(), agg.adder(), agg.remover(), Materialized.<String, Long>as(Stores.inMemoryKeyValueStore("cntByCnt")).withKeySerde(Serdes.String()).withValueSerde(Serdes.Long())).toStream().to("tagg", Produced.with(stringSerde, longSerde));
    return builder.build();
}
Also used : StreamsConfig(org.apache.kafka.streams.StreamsConfig) KGroupedStream(org.apache.kafka.streams.kstream.KGroupedStream) Produced(org.apache.kafka.streams.kstream.Produced) Stores(org.apache.kafka.streams.state.Stores) KStream(org.apache.kafka.streams.kstream.KStream) WindowStore(org.apache.kafka.streams.state.WindowStore) Suppressed.untilWindowCloses(org.apache.kafka.streams.kstream.Suppressed.untilWindowCloses) Windowed(org.apache.kafka.streams.kstream.Windowed) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) BufferConfig(org.apache.kafka.streams.kstream.Suppressed.BufferConfig) Utils(org.apache.kafka.common.utils.Utils) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) Properties(java.util.Properties) StreamsUncaughtExceptionHandler(org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler) Files(java.nio.file.Files) Consumed(org.apache.kafka.streams.kstream.Consumed) KeyValue(org.apache.kafka.streams.KeyValue) IOException(java.io.IOException) Instant(java.time.Instant) Grouped(org.apache.kafka.streams.kstream.Grouped) File(java.io.File) Bytes(org.apache.kafka.common.utils.Bytes) KafkaThread(org.apache.kafka.common.utils.KafkaThread) TimeUnit(java.util.concurrent.TimeUnit) CountDownLatch(java.util.concurrent.CountDownLatch) TimeWindows(org.apache.kafka.streams.kstream.TimeWindows) Materialized(org.apache.kafka.streams.kstream.Materialized) KafkaStreams(org.apache.kafka.streams.KafkaStreams) Topology(org.apache.kafka.streams.Topology) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Windowed(org.apache.kafka.streams.kstream.Windowed) Bytes(org.apache.kafka.common.utils.Bytes)

Example 3 with Consumed

use of org.apache.kafka.streams.kstream.Consumed in project kafka by apache.

the class PageViewUntypedDemo method main.

public static void main(final String[] args) throws Exception {
    final Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-pageview-untyped");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, JsonTimestampExtractor.class);
    props.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
    // setting offset reset to earliest so that we can re-run the demo code with the same pre-loaded data
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    final StreamsBuilder builder = new StreamsBuilder();
    final Serializer<JsonNode> jsonSerializer = new JsonSerializer();
    final Deserializer<JsonNode> jsonDeserializer = new JsonDeserializer();
    final Serde<JsonNode> jsonSerde = Serdes.serdeFrom(jsonSerializer, jsonDeserializer);
    final Consumed<String, JsonNode> consumed = Consumed.with(Serdes.String(), jsonSerde);
    final KStream<String, JsonNode> views = builder.stream("streams-pageview-input", consumed);
    final KTable<String, JsonNode> users = builder.table("streams-userprofile-input", consumed);
    final KTable<String, String> userRegions = users.mapValues(record -> record.get("region").textValue());
    final Duration duration24Hours = Duration.ofHours(24);
    final KStream<JsonNode, JsonNode> regionCount = views.leftJoin(userRegions, (view, region) -> {
        final ObjectNode jNode = JsonNodeFactory.instance.objectNode();
        return (JsonNode) jNode.put("user", view.get("user").textValue()).put("page", view.get("page").textValue()).put("region", region == null ? "UNKNOWN" : region);
    }).map((user, viewRegion) -> new KeyValue<>(viewRegion.get("region").textValue(), viewRegion)).groupByKey(Grouped.with(Serdes.String(), jsonSerde)).windowedBy(TimeWindows.ofSizeAndGrace(Duration.ofDays(7), duration24Hours).advanceBy(Duration.ofSeconds(1))).count().toStream().map((key, value) -> {
        final ObjectNode keyNode = JsonNodeFactory.instance.objectNode();
        keyNode.put("window-start", key.window().start()).put("region", key.key());
        final ObjectNode valueNode = JsonNodeFactory.instance.objectNode();
        valueNode.put("count", value);
        return new KeyValue<>((JsonNode) keyNode, (JsonNode) valueNode);
    });
    // write to the result topic
    regionCount.to("streams-pageviewstats-untyped-output", Produced.with(jsonSerde, jsonSerde));
    final KafkaStreams streams = new KafkaStreams(builder.build(), props);
    streams.start();
    // usually the stream application would be running forever,
    // in this example we just let it run for some time and stop since the input data is finite.
    Thread.sleep(5000L);
    streams.close();
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StreamsConfig(org.apache.kafka.streams.StreamsConfig) KTable(org.apache.kafka.streams.kstream.KTable) Properties(java.util.Properties) Produced(org.apache.kafka.streams.kstream.Produced) Consumed(org.apache.kafka.streams.kstream.Consumed) KeyValue(org.apache.kafka.streams.KeyValue) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) KStream(org.apache.kafka.streams.kstream.KStream) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) Grouped(org.apache.kafka.streams.kstream.Grouped) JsonSerializer(org.apache.kafka.connect.json.JsonSerializer) JsonDeserializer(org.apache.kafka.connect.json.JsonDeserializer) JsonNodeFactory(com.fasterxml.jackson.databind.node.JsonNodeFactory) Serde(org.apache.kafka.common.serialization.Serde) Serializer(org.apache.kafka.common.serialization.Serializer) TimeWindows(org.apache.kafka.streams.kstream.TimeWindows) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) JsonNode(com.fasterxml.jackson.databind.JsonNode) Deserializer(org.apache.kafka.common.serialization.Deserializer) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KeyValue(org.apache.kafka.streams.KeyValue) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) JsonNode(com.fasterxml.jackson.databind.JsonNode) Duration(java.time.Duration) JsonSerializer(org.apache.kafka.connect.json.JsonSerializer) Properties(java.util.Properties) JsonDeserializer(org.apache.kafka.connect.json.JsonDeserializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder)

Example 4 with Consumed

use of org.apache.kafka.streams.kstream.Consumed in project kafka by apache.

the class KTableKTableLeftJoinTest method shouldNotThrowIllegalStateExceptionWhenMultiCacheEvictions.

/**
 * This test was written to reproduce https://issues.apache.org/jira/browse/KAFKA-4492
 * It is based on a fairly complicated join used by the developer that reported the bug.
 * Before the fix this would trigger an IllegalStateException.
 */
@Test
public void shouldNotThrowIllegalStateExceptionWhenMultiCacheEvictions() {
    final String agg = "agg";
    final String tableOne = "tableOne";
    final String tableTwo = "tableTwo";
    final String tableThree = "tableThree";
    final String tableFour = "tableFour";
    final String tableFive = "tableFive";
    final String tableSix = "tableSix";
    final String[] inputs = { agg, tableOne, tableTwo, tableThree, tableFour, tableFive, tableSix };
    final StreamsBuilder builder = new StreamsBuilder();
    final Consumed<Long, String> consumed = Consumed.with(Serdes.Long(), Serdes.String());
    final KTable<Long, String> aggTable = builder.table(agg, consumed, Materialized.as(Stores.inMemoryKeyValueStore("agg-base-store"))).groupBy(KeyValue::new, Grouped.with(Serdes.Long(), Serdes.String())).reduce(MockReducer.STRING_ADDER, MockReducer.STRING_ADDER, Materialized.as(Stores.inMemoryKeyValueStore("agg-store")));
    final KTable<Long, String> one = builder.table(tableOne, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableOne-base-store")));
    final KTable<Long, String> two = builder.table(tableTwo, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableTwo-base-store")));
    final KTable<Long, String> three = builder.table(tableThree, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableThree-base-store")));
    final KTable<Long, String> four = builder.table(tableFour, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableFour-base-store")));
    final KTable<Long, String> five = builder.table(tableFive, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableFive-base-store")));
    final KTable<Long, String> six = builder.table(tableSix, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableSix-base-store")));
    final ValueMapper<String, String> mapper = value -> value.toUpperCase(Locale.ROOT);
    final KTable<Long, String> seven = one.mapValues(mapper);
    final KTable<Long, String> eight = six.leftJoin(seven, MockValueJoiner.TOSTRING_JOINER);
    aggTable.leftJoin(one, MockValueJoiner.TOSTRING_JOINER).leftJoin(two, MockValueJoiner.TOSTRING_JOINER).leftJoin(three, MockValueJoiner.TOSTRING_JOINER).leftJoin(four, MockValueJoiner.TOSTRING_JOINER).leftJoin(five, MockValueJoiner.TOSTRING_JOINER).leftJoin(eight, MockValueJoiner.TOSTRING_JOINER).mapValues(mapper);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final String[] values = { "a", "AA", "BBB", "CCCC", "DD", "EEEEEEEE", "F", "GGGGGGGGGGGGGGG", "HHH", "IIIIIIIIII", "J", "KK", "LLLL", "MMMMMMMMMMMMMMMMMMMMMM", "NNNNN", "O", "P", "QQQQQ", "R", "SSSS", "T", "UU", "VVVVVVVVVVVVVVVVVVV" };
        TestInputTopic<Long, String> inputTopic;
        final Random random = new Random();
        for (int i = 0; i < 1000; i++) {
            for (final String input : inputs) {
                final Long key = (long) random.nextInt(1000);
                final String value = values[random.nextInt(values.length)];
                inputTopic = driver.createInputTopic(input, Serdes.Long().serializer(), Serdes.String().serializer());
                inputTopic.pipeInput(key, value);
            }
        }
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Arrays(java.util.Arrays) CoreMatchers.hasItem(org.hamcrest.CoreMatchers.hasItem) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Stores(org.apache.kafka.streams.state.Stores) MockReducer(org.apache.kafka.test.MockReducer) Random(java.util.Random) MockProcessorContext(org.apache.kafka.streams.processor.api.MockProcessorContext) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) HashSet(java.util.HashSet) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Locale(java.util.Locale) TopologyWrapper(org.apache.kafka.streams.TopologyWrapper) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) Record(org.apache.kafka.streams.processor.api.Record) Processor(org.apache.kafka.streams.processor.api.Processor) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TestRecord(org.apache.kafka.streams.test.TestRecord) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) MockValueJoiner(org.apache.kafka.test.MockValueJoiner) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TopologyTestDriverWrapper(org.apache.kafka.streams.TopologyTestDriverWrapper) KTable(org.apache.kafka.streams.kstream.KTable) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Collection(java.util.Collection) KeyValue(org.apache.kafka.streams.KeyValue) Set(java.util.Set) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) Instant(java.time.Instant) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Grouped(org.apache.kafka.streams.kstream.Grouped) Assert.assertFalse(org.junit.Assert.assertFalse) Materialized(org.apache.kafka.streams.kstream.Materialized) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TestInputTopic(org.apache.kafka.streams.TestInputTopic) Matchers.is(org.hamcrest.Matchers.is) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Topology(org.apache.kafka.streams.Topology) Assert.assertEquals(org.junit.Assert.assertEquals) Random(java.util.Random) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Test(org.junit.Test)

Example 5 with Consumed

use of org.apache.kafka.streams.kstream.Consumed in project kafka by apache.

the class KTableMapValuesTest method testQueryableValueGetter.

@Test
public void testQueryableValueGetter() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic1 = "topic1";
    final String storeName2 = "store2";
    final String storeName3 = "store3";
    final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, consumed);
    final KTableImpl<String, String, Integer> table2 = (KTableImpl<String, String, Integer>) table1.mapValues(s -> Integer.valueOf(s), Materialized.<String, Integer, KeyValueStore<Bytes, byte[]>>as(storeName2).withValueSerde(Serdes.Integer()));
    final KTableImpl<String, String, Integer> table3 = (KTableImpl<String, String, Integer>) table1.mapValues(value -> Integer.valueOf(value) * (-1), Materialized.<String, Integer, KeyValueStore<Bytes, byte[]>>as(storeName3).withValueSerde(Serdes.Integer()));
    final KTableImpl<String, String, Integer> table4 = (KTableImpl<String, String, Integer>) table1.mapValues(s -> Integer.valueOf(s));
    assertEquals(storeName2, table2.queryableStoreName());
    assertEquals(storeName3, table3.queryableStoreName());
    assertNull(table4.queryableStoreName());
    doTestValueGetter(builder, topic1, table2, table3);
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Arrays.asList(java.util.Arrays.asList) TopologyWrapper(org.apache.kafka.streams.TopologyWrapper) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TopologyTestDriverWrapper(org.apache.kafka.streams.TopologyTestDriverWrapper) KTable(org.apache.kafka.streams.kstream.KTable) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Test(org.junit.Test) Instant(java.time.Instant) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Bytes(org.apache.kafka.common.utils.Bytes) Assert.assertNull(org.junit.Assert.assertNull) Assert.assertFalse(org.junit.Assert.assertFalse) Materialized(org.apache.kafka.streams.kstream.Materialized) TestInputTopic(org.apache.kafka.streams.TestInputTopic) InternalTopologyBuilder(org.apache.kafka.streams.processor.internals.InternalTopologyBuilder) Matchers.is(org.hamcrest.Matchers.is) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Topology(org.apache.kafka.streams.Topology) Assert.assertEquals(org.junit.Assert.assertEquals) Bytes(org.apache.kafka.common.utils.Bytes) Test(org.junit.Test)

Aggregations

Consumed (org.apache.kafka.streams.kstream.Consumed)16 KTable (org.apache.kafka.streams.kstream.KTable)16 Properties (java.util.Properties)15 Serdes (org.apache.kafka.common.serialization.Serdes)15 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)15 Materialized (org.apache.kafka.streams.kstream.Materialized)15 Assert.assertEquals (org.junit.Assert.assertEquals)14 Test (org.junit.Test)14 Duration (java.time.Duration)13 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)13 TestInputTopic (org.apache.kafka.streams.TestInputTopic)13 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)13 MockApiProcessor (org.apache.kafka.test.MockApiProcessor)13 MockApiProcessorSupplier (org.apache.kafka.test.MockApiProcessorSupplier)13 Instant (java.time.Instant)12 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)12 Bytes (org.apache.kafka.common.utils.Bytes)12 StreamsTestUtils (org.apache.kafka.test.StreamsTestUtils)12 MatcherAssert.assertThat (org.hamcrest.MatcherAssert.assertThat)12 Arrays.asList (java.util.Arrays.asList)11