Search in sources :

Example 56 with KStream

use of org.apache.kafka.streams.kstream.KStream in project kafkastreams-cep by fhussonnois.

the class CEPStockKStreamsIntegrationTest method test.

@Test
public void test() throws ExecutionException, InterruptedException {
    final Collection<KeyValue<String, String>> batch1 = Arrays.asList(new KeyValue<>(null, "{\"name\":\"e1\",\"price\":100,\"volume\":1010}"), new KeyValue<>(null, "{\"name\":\"e2\",\"price\":120,\"volume\":990}"), new KeyValue<>(null, "{\"name\":\"e3\",\"price\":120,\"volume\":1005}"), new KeyValue<>(null, "{\"name\":\"e4\",\"price\":121,\"volume\":999}"), new KeyValue<>(null, "{\"name\":\"e5\",\"price\":120,\"volume\":999}"), new KeyValue<>(null, "{\"name\":\"e6\",\"price\":125,\"volume\":750}"), new KeyValue<>(null, "{\"name\":\"e7\",\"price\":120,\"volume\":950}"), new KeyValue<>(null, "{\"name\":\"e8\",\"price\":120,\"volume\":700}"));
    IntegrationTestUtils.produceKeyValuesSynchronously(INPUT_STREAM, batch1, TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class, new Properties()), mockTime);
    // build query
    ComplexStreamsBuilder builder = new ComplexStreamsBuilder();
    CEPStream<String, StockEvent> stream = builder.stream(INPUT_STREAM);
    KStream<String, Sequence<String, StockEvent>> stocks = stream.query("Stocks", Patterns.STOCKS);
    stocks.mapValues(seq -> {
        JSONObject json = new JSONObject();
        seq.asMap().forEach((k, v) -> {
            JSONArray events = new JSONArray();
            json.put(k, events);
            List<String> collect = v.stream().map(e -> e.value.name).collect(Collectors.toList());
            Collections.reverse(collect);
            collect.forEach(events::add);
        });
        return json.toJSONString();
    }).through(OUTPUT_STREAM, Produced.with(null, Serdes.String())).print(Printed.toSysOut());
    Topology topology = builder.build();
    kafkaStreams = new KafkaStreams(topology, streamsConfiguration);
    kafkaStreams.start();
    final Properties consumerConfig = TestUtils.consumerConfig(CLUSTER.bootstrapServers(), StringDeserializer.class, StringDeserializer.class);
    List<KeyValue<String, String>> result = IntegrationTestUtils.readKeyValues(OUTPUT_STREAM, consumerConfig, TimeUnit.SECONDS.toMillis(10), 4);
    Assert.assertEquals(4, result.size());
    Assert.assertEquals("{\"0\":[\"e1\"],\"1\":[\"e2\",\"e3\",\"e4\",\"e5\"],\"2\":[\"e6\"]}", result.get(0).value);
    Assert.assertEquals("{\"0\":[\"e3\"],\"1\":[\"e4\"],\"2\":[\"e6\"]}", result.get(1).value);
    Assert.assertEquals("{\"0\":[\"e1\"],\"1\":[\"e2\",\"e3\",\"e4\",\"e5\",\"e6\",\"e7\"],\"2\":[\"e8\"]}", result.get(2).value);
    Assert.assertEquals("{\"0\":[\"e3\"],\"1\":[\"e4\",\"e6\"],\"2\":[\"e8\"]}", result.get(3).value);
}
Also used : Sequence(com.github.fhuss.kafka.streams.cep.Sequence) StreamsConfig(org.apache.kafka.streams.StreamsConfig) Arrays(java.util.Arrays) Produced(org.apache.kafka.streams.kstream.Produced) CEPStream(com.github.fhuss.kafka.streams.cep.CEPStream) KStream(org.apache.kafka.streams.kstream.KStream) JSONArray(org.json.simple.JSONArray) MockTime(kafka.utils.MockTime) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) EmbeddedKafkaCluster(org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster) After(org.junit.After) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) ClassRule(org.junit.ClassRule) Pattern(com.github.fhuss.kafka.streams.cep.pattern.Pattern) Printed(org.apache.kafka.streams.kstream.Printed) Before(org.junit.Before) Properties(java.util.Properties) TestUtils(org.apache.kafka.test.TestUtils) Collection(java.util.Collection) KeyValue(org.apache.kafka.streams.KeyValue) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) Test(org.junit.Test) IOException(java.io.IOException) ComplexStreamsBuilder(com.github.fhuss.kafka.streams.cep.ComplexStreamsBuilder) Collectors(java.util.stream.Collectors) ExecutionException(java.util.concurrent.ExecutionException) TimeUnit(java.util.concurrent.TimeUnit) IntegrationTestUtils(org.apache.kafka.streams.integration.utils.IntegrationTestUtils) List(java.util.List) JSONObject(org.json.simple.JSONObject) QueryBuilder(com.github.fhuss.kafka.streams.cep.pattern.QueryBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) Assert(org.junit.Assert) Collections(java.util.Collections) Topology(org.apache.kafka.streams.Topology) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KeyValue(org.apache.kafka.streams.KeyValue) ComplexStreamsBuilder(com.github.fhuss.kafka.streams.cep.ComplexStreamsBuilder) JSONArray(org.json.simple.JSONArray) Sequence(com.github.fhuss.kafka.streams.cep.Sequence) Topology(org.apache.kafka.streams.Topology) Properties(java.util.Properties) JSONObject(org.json.simple.JSONObject) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 57 with KStream

use of org.apache.kafka.streams.kstream.KStream in project tutorials by eugenp.

the class KafkaStreamsLiveTest method shouldTestKafkaStreams.

@Test
@Ignore("it needs to have kafka broker running on local")
public void shouldTestKafkaStreams() throws InterruptedException {
    // given
    String inputTopic = "inputTopic";
    Properties streamsConfiguration = new Properties();
    streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-live-test");
    streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1000);
    streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    // Use a temporary directory for storing state, which will be automatically removed after the test.
    streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getAbsolutePath());
    // when
    KStreamBuilder builder = new KStreamBuilder();
    KStream<String, String> textLines = builder.stream(inputTopic);
    Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS);
    KTable<String, Long> wordCounts = textLines.flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))).groupBy((key, word) -> word).count();
    wordCounts.foreach((word, count) -> System.out.println("word: " + word + " -> " + count));
    String outputTopic = "outputTopic";
    final Serde<String> stringSerde = Serdes.String();
    final Serde<Long> longSerde = Serdes.Long();
    wordCounts.to(stringSerde, longSerde, outputTopic);
    KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration);
    streams.start();
    // then
    Thread.sleep(30000);
    streams.close();
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) StreamsConfig(org.apache.kafka.streams.StreamsConfig) KTable(org.apache.kafka.streams.kstream.KTable) Arrays(java.util.Arrays) Properties(java.util.Properties) TestUtils(org.apache.kafka.test.TestUtils) KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) Test(org.junit.Test) KStream(org.apache.kafka.streams.kstream.KStream) Ignore(org.junit.Ignore) Serde(org.apache.kafka.common.serialization.Serde) Serdes(org.apache.kafka.common.serialization.Serdes) KafkaStreams(org.apache.kafka.streams.KafkaStreams) Pattern(java.util.regex.Pattern) Pattern(java.util.regex.Pattern) KafkaStreams(org.apache.kafka.streams.KafkaStreams) Properties(java.util.Properties) Ignore(org.junit.Ignore) Test(org.junit.Test)

Example 58 with KStream

use of org.apache.kafka.streams.kstream.KStream in project apache-kafka-on-k8s by banzaicloud.

the class KStreamBranchTest method testKStreamBranch.

@SuppressWarnings("unchecked")
@Test
public void testKStreamBranch() {
    final StreamsBuilder builder = new StreamsBuilder();
    Predicate<Integer, String> isEven = new Predicate<Integer, String>() {

        @Override
        public boolean test(Integer key, String value) {
            return (key % 2) == 0;
        }
    };
    Predicate<Integer, String> isMultipleOfThree = new Predicate<Integer, String>() {

        @Override
        public boolean test(Integer key, String value) {
            return (key % 3) == 0;
        }
    };
    Predicate<Integer, String> isOdd = new Predicate<Integer, String>() {

        @Override
        public boolean test(Integer key, String value) {
            return (key % 2) != 0;
        }
    };
    final int[] expectedKeys = new int[] { 1, 2, 3, 4, 5, 6 };
    KStream<Integer, String> stream;
    KStream<Integer, String>[] branches;
    MockProcessorSupplier<Integer, String>[] processors;
    stream = builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.String()));
    branches = stream.branch(isEven, isMultipleOfThree, isOdd);
    assertEquals(3, branches.length);
    processors = (MockProcessorSupplier<Integer, String>[]) Array.newInstance(MockProcessorSupplier.class, branches.length);
    for (int i = 0; i < branches.length; i++) {
        processors[i] = new MockProcessorSupplier<>();
        branches[i].process(processors[i]);
    }
    driver.setUp(builder);
    for (int expectedKey : expectedKeys) {
        driver.process(topicName, expectedKey, "V" + expectedKey);
    }
    assertEquals(3, processors[0].processed.size());
    assertEquals(1, processors[1].processed.size());
    assertEquals(2, processors[2].processed.size());
}
Also used : KStream(org.apache.kafka.streams.kstream.KStream) Predicate(org.apache.kafka.streams.kstream.Predicate) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) MockProcessorSupplier(org.apache.kafka.test.MockProcessorSupplier) Test(org.junit.Test)

Example 59 with KStream

use of org.apache.kafka.streams.kstream.KStream in project kafka by apache.

the class KStreamImpl method doBranch.

@SuppressWarnings({ "unchecked", "rawtypes" })
private KStream<K, V>[] doBranch(final NamedInternal named, final Predicate<? super K, ? super V>... predicates) {
    Objects.requireNonNull(predicates, "predicates can't be a null array");
    if (predicates.length == 0) {
        throw new IllegalArgumentException("branch() requires at least one predicate");
    }
    for (final Predicate<? super K, ? super V> predicate : predicates) {
        Objects.requireNonNull(predicate, "predicates can't be null");
    }
    final String branchName = named.orElseGenerateWithPrefix(builder, BRANCH_NAME);
    final String[] childNames = new String[predicates.length];
    for (int i = 0; i < predicates.length; i++) {
        childNames[i] = named.suffixWithOrElseGet("-predicate-" + i, builder, BRANCHCHILD_NAME);
    }
    final ProcessorParameters processorParameters = new ProcessorParameters<>(new KStreamBranch(Arrays.asList(predicates.clone()), Arrays.asList(childNames)), branchName);
    final ProcessorGraphNode<K, V> branchNode = new ProcessorGraphNode<>(branchName, processorParameters);
    builder.addGraphNode(graphNode, branchNode);
    final KStream<K, V>[] branchChildren = (KStream<K, V>[]) Array.newInstance(KStream.class, predicates.length);
    for (int i = 0; i < predicates.length; i++) {
        final ProcessorParameters innerProcessorParameters = new ProcessorParameters<>(new PassThrough<K, V>(), childNames[i]);
        final ProcessorGraphNode<K, V> branchChildNode = new ProcessorGraphNode<>(childNames[i], innerProcessorParameters);
        builder.addGraphNode(branchNode, branchChildNode);
        branchChildren[i] = new KStreamImpl<>(childNames[i], keySerde, valueSerde, subTopologySourceNodes, repartitionRequired, branchChildNode, builder);
    }
    return branchChildren;
}
Also used : BranchedKStream(org.apache.kafka.streams.kstream.BranchedKStream) KStream(org.apache.kafka.streams.kstream.KStream) ProcessorParameters(org.apache.kafka.streams.kstream.internals.graph.ProcessorParameters) ProcessorGraphNode(org.apache.kafka.streams.kstream.internals.graph.ProcessorGraphNode)

Example 60 with KStream

use of org.apache.kafka.streams.kstream.KStream in project kafka by apache.

the class KStreamRepartitionIntegrationTest method shouldDoProperJoiningWhenNumberOfPartitionsAreValidWhenUsingRepartitionOperation.

@Test
public void shouldDoProperJoiningWhenNumberOfPartitionsAreValidWhenUsingRepartitionOperation() throws Exception {
    final String topicBRepartitionedName = "topic-b-scale-up";
    final String inputTopicRepartitionedName = "input-topic-scale-up";
    final long timestamp = System.currentTimeMillis();
    CLUSTER.createTopic(topicB, 1, 1);
    final List<KeyValue<Integer, String>> expectedRecords = Arrays.asList(new KeyValue<>(1, "A"), new KeyValue<>(2, "B"));
    sendEvents(timestamp, expectedRecords);
    sendEvents(topicB, timestamp, expectedRecords);
    final StreamsBuilder builder = new StreamsBuilder();
    final Repartitioned<Integer, String> inputTopicRepartitioned = Repartitioned.<Integer, String>as(inputTopicRepartitionedName).withNumberOfPartitions(4);
    final Repartitioned<Integer, String> topicBRepartitioned = Repartitioned.<Integer, String>as(topicBRepartitionedName).withNumberOfPartitions(4);
    final KStream<Integer, String> topicBStream = builder.stream(topicB, Consumed.with(Serdes.Integer(), Serdes.String())).repartition(topicBRepartitioned);
    builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())).repartition(inputTopicRepartitioned).join(topicBStream, (value1, value2) -> value2, JoinWindows.of(Duration.ofSeconds(10))).to(outputTopic);
    startStreams(builder);
    assertEquals(4, getNumberOfPartitionsForTopic(toRepartitionTopicName(topicBRepartitionedName)));
    assertEquals(4, getNumberOfPartitionsForTopic(toRepartitionTopicName(inputTopicRepartitionedName)));
    validateReceivedMessages(new IntegerDeserializer(), new StringDeserializer(), expectedRecords);
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Arrays(java.util.Arrays) Repartitioned(org.apache.kafka.streams.kstream.Repartitioned) AdminClient(org.apache.kafka.clients.admin.AdminClient) Matcher(java.util.regex.Matcher) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) IntegrationTestUtils.safeUniqueTestName(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.safeUniqueTestName) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) After(org.junit.After) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Parameterized(org.junit.runners.Parameterized) AfterClass(org.junit.AfterClass) TestUtils(org.apache.kafka.test.TestUtils) Collection(java.util.Collection) KeyValue(org.apache.kafka.streams.KeyValue) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) State(org.apache.kafka.streams.KafkaStreams.State) Category(org.junit.experimental.categories.Category) Objects(java.util.Objects) IntegrationTestUtils(org.apache.kafka.streams.integration.utils.IntegrationTestUtils) CountDownLatch(java.util.concurrent.CountDownLatch) List(java.util.List) Pattern(java.util.regex.Pattern) ERROR(org.apache.kafka.streams.KafkaStreams.State.ERROR) StreamsConfig(org.apache.kafka.streams.StreamsConfig) BeforeClass(org.junit.BeforeClass) RunWith(org.junit.runner.RunWith) Parameters(org.junit.runners.Parameterized.Parameters) IntegrationTest(org.apache.kafka.test.IntegrationTest) KStream(org.apache.kafka.streams.kstream.KStream) AtomicReference(java.util.concurrent.atomic.AtomicReference) ArrayList(java.util.ArrayList) JoinWindows(org.apache.kafka.streams.kstream.JoinWindows) EmbeddedKafkaCluster(org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster) TestName(org.junit.rules.TestName) Named(org.apache.kafka.streams.kstream.Named) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) Deserializer(org.apache.kafka.common.serialization.Deserializer) Before(org.junit.Before) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Parameter(org.junit.runners.Parameterized.Parameter) Assert.assertNotNull(org.junit.Assert.assertNotNull) AdminClientConfig(org.apache.kafka.clients.admin.AdminClientConfig) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) IOException(java.io.IOException) TimeUnit(java.util.concurrent.TimeUnit) RUNNING(org.apache.kafka.streams.KafkaStreams.State.RUNNING) Rule(org.junit.Rule) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) KafkaStreams(org.apache.kafka.streams.KafkaStreams) REBALANCING(org.apache.kafka.streams.KafkaStreams.State.REBALANCING) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) KeyValue(org.apache.kafka.streams.KeyValue) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Aggregations

KStream (org.apache.kafka.streams.kstream.KStream)89 Serdes (org.apache.kafka.common.serialization.Serdes)83 Properties (java.util.Properties)82 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)80 Test (org.junit.Test)69 StreamsConfig (org.apache.kafka.streams.StreamsConfig)65 KeyValue (org.apache.kafka.streams.KeyValue)60 Consumed (org.apache.kafka.streams.kstream.Consumed)55 KTable (org.apache.kafka.streams.kstream.KTable)54 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)51 Materialized (org.apache.kafka.streams.kstream.Materialized)45 Duration (java.time.Duration)44 List (java.util.List)42 MatcherAssert.assertThat (org.hamcrest.MatcherAssert.assertThat)40 KafkaStreams (org.apache.kafka.streams.KafkaStreams)38 Arrays (java.util.Arrays)37 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)37 Assert.assertEquals (org.junit.Assert.assertEquals)37 Grouped (org.apache.kafka.streams.kstream.Grouped)35 Produced (org.apache.kafka.streams.kstream.Produced)35