Search in sources :

Example 61 with KeyValue

use of org.apache.kafka.streams.KeyValue in project kafka-streams-examples by confluentinc.

the class EmailServiceTest method shouldSendEmailWithValidContents.

@Test
public void shouldSendEmailWithValidContents() throws Exception {
    // Given one order, customer and payment
    String orderId = id(0L);
    Order order = new Order(orderId, 15L, CREATED, UNDERPANTS, 3, 5.00d);
    Customer customer = new Customer(15L, "Franz", "Kafka", "frans@thedarkside.net", "oppression street, prague, cze");
    Payment payment = new Payment("Payment:1234", orderId, "CZK", 1000.00d);
    emailService = new EmailService(details -> {
        assertThat(details.customer).isEqualTo(customer);
        assertThat(details.payment).isEqualTo(payment);
        assertThat(details.order).isEqualTo(order);
        complete = true;
    });
    send(Topics.CUSTOMERS, Collections.singleton(new KeyValue<>(customer.getId(), customer)));
    send(Topics.ORDERS, Collections.singleton(new KeyValue<>(order.getId(), order)));
    send(Topics.PAYMENTS, Collections.singleton(new KeyValue<>(payment.getId(), payment)));
    // When
    emailService.start(CLUSTER.bootstrapServers());
    // Then
    TestUtils.waitForCondition(() -> complete, "Email was never sent.");
}
Also used : Order(io.confluent.examples.streams.avro.microservices.Order) Payment(io.confluent.examples.streams.avro.microservices.Payment) MicroserviceTestUtils(io.confluent.examples.streams.microservices.util.MicroserviceTestUtils) BeforeClass(org.junit.BeforeClass) TestUtils(org.apache.kafka.test.TestUtils) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) KeyValue(org.apache.kafka.streams.KeyValue) OrderId.id(io.confluent.examples.streams.microservices.domain.beans.OrderId.id) Test(org.junit.Test) Customer(io.confluent.examples.streams.avro.microservices.Customer) UNDERPANTS(io.confluent.examples.streams.avro.microservices.Product.UNDERPANTS) Topics(io.confluent.examples.streams.microservices.domain.Schemas.Topics) Schemas(io.confluent.examples.streams.microservices.domain.Schemas) After(org.junit.After) Order(io.confluent.examples.streams.avro.microservices.Order) Collections(java.util.Collections) CREATED(io.confluent.examples.streams.avro.microservices.OrderState.CREATED) Payment(io.confluent.examples.streams.avro.microservices.Payment) KeyValue(org.apache.kafka.streams.KeyValue) Customer(io.confluent.examples.streams.avro.microservices.Customer) Test(org.junit.Test)

Example 62 with KeyValue

use of org.apache.kafka.streams.KeyValue in project kafka-streams-examples by confluentinc.

the class MicroserviceTestUtils method readKeysAndValues.

private static <K, V> List<KeyValue<K, V>> readKeysAndValues(int numberToRead, String bootstrapServers, Deserializer<K> keyDes, Deserializer<V> valDes, String topicName) throws InterruptedException {
    Properties consumerConfig = new Properties();
    consumerConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, "Test-Reader-" + consumerCounter++);
    consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    KafkaConsumer<K, V> consumer = new KafkaConsumer<>(consumerConfig, keyDes, valDes);
    consumer.subscribe(singletonList(topicName));
    List<KeyValue<K, V>> actualValues = new ArrayList<>();
    TestUtils.waitForCondition(() -> {
        ConsumerRecords<K, V> records = consumer.poll(100);
        for (ConsumerRecord<K, V> record : records) {
            actualValues.add(KeyValue.pair(record.key(), record.value()));
        }
        return actualValues.size() == numberToRead;
    }, 20000, "Timed out reading orders.");
    consumer.close();
    return actualValues;
}
Also used : KeyValue(org.apache.kafka.streams.KeyValue) ArrayList(java.util.ArrayList) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) Properties(java.util.Properties)

Example 63 with KeyValue

use of org.apache.kafka.streams.KeyValue in project kafka-streams-examples by confluentinc.

the class StateStoresInTheDSLIntegrationTest method shouldAllowStateStoreAccessFromDSL.

@Test
public void shouldAllowStateStoreAccessFromDSL() throws Exception {
    List<String> inputValues = Arrays.asList("foo", "bar", "foo", "quux", "bar", "foo");
    List<KeyValue<String, Long>> expectedRecords = Arrays.asList(new KeyValue<>("foo", 1L), new KeyValue<>("bar", 1L), new KeyValue<>("foo", 2L), new KeyValue<>("quux", 1L), new KeyValue<>("bar", 2L), new KeyValue<>("foo", 3L));
    // 
    // Step 1: Configure and start the processor topology.
    // 
    StreamsBuilder builder = new StreamsBuilder();
    Properties streamsConfiguration = new Properties();
    streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "state-store-dsl-lambda-integration-test");
    streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
    streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.ByteArray().getClass().getName());
    streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    // Use a temporary directory for storing state, which will be automatically removed after the test.
    streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getAbsolutePath());
    // Create a state store manually.
    StoreBuilder<KeyValueStore<String, Long>> wordCountsStore = Stores.keyValueStoreBuilder(Stores.persistentKeyValueStore("WordCountsStore"), Serdes.String(), Serdes.Long()).withCachingEnabled();
    // Important (1 of 2): You must add the state store to the topology, otherwise your application
    // will fail at run-time (because the state store is referred to in `transform()` below.
    builder.addStateStore(wordCountsStore);
    // Read the input data.  (In this example we ignore whatever is stored in the record keys.)
    KStream<byte[], String> words = builder.stream(inputTopic);
    // Important (2 of 2):  When we call `transform()` we must provide the name of the state store
    // that is going to be used by the `Transformer` returned by `WordCountTransformerSupplier` as
    // the second parameter of `transform()` (note: we are also passing the state store name to the
    // constructor of `WordCountTransformerSupplier`, which we do primarily for cleaner code).
    // Otherwise our application will fail at run-time when attempting to operate on the state store
    // (within the transformer) because `ProcessorContext#getStateStore("WordCountsStore")` will
    // return `null`.
    KStream<String, Long> wordCounts = words.transform(new WordCountTransformerSupplier(wordCountsStore.name()), wordCountsStore.name());
    wordCounts.to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));
    KafkaStreams streams = new KafkaStreams(builder.build(), streamsConfiguration);
    streams.start();
    // 
    // Step 2: Produce some input data to the input topic.
    // 
    Properties producerConfig = new Properties();
    producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
    producerConfig.put(ProducerConfig.ACKS_CONFIG, "all");
    producerConfig.put(ProducerConfig.RETRIES_CONFIG, 0);
    producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
    producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    IntegrationTestUtils.produceValuesSynchronously(inputTopic, inputValues, producerConfig);
    // 
    // Step 3: Verify the application's output data.
    // 
    Properties consumerConfig = new Properties();
    consumerConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
    consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, "state-store-dsl-lambda-integration-test-standard-consumer");
    consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
    consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
    List<KeyValue<String, Long>> actualValues = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(consumerConfig, outputTopic, expectedRecords.size());
    streams.close();
    assertThat(actualValues).isEqualTo(expectedRecords);
}
Also used : KafkaStreams(org.apache.kafka.streams.KafkaStreams) KeyValue(org.apache.kafka.streams.KeyValue) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Properties(java.util.Properties) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Test(org.junit.Test)

Example 64 with KeyValue

use of org.apache.kafka.streams.KeyValue in project kafka by apache.

the class KStreamPeekTest method shouldObserveStreamElements.

@Test
public void shouldObserveStreamElements() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<Integer, String> stream = builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.String()));
    final List<KeyValue<Integer, String>> peekObserved = new ArrayList<>(), streamObserved = new ArrayList<>();
    stream.peek(collect(peekObserved)).foreach(collect(streamObserved));
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<Integer, String> inputTopic = driver.createInputTopic(topicName, new IntegerSerializer(), new StringSerializer());
        final List<KeyValue<Integer, String>> expected = new ArrayList<>();
        for (int key = 0; key < 32; key++) {
            final String value = "V" + key;
            inputTopic.pipeInput(key, value);
            expected.add(new KeyValue<>(key, value));
        }
        assertEquals(expected, peekObserved);
        assertEquals(expected, streamObserved);
    }
}
Also used : KeyValue(org.apache.kafka.streams.KeyValue) ArrayList(java.util.ArrayList) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 65 with KeyValue

use of org.apache.kafka.streams.KeyValue in project kafka by apache.

the class CogroupedKStreamImplTest method shouldInsertRepartitionsTopicForCogroupsUsedTwice.

@Test
public void shouldInsertRepartitionsTopicForCogroupsUsedTwice() {
    final StreamsBuilder builder = new StreamsBuilder();
    final Properties properties = new Properties();
    final KStream<String, String> stream1 = builder.stream("one", stringConsumed);
    final KGroupedStream<String, String> groupedOne = stream1.map((k, v) -> new KeyValue<>(v, k)).groupByKey(Grouped.as("foo"));
    final CogroupedKStream<String, String> one = groupedOne.cogroup(STRING_AGGREGATOR);
    one.aggregate(STRING_INITIALIZER);
    one.aggregate(STRING_INITIALIZER);
    final String topologyDescription = builder.build(properties).describe().toString();
    assertThat(topologyDescription, equalTo("Topologies:\n" + "   Sub-topology: 0\n" + "    Source: KSTREAM-SOURCE-0000000000 (topics: [one])\n" + "      --> KSTREAM-MAP-0000000001\n" + "    Processor: KSTREAM-MAP-0000000001 (stores: [])\n" + "      --> foo-repartition-filter\n" + "      <-- KSTREAM-SOURCE-0000000000\n" + "    Processor: foo-repartition-filter (stores: [])\n" + "      --> foo-repartition-sink\n" + "      <-- KSTREAM-MAP-0000000001\n" + "    Sink: foo-repartition-sink (topic: foo-repartition)\n" + "      <-- foo-repartition-filter\n\n" + "  Sub-topology: 1\n" + "    Source: foo-repartition-source (topics: [foo-repartition])\n" + "      --> COGROUPKSTREAM-AGGREGATE-0000000006, COGROUPKSTREAM-AGGREGATE-0000000012\n" + "    Processor: COGROUPKSTREAM-AGGREGATE-0000000006 (stores: [COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000002])\n" + "      --> COGROUPKSTREAM-MERGE-0000000007\n" + "      <-- foo-repartition-source\n" + "    Processor: COGROUPKSTREAM-AGGREGATE-0000000012 (stores: [COGROUPKSTREAM-AGGREGATE-STATE-STORE-0000000008])\n" + "      --> COGROUPKSTREAM-MERGE-0000000013\n" + "      <-- foo-repartition-source\n" + "    Processor: COGROUPKSTREAM-MERGE-0000000007 (stores: [])\n" + "      --> none\n" + "      <-- COGROUPKSTREAM-AGGREGATE-0000000006\n" + "    Processor: COGROUPKSTREAM-MERGE-0000000013 (stores: [])\n" + "      --> none\n" + "      <-- COGROUPKSTREAM-AGGREGATE-0000000012\n\n"));
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StreamsConfig(org.apache.kafka.streams.StreamsConfig) KGroupedStream(org.apache.kafka.streams.kstream.KGroupedStream) MockInitializer(org.apache.kafka.test.MockInitializer) SessionWindows(org.apache.kafka.streams.kstream.SessionWindows) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) Assert.assertThrows(org.junit.Assert.assertThrows) Window(org.apache.kafka.streams.kstream.Window) KStream(org.apache.kafka.streams.kstream.KStream) Initializer(org.apache.kafka.streams.kstream.Initializer) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Named(org.apache.kafka.streams.kstream.Named) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Aggregator(org.apache.kafka.streams.kstream.Aggregator) TestRecord(org.apache.kafka.streams.test.TestRecord) Before(org.junit.Before) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) MockValueJoiner(org.apache.kafka.test.MockValueJoiner) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KTable(org.apache.kafka.streams.kstream.KTable) Properties(java.util.Properties) TestOutputTopic(org.apache.kafka.streams.TestOutputTopic) Consumed(org.apache.kafka.streams.kstream.Consumed) KeyValue(org.apache.kafka.streams.KeyValue) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) Grouped(org.apache.kafka.streams.kstream.Grouped) MockAggregator(org.apache.kafka.test.MockAggregator) Bytes(org.apache.kafka.common.utils.Bytes) SlidingWindows(org.apache.kafka.streams.kstream.SlidingWindows) Materialized(org.apache.kafka.streams.kstream.Materialized) IntegerDeserializer(org.apache.kafka.common.serialization.IntegerDeserializer) TestInputTopic(org.apache.kafka.streams.TestInputTopic) CogroupedKStream(org.apache.kafka.streams.kstream.CogroupedKStream) Windows(org.apache.kafka.streams.kstream.Windows) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) KeyValue(org.apache.kafka.streams.KeyValue) Properties(java.util.Properties) Test(org.junit.Test)

Aggregations

KeyValue (org.apache.kafka.streams.KeyValue)343 Test (org.junit.Test)268 Properties (java.util.Properties)127 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)123 Windowed (org.apache.kafka.streams.kstream.Windowed)105 ArrayList (java.util.ArrayList)90 KafkaStreams (org.apache.kafka.streams.KafkaStreams)82 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)74 Bytes (org.apache.kafka.common.utils.Bytes)74 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)68 IntegrationTest (org.apache.kafka.test.IntegrationTest)66 Serdes (org.apache.kafka.common.serialization.Serdes)65 KeyValueStore (org.apache.kafka.streams.state.KeyValueStore)62 StreamsConfig (org.apache.kafka.streams.StreamsConfig)55 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)53 KStream (org.apache.kafka.streams.kstream.KStream)52 SessionWindow (org.apache.kafka.streams.kstream.internals.SessionWindow)46 KTable (org.apache.kafka.streams.kstream.KTable)43 MatcherAssert.assertThat (org.hamcrest.MatcherAssert.assertThat)42 Consumed (org.apache.kafka.streams.kstream.Consumed)41