Search in sources :

Example 76 with KStreamBuilder

use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.

the class KTableKTableOuterJoinTest method testJoin.

@Test
public void testJoin() throws Exception {
    KStreamBuilder builder = new KStreamBuilder();
    final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
    KTable<Integer, String> table1;
    KTable<Integer, String> table2;
    KTable<Integer, String> joined;
    MockProcessorSupplier<Integer, String> processor;
    processor = new MockProcessorSupplier<>();
    table1 = builder.table(intSerde, stringSerde, topic1, storeName1);
    table2 = builder.table(intSerde, stringSerde, topic2, storeName2);
    joined = table1.outerJoin(table2, MockValueJoiner.TOSTRING_JOINER);
    joined.toStream().process(processor);
    Collection<Set<String>> copartitionGroups = builder.copartitionGroups();
    assertEquals(1, copartitionGroups.size());
    assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next());
    KTableValueGetterSupplier<Integer, String> getterSupplier = ((KTableImpl<Integer, String, String>) joined).valueGetterSupplier();
    driver = new KStreamTestDriver(builder, stateDir);
    KTableValueGetter<Integer, String> getter = getterSupplier.get();
    getter.init(driver.context());
    for (int i = 0; i < 2; i++) {
        driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]);
    }
    // pass tuple with null key, it will be discarded in join process
    driver.process(topic1, null, "SomeVal");
    driver.flushState();
    processor.checkAndClearProcessResult("0:X0+null", "1:X1+null");
    checkJoinedValues(getter, kv(0, "X0+null"), kv(1, "X1+null"), kv(2, null), kv(3, null));
    for (int i = 0; i < 2; i++) {
        driver.process(topic2, expectedKeys[i], "Y" + expectedKeys[i]);
    }
    // pass tuple with null key, it will be discarded in join process
    driver.process(topic2, null, "AnotherVal");
    driver.flushState();
    processor.checkAndClearProcessResult("0:X0+Y0", "1:X1+Y1");
    checkJoinedValues(getter, kv(0, "X0+Y0"), kv(1, "X1+Y1"), kv(2, null), kv(3, null));
    for (int expectedKey : expectedKeys) {
        driver.process(topic1, expectedKey, "X" + expectedKey);
    }
    driver.flushState();
    processor.checkAndClearProcessResult("0:X0+Y0", "1:X1+Y1", "2:X2+null", "3:X3+null");
    checkJoinedValues(getter, kv(0, "X0+Y0"), kv(1, "X1+Y1"), kv(2, "X2+null"), kv(3, "X3+null"));
    // push all items to the other stream. this should produce four items.
    for (int expectedKey : expectedKeys) {
        driver.process(topic2, expectedKey, "YY" + expectedKey);
    }
    driver.flushState();
    processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3");
    checkJoinedValues(getter, kv(0, "X0+YY0"), kv(1, "X1+YY1"), kv(2, "X2+YY2"), kv(3, "X3+YY3"));
    for (int expectedKey : expectedKeys) {
        driver.process(topic1, expectedKey, "X" + expectedKey);
    }
    driver.flushState();
    processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3");
    checkJoinedValues(getter, kv(0, "X0+YY0"), kv(1, "X1+YY1"), kv(2, "X2+YY2"), kv(3, "X3+YY3"));
    for (int i = 0; i < 2; i++) {
        driver.process(topic2, expectedKeys[i], null);
    }
    driver.flushState();
    processor.checkAndClearProcessResult("0:X0+null", "1:X1+null");
    checkJoinedValues(getter, kv(0, "X0+null"), kv(1, "X1+null"), kv(2, "X2+YY2"), kv(3, "X3+YY3"));
    for (int expectedKey : expectedKeys) {
        driver.process(topic1, expectedKey, "XX" + expectedKey);
    }
    driver.flushState();
    processor.checkAndClearProcessResult("0:XX0+null", "1:XX1+null", "2:XX2+YY2", "3:XX3+YY3");
    checkJoinedValues(getter, kv(0, "XX0+null"), kv(1, "XX1+null"), kv(2, "XX2+YY2"), kv(3, "XX3+YY3"));
    for (int i = 1; i < 3; i++) {
        driver.process(topic1, expectedKeys[i], null);
    }
    driver.flushState();
    processor.checkAndClearProcessResult("1:null", "2:null+YY2");
    checkJoinedValues(getter, kv(0, "XX0+null"), kv(1, null), kv(2, "null+YY2"), kv(3, "XX3+YY3"));
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) Set(java.util.Set) HashSet(java.util.HashSet) KStreamTestDriver(org.apache.kafka.test.KStreamTestDriver) Test(org.junit.Test)

Example 77 with KStreamBuilder

use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.

the class KTableMapKeysTest method testMapKeysConvertingToStream.

@Test
public void testMapKeysConvertingToStream() {
    final KStreamBuilder builder = new KStreamBuilder();
    String topic1 = "topic_map_keys";
    KTable<Integer, String> table1 = builder.table(integerSerde, stringSerde, topic1, "anyStoreName");
    final Map<Integer, String> keyMap = new HashMap<>();
    keyMap.put(1, "ONE");
    keyMap.put(2, "TWO");
    keyMap.put(3, "THREE");
    KeyValueMapper<Integer, String, String> keyMapper = new KeyValueMapper<Integer, String, String>() {

        @Override
        public String apply(Integer key, String value) {
            return keyMap.get(key);
        }
    };
    KStream<String, String> convertedStream = table1.toStream(keyMapper);
    final String[] expected = new String[] { "ONE:V_ONE", "TWO:V_TWO", "THREE:V_THREE" };
    final int[] originalKeys = new int[] { 1, 2, 3 };
    final String[] values = new String[] { "V_ONE", "V_TWO", "V_THREE" };
    MockProcessorSupplier<String, String> processor = new MockProcessorSupplier<>();
    convertedStream.process(processor);
    driver = new KStreamTestDriver(builder, stateDir);
    for (int i = 0; i < originalKeys.length; i++) {
        driver.process(topic1, originalKeys[i], values[i]);
    }
    driver.flushState();
    assertEquals(3, processor.processed.size());
    for (int i = 0; i < expected.length; i++) {
        assertEquals(expected[i], processor.processed.get(i));
    }
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) HashMap(java.util.HashMap) KeyValueMapper(org.apache.kafka.streams.kstream.KeyValueMapper) KStreamTestDriver(org.apache.kafka.test.KStreamTestDriver) MockProcessorSupplier(org.apache.kafka.test.MockProcessorSupplier) Test(org.junit.Test)

Example 78 with KStreamBuilder

use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.

the class KTableMapValuesTest method testSendingOldValue.

@Test
public void testSendingOldValue() throws IOException {
    KStreamBuilder builder = new KStreamBuilder();
    String topic1 = "topic1";
    KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(stringSerde, stringSerde, topic1, "anyStoreName");
    KTableImpl<String, String, Integer> table2 = (KTableImpl<String, String, Integer>) table1.mapValues(new ValueMapper<String, Integer>() {

        @Override
        public Integer apply(String value) {
            return new Integer(value);
        }
    });
    table2.enableSendingOldValues();
    MockProcessorSupplier<String, Integer> proc = new MockProcessorSupplier<>();
    builder.addProcessor("proc", proc, table2.name);
    driver = new KStreamTestDriver(builder, stateDir, null, null);
    assertTrue(table1.sendingOldValueEnabled());
    assertTrue(table2.sendingOldValueEnabled());
    driver.process(topic1, "A", "01");
    driver.process(topic1, "B", "01");
    driver.process(topic1, "C", "01");
    driver.flushState();
    proc.checkAndClearProcessResult("A:(1<-null)", "B:(1<-null)", "C:(1<-null)");
    driver.process(topic1, "A", "02");
    driver.process(topic1, "B", "02");
    driver.flushState();
    proc.checkAndClearProcessResult("A:(2<-1)", "B:(2<-1)");
    driver.process(topic1, "A", "03");
    driver.flushState();
    proc.checkAndClearProcessResult("A:(3<-2)");
    driver.process(topic1, "A", null);
    driver.flushState();
    proc.checkAndClearProcessResult("A:(null<-3)");
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) KStreamTestDriver(org.apache.kafka.test.KStreamTestDriver) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) MockProcessorSupplier(org.apache.kafka.test.MockProcessorSupplier) Test(org.junit.Test)

Example 79 with KStreamBuilder

use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.

the class SimpleBenchmark method createKafkaStreamsKStreamKTableJoin.

private KafkaStreams createKafkaStreamsKStreamKTableJoin(Properties streamConfig, String kStreamTopic, String kTableTopic, final CountDownLatch latch) {
    final KStreamBuilder builder = new KStreamBuilder();
    final KStream<Long, byte[]> input1 = builder.stream(kStreamTopic);
    final KTable<Long, byte[]> input2 = builder.table(kTableTopic, kTableTopic + "-store");
    input1.leftJoin(input2, VALUE_JOINER).foreach(new CountDownAction(latch));
    return createKafkaStreamsWithExceptionHandler(builder, streamConfig);
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder)

Example 80 with KStreamBuilder

use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.

the class SimpleBenchmark method createKafkaStreamsWithSink.

private KafkaStreams createKafkaStreamsWithSink(String topic, final CountDownLatch latch) {
    final Properties props = setStreamProperties("simple-benchmark-streams-with-sink");
    KStreamBuilder builder = new KStreamBuilder();
    KStream<Integer, byte[]> source = builder.stream(INTEGER_SERDE, BYTE_SERDE, topic);
    source.to(INTEGER_SERDE, BYTE_SERDE, SINK_TOPIC);
    source.process(new ProcessorSupplier<Integer, byte[]>() {

        @Override
        public Processor<Integer, byte[]> get() {
            return new AbstractProcessor<Integer, byte[]>() {

                @Override
                public void init(ProcessorContext context) {
                }

                @Override
                public void process(Integer key, byte[] value) {
                    processedRecords++;
                    processedBytes += value.length + Integer.SIZE;
                    if (processedRecords == numRecords) {
                        latch.countDown();
                    }
                }

                @Override
                public void punctuate(long timestamp) {
                }

                @Override
                public void close() {
                }
            };
        }
    });
    return createKafkaStreamsWithExceptionHandler(builder, props);
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) Processor(org.apache.kafka.streams.processor.Processor) AbstractProcessor(org.apache.kafka.streams.processor.AbstractProcessor) Properties(java.util.Properties) ProcessorContext(org.apache.kafka.streams.processor.ProcessorContext)

Aggregations

KStreamBuilder (org.apache.kafka.streams.kstream.KStreamBuilder)122 Test (org.junit.Test)95 KStreamTestDriver (org.apache.kafka.test.KStreamTestDriver)60 Properties (java.util.Properties)31 MockProcessorSupplier (org.apache.kafka.test.MockProcessorSupplier)25 KafkaStreams (org.apache.kafka.streams.KafkaStreams)23 HashSet (java.util.HashSet)21 Set (java.util.Set)19 KeyValue (org.apache.kafka.streams.KeyValue)19 HashMap (java.util.HashMap)14 Metrics (org.apache.kafka.common.metrics.Metrics)13 StreamsConfig (org.apache.kafka.streams.StreamsConfig)13 KeyValueMapper (org.apache.kafka.streams.kstream.KeyValueMapper)13 ValueMapper (org.apache.kafka.streams.kstream.ValueMapper)13 TopicPartition (org.apache.kafka.common.TopicPartition)11 Predicate (org.apache.kafka.streams.kstream.Predicate)10 TaskId (org.apache.kafka.streams.processor.TaskId)9 MockKeyValueMapper (org.apache.kafka.test.MockKeyValueMapper)9 ArrayList (java.util.ArrayList)8 Before (org.junit.Before)8