Search in sources :

Example 71 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KTableKTableForeignKeyJoinScenarioTest method shouldUseExpectedTopicsWithSerde.

@Test
public void shouldUseExpectedTopicsWithSerde() {
    final String applicationId = "ktable-ktable-joinOnForeignKey";
    final Properties streamsConfig = mkProperties(mkMap(mkEntry(StreamsConfig.APPLICATION_ID_CONFIG, applicationId), mkEntry(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath())));
    final UniqueTopicSerdeScope serdeScope = new UniqueTopicSerdeScope();
    final StreamsBuilder builder = new StreamsBuilder();
    final KTable<Integer, String> left = builder.table(LEFT_TABLE, Consumed.with(serdeScope.decorateSerde(Serdes.Integer(), streamsConfig, true), serdeScope.decorateSerde(Serdes.String(), streamsConfig, false)));
    final KTable<Integer, String> right = builder.table(RIGHT_TABLE, Consumed.with(serdeScope.decorateSerde(Serdes.Integer(), streamsConfig, true), serdeScope.decorateSerde(Serdes.String(), streamsConfig, false)));
    left.join(right, value -> Integer.parseInt(value.split("\\|")[1]), (value1, value2) -> "(" + value1 + "," + value2 + ")", Materialized.with(null, serdeScope.decorateSerde(Serdes.String(), streamsConfig, false))).toStream().to(OUTPUT);
    final Topology topology = builder.build(streamsConfig);
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, streamsConfig)) {
        final TestInputTopic<Integer, String> leftInput = driver.createInputTopic(LEFT_TABLE, new IntegerSerializer(), new StringSerializer());
        final TestInputTopic<Integer, String> rightInput = driver.createInputTopic(RIGHT_TABLE, new IntegerSerializer(), new StringSerializer());
        leftInput.pipeInput(2, "lhsValue1|1");
        rightInput.pipeInput(1, "rhsValue1");
    }
    // verifying primarily that no extra pseudo-topics were used, but it's nice to also verify the rest of the
    // topics our serdes serialize data for
    assertThat(serdeScope.registeredTopics(), is(mkSet(// expected pseudo-topics
    applicationId + "-KTABLE-FK-JOIN-SUBSCRIPTION-REGISTRATION-0000000006-topic-fk--key", applicationId + "-KTABLE-FK-JOIN-SUBSCRIPTION-REGISTRATION-0000000006-topic-pk--key", applicationId + "-KTABLE-FK-JOIN-SUBSCRIPTION-REGISTRATION-0000000006-topic-vh--value", // internal topics
    applicationId + "-KTABLE-FK-JOIN-SUBSCRIPTION-REGISTRATION-0000000006-topic--key", applicationId + "-KTABLE-FK-JOIN-SUBSCRIPTION-RESPONSE-0000000014-topic--key", applicationId + "-KTABLE-FK-JOIN-SUBSCRIPTION-RESPONSE-0000000014-topic--value", applicationId + "-left_table-STATE-STORE-0000000000-changelog--key", applicationId + "-left_table-STATE-STORE-0000000000-changelog--value", applicationId + "-right_table-STATE-STORE-0000000003-changelog--key", applicationId + "-right_table-STATE-STORE-0000000003-changelog--value", // output topics
    "output-topic--key", "output-topic--value")));
}
Also used : UniqueTopicSerdeScope(org.apache.kafka.streams.utils.UniqueTopicSerdeScope) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Topology(org.apache.kafka.streams.Topology) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) Properties(java.util.Properties) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 72 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamKStreamJoinTest method testJoin.

@Test
public void testJoin() {
    final StreamsBuilder builder = new StreamsBuilder();
    final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
    final KStream<Integer, String> stream1;
    final KStream<Integer, String> stream2;
    final KStream<Integer, String> joined;
    final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
    stream1 = builder.stream(topic1, consumed);
    stream2 = builder.stream(topic2, consumed);
    joined = stream1.join(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(100L)), StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String()));
    joined.process(supplier);
    final Collection<Set<String>> copartitionGroups = TopologyWrapper.getInternalTopologyBuilder(builder.build()).copartitionGroups();
    assertEquals(1, copartitionGroups.size());
    assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next());
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
        // w2 = {}
        for (int i = 0; i < 2; i++) {
            inputTopic1.pipeInput(expectedKeys[i], "A" + expectedKeys[i]);
        }
        processor.checkAndClearProcessResult();
        // w2 = { 0:a0, 1:a1 }
        for (int i = 0; i < 2; i++) {
            inputTopic2.pipeInput(expectedKeys[i], "a" + expectedKeys[i]);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+a0", 0L), new KeyValueTimestamp<>(1, "A1+a1", 0L));
        // w2 = { 0:a0, 1:a1 }
        for (final int expectedKey : expectedKeys) {
            inputTopic1.pipeInput(expectedKey, "B" + expectedKey);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "B0+a0", 0L), new KeyValueTimestamp<>(1, "B1+a1", 0L));
        // w2 = { 0:a0, 1:a1, 0:b0, 1:b1, 2:b2, 3:b3 }
        for (final int expectedKey : expectedKeys) {
            inputTopic2.pipeInput(expectedKey, "b" + expectedKey);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+b0", 0L), new KeyValueTimestamp<>(0, "B0+b0", 0L), new KeyValueTimestamp<>(1, "A1+b1", 0L), new KeyValueTimestamp<>(1, "B1+b1", 0L), new KeyValueTimestamp<>(2, "B2+b2", 0L), new KeyValueTimestamp<>(3, "B3+b3", 0L));
        // w2 = { 0:a0, 1:a1, 0:b0, 1:b1, 2:b2, 3:b3 }
        for (final int expectedKey : expectedKeys) {
            inputTopic1.pipeInput(expectedKey, "C" + expectedKey);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "C0+a0", 0L), new KeyValueTimestamp<>(0, "C0+b0", 0L), new KeyValueTimestamp<>(1, "C1+a1", 0L), new KeyValueTimestamp<>(1, "C1+b1", 0L), new KeyValueTimestamp<>(2, "C2+b2", 0L), new KeyValueTimestamp<>(3, "C3+b3", 0L));
        // w2 = { 0:a0, 1:a1, 0:b0, 1:b1, 2:b2, 3:b3, 0:c0, 1:c1 }
        for (int i = 0; i < 2; i++) {
            inputTopic2.pipeInput(expectedKeys[i], "c" + expectedKeys[i]);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+c0", 0L), new KeyValueTimestamp<>(0, "B0+c0", 0L), new KeyValueTimestamp<>(0, "C0+c0", 0L), new KeyValueTimestamp<>(1, "A1+c1", 0L), new KeyValueTimestamp<>(1, "B1+c1", 0L), new KeyValueTimestamp<>(1, "C1+c1", 0L));
    }
}
Also used : MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) HashSet(java.util.HashSet) Set(java.util.Set) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 73 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamKStreamJoinTest method testAsymmetricWindowingBefore.

@Test
public void testAsymmetricWindowingBefore() {
    final StreamsBuilder builder = new StreamsBuilder();
    final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
    final KStream<Integer, String> stream1;
    final KStream<Integer, String> stream2;
    final KStream<Integer, String> joined;
    final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
    stream1 = builder.stream(topic1, consumed);
    stream2 = builder.stream(topic2, consumed);
    joined = stream1.join(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(0)).before(ofMillis(100)), StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String()));
    joined.process(supplier);
    final Collection<Set<String>> copartitionGroups = TopologyWrapper.getInternalTopologyBuilder(builder.build()).copartitionGroups();
    assertEquals(1, copartitionGroups.size());
    assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next());
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
        long time = 1000L;
        // w2 = {}
        for (int i = 0; i < expectedKeys.length; i++) {
            inputTopic1.pipeInput(expectedKeys[i], "A" + expectedKeys[i], time + i);
        }
        processor.checkAndClearProcessResult();
        // push four items with smaller timestamps (before the window) to the other stream; this should produce no items
        // w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = {}
        // --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899) }
        time = 1000L - 100L - 1L;
        for (final int expectedKey : expectedKeys) {
            inputTopic2.pipeInput(expectedKey, "a" + expectedKey, time);
        }
        processor.checkAndClearProcessResult();
        // push four items with increased timestamp to the other stream; this should produce one item
        // w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899) }
        // --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
        // 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900) }
        time += 1L;
        for (final int expectedKey : expectedKeys) {
            inputTopic2.pipeInput(expectedKey, "b" + expectedKey, time);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+b0", 1000L));
        // push four items with increased timestamp to the other stream; this should produce two items
        // w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
        // 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900) }
        // --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
        // 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
        // 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901) }
        time += 1L;
        for (final int expectedKey : expectedKeys) {
            inputTopic2.pipeInput(expectedKey, "c" + expectedKey, time);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+c0", 1000L), new KeyValueTimestamp<>(1, "A1+c1", 1001L));
        // push four items with increased timestamp to the other stream; this should produce three items
        // w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
        // 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
        // 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901) }
        // --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
        // 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
        // 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
        // 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902) }
        time += 1L;
        for (final int expectedKey : expectedKeys) {
            inputTopic2.pipeInput(expectedKey, "d" + expectedKey, time);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+d0", 1000L), new KeyValueTimestamp<>(1, "A1+d1", 1001L), new KeyValueTimestamp<>(2, "A2+d2", 1002L));
        // push four items with increased timestamp to the other stream; this should produce four items
        // w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
        // 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
        // 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
        // 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902) }
        // --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
        // 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
        // 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
        // 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
        // 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903) }
        time += 1L;
        for (final int expectedKey : expectedKeys) {
            inputTopic2.pipeInput(expectedKey, "e" + expectedKey, time);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+e0", 1000L), new KeyValueTimestamp<>(1, "A1+e1", 1001L), new KeyValueTimestamp<>(2, "A2+e2", 1002L), new KeyValueTimestamp<>(3, "A3+e3", 1003L));
        // push four items with larger timestamp to the other stream; this should produce four items
        // w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
        // 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
        // 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
        // 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
        // 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903) }
        // --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
        // 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
        // 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
        // 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
        // 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903),
        // 0:f0 (ts: 1000), 1:f1 (ts: 1000), 2:f2 (ts: 1000), 3:f3 (ts: 1000) }
        time = 1000L;
        for (final int expectedKey : expectedKeys) {
            inputTopic2.pipeInput(expectedKey, "f" + expectedKey, time);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+f0", 1000L), new KeyValueTimestamp<>(1, "A1+f1", 1001L), new KeyValueTimestamp<>(2, "A2+f2", 1002L), new KeyValueTimestamp<>(3, "A3+f3", 1003L));
        // push four items with increase timestamp to the other stream; this should produce three items
        // w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
        // 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
        // 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
        // 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
        // 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903),
        // 0:f0 (ts: 1000), 1:f1 (ts: 1000), 2:f2 (ts: 1000), 3:f3 (ts: 1000) }
        // --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
        // 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
        // 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
        // 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
        // 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903),
        // 0:f0 (ts: 1000), 1:f1 (ts: 1000), 2:f2 (ts: 1000), 3:f3 (ts: 1000),
        // 0:g0 (ts: 1001), 1:g1 (ts: 1001), 2:g2 (ts: 1001), 3:g3 (ts: 1001) }
        time += 1L;
        for (final int expectedKey : expectedKeys) {
            inputTopic2.pipeInput(expectedKey, "g" + expectedKey, time);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(1, "A1+g1", 1001L), new KeyValueTimestamp<>(2, "A2+g2", 1002L), new KeyValueTimestamp<>(3, "A3+g3", 1003L));
        // push four items with increase timestamp to the other stream; this should produce two items
        // w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
        // 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
        // 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
        // 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
        // 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903),
        // 0:f0 (ts: 1000), 1:f1 (ts: 1000), 2:f2 (ts: 1000), 3:f3 (ts: 1000),
        // 0:g0 (ts: 1001), 1:g1 (ts: 1001), 2:g2 (ts: 1001), 3:g3 (ts: 1001) }
        // --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
        // 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
        // 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
        // 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
        // 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903),
        // 0:f0 (ts: 1000), 1:f1 (ts: 1000), 2:f2 (ts: 1000), 3:f3 (ts: 1000),
        // 0:g0 (ts: 1001), 1:g1 (ts: 1001), 2:g2 (ts: 1001), 3:g3 (ts: 1001),
        // 0:h0 (ts: 1002), 1:h1 (ts: 1002), 2:h2 (ts: 1002), 3:h3 (ts: 1002) }
        time += 1L;
        for (final int expectedKey : expectedKeys) {
            inputTopic2.pipeInput(expectedKey, "h" + expectedKey, time);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(2, "A2+h2", 1002L), new KeyValueTimestamp<>(3, "A3+h3", 1003L));
        // push four items with increase timestamp to the other stream; this should produce one item
        // w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
        // 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
        // 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
        // 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
        // 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903),
        // 0:f0 (ts: 1000), 1:f1 (ts: 1000), 2:f2 (ts: 1000), 3:f3 (ts: 1000),
        // 0:g0 (ts: 1001), 1:g1 (ts: 1001), 2:g2 (ts: 1001), 3:g3 (ts: 1001),
        // 0:h0 (ts: 1002), 1:h1 (ts: 1002), 2:h2 (ts: 1002), 3:h3 (ts: 1002) }
        // --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
        // 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
        // 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
        // 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
        // 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903),
        // 0:f0 (ts: 1000), 1:f1 (ts: 1000), 2:f2 (ts: 1000), 3:f3 (ts: 1000),
        // 0:g0 (ts: 1001), 1:g1 (ts: 1001), 2:g2 (ts: 1001), 3:g3 (ts: 1001),
        // 0:h0 (ts: 1002), 1:h1 (ts: 1002), 2:h2 (ts: 1002), 3:h3 (ts: 1002),
        // 0:i0 (ts: 1003), 1:i1 (ts: 1003), 2:i2 (ts: 1003), 3:i3 (ts: 1003) }
        time += 1L;
        for (final int expectedKey : expectedKeys) {
            inputTopic2.pipeInput(expectedKey, "i" + expectedKey, time);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(3, "A3+i3", 1003L));
        // push four items with increase timestamp (no out of window) to the other stream; this should produce no items
        // w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
        // 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
        // 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
        // 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
        // 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903),
        // 0:f0 (ts: 1000), 1:f1 (ts: 1000), 2:f2 (ts: 1000), 3:f3 (ts: 1000),
        // 0:g0 (ts: 1001), 1:g1 (ts: 1001), 2:g2 (ts: 1001), 3:g3 (ts: 1001),
        // 0:h0 (ts: 1002), 1:h1 (ts: 1002), 2:h2 (ts: 1002), 3:h3 (ts: 1002),
        // 0:i0 (ts: 1003), 1:i1 (ts: 1003), 2:i2 (ts: 1003), 3:i3 (ts: 1003) }
        // --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
        // w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
        // 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
        // 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
        // 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
        // 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903),
        // 0:f0 (ts: 1000), 1:f1 (ts: 1000), 2:f2 (ts: 1000), 3:f3 (ts: 1000),
        // 0:g0 (ts: 1001), 1:g1 (ts: 1001), 2:g2 (ts: 1001), 3:g3 (ts: 1001),
        // 0:h0 (ts: 1002), 1:h1 (ts: 1002), 2:h2 (ts: 1002), 3:h3 (ts: 1002),
        // 0:i0 (ts: 1003), 1:i1 (ts: 1003), 2:i2 (ts: 1003), 3:i3 (ts: 1003),
        // 0:j0 (ts: 1004), 1:j1 (ts: 1004), 2:j2 (ts: 1004), 3:j3 (ts: 1004) }
        time += 1L;
        for (final int expectedKey : expectedKeys) {
            inputTopic2.pipeInput(expectedKey, "j" + expectedKey, time);
        }
        processor.checkAndClearProcessResult();
    }
}
Also used : MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) HashSet(java.util.HashSet) Set(java.util.Set) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 74 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamKStreamJoinTest method testOuterJoin.

@Test
public void testOuterJoin() {
    final StreamsBuilder builder = new StreamsBuilder();
    final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
    final KStream<Integer, String> stream1;
    final KStream<Integer, String> stream2;
    final KStream<Integer, String> joined;
    final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
    stream1 = builder.stream(topic1, consumed);
    stream2 = builder.stream(topic2, consumed);
    joined = stream1.outerJoin(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.ofTimeDifferenceAndGrace(ofMillis(100L), ofHours(24L)), StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String()));
    joined.process(supplier);
    final Collection<Set<String>> copartitionGroups = TopologyWrapper.getInternalTopologyBuilder(builder.build()).copartitionGroups();
    assertEquals(1, copartitionGroups.size());
    assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next());
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
        // w2 = {}
        for (int i = 0; i < 2; i++) {
            inputTopic1.pipeInput(expectedKeys[i], "A" + expectedKeys[i]);
        }
        processor.checkAndClearProcessResult();
        // w2 = { 0:a0, 1:a1 }
        for (int i = 0; i < 2; i++) {
            inputTopic2.pipeInput(expectedKeys[i], "a" + expectedKeys[i]);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+a0", 0L), new KeyValueTimestamp<>(1, "A1+a1", 0L));
        // w2 = { 0:a0, 1:a1 }
        for (final int expectedKey : expectedKeys) {
            inputTopic1.pipeInput(expectedKey, "B" + expectedKey);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "B0+a0", 0L), new KeyValueTimestamp<>(1, "B1+a1", 0L));
        // w2 = { 0:a0, 1:a1, 0:b0, 0:b0, 1:b1, 2:b2, 3:b3 }
        for (final int expectedKey : expectedKeys) {
            inputTopic2.pipeInput(expectedKey, "b" + expectedKey);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+b0", 0L), new KeyValueTimestamp<>(0, "B0+b0", 0L), new KeyValueTimestamp<>(1, "A1+b1", 0L), new KeyValueTimestamp<>(1, "B1+b1", 0L), new KeyValueTimestamp<>(2, "B2+b2", 0L), new KeyValueTimestamp<>(3, "B3+b3", 0L));
        // w2 = { 0:a0, 1:a1, 0:b0, 0:b0, 1:b1, 2:b2, 3:b3 }
        for (final int expectedKey : expectedKeys) {
            inputTopic1.pipeInput(expectedKey, "C" + expectedKey);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "C0+a0", 0L), new KeyValueTimestamp<>(0, "C0+b0", 0L), new KeyValueTimestamp<>(1, "C1+a1", 0L), new KeyValueTimestamp<>(1, "C1+b1", 0L), new KeyValueTimestamp<>(2, "C2+b2", 0L), new KeyValueTimestamp<>(3, "C3+b3", 0L));
        // w2 = { 0:a0, 1:a1, 0:b0, 0:b0, 1:b1, 2:b2, 3:b3, 0:c0, 1:c1 }
        for (int i = 0; i < 2; i++) {
            inputTopic2.pipeInput(expectedKeys[i], "c" + expectedKeys[i]);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+c0", 0L), new KeyValueTimestamp<>(0, "B0+c0", 0L), new KeyValueTimestamp<>(0, "C0+c0", 0L), new KeyValueTimestamp<>(1, "A1+c1", 0L), new KeyValueTimestamp<>(1, "B1+c1", 0L), new KeyValueTimestamp<>(1, "C1+c1", 0L));
    }
}
Also used : MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) HashSet(java.util.HashSet) Set(java.util.Set) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 75 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamKStreamJoinTest method shouldLogAndMeterOnSkippedRecordsWithNullValueWithBuiltInMetricsVersionLatest.

@Test
public void shouldLogAndMeterOnSkippedRecordsWithNullValueWithBuiltInMetricsVersionLatest() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<String, Integer> left = builder.stream("left", Consumed.with(Serdes.String(), Serdes.Integer()));
    final KStream<String, Integer> right = builder.stream("right", Consumed.with(Serdes.String(), Serdes.Integer()));
    left.join(right, Integer::sum, JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(100)), StreamJoined.with(Serdes.String(), Serdes.Integer(), Serdes.Integer()));
    props.setProperty(StreamsConfig.BUILT_IN_METRICS_VERSION_CONFIG, StreamsConfig.METRICS_LATEST);
    try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KStreamKStreamJoin.class);
        final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, Integer> inputTopic = driver.createInputTopic("left", new StringSerializer(), new IntegerSerializer());
        inputTopic.pipeInput("A", null);
        assertThat(appender.getMessages(), hasItem("Skipping record due to null key or value. topic=[left] partition=[0] offset=[0]"));
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) LogCaptureAppender(org.apache.kafka.streams.processor.internals.testutil.LogCaptureAppender) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) Test(org.junit.Test)

Aggregations

IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)106 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)75 Test (org.junit.Test)74 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)72 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)58 MockApiProcessorSupplier (org.apache.kafka.test.MockApiProcessorSupplier)46 Properties (java.util.Properties)22 IntegerDeserializer (org.apache.kafka.common.serialization.IntegerDeserializer)16 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)13 HashSet (java.util.HashSet)11 Set (java.util.Set)11 KeyValue (org.apache.kafka.streams.KeyValue)10 LongSerializer (org.apache.kafka.common.serialization.LongSerializer)9 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)9 Serdes (org.apache.kafka.common.serialization.Serdes)8 TestInputTopic (org.apache.kafka.streams.TestInputTopic)8 Consumed (org.apache.kafka.streams.kstream.Consumed)8 KStream (org.apache.kafka.streams.kstream.KStream)8 StreamsTestUtils (org.apache.kafka.test.StreamsTestUtils)8 Duration (java.time.Duration)6