Search in sources :

Example 21 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class AbstractStreamTest method testShouldBeExtensible.

@Test
public void testShouldBeExtensible() {
    final StreamsBuilder builder = new StreamsBuilder();
    final int[] expectedKeys = new int[] { 1, 2, 3, 4, 5, 6, 7 };
    final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
    final String topicName = "topic";
    final ExtendedKStream<Integer, String> stream = new ExtendedKStream<>(builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.String())));
    stream.randomFilter().process(supplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build())) {
        final TestInputTopic<Integer, String> inputTopic = driver.createInputTopic(topicName, new IntegerSerializer(), new StringSerializer());
        for (final int expectedKey : expectedKeys) {
            inputTopic.pipeInput(expectedKey, "V" + expectedKey);
        }
        assertTrue(supplier.theCapturedProcessor().processed().size() <= expectedKeys.length);
    }
}
Also used : MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 22 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamKStreamOuterJoinTest method testWindowing.

@Test
public void testWindowing() {
    final StreamsBuilder builder = new StreamsBuilder();
    final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
    final KStream<Integer, String> stream1;
    final KStream<Integer, String> stream2;
    final KStream<Integer, String> joined;
    final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
    stream1 = builder.stream(topic1, consumed);
    stream2 = builder.stream(topic2, consumed);
    joined = stream1.outerJoin(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(100)), StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String()));
    joined.process(supplier);
    final Collection<Set<String>> copartitionGroups = TopologyWrapper.getInternalTopologyBuilder(builder.build()).copartitionGroups();
    assertEquals(1, copartitionGroups.size());
    assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next());
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
        final long time = 0L;
        // --> w2 = {}
        for (int i = 0; i < 2; i++) {
            inputTopic1.pipeInput(expectedKeys[i], "A" + expectedKeys[i], time);
        }
        processor.checkAndClearProcessResult();
        // --> w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0) }
        for (final int expectedKey : expectedKeys) {
            inputTopic2.pipeInput(expectedKey, "a" + expectedKey, time);
        }
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+a0", 0L), new KeyValueTimestamp<>(1, "A1+a1", 0L));
        testUpperWindowBound(expectedKeys, driver, processor);
        testLowerWindowBound(expectedKeys, driver, processor);
    }
}
Also used : MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) HashSet(java.util.HashSet) Set(java.util.Set) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 23 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamKStreamOuterJoinTest method testLowerWindowBound.

private void testLowerWindowBound(final int[] expectedKeys, final TopologyTestDriver driver, final MockApiProcessor<Integer, String, Void, Void> processor) {
    long time;
    final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer());
    // push four items with smaller timestamp (before the window) to the primary stream; this should produce four left-join and no full-join items
    // w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100),
    // 0:C0 (ts: 1101), 1:C1 (ts: 1101), 2:C2 (ts: 1101), 3:C3 (ts: 1101),
    // 0:D0 (ts: 1102), 1:D1 (ts: 1102), 2:D2 (ts: 1102), 3:D3 (ts: 1102),
    // 0:E0 (ts: 1103), 1:E1 (ts: 1103), 2:E2 (ts: 1103), 3:E3 (ts: 1103),
    // 0:F0 (ts: 1104), 1:F1 (ts: 1104), 2:F2 (ts: 1104), 3:F3 (ts: 1104) }
    // w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    // --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100),
    // 0:C0 (ts: 1101), 1:C1 (ts: 1101), 2:C2 (ts: 1101), 3:C3 (ts: 1101),
    // 0:D0 (ts: 1102), 1:D1 (ts: 1102), 2:D2 (ts: 1102), 3:D3 (ts: 1102),
    // 0:E0 (ts: 1103), 1:E1 (ts: 1103), 2:E2 (ts: 1103), 3:E3 (ts: 1103),
    // 0:F0 (ts: 1104), 1:F1 (ts: 1104), 2:F2 (ts: 1104), 3:F3 (ts: 1104),
    // 0:G0 (ts: 899), 1:G1 (ts: 899), 2:G2 (ts: 899), 3:G3 (ts: 899) }
    // --> w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    time = 1000L - 100L - 1L;
    for (final int expectedKey : expectedKeys) {
        inputTopic1.pipeInput(expectedKey, "G" + expectedKey, time);
    }
    processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "G0+null", 899L), new KeyValueTimestamp<>(1, "G1+null", 899L), new KeyValueTimestamp<>(2, "G2+null", 899L), new KeyValueTimestamp<>(3, "G3+null", 899L));
    // push four items with increase timestamp to the primary stream; this should produce three left-join and one full-join items
    // w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100),
    // 0:C0 (ts: 1101), 1:C1 (ts: 1101), 2:C2 (ts: 1101), 3:C3 (ts: 1101),
    // 0:D0 (ts: 1102), 1:D1 (ts: 1102), 2:D2 (ts: 1102), 3:D3 (ts: 1102),
    // 0:E0 (ts: 1103), 1:E1 (ts: 1103), 2:E2 (ts: 1103), 3:E3 (ts: 1103),
    // 0:F0 (ts: 1104), 1:F1 (ts: 1104), 2:F2 (ts: 1104), 3:F3 (ts: 1104),
    // 0:G0 (ts: 899), 1:G1 (ts: 899), 2:G2 (ts: 899), 3:G3 (ts: 899) }
    // w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    // --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100),
    // 0:C0 (ts: 1101), 1:C1 (ts: 1101), 2:C2 (ts: 1101), 3:C3 (ts: 1101),
    // 0:D0 (ts: 1102), 1:D1 (ts: 1102), 2:D2 (ts: 1102), 3:D3 (ts: 1102),
    // 0:E0 (ts: 1103), 1:E1 (ts: 1103), 2:E2 (ts: 1103), 3:E3 (ts: 1103),
    // 0:F0 (ts: 1104), 1:F1 (ts: 1104), 2:F2 (ts: 1104), 3:F3 (ts: 1104),
    // 0:G0 (ts: 899), 1:G1 (ts: 899), 2:G2 (ts: 899), 3:G3 (ts: 899),
    // 0:H0 (ts: 900), 1:H1 (ts: 900), 2:H2 (ts: 900), 3:H3 (ts: 900) }
    // --> w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    time += 1L;
    for (final int expectedKey : expectedKeys) {
        inputTopic1.pipeInput(expectedKey, "H" + expectedKey, time);
    }
    processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "H0+b0", 1000L), new KeyValueTimestamp<>(1, "H1+null", 900L), new KeyValueTimestamp<>(2, "H2+null", 900L), new KeyValueTimestamp<>(3, "H3+null", 900L));
    // push four items with increase timestamp to the primary stream; this should produce two left-join and two full-join items
    // w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100),
    // 0:C0 (ts: 1101), 1:C1 (ts: 1101), 2:C2 (ts: 1101), 3:C3 (ts: 1101),
    // 0:D0 (ts: 1102), 1:D1 (ts: 1102), 2:D2 (ts: 1102), 3:D3 (ts: 1102),
    // 0:E0 (ts: 1103), 1:E1 (ts: 1103), 2:E2 (ts: 1103), 3:E3 (ts: 1103),
    // 0:F0 (ts: 1104), 1:F1 (ts: 1104), 2:F2 (ts: 1104), 3:F3 (ts: 1104),
    // 0:G0 (ts: 899), 1:G1 (ts: 899), 2:G2 (ts: 899), 3:G3 (ts: 899),
    // 0:H0 (ts: 900), 1:H1 (ts: 900), 2:H2 (ts: 900), 3:H3 (ts: 900) }
    // w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    // --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100),
    // 0:C0 (ts: 1101), 1:C1 (ts: 1101), 2:C2 (ts: 1101), 3:C3 (ts: 1101),
    // 0:D0 (ts: 1102), 1:D1 (ts: 1102), 2:D2 (ts: 1102), 3:D3 (ts: 1102),
    // 0:E0 (ts: 1103), 1:E1 (ts: 1103), 2:E2 (ts: 1103), 3:E3 (ts: 1103),
    // 0:F0 (ts: 1104), 1:F1 (ts: 1104), 2:F2 (ts: 1104), 3:F3 (ts: 1104),
    // 0:G0 (ts: 899), 1:G1 (ts: 899), 2:G2 (ts: 899), 3:G3 (ts: 899),
    // 0:H0 (ts: 900), 1:H1 (ts: 900), 2:H2 (ts: 900), 3:H3 (ts: 900),
    // 0:I0 (ts: 901), 1:I1 (ts: 901), 2:I2 (ts: 901), 3:I3 (ts: 901) }
    // --> w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    time += 1L;
    for (final int expectedKey : expectedKeys) {
        inputTopic1.pipeInput(expectedKey, "I" + expectedKey, time);
    }
    processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "I0+b0", 1000L), new KeyValueTimestamp<>(1, "I1+b1", 1001L), new KeyValueTimestamp<>(2, "I2+null", 901L), new KeyValueTimestamp<>(3, "I3+null", 901L));
    // push four items with increase timestamp to the primary stream; this should produce one left-join and three full-join items
    // w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100),
    // 0:C0 (ts: 1101), 1:C1 (ts: 1101), 2:C2 (ts: 1101), 3:C3 (ts: 1101),
    // 0:D0 (ts: 1102), 1:D1 (ts: 1102), 2:D2 (ts: 1102), 3:D3 (ts: 1102),
    // 0:E0 (ts: 1103), 1:E1 (ts: 1103), 2:E2 (ts: 1103), 3:E3 (ts: 1103),
    // 0:F0 (ts: 1104), 1:F1 (ts: 1104), 2:F2 (ts: 1104), 3:F3 (ts: 1104),
    // 0:G0 (ts: 899), 1:G1 (ts: 899), 2:G2 (ts: 899), 3:G3 (ts: 899),
    // 0:H0 (ts: 900), 1:H1 (ts: 900), 2:H2 (ts: 900), 3:H3 (ts: 900),
    // 0:I0 (ts: 901), 1:I1 (ts: 901), 2:I2 (ts: 901), 3:I3 (ts: 901) }
    // w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    // --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100),
    // 0:C0 (ts: 1101), 1:C1 (ts: 1101), 2:C2 (ts: 1101), 3:C3 (ts: 1101),
    // 0:D0 (ts: 1102), 1:D1 (ts: 1102), 2:D2 (ts: 1102), 3:D3 (ts: 1102),
    // 0:E0 (ts: 1103), 1:E1 (ts: 1103), 2:E2 (ts: 1103), 3:E3 (ts: 1103),
    // 0:F0 (ts: 1104), 1:F1 (ts: 1104), 2:F2 (ts: 1104), 3:F3 (ts: 1104),
    // 0:G0 (ts: 899), 1:G1 (ts: 899), 2:G2 (ts: 899), 3:G3 (ts: 899),
    // 0:H0 (ts: 900), 1:H1 (ts: 900), 2:H2 (ts: 900), 3:H3 (ts: 900),
    // 0:I0 (ts: 901), 1:I1 (ts: 901), 2:I2 (ts: 901), 3:I3 (ts: 901),
    // 0:J0 (ts: 902), 1:J1 (ts: 902), 2:J2 (ts: 902), 3:J3 (ts: 902) }
    // --> w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    time += 1L;
    for (final int expectedKey : expectedKeys) {
        inputTopic1.pipeInput(expectedKey, "J" + expectedKey, time);
    }
    processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "J0+b0", 1000L), new KeyValueTimestamp<>(1, "J1+b1", 1001L), new KeyValueTimestamp<>(2, "J2+b2", 1002L), new KeyValueTimestamp<>(3, "J3+null", 902L));
    // push four items with increase timestamp to the primary stream; this should produce one left-join and three full-join items
    // w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100),
    // 0:C0 (ts: 1101), 1:C1 (ts: 1101), 2:C2 (ts: 1101), 3:C3 (ts: 1101),
    // 0:D0 (ts: 1102), 1:D1 (ts: 1102), 2:D2 (ts: 1102), 3:D3 (ts: 1102),
    // 0:E0 (ts: 1103), 1:E1 (ts: 1103), 2:E2 (ts: 1103), 3:E3 (ts: 1103),
    // 0:F0 (ts: 1104), 1:F1 (ts: 1104), 2:F2 (ts: 1104), 3:F3 (ts: 1104),
    // 0:G0 (ts: 899), 1:G1 (ts: 899), 2:G2 (ts: 899), 3:G3 (ts: 899),
    // 0:H0 (ts: 900), 1:H1 (ts: 900), 2:H2 (ts: 900), 3:H3 (ts: 900),
    // 0:I0 (ts: 901), 1:I1 (ts: 901), 2:I2 (ts: 901), 3:I3 (ts: 901),
    // 0:J0 (ts: 902), 1:J1 (ts: 902), 2:J2 (ts: 902), 3:J3 (ts: 902) }
    // w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    // --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
    // 0:B0 (ts: 1100), 1:B1 (ts: 1100), 2:B2 (ts: 1100), 3:B3 (ts: 1100),
    // 0:C0 (ts: 1101), 1:C1 (ts: 1101), 2:C2 (ts: 1101), 3:C3 (ts: 1101),
    // 0:D0 (ts: 1102), 1:D1 (ts: 1102), 2:D2 (ts: 1102), 3:D3 (ts: 1102),
    // 0:E0 (ts: 1103), 1:E1 (ts: 1103), 2:E2 (ts: 1103), 3:E3 (ts: 1103),
    // 0:F0 (ts: 1104), 1:F1 (ts: 1104), 2:F2 (ts: 1104), 3:F3 (ts: 1104),
    // 0:G0 (ts: 899), 1:G1 (ts: 899), 2:G2 (ts: 899), 3:G3 (ts: 899),
    // 0:H0 (ts: 900), 1:H1 (ts: 900), 2:H2 (ts: 900), 3:H3 (ts: 900),
    // 0:I0 (ts: 901), 1:I1 (ts: 901), 2:I2 (ts: 901), 3:I3 (ts: 901),
    // 0:J0 (ts: 902), 1:J1 (ts: 902), 2:J2 (ts: 902), 3:J3 (ts: 902),
    // 0:K0 (ts: 903), 1:K1 (ts: 903), 2:K2 (ts: 903), 3:K3 (ts: 903) }
    // --> w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0), 2:a2 (ts: 0), 3:a3 (ts: 0),
    // 0:b0 (ts: 1000), 1:b1 (ts: 1001), 2:b2 (ts: 1002), 3:b3 (ts: 1003) }
    time += 1L;
    for (final int expectedKey : expectedKeys) {
        inputTopic1.pipeInput(expectedKey, "K" + expectedKey, time);
    }
    processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "K0+b0", 1000L), new KeyValueTimestamp<>(1, "K1+b1", 1001L), new KeyValueTimestamp<>(2, "K2+b2", 1002L), new KeyValueTimestamp<>(3, "K3+b3", 1003L));
    // push a dummy record to verify there are no expired records to produce
    // dummy window is behind the max. stream time seen (1205 used in testUpperWindowBound)
    inputTopic1.pipeInput(0, "dummy", time + 200L);
    processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "dummy+null", 1103L));
}
Also used : IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StringSerializer(org.apache.kafka.common.serialization.StringSerializer)

Example 24 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamKStreamOuterJoinTest method testLeftExpiredNonJoinedRecordsAreEmittedByTheRightProcessor.

@Test
public void testLeftExpiredNonJoinedRecordsAreEmittedByTheRightProcessor() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<Integer, String> stream1;
    final KStream<Integer, String> stream2;
    final KStream<Integer, String> joined;
    final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
    stream1 = builder.stream(topic1, consumed);
    stream2 = builder.stream(topic2, consumed);
    joined = stream1.outerJoin(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.ofTimeDifferenceAndGrace(ofMillis(100L), ofMillis(0L)), StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String()));
    joined.process(supplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
        final long windowStart = 0L;
        // No joins detected; No null-joins emitted
        inputTopic1.pipeInput(0, "A0", windowStart + 1L);
        inputTopic1.pipeInput(1, "A1", windowStart + 2L);
        inputTopic1.pipeInput(0, "A0-0", windowStart + 3L);
        processor.checkAndClearProcessResult();
        // Join detected; No null-joins emitted
        inputTopic2.pipeInput(1, "a1", windowStart + 3L);
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(1, "A1+a1", windowStart + 3L));
        // Dummy record in right topic will emit expired non-joined records from the left topic
        inputTopic2.pipeInput(2, "dummy", windowStart + 401L);
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(0, "A0+null", windowStart + 1L), new KeyValueTimestamp<>(0, "A0-0+null", windowStart + 3L));
        // Flush internal non-joined state store by joining the dummy record
        inputTopic1.pipeInput(2, "dummy", windowStart + 402L);
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(2, "dummy+dummy", windowStart + 402L));
    }
}
Also used : MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 25 with IntegerSerializer

use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.

the class KStreamKStreamOuterJoinTest method testGracePeriod.

@Test
public void testGracePeriod() {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<Integer, String> stream1;
    final KStream<Integer, String> stream2;
    final KStream<Integer, String> joined;
    final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
    stream1 = builder.stream(topic1, consumed);
    stream2 = builder.stream(topic2, consumed);
    joined = stream1.outerJoin(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.ofTimeDifferenceAndGrace(ofMillis(100), ofMillis(10)), StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String()));
    joined.process(supplier);
    final Collection<Set<String>> copartitionGroups = TopologyWrapper.getInternalTopologyBuilder(builder.build()).copartitionGroups();
    assertEquals(1, copartitionGroups.size());
    assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next());
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<Integer, String> inputTopic1 = driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final TestInputTopic<Integer, String> inputTopic2 = driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
        // push one item to the primary stream; and one item in other stream; this should not produce items because there are no joins
        // and window has not ended
        // w1 = {}
        // w2 = {}
        // --> w1 = { 0:A0 (ts: 0) }
        // --> w2 = { 1:a1 (ts: 0) }
        inputTopic1.pipeInput(0, "A0", 0L);
        inputTopic2.pipeInput(1, "a1", 0L);
        processor.checkAndClearProcessResult();
        // push one item on each stream with a window time after the previous window ended (not closed); this should not produce
        // joined records because the window has ended, but will not produce non-joined records because the window has not closed.
        // w1 = { 0:A0 (ts: 0) }
        // w2 = { 1:a1 (ts: 0) }
        // --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0) }
        // --> w2 = { 0:a0 (ts: 101), 1:a1 (ts: 101) }
        inputTopic2.pipeInput(0, "a0", 101L);
        inputTopic1.pipeInput(1, "A1", 101L);
        processor.checkAndClearProcessResult();
        // push a dummy item to the any stream after the window is closed; this should produced all expired non-joined records because
        // the window has closed
        // w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0) }
        // w2 = { 0:a0 (ts: 101), 1:a1 (ts: 101) }
        // --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0) }
        // --> w2 = { 0:a0 (ts: 101), 1:a1 (ts: 101), 0:dummy (ts: 112) }
        inputTopic2.pipeInput(0, "dummy", 211);
        processor.checkAndClearProcessResult(new KeyValueTimestamp<>(1, "null+a1", 0L), new KeyValueTimestamp<>(0, "A0+null", 0L));
    }
}
Also used : MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) HashSet(java.util.HashSet) Set(java.util.Set) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Aggregations

IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)86 Test (org.junit.Test)65 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)64 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)61 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)53 MockApiProcessorSupplier (org.apache.kafka.test.MockApiProcessorSupplier)42 Properties (java.util.Properties)17 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)13 HashSet (java.util.HashSet)11 Set (java.util.Set)11 LongSerializer (org.apache.kafka.common.serialization.LongSerializer)8 Serdes (org.apache.kafka.common.serialization.Serdes)8 KeyValue (org.apache.kafka.streams.KeyValue)8 TestInputTopic (org.apache.kafka.streams.TestInputTopic)8 Consumed (org.apache.kafka.streams.kstream.Consumed)8 KStream (org.apache.kafka.streams.kstream.KStream)8 StreamsTestUtils (org.apache.kafka.test.StreamsTestUtils)8 Duration (java.time.Duration)5 Instant (java.time.Instant)5 ArrayList (java.util.ArrayList)5