Search in sources :

Example 16 with KeyValueTimestamp

use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.

the class SuppressionIntegrationTest method shouldShutdownWhenBytesConstraintIsViolated.

@Test
public void shouldShutdownWhenBytesConstraintIsViolated() throws InterruptedException {
    final String testId = "-shouldShutdownWhenBytesConstraintIsViolated";
    final String appId = getClass().getSimpleName().toLowerCase(Locale.getDefault()) + testId;
    final String input = "input" + testId;
    final String outputSuppressed = "output-suppressed" + testId;
    final String outputRaw = "output-raw" + testId;
    cleanStateBeforeTest(CLUSTER, input, outputRaw, outputSuppressed);
    final StreamsBuilder builder = new StreamsBuilder();
    final KTable<String, Long> valueCounts = buildCountsTable(input, builder);
    valueCounts.suppress(untilTimeLimit(ofMillis(MAX_VALUE), maxBytes(200L).shutDownWhenFull())).toStream().to(outputSuppressed, Produced.with(STRING_SERDE, Serdes.Long()));
    valueCounts.toStream().to(outputRaw, Produced.with(STRING_SERDE, Serdes.Long()));
    final Properties streamsConfig = getStreamsConfig(appId);
    final KafkaStreams driver = IntegrationTestUtils.getStartedStreams(streamsConfig, builder, true);
    try {
        produceSynchronously(input, asList(new KeyValueTimestamp<>("k1", "v1", scaledTime(0L)), new KeyValueTimestamp<>("k1", "v2", scaledTime(1L)), new KeyValueTimestamp<>("k2", "v1", scaledTime(2L)), new KeyValueTimestamp<>("x", "x", scaledTime(3L))));
        verifyErrorShutdown(driver);
    } finally {
        driver.close();
        quietlyCleanStateAfterTest(CLUSTER, driver);
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) Properties(java.util.Properties) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) IntegrationTestUtils.quietlyCleanStateAfterTest(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.quietlyCleanStateAfterTest) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test) IntegrationTestUtils.cleanStateBeforeTest(org.apache.kafka.streams.integration.utils.IntegrationTestUtils.cleanStateBeforeTest)

Example 17 with KeyValueTimestamp

use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.

the class KTableMapValuesTest method doTestKTable.

private void doTestKTable(final StreamsBuilder builder, final String topic1, final MockApiProcessorSupplier<String, Integer, Void, Void> supplier) {
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic1 = driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        inputTopic1.pipeInput("A", "1", 5L);
        inputTopic1.pipeInput("B", "2", 25L);
        inputTopic1.pipeInput("C", "3", 20L);
        inputTopic1.pipeInput("D", "4", 10L);
        assertEquals(asList(new KeyValueTimestamp<>("A", 1, 5), new KeyValueTimestamp<>("B", 2, 25), new KeyValueTimestamp<>("C", 3, 20), new KeyValueTimestamp<>("D", 4, 10)), supplier.theCapturedProcessor().processed());
    }
}
Also used : TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp)

Example 18 with KeyValueTimestamp

use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.

the class KTableMapValuesTest method testNotSendingOldValue.

@Test
public void testNotSendingOldValue() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic1 = "topic1";
    final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, consumed);
    final KTableImpl<String, String, Integer> table2 = (KTableImpl<String, String, Integer>) table1.mapValues(s -> Integer.valueOf(s));
    final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
    final Topology topology = builder.build().addProcessor("proc", supplier, table2.name);
    try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
        final TestInputTopic<String, String> inputTopic1 = driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        final MockApiProcessor<String, Integer, Void, Void> proc = supplier.theCapturedProcessor();
        assertFalse(table1.sendingOldValueEnabled());
        assertFalse(table2.sendingOldValueEnabled());
        inputTopic1.pipeInput("A", "01", 5L);
        inputTopic1.pipeInput("B", "01", 10L);
        inputTopic1.pipeInput("C", "01", 15L);
        proc.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(1, null), 5), new KeyValueTimestamp<>("B", new Change<>(1, null), 10), new KeyValueTimestamp<>("C", new Change<>(1, null), 15));
        inputTopic1.pipeInput("A", "02", 10L);
        inputTopic1.pipeInput("B", "02", 8L);
        proc.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(2, null), 10), new KeyValueTimestamp<>("B", new Change<>(2, null), 8));
        inputTopic1.pipeInput("A", "03", 20L);
        proc.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(3, null), 20));
        inputTopic1.pipeInput("A", (String) null, 30L);
        proc.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, null), 30));
    }
}
Also used : ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) Arrays.asList(java.util.Arrays.asList) TopologyWrapper(org.apache.kafka.streams.TopologyWrapper) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) TopologyTestDriverWrapper(org.apache.kafka.streams.TopologyTestDriverWrapper) KTable(org.apache.kafka.streams.kstream.KTable) Properties(java.util.Properties) Consumed(org.apache.kafka.streams.kstream.Consumed) Test(org.junit.Test) Instant(java.time.Instant) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Bytes(org.apache.kafka.common.utils.Bytes) Assert.assertNull(org.junit.Assert.assertNull) Assert.assertFalse(org.junit.Assert.assertFalse) Materialized(org.apache.kafka.streams.kstream.Materialized) TestInputTopic(org.apache.kafka.streams.TestInputTopic) InternalTopologyBuilder(org.apache.kafka.streams.processor.internals.InternalTopologyBuilder) Matchers.is(org.hamcrest.Matchers.is) StreamsTestUtils(org.apache.kafka.test.StreamsTestUtils) Topology(org.apache.kafka.streams.Topology) Assert.assertEquals(org.junit.Assert.assertEquals) MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) Topology(org.apache.kafka.streams.Topology) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 19 with KeyValueTimestamp

use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.

the class KStreamSlidingWindowAggregateTest method testEarlyRecordsLargeInput.

@Test
public void testEarlyRecordsLargeInput() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String topic = "topic";
    final WindowBytesStoreSupplier storeSupplier = inOrderIterator ? new InOrderMemoryWindowStoreSupplier("InOrder", 50000L, 10L, false) : Stores.inMemoryWindowStore("Reverse", Duration.ofMillis(50000), Duration.ofMillis(10), false);
    final KTable<Windowed<String>, String> table2 = builder.stream(topic, Consumed.with(Serdes.String(), Serdes.String())).groupByKey(Grouped.with(Serdes.String(), Serdes.String())).windowedBy(SlidingWindows.ofTimeDifferenceAndGrace(ofMillis(10), ofMillis(50))).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.as(storeSupplier));
    final MockApiProcessorSupplier<Windowed<String>, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
    table2.toStream().process(supplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic1 = driver.createInputTopic(topic, new StringSerializer(), new StringSerializer());
        inputTopic1.pipeInput("E", "1", 0L);
        inputTopic1.pipeInput("E", "3", 5L);
        inputTopic1.pipeInput("E", "4", 6L);
        inputTopic1.pipeInput("E", "2", 3L);
        inputTopic1.pipeInput("E", "6", 13L);
        inputTopic1.pipeInput("E", "5", 10L);
        inputTopic1.pipeInput("E", "7", 4L);
        inputTopic1.pipeInput("E", "8", 2L);
        inputTopic1.pipeInput("E", "9", 15L);
    }
    final Comparator<KeyValueTimestamp<Windowed<String>, String>> comparator = Comparator.comparing((KeyValueTimestamp<Windowed<String>, String> o) -> o.key().key()).thenComparing((KeyValueTimestamp<Windowed<String>, String> o) -> o.key().window().start());
    final ArrayList<KeyValueTimestamp<Windowed<String>, String>> actual = supplier.theCapturedProcessor().processed();
    actual.sort(comparator);
    assertEquals(asList(// E@0
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(0, 10)), "0+1", 0), // E@5
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(0, 10)), "0+1+3", 5), // E@6
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(0, 10)), "0+1+3+4", 6), // E@3
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(0, 10)), "0+1+3+4+2", 6), // E@10
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(0, 10)), "0+1+3+4+2+5", 10), // E@4
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(0, 10)), "0+1+3+4+2+5+7", 10), // E@2
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(0, 10)), "0+1+3+4+2+5+7+8", 10), // E@5
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(1, 11)), "0+3", 5), // E@6
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(1, 11)), "0+3+4", 6), // E@3
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(1, 11)), "0+3+4+2", 6), // E@10
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(1, 11)), "0+3+4+2+5", 10), // E@4
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(1, 11)), "0+3+4+2+5+7", 10), // E@2
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(1, 11)), "0+3+4+2+5+7+8", 10), // E@13
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(3, 13)), "0+3+4+2+6", 13), // E@10
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(3, 13)), "0+3+4+2+6+5", 13), // E@4
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(3, 13)), "0+3+4+2+6+5+7", 13), // E@3
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(4, 14)), "0+3+4", 6), // E@13
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(4, 14)), "0+3+4+6", 13), // E@10
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(4, 14)), "0+3+4+6+5", 13), // E@4
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(4, 14)), "0+3+4+6+5+7", 13), // E@4
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(5, 15)), "0+3+4+6+5", 13), // E@15
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(5, 15)), "0+3+4+6+5+9", 15), // E@6
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(6, 16)), "0+4", 6), // E@13
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(6, 16)), "0+4+6", 13), // E@10
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(6, 16)), "0+4+6+5", 13), // E@15
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(6, 16)), "0+4+6+5+9", 15), // E@13
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(7, 17)), "0+6", 13), // E@10
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(7, 17)), "0+6+5", 13), // E@15
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(7, 17)), "0+6+5+9", 15), // E@10
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(11, 21)), "0+6", 13), // E@15
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(11, 21)), "0+6+9", 15), // E@15
    new KeyValueTimestamp<>(new Windowed<>("E", new TimeWindow(14, 24)), "0+9", 15)), actual);
}
Also used : MockApiProcessorSupplier(org.apache.kafka.test.MockApiProcessorSupplier) InMemoryWindowBytesStoreSupplier(org.apache.kafka.streams.state.internals.InMemoryWindowBytesStoreSupplier) WindowBytesStoreSupplier(org.apache.kafka.streams.state.WindowBytesStoreSupplier) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Windowed(org.apache.kafka.streams.kstream.Windowed) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Example 20 with KeyValueTimestamp

use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.

the class KStreamTransformTest method testTransform.

// Old PAPI. Needs to be migrated.
@SuppressWarnings("deprecation")
@Test
public void testTransform() {
    final StreamsBuilder builder = new StreamsBuilder();
    final TransformerSupplier<Number, Number, KeyValue<Integer, Integer>> transformerSupplier = () -> new Transformer<Number, Number, KeyValue<Integer, Integer>>() {

        private int total = 0;

        @Override
        public void init(final ProcessorContext context) {
            context.schedule(Duration.ofMillis(1), PunctuationType.WALL_CLOCK_TIME, timestamp -> context.forward(-1, (int) timestamp, To.all().withTimestamp(timestamp)));
        }

        @Override
        public KeyValue<Integer, Integer> transform(final Number key, final Number value) {
            total += value.intValue();
            return KeyValue.pair(key.intValue() * 2, total);
        }

        @Override
        public void close() {
        }
    };
    final int[] expectedKeys = { 1, 10, 100, 1000 };
    final MockProcessorSupplier<Integer, Integer> processor = new MockProcessorSupplier<>();
    final KStream<Integer, Integer> stream = builder.stream(TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()));
    stream.transform(transformerSupplier).process(processor);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), Instant.ofEpochMilli(0L))) {
        final TestInputTopic<Integer, Integer> inputTopic = driver.createInputTopic(TOPIC_NAME, new IntegerSerializer(), new IntegerSerializer());
        for (final int expectedKey : expectedKeys) {
            inputTopic.pipeInput(expectedKey, expectedKey * 10, expectedKey / 2L);
        }
        driver.advanceWallClockTime(Duration.ofMillis(2));
        driver.advanceWallClockTime(Duration.ofMillis(1));
        final KeyValueTimestamp[] expected = { new KeyValueTimestamp<>(2, 10, 0), new KeyValueTimestamp<>(20, 110, 5), new KeyValueTimestamp<>(200, 1110, 50), new KeyValueTimestamp<>(2000, 11110, 500), new KeyValueTimestamp<>(-1, 2, 2), new KeyValueTimestamp<>(-1, 3, 3) };
        assertEquals(expected.length, processor.theCapturedProcessor().processed().size());
        for (int i = 0; i < expected.length; i++) {
            assertEquals(expected[i], processor.theCapturedProcessor().processed().get(i));
        }
    }
}
Also used : KeyValue(org.apache.kafka.streams.KeyValue) Transformer(org.apache.kafka.streams.kstream.Transformer) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) ProcessorContext(org.apache.kafka.streams.processor.ProcessorContext) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) MockProcessorSupplier(org.apache.kafka.test.MockProcessorSupplier) KeyValueTimestamp(org.apache.kafka.streams.KeyValueTimestamp) Test(org.junit.Test)

Aggregations

KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)71 Test (org.junit.Test)65 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)46 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)44 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)36 Properties (java.util.Properties)26 Windowed (org.apache.kafka.streams.kstream.Windowed)22 Serdes (org.apache.kafka.common.serialization.Serdes)21 IntegrationTest (org.apache.kafka.test.IntegrationTest)20 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)19 KeyValue (org.apache.kafka.streams.KeyValue)19 Consumed (org.apache.kafka.streams.kstream.Consumed)17 IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)16 MockApiProcessorSupplier (org.apache.kafka.test.MockApiProcessorSupplier)16 Bytes (org.apache.kafka.common.utils.Bytes)15 KStream (org.apache.kafka.streams.kstream.KStream)14 Duration (java.time.Duration)13 KafkaStreams (org.apache.kafka.streams.KafkaStreams)13 TestInputTopic (org.apache.kafka.streams.TestInputTopic)13 Utils.mkProperties (org.apache.kafka.common.utils.Utils.mkProperties)12