Search in sources :

Example 11 with MockApiProcessor

use of org.apache.kafka.test.MockApiProcessor in project kafka by apache.

the class KStreamImplTest method shouldSendDataToDynamicTopics.

@Test
public void shouldSendDataToDynamicTopics() {
    final StreamsBuilder builder = new StreamsBuilder();
    final String input = "topic";
    final KStream<String, String> stream = builder.stream(input, stringConsumed);
    stream.to((key, value, context) -> context.topic() + "-" + key + "-" + value.substring(0, 1), Produced.with(Serdes.String(), Serdes.String()));
    builder.stream(input + "-a-v", stringConsumed).process(processorSupplier);
    builder.stream(input + "-b-v", stringConsumed).process(processorSupplier);
    try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
        final TestInputTopic<String, String> inputTopic = driver.createInputTopic(input, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
        inputTopic.pipeInput("a", "v1");
        inputTopic.pipeInput("a", "v2");
        inputTopic.pipeInput("b", "v1");
    }
    final List<MockApiProcessor<String, String, Void, Void>> mockProcessors = processorSupplier.capturedProcessors(2);
    assertThat(mockProcessors.get(0).processed(), equalTo(asList(new KeyValueTimestamp<>("a", "v1", 0), new KeyValueTimestamp<>("a", "v2", 0))));
    assertThat(mockProcessors.get(1).processed(), equalTo(Collections.singletonList(new KeyValueTimestamp<>("b", "v1", 0))));
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) TopologyTestDriver(org.apache.kafka.streams.TopologyTestDriver) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Test(org.junit.Test)

Example 12 with MockApiProcessor

use of org.apache.kafka.test.MockApiProcessor in project kafka by apache.

the class StreamThreadTest method shouldRespectNumIterationsInMainLoop.

@Test
public void shouldRespectNumIterationsInMainLoop() {
    final List<MockApiProcessor<byte[], byte[], Object, Object>> mockProcessors = new LinkedList<>();
    internalTopologyBuilder.addSource(null, "source1", null, null, null, topic1);
    internalTopologyBuilder.addProcessor("processor1", (ProcessorSupplier<byte[], byte[], ?, ?>) () -> {
        final MockApiProcessor<byte[], byte[], Object, Object> processor = new MockApiProcessor<>(PunctuationType.WALL_CLOCK_TIME, 10L);
        mockProcessors.add(processor);
        return processor;
    }, "source1");
    internalTopologyBuilder.addProcessor("processor2", (ProcessorSupplier<byte[], byte[], ?, ?>) () -> new MockApiProcessor<>(PunctuationType.STREAM_TIME, 10L), "source1");
    final Properties properties = new Properties();
    properties.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100L);
    final StreamsConfig config = new StreamsConfig(StreamsTestUtils.getStreamsConfig(APPLICATION_ID, "localhost:2171", Serdes.ByteArraySerde.class.getName(), Serdes.ByteArraySerde.class.getName(), properties));
    final StreamThread thread = createStreamThread(CLIENT_ID, config, false);
    thread.setState(StreamThread.State.STARTING);
    thread.setState(StreamThread.State.PARTITIONS_REVOKED);
    final TaskId task1 = new TaskId(0, t1p1.partition());
    final Set<TopicPartition> assignedPartitions = Collections.singleton(t1p1);
    thread.taskManager().handleAssignment(Collections.singletonMap(task1, assignedPartitions), emptyMap());
    final MockConsumer<byte[], byte[]> mockConsumer = (MockConsumer<byte[], byte[]>) thread.mainConsumer();
    mockConsumer.assign(Collections.singleton(t1p1));
    mockConsumer.updateBeginningOffsets(Collections.singletonMap(t1p1, 0L));
    thread.rebalanceListener().onPartitionsAssigned(assignedPartitions);
    thread.runOnce();
    // processed one record, punctuated after the first record, and hence num.iterations is still 1
    long offset = -1;
    addRecord(mockConsumer, ++offset, 0L);
    thread.runOnce();
    assertThat(thread.currentNumIterations(), equalTo(1));
    // processed one more record without punctuation, and bump num.iterations to 2
    addRecord(mockConsumer, ++offset, 1L);
    thread.runOnce();
    assertThat(thread.currentNumIterations(), equalTo(2));
    // processed zero records, early exit and iterations stays as 2
    thread.runOnce();
    assertThat(thread.currentNumIterations(), equalTo(2));
    // system time based punctutation without processing any record, iteration stays as 2
    mockTime.sleep(11L);
    thread.runOnce();
    assertThat(thread.currentNumIterations(), equalTo(2));
    // system time based punctutation after processing a record, half iteration to 1
    mockTime.sleep(11L);
    addRecord(mockConsumer, ++offset, 5L);
    thread.runOnce();
    assertThat(thread.currentNumIterations(), equalTo(1));
    // processed two records, bumping up iterations to 3 (1 + 2)
    addRecord(mockConsumer, ++offset, 5L);
    addRecord(mockConsumer, ++offset, 6L);
    thread.runOnce();
    assertThat(thread.currentNumIterations(), equalTo(3));
    // stream time based punctutation halves to 1
    addRecord(mockConsumer, ++offset, 11L);
    thread.runOnce();
    assertThat(thread.currentNumIterations(), equalTo(1));
    // processed three records, bumping up iterations to 3 (1 + 2)
    addRecord(mockConsumer, ++offset, 12L);
    addRecord(mockConsumer, ++offset, 13L);
    addRecord(mockConsumer, ++offset, 14L);
    thread.runOnce();
    assertThat(thread.currentNumIterations(), equalTo(3));
    mockProcessors.forEach(MockApiProcessor::requestCommit);
    addRecord(mockConsumer, ++offset, 15L);
    thread.runOnce();
    // user requested commit should half iteration to 1
    assertThat(thread.currentNumIterations(), equalTo(1));
    // processed three records, bumping up iterations to 3 (1 + 2)
    addRecord(mockConsumer, ++offset, 15L);
    addRecord(mockConsumer, ++offset, 16L);
    addRecord(mockConsumer, ++offset, 17L);
    thread.runOnce();
    assertThat(thread.currentNumIterations(), equalTo(3));
    // time based commit without processing, should keep the iteration as 3
    mockTime.sleep(90L);
    thread.runOnce();
    assertThat(thread.currentNumIterations(), equalTo(3));
    // time based commit without processing, should half the iteration to 1
    mockTime.sleep(90L);
    addRecord(mockConsumer, ++offset, 18L);
    thread.runOnce();
    assertThat(thread.currentNumIterations(), equalTo(1));
}
Also used : TaskId(org.apache.kafka.streams.processor.TaskId) MockApiProcessor(org.apache.kafka.test.MockApiProcessor) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) Properties(java.util.Properties) LinkedList(java.util.LinkedList) Serdes(org.apache.kafka.common.serialization.Serdes) TopicPartition(org.apache.kafka.common.TopicPartition) MockConsumer(org.apache.kafka.clients.consumer.MockConsumer) StreamsConfig(org.apache.kafka.streams.StreamsConfig) Test(org.junit.Test)

Aggregations

MockApiProcessor (org.apache.kafka.test.MockApiProcessor)12 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)11 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)11 MockApiProcessorSupplier (org.apache.kafka.test.MockApiProcessorSupplier)10 Test (org.junit.Test)8 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)7 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)4 Properties (java.util.Properties)3 IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)3 Serdes (org.apache.kafka.common.serialization.Serdes)3 Topology (org.apache.kafka.streams.Topology)3 Duration (java.time.Duration)2 Instant (java.time.Instant)2 Arrays.asList (java.util.Arrays.asList)2 Bytes (org.apache.kafka.common.utils.Bytes)2 Utils.mkProperties (org.apache.kafka.common.utils.Utils.mkProperties)2 StreamsConfig (org.apache.kafka.streams.StreamsConfig)2 TestInputTopic (org.apache.kafka.streams.TestInputTopic)2 Consumed (org.apache.kafka.streams.kstream.Consumed)2 KTable (org.apache.kafka.streams.kstream.KTable)2