Search in sources :

Example 6 with Processor

use of org.apache.kafka.streams.processor.api.Processor in project kafka by apache.

the class KafkaStreamsTest method statelessTopologyShouldNotCreateStateDirectory.

@Test
public void statelessTopologyShouldNotCreateStateDirectory() throws Exception {
    final String safeTestName = safeUniqueTestName(getClass(), testName);
    final String inputTopic = safeTestName + "-input";
    final String outputTopic = safeTestName + "-output";
    final Topology topology = new Topology();
    topology.addSource("source", Serdes.String().deserializer(), Serdes.String().deserializer(), inputTopic).addProcessor("process", () -> new Processor<String, String, String, String>() {

        private ProcessorContext<String, String> context;

        @Override
        public void init(final ProcessorContext<String, String> context) {
            this.context = context;
        }

        @Override
        public void process(final Record<String, String> record) {
            if (record.value().length() % 2 == 0) {
                context.forward(record.withValue(record.key() + record.value()));
            }
        }
    }, "source").addSink("sink", outputTopic, new StringSerializer(), new StringSerializer(), "process");
    startStreamsAndCheckDirExists(topology, false);
}
Also used : Processor(org.apache.kafka.streams.processor.api.Processor) Record(org.apache.kafka.streams.processor.api.Record) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) EasyMock.anyString(org.easymock.EasyMock.anyString) ProcessorTopology(org.apache.kafka.streams.processor.internals.ProcessorTopology) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) ProcessorContext(org.apache.kafka.streams.processor.api.ProcessorContext) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 7 with Processor

use of org.apache.kafka.streams.processor.api.Processor in project kafka by apache.

the class MockProcessorContextAPITest method shouldCaptureApplicationAndRecordMetadata.

@Test
public void shouldCaptureApplicationAndRecordMetadata() {
    final Properties config = mkProperties(mkMap(mkEntry(StreamsConfig.APPLICATION_ID_CONFIG, "testMetadata"), mkEntry(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "")));
    final Processor<String, Object, String, Object> processor = new Processor<String, Object, String, Object>() {

        private ProcessorContext<String, Object> context;

        @Override
        public void init(final ProcessorContext<String, Object> context) {
            this.context = context;
        }

        @Override
        public void process(final Record<String, Object> record) {
            context.forward(new Record<String, Object>("appId", context.applicationId(), 0L));
            context.forward(new Record<String, Object>("taskId", context.taskId(), 0L));
            if (context.recordMetadata().isPresent()) {
                final RecordMetadata recordMetadata = context.recordMetadata().get();
                context.forward(new Record<String, Object>("topic", recordMetadata.topic(), 0L));
                context.forward(new Record<String, Object>("partition", recordMetadata.partition(), 0L));
                context.forward(new Record<String, Object>("offset", recordMetadata.offset(), 0L));
            }
            context.forward(new Record<String, Object>("record", record, 0L));
        }
    };
    final MockProcessorContext<String, Object> context = new MockProcessorContext<>(config);
    processor.init(context);
    processor.process(new Record<>("foo", 5L, 0L));
    {
        final List<CapturedForward<? extends String, ?>> forwarded = context.forwarded();
        final List<CapturedForward<? extends String, ?>> expected = asList(new CapturedForward<>(new Record<>("appId", "testMetadata", 0L)), new CapturedForward<>(new Record<>("taskId", new TaskId(0, 0), 0L)), new CapturedForward<>(new Record<>("record", new Record<>("foo", 5L, 0L), 0L)));
        assertThat(forwarded, is(expected));
    }
    context.resetForwards();
    context.setRecordMetadata("t1", 0, 0L);
    processor.process(new Record<>("foo", 5L, 0L));
    {
        final List<CapturedForward<? extends String, ?>> forwarded = context.forwarded();
        final List<CapturedForward<? extends String, ?>> expected = asList(new CapturedForward<>(new Record<>("appId", "testMetadata", 0L)), new CapturedForward<>(new Record<>("taskId", new TaskId(0, 0), 0L)), new CapturedForward<>(new Record<>("topic", "t1", 0L)), new CapturedForward<>(new Record<>("partition", 0, 0L)), new CapturedForward<>(new Record<>("offset", 0L, 0L)), new CapturedForward<>(new Record<>("record", new Record<>("foo", 5L, 0L), 0L)));
        assertThat(forwarded, is(expected));
    }
}
Also used : Processor(org.apache.kafka.streams.processor.api.Processor) TaskId(org.apache.kafka.streams.processor.TaskId) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) Properties(java.util.Properties) MockProcessorContext(org.apache.kafka.streams.processor.api.MockProcessorContext) MockProcessorContext(org.apache.kafka.streams.processor.api.MockProcessorContext) ProcessorContext(org.apache.kafka.streams.processor.api.ProcessorContext) RecordMetadata(org.apache.kafka.streams.processor.api.RecordMetadata) CapturedForward(org.apache.kafka.streams.processor.api.MockProcessorContext.CapturedForward) Record(org.apache.kafka.streams.processor.api.Record) Collections.singletonList(java.util.Collections.singletonList) Arrays.asList(java.util.Arrays.asList) List(java.util.List) Test(org.junit.jupiter.api.Test)

Example 8 with Processor

use of org.apache.kafka.streams.processor.api.Processor in project kafka by apache.

the class MockProcessorContextAPITest method shouldCapturePunctuator.

@Test
public void shouldCapturePunctuator() {
    final Processor<String, Long, Void, Void> processor = new Processor<String, Long, Void, Void>() {

        @Override
        public void init(final ProcessorContext<Void, Void> context) {
            context.schedule(Duration.ofSeconds(1L), PunctuationType.WALL_CLOCK_TIME, timestamp -> context.commit());
        }

        @Override
        public void process(final Record<String, Long> record) {
        }
    };
    final MockProcessorContext<Void, Void> context = new MockProcessorContext<>();
    processor.init(context);
    final MockProcessorContext.CapturedPunctuator capturedPunctuator = context.scheduledPunctuators().get(0);
    assertThat(capturedPunctuator.getInterval(), is(Duration.ofMillis(1000L)));
    assertThat(capturedPunctuator.getType(), is(PunctuationType.WALL_CLOCK_TIME));
    assertThat(capturedPunctuator.cancelled(), is(false));
    final Punctuator punctuator = capturedPunctuator.getPunctuator();
    assertThat(context.committed(), is(false));
    punctuator.punctuate(1234L);
    assertThat(context.committed(), is(true));
}
Also used : Processor(org.apache.kafka.streams.processor.api.Processor) Punctuator(org.apache.kafka.streams.processor.Punctuator) Record(org.apache.kafka.streams.processor.api.Record) MockProcessorContext(org.apache.kafka.streams.processor.api.MockProcessorContext) MockProcessorContext(org.apache.kafka.streams.processor.api.MockProcessorContext) ProcessorContext(org.apache.kafka.streams.processor.api.ProcessorContext) Test(org.junit.jupiter.api.Test)

Aggregations

Processor (org.apache.kafka.streams.processor.api.Processor)8 Record (org.apache.kafka.streams.processor.api.Record)7 Properties (java.util.Properties)5 Test (org.junit.Test)5 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)4 ProcessorContext (org.apache.kafka.streams.processor.api.ProcessorContext)4 List (java.util.List)3 Serdes (org.apache.kafka.common.serialization.Serdes)3 Consumed (org.apache.kafka.streams.kstream.Consumed)3 MockProcessorContext (org.apache.kafka.streams.processor.api.MockProcessorContext)3 Arrays (java.util.Arrays)2 Arrays.asList (java.util.Arrays.asList)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2 Pattern (java.util.regex.Pattern)2 LongSerializer (org.apache.kafka.common.serialization.LongSerializer)2 Utils.mkProperties (org.apache.kafka.common.utils.Utils.mkProperties)2 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)2 StreamsConfig (org.apache.kafka.streams.StreamsConfig)2 Grouped (org.apache.kafka.streams.kstream.Grouped)2