Search in sources :

Example 1 with ProcessorContext

use of org.apache.kafka.streams.processor.api.ProcessorContext in project kafka by apache.

the class KStreamPrintTest method setUp.

@Before
public void setUp() {
    byteOutStream = new ByteArrayOutputStream();
    final KStreamPrint<Integer, String> kStreamPrint = new KStreamPrint<>(new PrintForeachAction<>(byteOutStream, (key, value) -> String.format("%d, %s", key, value), "test-stream"));
    printProcessor = kStreamPrint.get();
    final ProcessorContext<Void, Void> processorContext = EasyMock.createNiceMock(ProcessorContext.class);
    EasyMock.replay(processorContext);
    printProcessor.init(processorContext);
}
Also used : Arrays(java.util.Arrays) List(java.util.List) ProcessorContext(org.apache.kafka.streams.processor.api.ProcessorContext) ByteArrayOutputStream(java.io.ByteArrayOutputStream) KeyValue(org.apache.kafka.streams.KeyValue) Record(org.apache.kafka.streams.processor.api.Record) Test(org.junit.Test) Processor(org.apache.kafka.streams.processor.api.Processor) EasyMock(org.easymock.EasyMock) StandardCharsets(java.nio.charset.StandardCharsets) Assert.assertEquals(org.junit.Assert.assertEquals) Before(org.junit.Before) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Before(org.junit.Before)

Example 2 with ProcessorContext

use of org.apache.kafka.streams.processor.api.ProcessorContext in project kafka by apache.

the class GraphGraceSearchUtilTest method shouldExtractGraceFromSessionAncestorThroughStatefulParent.

@Test
public void shouldExtractGraceFromSessionAncestorThroughStatefulParent() {
    final SessionWindows windows = SessionWindows.ofInactivityGapAndGrace(ofMillis(10L), ofMillis(1234L));
    final StatefulProcessorNode<String, Long> graceGrandparent = new StatefulProcessorNode<>("asdf", new ProcessorParameters<>(new KStreamSessionWindowAggregate<String, Long, Integer>(windows, "asdf", null, null, null), "asdf"), (StoreBuilder<?>) null);
    final StatefulProcessorNode<String, Long> statefulParent = new StatefulProcessorNode<>("stateful", new ProcessorParameters<>(() -> new Processor<String, Long, String, Long>() {

        @Override
        public void init(final ProcessorContext<String, Long> context) {
        }

        @Override
        public void process(final Record<String, Long> record) {
        }

        @Override
        public void close() {
        }
    }, "dummy"), (StoreBuilder<?>) null);
    graceGrandparent.addChild(statefulParent);
    final ProcessorGraphNode<String, Long> node = new ProcessorGraphNode<>("stateless", null);
    statefulParent.addChild(node);
    final long extracted = GraphGraceSearchUtil.findAndVerifyWindowGrace(node);
    assertThat(extracted, is(windows.gracePeriodMs() + windows.inactivityGap()));
}
Also used : Processor(org.apache.kafka.streams.processor.api.Processor) ProcessorContext(org.apache.kafka.streams.processor.api.ProcessorContext) SessionWindows(org.apache.kafka.streams.kstream.SessionWindows) Record(org.apache.kafka.streams.processor.api.Record) KStreamSessionWindowAggregate(org.apache.kafka.streams.kstream.internals.KStreamSessionWindowAggregate) Test(org.junit.Test)

Example 3 with ProcessorContext

use of org.apache.kafka.streams.processor.api.ProcessorContext in project kafka by apache.

the class SubscriptionStoreReceiveProcessorSupplier method get.

@Override
public Processor<KO, SubscriptionWrapper<K>, CombinedKey<KO, K>, Change<ValueAndTimestamp<SubscriptionWrapper<K>>>> get() {
    return new ContextualProcessor<KO, SubscriptionWrapper<K>, CombinedKey<KO, K>, Change<ValueAndTimestamp<SubscriptionWrapper<K>>>>() {

        private TimestampedKeyValueStore<Bytes, SubscriptionWrapper<K>> store;

        private Sensor droppedRecordsSensor;

        @Override
        public void init(final ProcessorContext<CombinedKey<KO, K>, Change<ValueAndTimestamp<SubscriptionWrapper<K>>>> context) {
            super.init(context);
            final InternalProcessorContext<?, ?> internalProcessorContext = (InternalProcessorContext<?, ?>) context;
            droppedRecordsSensor = TaskMetrics.droppedRecordsSensor(Thread.currentThread().getName(), internalProcessorContext.taskId().toString(), internalProcessorContext.metrics());
            store = internalProcessorContext.getStateStore(storeBuilder);
            keySchema.init(context);
        }

        @Override
        public void process(final Record<KO, SubscriptionWrapper<K>> record) {
            if (record.key() == null) {
                if (context().recordMetadata().isPresent()) {
                    final RecordMetadata recordMetadata = context().recordMetadata().get();
                    LOG.warn("Skipping record due to null foreign key. " + "topic=[{}] partition=[{}] offset=[{}]", recordMetadata.topic(), recordMetadata.partition(), recordMetadata.offset());
                } else {
                    LOG.warn("Skipping record due to null foreign key. Topic, partition, and offset not known.");
                }
                droppedRecordsSensor.record();
                return;
            }
            if (record.value().getVersion() != SubscriptionWrapper.CURRENT_VERSION) {
                // from older SubscriptionWrapper versions to newer versions.
                throw new UnsupportedVersionException("SubscriptionWrapper is of an incompatible version.");
            }
            final Bytes subscriptionKey = keySchema.toBytes(record.key(), record.value().getPrimaryKey());
            final ValueAndTimestamp<SubscriptionWrapper<K>> newValue = ValueAndTimestamp.make(record.value(), record.timestamp());
            final ValueAndTimestamp<SubscriptionWrapper<K>> oldValue = store.get(subscriptionKey);
            // This store is used by the prefix scanner in ForeignJoinSubscriptionProcessorSupplier
            if (record.value().getInstruction().equals(SubscriptionWrapper.Instruction.DELETE_KEY_AND_PROPAGATE) || record.value().getInstruction().equals(SubscriptionWrapper.Instruction.DELETE_KEY_NO_PROPAGATE)) {
                store.delete(subscriptionKey);
            } else {
                store.put(subscriptionKey, newValue);
            }
            final Change<ValueAndTimestamp<SubscriptionWrapper<K>>> change = new Change<>(newValue, oldValue);
            // note: key is non-nullable
            // note: newValue is non-nullable
            context().forward(record.withKey(new CombinedKey<>(record.key(), record.value().getPrimaryKey())).withValue(change).withTimestamp(newValue.timestamp()));
        }
    };
}
Also used : InternalProcessorContext(org.apache.kafka.streams.processor.internals.InternalProcessorContext) Change(org.apache.kafka.streams.kstream.internals.Change) ProcessorContext(org.apache.kafka.streams.processor.api.ProcessorContext) InternalProcessorContext(org.apache.kafka.streams.processor.internals.InternalProcessorContext) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) RecordMetadata(org.apache.kafka.streams.processor.api.RecordMetadata) Bytes(org.apache.kafka.common.utils.Bytes) TimestampedKeyValueStore(org.apache.kafka.streams.state.TimestampedKeyValueStore) Record(org.apache.kafka.streams.processor.api.Record) ContextualProcessor(org.apache.kafka.streams.processor.api.ContextualProcessor) Sensor(org.apache.kafka.common.metrics.Sensor) UnsupportedVersionException(org.apache.kafka.common.errors.UnsupportedVersionException)

Example 4 with ProcessorContext

use of org.apache.kafka.streams.processor.api.ProcessorContext in project kafka by apache.

the class KafkaStreamsTest method statelessTopologyShouldNotCreateStateDirectory.

@Test
public void statelessTopologyShouldNotCreateStateDirectory() throws Exception {
    final String safeTestName = safeUniqueTestName(getClass(), testName);
    final String inputTopic = safeTestName + "-input";
    final String outputTopic = safeTestName + "-output";
    final Topology topology = new Topology();
    topology.addSource("source", Serdes.String().deserializer(), Serdes.String().deserializer(), inputTopic).addProcessor("process", () -> new Processor<String, String, String, String>() {

        private ProcessorContext<String, String> context;

        @Override
        public void init(final ProcessorContext<String, String> context) {
            this.context = context;
        }

        @Override
        public void process(final Record<String, String> record) {
            if (record.value().length() % 2 == 0) {
                context.forward(record.withValue(record.key() + record.value()));
            }
        }
    }, "source").addSink("sink", outputTopic, new StringSerializer(), new StringSerializer(), "process");
    startStreamsAndCheckDirExists(topology, false);
}
Also used : Processor(org.apache.kafka.streams.processor.api.Processor) Record(org.apache.kafka.streams.processor.api.Record) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) EasyMock.anyString(org.easymock.EasyMock.anyString) ProcessorTopology(org.apache.kafka.streams.processor.internals.ProcessorTopology) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) ProcessorContext(org.apache.kafka.streams.processor.api.ProcessorContext) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 5 with ProcessorContext

use of org.apache.kafka.streams.processor.api.ProcessorContext in project kafka by apache.

the class MockProcessorContextAPITest method shouldCaptureApplicationAndRecordMetadata.

@Test
public void shouldCaptureApplicationAndRecordMetadata() {
    final Properties config = mkProperties(mkMap(mkEntry(StreamsConfig.APPLICATION_ID_CONFIG, "testMetadata"), mkEntry(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "")));
    final Processor<String, Object, String, Object> processor = new Processor<String, Object, String, Object>() {

        private ProcessorContext<String, Object> context;

        @Override
        public void init(final ProcessorContext<String, Object> context) {
            this.context = context;
        }

        @Override
        public void process(final Record<String, Object> record) {
            context.forward(new Record<String, Object>("appId", context.applicationId(), 0L));
            context.forward(new Record<String, Object>("taskId", context.taskId(), 0L));
            if (context.recordMetadata().isPresent()) {
                final RecordMetadata recordMetadata = context.recordMetadata().get();
                context.forward(new Record<String, Object>("topic", recordMetadata.topic(), 0L));
                context.forward(new Record<String, Object>("partition", recordMetadata.partition(), 0L));
                context.forward(new Record<String, Object>("offset", recordMetadata.offset(), 0L));
            }
            context.forward(new Record<String, Object>("record", record, 0L));
        }
    };
    final MockProcessorContext<String, Object> context = new MockProcessorContext<>(config);
    processor.init(context);
    processor.process(new Record<>("foo", 5L, 0L));
    {
        final List<CapturedForward<? extends String, ?>> forwarded = context.forwarded();
        final List<CapturedForward<? extends String, ?>> expected = asList(new CapturedForward<>(new Record<>("appId", "testMetadata", 0L)), new CapturedForward<>(new Record<>("taskId", new TaskId(0, 0), 0L)), new CapturedForward<>(new Record<>("record", new Record<>("foo", 5L, 0L), 0L)));
        assertThat(forwarded, is(expected));
    }
    context.resetForwards();
    context.setRecordMetadata("t1", 0, 0L);
    processor.process(new Record<>("foo", 5L, 0L));
    {
        final List<CapturedForward<? extends String, ?>> forwarded = context.forwarded();
        final List<CapturedForward<? extends String, ?>> expected = asList(new CapturedForward<>(new Record<>("appId", "testMetadata", 0L)), new CapturedForward<>(new Record<>("taskId", new TaskId(0, 0), 0L)), new CapturedForward<>(new Record<>("topic", "t1", 0L)), new CapturedForward<>(new Record<>("partition", 0, 0L)), new CapturedForward<>(new Record<>("offset", 0L, 0L)), new CapturedForward<>(new Record<>("record", new Record<>("foo", 5L, 0L), 0L)));
        assertThat(forwarded, is(expected));
    }
}
Also used : Processor(org.apache.kafka.streams.processor.api.Processor) TaskId(org.apache.kafka.streams.processor.TaskId) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) Properties(java.util.Properties) MockProcessorContext(org.apache.kafka.streams.processor.api.MockProcessorContext) MockProcessorContext(org.apache.kafka.streams.processor.api.MockProcessorContext) ProcessorContext(org.apache.kafka.streams.processor.api.ProcessorContext) RecordMetadata(org.apache.kafka.streams.processor.api.RecordMetadata) CapturedForward(org.apache.kafka.streams.processor.api.MockProcessorContext.CapturedForward) Record(org.apache.kafka.streams.processor.api.Record) Collections.singletonList(java.util.Collections.singletonList) Arrays.asList(java.util.Arrays.asList) List(java.util.List) Test(org.junit.jupiter.api.Test)

Aggregations

ProcessorContext (org.apache.kafka.streams.processor.api.ProcessorContext)6 Record (org.apache.kafka.streams.processor.api.Record)6 Processor (org.apache.kafka.streams.processor.api.Processor)5 Test (org.junit.Test)3 List (java.util.List)2 MockProcessorContext (org.apache.kafka.streams.processor.api.MockProcessorContext)2 RecordMetadata (org.apache.kafka.streams.processor.api.RecordMetadata)2 Test (org.junit.jupiter.api.Test)2 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1 StandardCharsets (java.nio.charset.StandardCharsets)1 Arrays (java.util.Arrays)1 Arrays.asList (java.util.Arrays.asList)1 Collections.singletonList (java.util.Collections.singletonList)1 Properties (java.util.Properties)1 UnsupportedVersionException (org.apache.kafka.common.errors.UnsupportedVersionException)1 Sensor (org.apache.kafka.common.metrics.Sensor)1 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)1 Bytes (org.apache.kafka.common.utils.Bytes)1 Utils.mkProperties (org.apache.kafka.common.utils.Utils.mkProperties)1 KeyValue (org.apache.kafka.streams.KeyValue)1