Search in sources :

Example 11 with Processor

use of org.apache.kafka.streams.processor.Processor in project apache-kafka-on-k8s by banzaicloud.

the class KStreamTransformValuesTest method shouldNotAllowValueTransformerToCallInternalProcessorContextMethods.

@Test
public void shouldNotAllowValueTransformerToCallInternalProcessorContextMethods() {
    final BadValueTransformer badValueTransformer = new BadValueTransformer();
    final KStreamTransformValues<Integer, Integer, Integer> transformValue = new KStreamTransformValues<>(new InternalValueTransformerWithKeySupplier<Integer, Integer, Integer>() {

        @Override
        public InternalValueTransformerWithKey<Integer, Integer, Integer> get() {
            return new InternalValueTransformerWithKey<Integer, Integer, Integer>() {

                @Override
                public Integer punctuate(long timestamp) {
                    throw new StreamsException("ValueTransformerWithKey#punctuate should not be called.");
                }

                @Override
                public void init(final ProcessorContext context) {
                    badValueTransformer.init(context);
                }

                @Override
                public Integer transform(final Integer readOnlyKey, final Integer value) {
                    return badValueTransformer.transform(readOnlyKey, value);
                }

                @Override
                public void close() {
                    badValueTransformer.close();
                }
            };
        }
    });
    final Processor transformValueProcessor = transformValue.get();
    transformValueProcessor.init(null);
    try {
        transformValueProcessor.process(null, 0);
        fail("should not allow call to context.forward() within ValueTransformer");
    } catch (final StreamsException e) {
    // expected
    }
    try {
        transformValueProcessor.process(null, 1);
        fail("should not allow call to context.forward() within ValueTransformer");
    } catch (final StreamsException e) {
    // expected
    }
    try {
        transformValueProcessor.process(null, 2);
        fail("should not allow call to context.forward() within ValueTransformer");
    } catch (final StreamsException e) {
    // expected
    }
    try {
        transformValueProcessor.process(null, 3);
        fail("should not allow call to context.forward() within ValueTransformer");
    } catch (final StreamsException e) {
    // expected
    }
    try {
        transformValueProcessor.punctuate(0);
        fail("should not allow ValueTransformer#puntuate() to return not-null value");
    } catch (final StreamsException e) {
    // expected
    }
}
Also used : Processor(org.apache.kafka.streams.processor.Processor) StreamsException(org.apache.kafka.streams.errors.StreamsException) ProcessorContext(org.apache.kafka.streams.processor.ProcessorContext) Test(org.junit.Test)

Example 12 with Processor

use of org.apache.kafka.streams.processor.Processor in project apache-kafka-on-k8s by banzaicloud.

the class SimpleBenchmark method createKafkaStreamsWithSink.

private KafkaStreams createKafkaStreamsWithSink(String topic, final CountDownLatch latch) {
    setStreamProperties("simple-benchmark-streams-with-sink");
    StreamsBuilder builder = new StreamsBuilder();
    KStream<Integer, byte[]> source = builder.stream(topic, Consumed.with(INTEGER_SERDE, BYTE_SERDE));
    source.to(INTEGER_SERDE, BYTE_SERDE, SINK_TOPIC);
    source.process(new ProcessorSupplier<Integer, byte[]>() {

        @Override
        public Processor<Integer, byte[]> get() {
            return new AbstractProcessor<Integer, byte[]>() {

                @Override
                public void init(ProcessorContext context) {
                }

                @Override
                public void process(Integer key, byte[] value) {
                    processedRecords.getAndIncrement();
                    processedBytes += value.length + Integer.SIZE;
                    if (processedRecords.get() == numRecords) {
                        latch.countDown();
                    }
                }

                @Override
                public void punctuate(long timestamp) {
                }

                @Override
                public void close() {
                }
            };
        }
    });
    return createKafkaStreamsWithExceptionHandler(builder, props);
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Processor(org.apache.kafka.streams.processor.Processor) AbstractProcessor(org.apache.kafka.streams.processor.AbstractProcessor) ProcessorContext(org.apache.kafka.streams.processor.ProcessorContext)

Example 13 with Processor

use of org.apache.kafka.streams.processor.Processor in project apache-kafka-on-k8s by banzaicloud.

the class SimpleBenchmark method createKafkaStreams.

private KafkaStreams createKafkaStreams(String topic, final CountDownLatch latch) {
    setStreamProperties("simple-benchmark-streams");
    StreamsBuilder builder = new StreamsBuilder();
    KStream<Integer, byte[]> source = builder.stream(topic, Consumed.with(INTEGER_SERDE, BYTE_SERDE));
    source.process(new ProcessorSupplier<Integer, byte[]>() {

        @Override
        public Processor<Integer, byte[]> get() {
            return new AbstractProcessor<Integer, byte[]>() {

                @Override
                public void init(ProcessorContext context) {
                }

                @Override
                public void process(Integer key, byte[] value) {
                    processedRecords.getAndIncrement();
                    processedBytes += value.length + Integer.SIZE;
                    if (processedRecords.get() == numRecords) {
                        latch.countDown();
                    }
                }

                @Override
                public void punctuate(long timestamp) {
                }

                @Override
                public void close() {
                }
            };
        }
    });
    return createKafkaStreamsWithExceptionHandler(builder, props);
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Processor(org.apache.kafka.streams.processor.Processor) AbstractProcessor(org.apache.kafka.streams.processor.AbstractProcessor) ProcessorContext(org.apache.kafka.streams.processor.ProcessorContext)

Aggregations

Processor (org.apache.kafka.streams.processor.Processor)13 ProcessorContext (org.apache.kafka.streams.processor.ProcessorContext)10 AbstractProcessor (org.apache.kafka.streams.processor.AbstractProcessor)7 Test (org.junit.Test)7 Properties (java.util.Properties)4 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)4 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)4 KeyValueStore (org.apache.kafka.streams.state.KeyValueStore)4 KStreamBuilder (org.apache.kafka.streams.kstream.KStreamBuilder)3 ProcessorSupplier (org.apache.kafka.streams.processor.ProcessorSupplier)3 HashSet (java.util.HashSet)2 StreamsException (org.apache.kafka.streams.errors.StreamsException)2 Punctuator (org.apache.kafka.streams.processor.Punctuator)2 ArrayList (java.util.ArrayList)1 HashMap (java.util.HashMap)1 Set (java.util.Set)1 CountDownLatch (java.util.concurrent.CountDownLatch)1 TopicPartition (org.apache.kafka.common.TopicPartition)1 IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)1 SystemTime (org.apache.kafka.common.utils.SystemTime)1