use of org.apache.kafka.streams.processor.AbstractProcessor in project apache-kafka-on-k8s by banzaicloud.
the class MockProcessorContextTest method shouldCaptureOutputRecords.
@Test
public void shouldCaptureOutputRecords() {
final AbstractProcessor<String, Long> processor = new AbstractProcessor<String, Long>() {
@Override
public void process(final String key, final Long value) {
context().forward(key + value, key.length() + value);
}
};
final MockProcessorContext context = new MockProcessorContext();
processor.init(context);
processor.process("foo", 5L);
processor.process("barbaz", 50L);
final Iterator<CapturedForward> forwarded = context.forwarded().iterator();
assertEquals(new KeyValue<>("foo5", 8L), forwarded.next().keyValue());
assertEquals(new KeyValue<>("barbaz50", 56L), forwarded.next().keyValue());
assertFalse(forwarded.hasNext());
context.resetForwards();
assertEquals(0, context.forwarded().size());
}
use of org.apache.kafka.streams.processor.AbstractProcessor in project apache-kafka-on-k8s by banzaicloud.
the class MockProcessorContextTest method shouldStoreAndReturnStateStores.
@Test
public void shouldStoreAndReturnStateStores() {
final AbstractProcessor<String, Long> processor = new AbstractProcessor<String, Long>() {
@Override
public void process(final String key, final Long value) {
// noinspection unchecked
final KeyValueStore<String, Long> stateStore = (KeyValueStore<String, Long>) context().getStateStore("my-state");
stateStore.put(key, (stateStore.get(key) == null ? 0 : stateStore.get(key)) + value);
stateStore.put("all", (stateStore.get("all") == null ? 0 : stateStore.get("all")) + value);
}
};
final MockProcessorContext context = new MockProcessorContext();
final KeyValueStore<String, Long> store = new InMemoryKeyValueStore<>("my-state", Serdes.String(), Serdes.Long());
context.register(store, false, null);
store.init(context, store);
processor.init(context);
processor.process("foo", 5L);
processor.process("bar", 50L);
assertEquals(5L, (long) store.get("foo"));
assertEquals(50L, (long) store.get("bar"));
assertEquals(55L, (long) store.get("all"));
}
use of org.apache.kafka.streams.processor.AbstractProcessor in project apache-kafka-on-k8s by banzaicloud.
the class MockProcessorContextTest method shouldCaptureOutputRecordsUsingTo.
@Test
public void shouldCaptureOutputRecordsUsingTo() {
final AbstractProcessor<String, Long> processor = new AbstractProcessor<String, Long>() {
@Override
public void process(final String key, final Long value) {
context().forward(key + value, key.length() + value, To.all());
}
};
final MockProcessorContext context = new MockProcessorContext();
processor.init(context);
processor.process("foo", 5L);
processor.process("barbaz", 50L);
final Iterator<CapturedForward> forwarded = context.forwarded().iterator();
assertEquals(new KeyValue<>("foo5", 8L), forwarded.next().keyValue());
assertEquals(new KeyValue<>("barbaz50", 56L), forwarded.next().keyValue());
assertFalse(forwarded.hasNext());
context.resetForwards();
assertEquals(0, context.forwarded().size());
}
use of org.apache.kafka.streams.processor.AbstractProcessor in project apache-kafka-on-k8s by banzaicloud.
the class SimpleBenchmark method createKafkaStreamsWithSink.
private KafkaStreams createKafkaStreamsWithSink(String topic, final CountDownLatch latch) {
setStreamProperties("simple-benchmark-streams-with-sink");
StreamsBuilder builder = new StreamsBuilder();
KStream<Integer, byte[]> source = builder.stream(topic, Consumed.with(INTEGER_SERDE, BYTE_SERDE));
source.to(INTEGER_SERDE, BYTE_SERDE, SINK_TOPIC);
source.process(new ProcessorSupplier<Integer, byte[]>() {
@Override
public Processor<Integer, byte[]> get() {
return new AbstractProcessor<Integer, byte[]>() {
@Override
public void init(ProcessorContext context) {
}
@Override
public void process(Integer key, byte[] value) {
processedRecords.getAndIncrement();
processedBytes += value.length + Integer.SIZE;
if (processedRecords.get() == numRecords) {
latch.countDown();
}
}
@Override
public void punctuate(long timestamp) {
}
@Override
public void close() {
}
};
}
});
return createKafkaStreamsWithExceptionHandler(builder, props);
}
use of org.apache.kafka.streams.processor.AbstractProcessor in project apache-kafka-on-k8s by banzaicloud.
the class SimpleBenchmark method createKafkaStreams.
private KafkaStreams createKafkaStreams(String topic, final CountDownLatch latch) {
setStreamProperties("simple-benchmark-streams");
StreamsBuilder builder = new StreamsBuilder();
KStream<Integer, byte[]> source = builder.stream(topic, Consumed.with(INTEGER_SERDE, BYTE_SERDE));
source.process(new ProcessorSupplier<Integer, byte[]>() {
@Override
public Processor<Integer, byte[]> get() {
return new AbstractProcessor<Integer, byte[]>() {
@Override
public void init(ProcessorContext context) {
}
@Override
public void process(Integer key, byte[] value) {
processedRecords.getAndIncrement();
processedBytes += value.length + Integer.SIZE;
if (processedRecords.get() == numRecords) {
latch.countDown();
}
}
@Override
public void punctuate(long timestamp) {
}
@Override
public void close() {
}
};
}
});
return createKafkaStreamsWithExceptionHandler(builder, props);
}
Aggregations