use of org.apache.kafka.test.MockProcessorSupplier in project apache-kafka-on-k8s by banzaicloud.
the class KStreamImplTest method shouldSendDataThroughTopicUsingProduced.
@Test
public void shouldSendDataThroughTopicUsingProduced() {
final StreamsBuilder builder = new StreamsBuilder();
final String input = "topic";
final KStream<String, String> stream = builder.stream(input, consumed);
final MockProcessorSupplier<String, String> processorSupplier = new MockProcessorSupplier<>();
stream.through("through-topic", Produced.with(stringSerde, stringSerde)).process(processorSupplier);
driver.setUp(builder);
driver.process(input, "a", "b");
assertThat(processorSupplier.processed, equalTo(Collections.singletonList("a:b")));
}
use of org.apache.kafka.test.MockProcessorSupplier in project apache-kafka-on-k8s by banzaicloud.
the class KStreamImplTest method shouldSendDataToTopicUsingProduced.
@Test
public void shouldSendDataToTopicUsingProduced() {
final StreamsBuilder builder = new StreamsBuilder();
final String input = "topic";
final KStream<String, String> stream = builder.stream(input, consumed);
final MockProcessorSupplier<String, String> processorSupplier = new MockProcessorSupplier<>();
stream.to("to-topic", Produced.with(stringSerde, stringSerde));
builder.stream("to-topic", consumed).process(processorSupplier);
driver.setUp(builder);
driver.process(input, "e", "f");
assertThat(processorSupplier.processed, equalTo(Collections.singletonList("e:f")));
}
use of org.apache.kafka.test.MockProcessorSupplier in project apache-kafka-on-k8s by banzaicloud.
the class KStreamTransformValuesTest method testTransform.
@Test
public void testTransform() {
StreamsBuilder builder = new StreamsBuilder();
ValueTransformerSupplier<Number, Integer> valueTransformerSupplier = new ValueTransformerSupplier<Number, Integer>() {
public ValueTransformer<Number, Integer> get() {
return new ValueTransformer<Number, Integer>() {
private int total = 0;
@Override
public void init(ProcessorContext context) {
}
@Override
public Integer transform(Number value) {
total += value.intValue();
return total;
}
@Override
public Integer punctuate(long timestamp) {
return null;
}
@Override
public void close() {
}
};
}
};
final int[] expectedKeys = { 1, 10, 100, 1000 };
KStream<Integer, Integer> stream;
MockProcessorSupplier<Integer, Integer> processor = new MockProcessorSupplier<>();
stream = builder.stream(topicName, Consumed.with(intSerde, intSerde));
stream.transformValues(valueTransformerSupplier).process(processor);
driver.setUp(builder);
for (int expectedKey : expectedKeys) {
driver.process(topicName, expectedKey, expectedKey * 10);
}
String[] expected = { "1:10", "10:110", "100:1110", "1000:11110" };
assertArrayEquals(expected, processor.processed.toArray());
}
use of org.apache.kafka.test.MockProcessorSupplier in project apache-kafka-on-k8s by banzaicloud.
the class KStreamWindowAggregateTest method testAggBasic.
@Test
public void testAggBasic() {
final StreamsBuilder builder = new StreamsBuilder();
String topic1 = "topic1";
KStream<String, String> stream1 = builder.stream(topic1, Consumed.with(strSerde, strSerde));
KTable<Windowed<String>, String> table2 = stream1.groupByKey(Serialized.with(strSerde, strSerde)).aggregate(MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, TimeWindows.of(10).advanceBy(5), strSerde, "topic1-Canonized");
MockProcessorSupplier<Windowed<String>, String> proc2 = new MockProcessorSupplier<>();
table2.toStream().process(proc2);
driver.setUp(builder, stateDir);
setRecordContext(0, topic1);
driver.process(topic1, "A", "1");
driver.flushState();
setRecordContext(1, topic1);
driver.process(topic1, "B", "2");
driver.flushState();
setRecordContext(2, topic1);
driver.process(topic1, "C", "3");
driver.flushState();
setRecordContext(3, topic1);
driver.process(topic1, "D", "4");
driver.flushState();
setRecordContext(4, topic1);
driver.process(topic1, "A", "1");
driver.flushState();
setRecordContext(5, topic1);
driver.process(topic1, "A", "1");
driver.flushState();
setRecordContext(6, topic1);
driver.process(topic1, "B", "2");
driver.flushState();
setRecordContext(7, topic1);
driver.process(topic1, "D", "4");
driver.flushState();
setRecordContext(8, topic1);
driver.process(topic1, "B", "2");
driver.flushState();
setRecordContext(9, topic1);
driver.process(topic1, "C", "3");
driver.flushState();
setRecordContext(10, topic1);
driver.process(topic1, "A", "1");
driver.flushState();
setRecordContext(11, topic1);
driver.process(topic1, "B", "2");
driver.flushState();
setRecordContext(12, topic1);
driver.flushState();
driver.process(topic1, "D", "4");
driver.flushState();
setRecordContext(13, topic1);
driver.process(topic1, "B", "2");
driver.flushState();
setRecordContext(14, topic1);
driver.process(topic1, "C", "3");
driver.flushState();
assertEquals(Utils.mkList("[A@0/10]:0+1", "[B@0/10]:0+2", "[C@0/10]:0+3", "[D@0/10]:0+4", "[A@0/10]:0+1+1", "[A@0/10]:0+1+1+1", "[A@5/15]:0+1", "[B@0/10]:0+2+2", "[B@5/15]:0+2", "[D@0/10]:0+4+4", "[D@5/15]:0+4", "[B@0/10]:0+2+2+2", "[B@5/15]:0+2+2", "[C@0/10]:0+3+3", "[C@5/15]:0+3", "[A@5/15]:0+1+1", "[A@10/20]:0+1", "[B@5/15]:0+2+2+2", "[B@10/20]:0+2", "[D@5/15]:0+4+4", "[D@10/20]:0+4", "[B@5/15]:0+2+2+2+2", "[B@10/20]:0+2+2", "[C@5/15]:0+3+3", "[C@10/20]:0+3"), proc2.processed);
}
use of org.apache.kafka.test.MockProcessorSupplier in project apache-kafka-on-k8s by banzaicloud.
the class InternalTopologyBuilderTest method shouldSortProcessorNodesCorrectly.
@Test
public void shouldSortProcessorNodesCorrectly() throws Exception {
builder.addSource(null, "source1", null, null, null, "topic1");
builder.addSource(null, "source2", null, null, null, "topic2");
builder.addProcessor("processor1", new MockProcessorSupplier(), "source1");
builder.addProcessor("processor2", new MockProcessorSupplier(), "source1", "source2");
builder.addProcessor("processor3", new MockProcessorSupplier(), "processor2");
builder.addSink("sink1", "topic2", null, null, null, "processor1", "processor3");
assertEquals(1, builder.describe().subtopologies().size());
final Iterator<TopologyDescription.Node> iterator = ((InternalTopologyBuilder.Subtopology) builder.describe().subtopologies().iterator().next()).nodesInOrder();
assertTrue(iterator.hasNext());
InternalTopologyBuilder.AbstractNode node = (InternalTopologyBuilder.AbstractNode) iterator.next();
assertTrue(node.name.equals("source1"));
assertEquals(6, node.size);
assertTrue(iterator.hasNext());
node = (InternalTopologyBuilder.AbstractNode) iterator.next();
assertTrue(node.name.equals("source2"));
assertEquals(4, node.size);
assertTrue(iterator.hasNext());
node = (InternalTopologyBuilder.AbstractNode) iterator.next();
assertTrue(node.name.equals("processor2"));
assertEquals(3, node.size);
assertTrue(iterator.hasNext());
node = (InternalTopologyBuilder.AbstractNode) iterator.next();
assertTrue(node.name.equals("processor1"));
assertEquals(2, node.size);
assertTrue(iterator.hasNext());
node = (InternalTopologyBuilder.AbstractNode) iterator.next();
assertTrue(node.name.equals("processor3"));
assertEquals(2, node.size);
assertTrue(iterator.hasNext());
node = (InternalTopologyBuilder.AbstractNode) iterator.next();
assertTrue(node.name.equals("sink1"));
assertEquals(1, node.size);
}
Aggregations