use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.
the class KStreamImplTest method testNumProcesses.
@Test
public void testNumProcesses() {
final StreamsBuilder builder = new StreamsBuilder();
KStream<String, String> source1 = builder.stream(Arrays.asList("topic-1", "topic-2"), consumed);
KStream<String, String> source2 = builder.stream(Arrays.asList("topic-3", "topic-4"), consumed);
KStream<String, String> stream1 = source1.filter(new Predicate<String, String>() {
@Override
public boolean test(String key, String value) {
return true;
}
}).filterNot(new Predicate<String, String>() {
@Override
public boolean test(String key, String value) {
return false;
}
});
KStream<String, Integer> stream2 = stream1.mapValues(new ValueMapper<String, Integer>() {
@Override
public Integer apply(String value) {
return new Integer(value);
}
});
KStream<String, Integer> stream3 = source2.flatMapValues(new ValueMapper<String, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(String value) {
return Collections.singletonList(new Integer(value));
}
});
KStream<String, Integer>[] streams2 = stream2.branch(new Predicate<String, Integer>() {
@Override
public boolean test(String key, Integer value) {
return (value % 2) == 0;
}
}, new Predicate<String, Integer>() {
@Override
public boolean test(String key, Integer value) {
return true;
}
});
KStream<String, Integer>[] streams3 = stream3.branch(new Predicate<String, Integer>() {
@Override
public boolean test(String key, Integer value) {
return (value % 2) == 0;
}
}, new Predicate<String, Integer>() {
@Override
public boolean test(String key, Integer value) {
return true;
}
});
final int anyWindowSize = 1;
final Joined<String, Integer, Integer> joined = Joined.with(stringSerde, intSerde, intSerde);
KStream<String, Integer> stream4 = streams2[0].join(streams3[0], new ValueJoiner<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer value1, Integer value2) {
return value1 + value2;
}
}, JoinWindows.of(anyWindowSize), joined);
streams2[1].join(streams3[1], new ValueJoiner<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer value1, Integer value2) {
return value1 + value2;
}
}, JoinWindows.of(anyWindowSize), joined);
stream4.to("topic-5");
streams2[1].through("topic-6").process(new MockProcessorSupplier<String, Integer>());
assertEquals(// sources
2 + // stream1
2 + // stream2
1 + // stream3
1 + 1 + // streams2
2 + 1 + // streams3
2 + // stream2-stream3 joins
5 * 2 + // to
1 + // through
2 + // process
1, StreamsBuilderTest.internalTopologyBuilder(builder).setApplicationId("X").build(null).processors().size());
}
use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.
the class KStreamImplTest method testToWithNullValueSerdeDoesntNPE.
@Test
public void testToWithNullValueSerdeDoesntNPE() {
final StreamsBuilder builder = new StreamsBuilder();
final Consumed<String, String> consumed = Consumed.with(stringSerde, stringSerde);
final KStream<String, String> inputStream = builder.stream(Collections.singleton("input"), consumed);
inputStream.to(stringSerde, null, "output");
}
use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.
the class KStreamImplTest method shouldSendDataToTopicUsingProduced.
@Test
public void shouldSendDataToTopicUsingProduced() {
final StreamsBuilder builder = new StreamsBuilder();
final String input = "topic";
final KStream<String, String> stream = builder.stream(input, consumed);
final MockProcessorSupplier<String, String> processorSupplier = new MockProcessorSupplier<>();
stream.to("to-topic", Produced.with(stringSerde, stringSerde));
builder.stream("to-topic", consumed).process(processorSupplier);
driver.setUp(builder);
driver.process(input, "e", "f");
assertThat(processorSupplier.processed, equalTo(Collections.singletonList("e:f")));
}
use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.
the class KStreamImplTest method before.
@Before
public void before() {
builder = new StreamsBuilder();
testStream = builder.stream("source");
}
use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.
the class KStreamKStreamJoinTest method testAsymetricWindowingAfter.
@Test
public void testAsymetricWindowingAfter() {
long time = 1000L;
StreamsBuilder builder = new StreamsBuilder();
final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
KStream<Integer, String> stream1;
KStream<Integer, String> stream2;
KStream<Integer, String> joined;
MockProcessorSupplier<Integer, String> processor;
processor = new MockProcessorSupplier<>();
stream1 = builder.stream(topic1, consumed);
stream2 = builder.stream(topic2, consumed);
joined = stream1.join(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(0).after(100), Joined.with(intSerde, stringSerde, stringSerde));
joined.process(processor);
Collection<Set<String>> copartitionGroups = StreamsBuilderTest.getCopartitionedGroups(builder);
assertEquals(1, copartitionGroups.size());
assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next());
driver.setUp(builder, stateDir);
for (int i = 0; i < expectedKeys.length; i++) {
setRecordContext(time + i, topic1);
driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]);
}
processor.checkAndClearProcessResult();
time = 1000L - 1L;
setRecordContext(time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult();
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3");
time = 1000 + 100L;
setRecordContext(time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("1:X1+YY1", "2:X2+YY2", "3:X3+YY3");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("2:X2+YY2", "3:X3+YY3");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("3:X3+YY3");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult();
}
Aggregations