use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.
the class KStreamImplTest method shouldMergeMultipleStreams.
@Test
public void shouldMergeMultipleStreams() {
final String topic1 = "topic-1";
final String topic2 = "topic-2";
final String topic3 = "topic-3";
final String topic4 = "topic-4";
final KStream<String, String> source1 = builder.stream(topic1);
final KStream<String, String> source2 = builder.stream(topic2);
final KStream<String, String> source3 = builder.stream(topic3);
final KStream<String, String> source4 = builder.stream(topic4);
final KStream<String, String> merged = source1.merge(source2).merge(source3).merge(source4);
merged.process(processorSupplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic1 = driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<String, String> inputTopic2 = driver.createInputTopic(topic2, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<String, String> inputTopic3 = driver.createInputTopic(topic3, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<String, String> inputTopic4 = driver.createInputTopic(topic4, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
inputTopic1.pipeInput("A", "aa", 1L);
inputTopic2.pipeInput("B", "bb", 9L);
inputTopic3.pipeInput("C", "cc", 2L);
inputTopic4.pipeInput("D", "dd", 8L);
inputTopic4.pipeInput("E", "ee", 3L);
inputTopic3.pipeInput("F", "ff", 7L);
inputTopic2.pipeInput("G", "gg", 4L);
inputTopic1.pipeInput("H", "hh", 6L);
}
assertEquals(asList(new KeyValueTimestamp<>("A", "aa", 1), new KeyValueTimestamp<>("B", "bb", 9), new KeyValueTimestamp<>("C", "cc", 2), new KeyValueTimestamp<>("D", "dd", 8), new KeyValueTimestamp<>("E", "ee", 3), new KeyValueTimestamp<>("F", "ff", 7), new KeyValueTimestamp<>("G", "gg", 4), new KeyValueTimestamp<>("H", "hh", 6)), processorSupplier.theCapturedProcessor().processed());
}
use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.
the class KStreamImplTest method shouldProcessFromSourcesThatMatchMultiplePattern.
@Test
public void shouldProcessFromSourcesThatMatchMultiplePattern() {
final String topic3 = "topic-without-pattern";
final KStream<String, String> pattern2Source1 = builder.stream(Pattern.compile("topic-\\d"));
final KStream<String, String> pattern2Source2 = builder.stream(Pattern.compile("topic-[A-Z]"));
final KStream<String, String> source3 = builder.stream(topic3);
final KStream<String, String> merged = pattern2Source1.merge(pattern2Source2).merge(source3);
merged.process(processorSupplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> inputTopic3 = driver.createInputTopic("topic-3", new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<String, String> inputTopic4 = driver.createInputTopic("topic-4", new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<String, String> inputTopicA = driver.createInputTopic("topic-A", new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<String, String> inputTopicZ = driver.createInputTopic("topic-Z", new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<String, String> inputTopic = driver.createInputTopic(topic3, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
inputTopic3.pipeInput("A", "aa", 1L);
inputTopic4.pipeInput("B", "bb", 5L);
inputTopicA.pipeInput("C", "cc", 10L);
inputTopicZ.pipeInput("D", "dd", 8L);
inputTopic.pipeInput("E", "ee", 3L);
}
assertEquals(asList(new KeyValueTimestamp<>("A", "aa", 1), new KeyValueTimestamp<>("B", "bb", 5), new KeyValueTimestamp<>("C", "cc", 10), new KeyValueTimestamp<>("D", "dd", 8), new KeyValueTimestamp<>("E", "ee", 3)), processorSupplier.theCapturedProcessor().processed());
}
use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.
the class KStreamFlatMapTest method testFlatMap.
@Test
public void testFlatMap() {
final StreamsBuilder builder = new StreamsBuilder();
final String topicName = "topic";
final KeyValueMapper<Number, Object, Iterable<KeyValue<String, String>>> mapper = (key, value) -> {
final ArrayList<KeyValue<String, String>> result = new ArrayList<>();
for (int i = 0; i < key.intValue(); i++) {
result.add(KeyValue.pair(Integer.toString(key.intValue() * 10 + i), value.toString()));
}
return result;
};
final int[] expectedKeys = { 0, 1, 2, 3 };
final KStream<Integer, String> stream;
final MockApiProcessorSupplier<String, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
stream = builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.String()));
stream.flatMap(mapper).process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic = driver.createInputTopic(topicName, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0), Duration.ZERO);
for (final int expectedKey : expectedKeys) {
inputTopic.pipeInput(expectedKey, "V" + expectedKey);
}
}
assertEquals(6, supplier.theCapturedProcessor().processed().size());
final KeyValueTimestamp[] expected = { new KeyValueTimestamp<>("10", "V1", 0), new KeyValueTimestamp<>("20", "V2", 0), new KeyValueTimestamp<>("21", "V2", 0), new KeyValueTimestamp<>("30", "V3", 0), new KeyValueTimestamp<>("31", "V3", 0), new KeyValueTimestamp<>("32", "V3", 0) };
for (int i = 0; i < expected.length; i++) {
assertEquals(expected[i], supplier.theCapturedProcessor().processed().get(i));
}
}
use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.
the class KStreamAggregationDedupIntegrationTest method shouldGroupByKey.
@Test
public void shouldGroupByKey() throws Exception {
final long timestamp = mockTime.milliseconds();
produceMessages(timestamp);
produceMessages(timestamp);
stream.groupByKey(Grouped.with(Serdes.Integer(), Serdes.String())).windowedBy(TimeWindows.of(ofMillis(500L))).count(Materialized.as("count-windows")).toStream((windowedKey, value) -> windowedKey.key() + "@" + windowedKey.window().start()).to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));
startStreams();
final long window = timestamp / 500 * 500;
validateReceivedMessages(new StringDeserializer(), new LongDeserializer(), Arrays.asList(new KeyValueTimestamp<>("1@" + window, 2L, timestamp), new KeyValueTimestamp<>("2@" + window, 2L, timestamp), new KeyValueTimestamp<>("3@" + window, 2L, timestamp), new KeyValueTimestamp<>("4@" + window, 2L, timestamp), new KeyValueTimestamp<>("5@" + window, 2L, timestamp)));
}
use of org.apache.kafka.streams.KeyValueTimestamp in project kafka by apache.
the class KStreamAggregationDedupIntegrationTest method shouldReduce.
@Test
public void shouldReduce() throws Exception {
produceMessages(System.currentTimeMillis());
groupedStream.reduce(reducer, Materialized.as("reduce-by-key")).toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
startStreams();
final long timestamp = System.currentTimeMillis();
produceMessages(timestamp);
validateReceivedMessages(new StringDeserializer(), new StringDeserializer(), Arrays.asList(new KeyValueTimestamp<>("A", "A:A", timestamp), new KeyValueTimestamp<>("B", "B:B", timestamp), new KeyValueTimestamp<>("C", "C:C", timestamp), new KeyValueTimestamp<>("D", "D:D", timestamp), new KeyValueTimestamp<>("E", "E:E", timestamp)));
}
Aggregations