use of org.apache.kafka.streams.KeyValue in project apache-kafka-on-k8s by banzaicloud.
the class KStreamAggregationDedupIntegrationTest method shouldGroupByKey.
@Test
public void shouldGroupByKey() throws Exception {
final long timestamp = mockTime.milliseconds();
produceMessages(timestamp);
produceMessages(timestamp);
stream.groupByKey(Serialized.with(Serdes.Integer(), Serdes.String())).count(TimeWindows.of(500L), "count-windows").toStream(new KeyValueMapper<Windowed<Integer>, Long, String>() {
@Override
public String apply(final Windowed<Integer> windowedKey, final Long value) {
return windowedKey.key() + "@" + windowedKey.window().start();
}
}).to(Serdes.String(), Serdes.Long(), outputTopic);
startStreams();
final List<KeyValue<String, Long>> results = receiveMessages(new StringDeserializer(), new LongDeserializer(), 5);
Collections.sort(results, new Comparator<KeyValue<String, Long>>() {
@Override
public int compare(final KeyValue<String, Long> o1, final KeyValue<String, Long> o2) {
return KStreamAggregationDedupIntegrationTest.compare(o1, o2);
}
});
final long window = timestamp / 500 * 500;
assertThat(results, is(Arrays.asList(KeyValue.pair("1@" + window, 2L), KeyValue.pair("2@" + window, 2L), KeyValue.pair("3@" + window, 2L), KeyValue.pair("4@" + window, 2L), KeyValue.pair("5@" + window, 2L))));
}
use of org.apache.kafka.streams.KeyValue in project apache-kafka-on-k8s by banzaicloud.
the class KStreamAggregationDedupIntegrationTest method shouldReduceWindowed.
@Test
public void shouldReduceWindowed() throws Exception {
long firstBatchTimestamp = System.currentTimeMillis() - 1000;
produceMessages(firstBatchTimestamp);
long secondBatchTimestamp = System.currentTimeMillis();
produceMessages(secondBatchTimestamp);
produceMessages(secondBatchTimestamp);
groupedStream.reduce(reducer, TimeWindows.of(500L), "reduce-time-windows").toStream(new KeyValueMapper<Windowed<String>, String, String>() {
@Override
public String apply(Windowed<String> windowedKey, String value) {
return windowedKey.key() + "@" + windowedKey.window().start();
}
}).to(Serdes.String(), Serdes.String(), outputTopic);
startStreams();
List<KeyValue<String, String>> windowedOutput = receiveMessages(new StringDeserializer(), new StringDeserializer(), 10);
Comparator<KeyValue<String, String>> comparator = new Comparator<KeyValue<String, String>>() {
@Override
public int compare(final KeyValue<String, String> o1, final KeyValue<String, String> o2) {
return KStreamAggregationDedupIntegrationTest.compare(o1, o2);
}
};
Collections.sort(windowedOutput, comparator);
long firstBatchWindow = firstBatchTimestamp / 500 * 500;
long secondBatchWindow = secondBatchTimestamp / 500 * 500;
assertThat(windowedOutput, is(Arrays.asList(new KeyValue<>("A@" + firstBatchWindow, "A"), new KeyValue<>("A@" + secondBatchWindow, "A:A"), new KeyValue<>("B@" + firstBatchWindow, "B"), new KeyValue<>("B@" + secondBatchWindow, "B:B"), new KeyValue<>("C@" + firstBatchWindow, "C"), new KeyValue<>("C@" + secondBatchWindow, "C:C"), new KeyValue<>("D@" + firstBatchWindow, "D"), new KeyValue<>("D@" + secondBatchWindow, "D:D"), new KeyValue<>("E@" + firstBatchWindow, "E"), new KeyValue<>("E@" + secondBatchWindow, "E:E"))));
}
use of org.apache.kafka.streams.KeyValue in project apache-kafka-on-k8s by banzaicloud.
the class KStreamAggregationIntegrationTest method shouldGroupByKey.
@Test
public void shouldGroupByKey() throws Exception {
final long timestamp = mockTime.milliseconds();
produceMessages(timestamp);
produceMessages(timestamp);
stream.groupByKey(Serialized.with(Serdes.Integer(), Serdes.String())).windowedBy(TimeWindows.of(500L)).count().toStream(new KeyValueMapper<Windowed<Integer>, Long, String>() {
@Override
public String apply(final Windowed<Integer> windowedKey, final Long value) {
return windowedKey.key() + "@" + windowedKey.window().start();
}
}).to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));
startStreams();
final List<KeyValue<String, Long>> results = receiveMessages(new StringDeserializer(), new LongDeserializer(), 10);
Collections.sort(results, new Comparator<KeyValue<String, Long>>() {
@Override
public int compare(final KeyValue<String, Long> o1, final KeyValue<String, Long> o2) {
return KStreamAggregationIntegrationTest.compare(o1, o2);
}
});
final long window = timestamp / 500 * 500;
assertThat(results, is(Arrays.asList(KeyValue.pair("1@" + window, 1L), KeyValue.pair("1@" + window, 2L), KeyValue.pair("2@" + window, 1L), KeyValue.pair("2@" + window, 2L), KeyValue.pair("3@" + window, 1L), KeyValue.pair("3@" + window, 2L), KeyValue.pair("4@" + window, 1L), KeyValue.pair("4@" + window, 2L), KeyValue.pair("5@" + window, 1L), KeyValue.pair("5@" + window, 2L))));
}
use of org.apache.kafka.streams.KeyValue in project apache-kafka-on-k8s by banzaicloud.
the class KStreamAggregationIntegrationTest method shouldReduce.
@SuppressWarnings("deprecation")
@Test
public void shouldReduce() throws Exception {
produceMessages(mockTime.milliseconds());
groupedStream.reduce(reducer, "reduce-by-key").to(Serdes.String(), Serdes.String(), outputTopic);
startStreams();
produceMessages(mockTime.milliseconds());
final List<KeyValue<String, String>> results = receiveMessages(new StringDeserializer(), new StringDeserializer(), 10);
Collections.sort(results, new Comparator<KeyValue<String, String>>() {
@Override
public int compare(final KeyValue<String, String> o1, final KeyValue<String, String> o2) {
return KStreamAggregationIntegrationTest.compare(o1, o2);
}
});
assertThat(results, is(Arrays.asList(KeyValue.pair("A", "A"), KeyValue.pair("A", "A:A"), KeyValue.pair("B", "B"), KeyValue.pair("B", "B:B"), KeyValue.pair("C", "C"), KeyValue.pair("C", "C:C"), KeyValue.pair("D", "D"), KeyValue.pair("D", "D:D"), KeyValue.pair("E", "E"), KeyValue.pair("E", "E:E"))));
}
use of org.apache.kafka.streams.KeyValue in project apache-kafka-on-k8s by banzaicloud.
the class KStreamAggregationIntegrationTest method shouldReduceWindowed.
@Test
public void shouldReduceWindowed() throws Exception {
final long firstBatchTimestamp = mockTime.milliseconds();
mockTime.sleep(1000);
produceMessages(firstBatchTimestamp);
final long secondBatchTimestamp = mockTime.milliseconds();
produceMessages(secondBatchTimestamp);
produceMessages(secondBatchTimestamp);
groupedStream.windowedBy(TimeWindows.of(500L)).reduce(reducer).toStream(new KeyValueMapper<Windowed<String>, String, String>() {
@Override
public String apply(final Windowed<String> windowedKey, final String value) {
return windowedKey.key() + "@" + windowedKey.window().start();
}
}).to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
startStreams();
final List<KeyValue<String, String>> windowedOutput = receiveMessages(new StringDeserializer(), new StringDeserializer(), 15);
final Comparator<KeyValue<String, String>> comparator = new Comparator<KeyValue<String, String>>() {
@Override
public int compare(final KeyValue<String, String> o1, final KeyValue<String, String> o2) {
return KStreamAggregationIntegrationTest.compare(o1, o2);
}
};
Collections.sort(windowedOutput, comparator);
final long firstBatchWindow = firstBatchTimestamp / 500 * 500;
final long secondBatchWindow = secondBatchTimestamp / 500 * 500;
assertThat(windowedOutput, is(Arrays.asList(new KeyValue<>("A@" + firstBatchWindow, "A"), new KeyValue<>("A@" + secondBatchWindow, "A"), new KeyValue<>("A@" + secondBatchWindow, "A:A"), new KeyValue<>("B@" + firstBatchWindow, "B"), new KeyValue<>("B@" + secondBatchWindow, "B"), new KeyValue<>("B@" + secondBatchWindow, "B:B"), new KeyValue<>("C@" + firstBatchWindow, "C"), new KeyValue<>("C@" + secondBatchWindow, "C"), new KeyValue<>("C@" + secondBatchWindow, "C:C"), new KeyValue<>("D@" + firstBatchWindow, "D"), new KeyValue<>("D@" + secondBatchWindow, "D"), new KeyValue<>("D@" + secondBatchWindow, "D:D"), new KeyValue<>("E@" + firstBatchWindow, "E"), new KeyValue<>("E@" + secondBatchWindow, "E"), new KeyValue<>("E@" + secondBatchWindow, "E:E"))));
}
Aggregations