use of org.apache.kafka.common.serialization.LongDeserializer in project kafka by apache.
the class KStreamAggregationIntegrationTest method shouldCount.
@Test
public void shouldCount() throws Exception {
produceMessages(mockTime.milliseconds());
groupedStream.count("count-by-key").to(Serdes.String(), Serdes.Long(), outputTopic);
startStreams();
produceMessages(mockTime.milliseconds());
final List<KeyValue<String, Long>> results = receiveMessages(new StringDeserializer(), new LongDeserializer(), 10);
Collections.sort(results, new Comparator<KeyValue<String, Long>>() {
@Override
public int compare(final KeyValue<String, Long> o1, final KeyValue<String, Long> o2) {
return KStreamAggregationIntegrationTest.compare(o1, o2);
}
});
assertThat(results, is(Arrays.asList(KeyValue.pair("A", 1L), KeyValue.pair("A", 2L), KeyValue.pair("B", 1L), KeyValue.pair("B", 2L), KeyValue.pair("C", 1L), KeyValue.pair("C", 2L), KeyValue.pair("D", 1L), KeyValue.pair("D", 2L), KeyValue.pair("E", 1L), KeyValue.pair("E", 2L))));
}
use of org.apache.kafka.common.serialization.LongDeserializer in project apache-kafka-on-k8s by banzaicloud.
the class KStreamAggregationDedupIntegrationTest method shouldGroupByKey.
@Test
public void shouldGroupByKey() throws Exception {
final long timestamp = mockTime.milliseconds();
produceMessages(timestamp);
produceMessages(timestamp);
stream.groupByKey(Serialized.with(Serdes.Integer(), Serdes.String())).count(TimeWindows.of(500L), "count-windows").toStream(new KeyValueMapper<Windowed<Integer>, Long, String>() {
@Override
public String apply(final Windowed<Integer> windowedKey, final Long value) {
return windowedKey.key() + "@" + windowedKey.window().start();
}
}).to(Serdes.String(), Serdes.Long(), outputTopic);
startStreams();
final List<KeyValue<String, Long>> results = receiveMessages(new StringDeserializer(), new LongDeserializer(), 5);
Collections.sort(results, new Comparator<KeyValue<String, Long>>() {
@Override
public int compare(final KeyValue<String, Long> o1, final KeyValue<String, Long> o2) {
return KStreamAggregationDedupIntegrationTest.compare(o1, o2);
}
});
final long window = timestamp / 500 * 500;
assertThat(results, is(Arrays.asList(KeyValue.pair("1@" + window, 2L), KeyValue.pair("2@" + window, 2L), KeyValue.pair("3@" + window, 2L), KeyValue.pair("4@" + window, 2L), KeyValue.pair("5@" + window, 2L))));
}
use of org.apache.kafka.common.serialization.LongDeserializer in project apache-kafka-on-k8s by banzaicloud.
the class KStreamAggregationIntegrationTest method shouldGroupByKey.
@Test
public void shouldGroupByKey() throws Exception {
final long timestamp = mockTime.milliseconds();
produceMessages(timestamp);
produceMessages(timestamp);
stream.groupByKey(Serialized.with(Serdes.Integer(), Serdes.String())).windowedBy(TimeWindows.of(500L)).count().toStream(new KeyValueMapper<Windowed<Integer>, Long, String>() {
@Override
public String apply(final Windowed<Integer> windowedKey, final Long value) {
return windowedKey.key() + "@" + windowedKey.window().start();
}
}).to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));
startStreams();
final List<KeyValue<String, Long>> results = receiveMessages(new StringDeserializer(), new LongDeserializer(), 10);
Collections.sort(results, new Comparator<KeyValue<String, Long>>() {
@Override
public int compare(final KeyValue<String, Long> o1, final KeyValue<String, Long> o2) {
return KStreamAggregationIntegrationTest.compare(o1, o2);
}
});
final long window = timestamp / 500 * 500;
assertThat(results, is(Arrays.asList(KeyValue.pair("1@" + window, 1L), KeyValue.pair("1@" + window, 2L), KeyValue.pair("2@" + window, 1L), KeyValue.pair("2@" + window, 2L), KeyValue.pair("3@" + window, 1L), KeyValue.pair("3@" + window, 2L), KeyValue.pair("4@" + window, 1L), KeyValue.pair("4@" + window, 2L), KeyValue.pair("5@" + window, 1L), KeyValue.pair("5@" + window, 2L))));
}
use of org.apache.kafka.common.serialization.LongDeserializer in project kafka-streams-examples by confluentinc.
the class WikipediaFeedAvroExampleTest method shouldRunTheWikipediaFeedExample.
@Test
public void shouldRunTheWikipediaFeedExample() throws Exception {
final Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, io.confluent.kafka.serializers.KafkaAvroSerializer.class);
props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, CLUSTER.schemaRegistryUrl());
final KafkaProducer<String, WikiFeed> producer = new KafkaProducer<>(props);
producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, new WikiFeed("donna", true, "first post")));
producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, new WikiFeed("donna", true, "second post")));
producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, new WikiFeed("donna", true, "third post")));
producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, new WikiFeed("becca", true, "first post")));
producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, new WikiFeed("becca", true, "second post")));
producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, new WikiFeed("john", true, "first post")));
producer.flush();
streams.start();
final Properties consumerProperties = new Properties();
consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "wikipedia-feed-consumer");
consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
final KafkaConsumer<String, Long> consumer = new KafkaConsumer<>(consumerProperties, new StringDeserializer(), new LongDeserializer());
final Map<String, Long> expected = new HashMap<>();
expected.put("donna", 3L);
expected.put("becca", 2L);
expected.put("john", 1L);
final Map<String, Long> actual = new HashMap<>();
consumer.subscribe(Collections.singleton(WikipediaFeedAvroExample.WIKIPEDIA_STATS));
final long timeout = System.currentTimeMillis() + 30000L;
while (!actual.equals(expected) && System.currentTimeMillis() < timeout) {
final ConsumerRecords<String, Long> records = consumer.poll(1000);
records.forEach(record -> actual.put(record.key(), record.value()));
}
assertThat(expected, equalTo(actual));
}
use of org.apache.kafka.common.serialization.LongDeserializer in project kafka by apache.
the class KStreamAggregationDedupIntegrationTest method shouldGroupByKey.
@Test
public void shouldGroupByKey() throws Exception {
final long timestamp = mockTime.milliseconds();
produceMessages(timestamp);
produceMessages(timestamp);
stream.groupByKey(Grouped.with(Serdes.Integer(), Serdes.String())).windowedBy(TimeWindows.of(ofMillis(500L))).count(Materialized.as("count-windows")).toStream((windowedKey, value) -> windowedKey.key() + "@" + windowedKey.window().start()).to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));
startStreams();
final long window = timestamp / 500 * 500;
validateReceivedMessages(new StringDeserializer(), new LongDeserializer(), Arrays.asList(new KeyValueTimestamp<>("1@" + window, 2L, timestamp), new KeyValueTimestamp<>("2@" + window, 2L, timestamp), new KeyValueTimestamp<>("3@" + window, 2L, timestamp), new KeyValueTimestamp<>("4@" + window, 2L, timestamp), new KeyValueTimestamp<>("5@" + window, 2L, timestamp)));
}
Aggregations