use of org.apache.kafka.common.serialization.LongDeserializer in project kafka-streams-examples by confluentinc.
the class WikipediaFeedAvroExampleTest method shouldRunTheWikipediaFeedExample.
@Test
public void shouldRunTheWikipediaFeedExample() {
final Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, io.confluent.kafka.serializers.KafkaAvroSerializer.class);
props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, CLUSTER.schemaRegistryUrl());
final KafkaProducer<String, WikiFeed> producer = new KafkaProducer<>(props);
producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, new WikiFeed("donna", true, "first post")));
producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, new WikiFeed("donna", true, "second post")));
producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, new WikiFeed("donna", true, "third post")));
producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, new WikiFeed("becca", true, "first post")));
producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, new WikiFeed("becca", true, "second post")));
producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, new WikiFeed("john", true, "first post")));
producer.flush();
streams.start();
final Properties consumerProperties = new Properties();
consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "wikipedia-feed-consumer");
consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
final KafkaConsumer<String, Long> consumer = new KafkaConsumer<>(consumerProperties, new StringDeserializer(), new LongDeserializer());
final Map<String, Long> expected = new HashMap<>();
expected.put("donna", 3L);
expected.put("becca", 2L);
expected.put("john", 1L);
final Map<String, Long> actual = new HashMap<>();
consumer.subscribe(Collections.singleton(WikipediaFeedAvroExample.WIKIPEDIA_STATS));
final long timeout = System.currentTimeMillis() + 30000L;
while (!actual.equals(expected) && System.currentTimeMillis() < timeout) {
final ConsumerRecords<String, Long> records = consumer.poll(Duration.ofSeconds(1));
records.forEach(record -> actual.put(record.key(), record.value()));
}
assertThat(expected, equalTo(actual));
}
use of org.apache.kafka.common.serialization.LongDeserializer in project kafka-streams-examples by confluentinc.
the class WordCountLambdaIntegrationTest method shouldCountWords.
@Test
public void shouldCountWords() {
final List<String> inputValues = Arrays.asList("Hello Kafka Streams", "All streams lead to Kafka", "Join Kafka Summit", "И теперь пошли русские слова");
final Map<String, Long> expectedWordCounts = mkMap(mkEntry("hello", 1L), mkEntry("all", 1L), mkEntry("streams", 2L), mkEntry("lead", 1L), mkEntry("to", 1L), mkEntry("join", 1L), mkEntry("kafka", 3L), mkEntry("summit", 1L), mkEntry("и", 1L), mkEntry("теперь", 1L), mkEntry("пошли", 1L), mkEntry("русские", 1L), mkEntry("слова", 1L));
//
// Step 1: Configure and start the processor topology.
//
final Serde<String> stringSerde = Serdes.String();
final Serde<Long> longSerde = Serdes.Long();
final Properties streamsConfiguration = new Properties();
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-lambda-integration-test");
streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "dummy config");
streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
// Use a temporary directory for storing state, which will be automatically removed after the test.
streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getAbsolutePath());
final StreamsBuilder builder = new StreamsBuilder();
final KStream<String, String> textLines = builder.stream(inputTopic);
final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS);
final KTable<String, Long> wordCounts = textLines.flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))).groupBy((key, word) -> word).count();
wordCounts.toStream().to(outputTopic, Produced.with(stringSerde, longSerde));
try (final TopologyTestDriver topologyTestDriver = new TopologyTestDriver(builder.build(), streamsConfiguration)) {
//
// Step 2: Setup input and output topics.
//
final TestInputTopic<Void, String> input = topologyTestDriver.createInputTopic(inputTopic, new IntegrationTestUtils.NothingSerde<>(), new StringSerializer());
final TestOutputTopic<String, Long> output = topologyTestDriver.createOutputTopic(outputTopic, new StringDeserializer(), new LongDeserializer());
//
// Step 3: Produce some input data to the input topic.
//
input.pipeValueList(inputValues);
//
// Step 4: Verify the application's output data.
//
assertThat(output.readKeyValuesToMap()).isEqualTo(expectedWordCounts);
}
}
use of org.apache.kafka.common.serialization.LongDeserializer in project kafka-tutorials by confluentinc.
the class RunningAverageTest method validateAverageRating.
@Test
public void validateAverageRating() {
TestInputTopic<Long, Rating> inputTopic = testDriver.createInputTopic(RATINGS_TOPIC_NAME, new LongSerializer(), ratingSpecificAvroSerde.serializer());
inputTopic.pipeKeyValueList(asList(new KeyValue<>(LETHAL_WEAPON_RATING_8.getMovieId(), LETHAL_WEAPON_RATING_8), new KeyValue<>(LETHAL_WEAPON_RATING_10.getMovieId(), LETHAL_WEAPON_RATING_10)));
final TestOutputTopic<Long, Double> outputTopic = testDriver.createOutputTopic(AVERAGE_RATINGS_TOPIC_NAME, new LongDeserializer(), new DoubleDeserializer());
final List<KeyValue<Long, Double>> keyValues = outputTopic.readKeyValuesToList();
// I sent two records to input topic
// I expect second record in topic will contain correct result
final KeyValue<Long, Double> longDoubleKeyValue = keyValues.get(1);
System.out.println("longDoubleKeyValue = " + longDoubleKeyValue);
MatcherAssert.assertThat(longDoubleKeyValue, equalTo(new KeyValue<>(362L, 9.0)));
final KeyValueStore<Long, Double> keyValueStore = testDriver.getKeyValueStore("average-ratings");
final Double expected = keyValueStore.get(362L);
Assert.assertEquals("Message", expected, 9.0, 0.0);
}
use of org.apache.kafka.common.serialization.LongDeserializer in project apache-kafka-on-k8s by banzaicloud.
the class EosTestDriver method verifyCnt.
private static void verifyCnt(final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> inputPerTopicPerPartition, final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> cntPerTopicPerPartition) {
final StringDeserializer stringDeserializer = new StringDeserializer();
final LongDeserializer longDeserializer = new LongDeserializer();
final HashMap<String, Long> currentSumPerKey = new HashMap<>();
for (final Map.Entry<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords : cntPerTopicPerPartition.entrySet()) {
final TopicPartition inputTopicPartition = new TopicPartition("repartition", partitionRecords.getKey().partition());
final List<ConsumerRecord<byte[], byte[]>> partitionInput = inputPerTopicPerPartition.get(inputTopicPartition);
final List<ConsumerRecord<byte[], byte[]>> partitionCnt = partitionRecords.getValue();
if (partitionInput.size() != partitionCnt.size()) {
throw new RuntimeException("Result verification failed: expected " + partitionInput.size() + " records for " + partitionRecords.getKey() + " but received " + partitionCnt.size());
}
final Iterator<ConsumerRecord<byte[], byte[]>> inputRecords = partitionInput.iterator();
for (final ConsumerRecord<byte[], byte[]> receivedRecord : partitionCnt) {
final ConsumerRecord<byte[], byte[]> input = inputRecords.next();
final String receivedKey = stringDeserializer.deserialize(receivedRecord.topic(), receivedRecord.key());
final long receivedValue = longDeserializer.deserialize(receivedRecord.topic(), receivedRecord.value());
final String key = stringDeserializer.deserialize(input.topic(), input.key());
Long cnt = currentSumPerKey.get(key);
if (cnt == null) {
cnt = 0L;
}
currentSumPerKey.put(key, ++cnt);
if (!receivedKey.equals(key) || receivedValue != cnt.longValue()) {
throw new RuntimeException("Result verification failed for " + receivedRecord + " expected <" + key + "," + cnt + "> but was <" + receivedKey + "," + receivedValue + ">");
}
}
}
}
use of org.apache.kafka.common.serialization.LongDeserializer in project apache-kafka-on-k8s by banzaicloud.
the class EosTestDriver method verifySum.
private static void verifySum(final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> inputPerTopicPerPartition, final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> minPerTopicPerPartition) {
final StringDeserializer stringDeserializer = new StringDeserializer();
final IntegerDeserializer integerDeserializer = new IntegerDeserializer();
final LongDeserializer longDeserializer = new LongDeserializer();
final HashMap<String, Long> currentSumPerKey = new HashMap<>();
for (final Map.Entry<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords : minPerTopicPerPartition.entrySet()) {
final TopicPartition inputTopicPartition = new TopicPartition("data", partitionRecords.getKey().partition());
final List<ConsumerRecord<byte[], byte[]>> partitionInput = inputPerTopicPerPartition.get(inputTopicPartition);
final List<ConsumerRecord<byte[], byte[]>> partitionSum = partitionRecords.getValue();
if (partitionInput.size() != partitionSum.size()) {
throw new RuntimeException("Result verification failed: expected " + partitionInput.size() + " records for " + partitionRecords.getKey() + " but received " + partitionSum.size());
}
final Iterator<ConsumerRecord<byte[], byte[]>> inputRecords = partitionInput.iterator();
for (final ConsumerRecord<byte[], byte[]> receivedRecord : partitionSum) {
final ConsumerRecord<byte[], byte[]> input = inputRecords.next();
final String receivedKey = stringDeserializer.deserialize(receivedRecord.topic(), receivedRecord.key());
final long receivedValue = longDeserializer.deserialize(receivedRecord.topic(), receivedRecord.value());
final String key = stringDeserializer.deserialize(input.topic(), input.key());
final int value = integerDeserializer.deserialize(input.topic(), input.value());
Long sum = currentSumPerKey.get(key);
if (sum == null) {
sum = (long) value;
} else {
sum += value;
}
currentSumPerKey.put(key, sum);
if (!receivedKey.equals(key) || receivedValue != sum) {
throw new RuntimeException("Result verification failed for " + receivedRecord + " expected <" + key + "," + sum + "> but was <" + receivedKey + "," + receivedValue + ">");
}
}
}
}
Aggregations