use of org.apache.kafka.common.serialization.StringDeserializer in project incubator-rya by apache.
the class PeriodicCommandNotificationConsumerIT method kafkaNotificationProviderTest.
@Test
public void kafkaNotificationProviderTest() throws InterruptedException {
BasicConfigurator.configure();
final BlockingQueue<TimestampedNotification> notifications = new LinkedBlockingQueue<>();
final Properties props = createKafkaConfig();
final KafkaProducer<String, CommandNotification> producer = new KafkaProducer<>(props);
final String topic = rule.getKafkaTopicName();
rule.createTopic(topic);
registration = new KafkaNotificationRegistrationClient(topic, producer);
coord = new PeriodicNotificationCoordinatorExecutor(1, notifications);
provider = new KafkaNotificationProvider(topic, new StringDeserializer(), new CommandNotificationSerializer(), props, coord, 1);
provider.start();
registration.addNotification("1", 1, 0, TimeUnit.SECONDS);
Thread.sleep(4000);
// check that notifications are being added to the blocking queue
Assert.assertEquals(true, notifications.size() > 0);
registration.deleteNotification("1");
Thread.sleep(2000);
final int size = notifications.size();
// sleep for 2 seconds to ensure no more messages being produced
Thread.sleep(2000);
Assert.assertEquals(size, notifications.size());
tearDown();
}
use of org.apache.kafka.common.serialization.StringDeserializer in project testcontainers-java by testcontainers.
the class KafkaContainerTest method testKafkaFunctionality.
protected void testKafkaFunctionality(String bootstrapServers) throws Exception {
try (KafkaProducer<String, String> producer = new KafkaProducer<>(ImmutableMap.of(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers, ProducerConfig.CLIENT_ID_CONFIG, UUID.randomUUID().toString()), new StringSerializer(), new StringSerializer());
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(ImmutableMap.of(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers, ConsumerConfig.GROUP_ID_CONFIG, "tc-" + UUID.randomUUID(), ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"), new StringDeserializer(), new StringDeserializer())) {
String topicName = "messages";
consumer.subscribe(Arrays.asList(topicName));
producer.send(new ProducerRecord<>(topicName, "testcontainers", "rulezzz")).get();
Unreliables.retryUntilTrue(10, TimeUnit.SECONDS, () -> {
ConsumerRecords<String, String> records = consumer.poll(100);
if (records.isEmpty()) {
return false;
}
assertThat(records).hasSize(1).extracting(ConsumerRecord::topic, ConsumerRecord::key, ConsumerRecord::value).containsExactly(tuple(topicName, "testcontainers", "rulezzz"));
return true;
});
consumer.unsubscribe();
}
}
use of org.apache.kafka.common.serialization.StringDeserializer in project kafka-streams-examples by confluentinc.
the class WikipediaFeedAvroLambdaExampleTest method shouldRunTheWikipediaFeedLambdaExample.
@Test
public void shouldRunTheWikipediaFeedLambdaExample() {
final Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, io.confluent.kafka.serializers.KafkaAvroSerializer.class);
props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, CLUSTER.schemaRegistryUrl());
final KafkaProducer<String, WikiFeed> producer = new KafkaProducer<>(props);
producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, new WikiFeed("donna", true, "first post")));
producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, new WikiFeed("donna", true, "second post")));
producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, new WikiFeed("donna", true, "third post")));
producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, new WikiFeed("becca", true, "first post")));
producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, new WikiFeed("becca", true, "second post")));
producer.send(new ProducerRecord<>(WikipediaFeedAvroExample.WIKIPEDIA_FEED, new WikiFeed("john", true, "first post")));
producer.flush();
streams.start();
final Properties consumerProperties = new Properties();
consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "wikipedia-lambda-feed-consumer");
consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
final KafkaConsumer<String, Long> consumer = new KafkaConsumer<>(consumerProperties, new StringDeserializer(), new LongDeserializer());
final Map<String, Long> expected = new HashMap<>();
expected.put("donna", 3L);
expected.put("becca", 2L);
expected.put("john", 1L);
final Map<String, Long> actual = new HashMap<>();
consumer.subscribe(Collections.singleton(WikipediaFeedAvroExample.WIKIPEDIA_STATS));
final long timeout = System.currentTimeMillis() + 30000L;
while (!actual.equals(expected) && System.currentTimeMillis() < timeout) {
final ConsumerRecords<String, Long> records = consumer.poll(1000);
records.forEach(record -> actual.put(record.key(), record.value()));
}
assertThat(actual, equalTo(expected));
}
use of org.apache.kafka.common.serialization.StringDeserializer in project kafka-streams-examples by confluentinc.
the class WikipediaFeedAvroExampleDriver method consumeOutput.
private static void consumeOutput(String bootstrapServers, String schemaRegistryUrl) {
final Properties consumerProperties = new Properties();
consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
consumerProperties.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG, "wikipedia-feed-example-consumer");
consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
final KafkaConsumer<String, Long> consumer = new KafkaConsumer<>(consumerProperties, new StringDeserializer(), new LongDeserializer());
consumer.subscribe(Collections.singleton(WikipediaFeedAvroExample.WIKIPEDIA_STATS));
while (true) {
final ConsumerRecords<String, Long> consumerRecords = consumer.poll(Long.MAX_VALUE);
for (final ConsumerRecord<String, Long> consumerRecord : consumerRecords) {
System.out.println(consumerRecord.key() + "=" + consumerRecord.value());
}
}
}
use of org.apache.kafka.common.serialization.StringDeserializer in project ksql by confluentinc.
the class CliTest method testCreateStreamAsSelect.
private static void testCreateStreamAsSelect(String selectQuery, Schema resultSchema, Map<String, GenericRow> expectedResults) throws Exception {
if (!selectQuery.endsWith(";")) {
selectQuery += ";";
}
String resultKStreamName = "RESULT_" + result_stream_no++;
final String queryString = "CREATE STREAM " + resultKStreamName + " AS " + selectQuery;
/* Start Stream Query */
test(queryString, build("Stream created and running"));
/* Assert Results */
Map<String, GenericRow> results = topicConsumer.readResults(resultKStreamName, resultSchema, expectedResults.size(), new StringDeserializer());
dropStream(resultKStreamName);
assertThat(results, equalTo(expectedResults));
}
Aggregations