use of com.avro.Customer in project HybridTestFramework by dipjyotimetia.
the class PubSub method publishAvroRecords.
public static void publishAvroRecords(String projectId, String topicId, Config config) throws IOException, ExecutionException, InterruptedException {
Encoding encoding = null;
TopicName topicName = TopicName.of(projectId, topicId);
// Get the topic encoding type.
try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
encoding = topicAdminClient.getTopic(topicName).getSchemaSettings().getEncoding();
}
// Instantiate an avro-tools-generated class defined in `us-states.avsc`.
Customer customer = Customer.newBuilder().setFirstName("John").setLastName("Doe").setAge(25).setHeight(185.5f).setWeight(85.6f).setAutomatedEmail(false).build();
Publisher publisher = null;
block: try {
publisher = Publisher.newBuilder(topicName).setChannelProvider(config.ChannelProvider()).setCredentialsProvider(config.CredentialProvider()).build();
// Prepare to serialize the object to the output stream.
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
Encoder encoder = null;
// Prepare an appropriate encoder for publishing to the topic.
switch(encoding) {
case BINARY:
System.out.println("Preparing a BINARY encoder...");
encoder = EncoderFactory.get().directBinaryEncoder(byteStream, /*reuse=*/
null);
break;
case JSON:
System.out.println("Preparing a JSON encoder...");
encoder = EncoderFactory.get().jsonEncoder(Customer.getClassSchema(), byteStream);
break;
default:
break block;
}
// Encode the object and write it to the output stream.
customer.customEncode(encoder);
encoder.flush();
// Publish the encoded object as a Pub/Sub message.
ByteString data = ByteString.copyFrom(byteStream.toByteArray());
PubsubMessage message = PubsubMessage.newBuilder().setData(data).build();
System.out.println("Publishing message: " + message);
ApiFuture<String> future = publisher.publish(message);
System.out.println("Published message ID: " + future.get());
} finally {
if (publisher != null) {
publisher.shutdown();
publisher.awaitTermination(1, TimeUnit.MINUTES);
}
}
}
use of com.avro.Customer in project HybridTestFramework by dipjyotimetia.
the class Kafka method consumer.
/**
* Consume kafka messages
*
* @param config
*/
public void consumer(Config config) {
Properties configProperties = setConsumerConfig(config);
try (KafkaConsumer<String, Customer> kafkaConsumer = new KafkaConsumer<>(configProperties)) {
kafkaConsumer.subscribe(Collections.singleton(config.getTopic()));
log.info("Waiting for data");
while (true) {
ConsumerRecords<String, Customer> records = kafkaConsumer.poll(Duration.ofMillis(500));
for (ConsumerRecord<String, Customer> record : records) {
Customer customer = record.value();
System.out.println(customer);
}
kafkaConsumer.commitSync();
}
} catch (Exception e) {
log.error(e.getMessage());
}
// kafkaConsumer.close();
}
use of com.avro.Customer in project HybridTestFramework by dipjyotimetia.
the class Kafka method producer.
/**
* Produce kafka messages
*
* @param config
* @throws Exception
*/
public void producer(Config config) throws Exception {
Properties configProperty = getKafkaStreamsConfig(config);
KafkaProducer<String, Customer> kafkaProducer = new KafkaProducer<>(configProperty);
for (int i = 0; i < 10; i++) {
Thread.sleep(1000);
Customer customer = Customer.newBuilder().setFirstName("John").setLastName("Doe").setAge(25).setHeight(185.5f).setWeight(85.6f).setAutomatedEmail(false).build();
ProducerRecord<String, Customer> producerRecord = new ProducerRecord<>(config.getTopic(), customer);
kafkaProducer.send(producerRecord, (metadata, exception) -> {
if (exception == null) {
System.out.println("Success");
System.out.println(metadata.toString());
} else {
exception.printStackTrace();
}
});
}
kafkaProducer.flush();
kafkaProducer.close();
}
use of com.avro.Customer in project HybridTestFramework by dipjyotimetia.
the class Pubsub method publishAvroRecords.
/**
* Publish Avro message
* @param projectId
* @param topicId
* @throws IOException
* @throws ExecutionException
* @throws InterruptedException
*/
public void publishAvroRecords(String projectId, String topicId) throws IOException, ExecutionException, InterruptedException {
Encoding encoding = null;
TopicName topicName = TopicName.of(projectId, topicId);
// Get the topic encoding type.
try (TopicAdminClient topicAdminClient = TopicAdminClient.create()) {
encoding = topicAdminClient.getTopic(topicName).getSchemaSettings().getEncoding();
}
// Instantiate an avro-tools-generated class defined in `us-states.avsc`.
Customer customer = Customer.newBuilder().setFirstName("John").setLastName("Doe").setAge(25).setHeight(185.5f).setWeight(85.6f).setAutomatedEmail(false).build();
Publisher publisher = createPublisher(projectId, topicId);
block: try {
// Prepare to serialize the object to the output stream.
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
Encoder encoder = null;
// Prepare an appropriate encoder for publishing to the topic.
switch(encoding) {
case BINARY:
System.out.println("Preparing a BINARY encoder...");
encoder = EncoderFactory.get().directBinaryEncoder(byteStream, /*reuse=*/
null);
break;
case JSON:
System.out.println("Preparing a JSON encoder...");
encoder = EncoderFactory.get().jsonEncoder(Customer.getClassSchema(), byteStream);
break;
default:
break block;
}
// Encode the object and write it to the output stream.
customer.customEncode(encoder);
encoder.flush();
// Publish the encoded object as a Pub/Sub message.
ByteString data = ByteString.copyFrom(byteStream.toByteArray());
PubsubMessage message = PubsubMessage.newBuilder().setData(data).build();
System.out.println("Publishing message: " + message);
ApiFuture<String> future = publisher.publish(message);
System.out.println("Published message ID: " + future.get());
} finally {
if (publisher != null) {
publisher.shutdown();
publisher.awaitTermination(1, TimeUnit.MINUTES);
}
}
}
Aggregations