use of org.apache.kafka.clients.producer.ProducerRecord in project components by Talend.
the class KafkaDatasetOtherDelimTestIT method init.
@Before
public void init() throws TimeoutException {
// there may exists other topics than these build in(configured in pom.xml) topics, but ignore them
// ----------------- Send sample data to TOPIC_IN start --------------------
String testID = "sampleTest" + new Random().nextInt();
List<Person> expectedPersons = Person.genRandomList(testID, 10);
Properties props = new Properties();
props.put("bootstrap.servers", BOOTSTRAP_HOST);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
Producer<Void, String> producer = new KafkaProducer<>(props);
for (Person person : expectedPersons) {
ProducerRecord<Void, String> message = new ProducerRecord<>(TOPIC_IN, person.toCSV(fieldDelimiter));
producer.send(message);
}
producer.close();
// ----------------- Send sample data to TOPIC_IN end --------------------
}
use of org.apache.kafka.clients.producer.ProducerRecord in project components by Talend.
the class KafkaDatasetTestIT method init.
@Before
public void init() throws TimeoutException {
// there may exists other topics than these build in(configured in pom.xml) topics, but ignore them
// ----------------- Send sample data to TOPIC_IN start --------------------
String testID = "sampleTest" + new Random().nextInt();
List<Person> expectedPersons = Person.genRandomList(testID, 10);
Properties props = new Properties();
props.put("bootstrap.servers", BOOTSTRAP_HOST);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
Producer<Void, String> producer = new KafkaProducer<>(props);
for (Person person : expectedPersons) {
ProducerRecord<Void, String> message = new ProducerRecord<>(TOPIC_IN, person.toCSV(";"));
producer.send(message);
}
producer.close();
// ----------------- Send sample data to TOPIC_IN end --------------------
}
use of org.apache.kafka.clients.producer.ProducerRecord in project components by Talend.
the class KafkaAvroBeamRuntimeTestIT method avroBasicTest.
/**
* Read avro(Person) format and write avro(Person) format with schema.
*/
@Test
public void avroBasicTest() throws IOException {
String testID = "avroBasicTest" + new Random().nextInt();
expectedPersons = Person.genRandomList(testID, maxRecords);
// ----------------- Send data to TOPIC_AVRO_IN start --------------------
Properties props = new Properties();
props.put("bootstrap.servers", BOOTSTRAP_HOST);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer");
Producer<Void, byte[]> producer = new KafkaProducer<>(props);
for (Person person : expectedPersons) {
ProducerRecord<Void, byte[]> message = new ProducerRecord<>(TOPIC_AVRO_IN, person.serToAvroBytes());
producer.send(message);
}
producer.close();
// ----------------- Send data to TOPIC_AVRO_IN done --------------------
KafkaInputProperties inputProperties = new KafkaInputProperties("input");
inputProperties.init();
inputProperties.setDatasetProperties(inputDatasetProperties);
inputProperties.autoOffsetReset.setValue(KafkaInputProperties.OffsetType.EARLIEST);
inputProperties.useMaxNumRecords.setValue(false);
// inputProperties.maxNumRecords.setValue(maxRecords.longValue());
inputProperties.useMaxReadTime.setValue(true);
inputProperties.maxReadTime.setValue(5000l);
KafkaOutputProperties outputProperties = new KafkaOutputProperties("output");
outputProperties.init();
outputProperties.setDatasetProperties(outputDatasetProperties);
outputProperties.partitionType.setValue(KafkaOutputProperties.PartitionType.ROUND_ROBIN);
outputProperties.useCompress.setValue(false);
KafkaInputPTransformRuntime inputRuntime = new KafkaInputPTransformRuntime();
inputRuntime.initialize(null, inputProperties);
KafkaOutputPTransformRuntime outputRuntime = new KafkaOutputPTransformRuntime();
outputRuntime.initialize(null, outputProperties);
// ----------------- pipeline start --------------------
pipeline.apply(inputRuntime).apply(Filter.by(new KafkaCsvBeamRuntimeTestIT.FilterByGroup(testID))).apply(outputRuntime);
PipelineResult result = pipeline.run();
// ----------------- pipeline done --------------------
// ----------------- Read data from TOPIC_AVRO_OUT start --------------------
props = new Properties();
props.put("bootstrap.servers", BOOTSTRAP_HOST);
props.put("group.id", "getResult");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer");
props.put("auto.offset.reset", "earliest");
KafkaConsumer<String, byte[]> consumer = new KafkaConsumer<>(props);
consumer.subscribe(Arrays.asList(TOPIC_AVRO_OUT));
List<Person> results = new ArrayList<>();
while (true) {
ConsumerRecords<String, byte[]> records = consumer.poll(100);
for (ConsumerRecord<String, byte[]> record : records) {
Person person = Person.desFromAvroBytes(record.value());
if (testID.equals(person.group)) {
results.add(person);
}
}
if (results.size() >= maxRecords) {
break;
}
}
// ----------------- Read data from TOPIC_AVRO_OUT done --------------------
assertEquals(expectedPersons, results);
}
use of org.apache.kafka.clients.producer.ProducerRecord in project components by Talend.
the class KafkaAvroBeamRuntimeTestIT method avroBasicTest2.
/**
* Read avro(Person) format and write avro(Person) format with schema.
*/
@Test
public void avroBasicTest2() throws IOException {
String testID = "avroBasicTest2" + new Random().nextInt();
expectedPersons = Person.genRandomList(testID, maxRecords);
// ----------------- Send data to TOPIC_AVRO_IN start --------------------
Properties props = new Properties();
props.put("bootstrap.servers", BOOTSTRAP_HOST);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer");
Producer<Void, byte[]> producer = new KafkaProducer<>(props);
for (Person person : expectedPersons) {
ProducerRecord<Void, byte[]> message = new ProducerRecord<>(TOPIC_AVRO_IN, person.serToAvroBytes());
producer.send(message);
}
producer.close();
// ----------------- Send data to TOPIC_AVRO_IN done --------------------
KafkaInputProperties inputProperties = new KafkaInputProperties("input");
inputProperties.init();
inputProperties.setDatasetProperties(inputDatasetProperties);
inputProperties.autoOffsetReset.setValue(KafkaInputProperties.OffsetType.EARLIEST);
inputProperties.useMaxNumRecords.setValue(false);
// inputProperties.maxNumRecords.setValue(maxRecords.longValue());
inputProperties.useMaxReadTime.setValue(true);
inputProperties.maxReadTime.setValue(5000l);
KafkaOutputProperties outputProperties = new KafkaOutputProperties("output");
outputProperties.init();
outputProperties.setDatasetProperties(outputDatasetProperties);
outputProperties.partitionType.setValue(KafkaOutputProperties.PartitionType.COLUMN);
outputProperties.keyColumn.setValue("name");
outputProperties.useCompress.setValue(false);
KafkaInputPTransformRuntime inputRuntime = new KafkaInputPTransformRuntime();
inputRuntime.initialize(null, inputProperties);
KafkaOutputPTransformRuntime outputRuntime = new KafkaOutputPTransformRuntime();
outputRuntime.initialize(null, outputProperties);
// ----------------- pipeline start --------------------
pipeline.apply(inputRuntime).apply(Filter.by(new KafkaCsvBeamRuntimeTestIT.FilterByGroup(testID))).apply(outputRuntime);
PipelineResult result = pipeline.run();
// ----------------- pipeline done --------------------
// ----------------- Read data from TOPIC_AVRO_OUT start --------------------
props = new Properties();
props.put("bootstrap.servers", BOOTSTRAP_HOST);
props.put("group.id", "getResult");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer");
props.put("auto.offset.reset", "earliest");
KafkaConsumer<String, byte[]> consumer = new KafkaConsumer<>(props);
consumer.subscribe(Arrays.asList(TOPIC_AVRO_OUT));
List<Person> results = new ArrayList<>();
List<String> keys = new ArrayList<>();
while (true) {
ConsumerRecords<String, byte[]> records = consumer.poll(100);
for (ConsumerRecord<String, byte[]> record : records) {
Person person = Person.desFromAvroBytes(record.value());
if (testID.equals(person.group)) {
keys.add(record.key());
results.add(person);
}
}
if (results.size() >= maxRecords) {
break;
}
}
// ----------------- Read data from TOPIC_AVRO_OUT done --------------------
assertEquals(expectedPersons, results);
List<String> expectedKeys = new ArrayList<>();
for (Person person : results) {
expectedKeys.add(person.name);
}
assertEquals(expectedKeys, keys);
}
use of org.apache.kafka.clients.producer.ProducerRecord in project incubator-skywalking by apache.
the class KafkaProducerInterceptor method beforeMethod.
@Override
public void beforeMethod(EnhancedInstance objInst, Method method, Object[] allArguments, Class<?>[] argumentsTypes, MethodInterceptResult result) throws Throwable {
ContextCarrier contextCarrier = new ContextCarrier();
ProducerRecord record = (ProducerRecord) allArguments[0];
String topicName = (String) ((EnhancedInstance) record).getSkyWalkingDynamicField();
AbstractSpan activeSpan = ContextManager.createExitSpan(OPERATE_NAME_PREFIX + topicName + PRODUCER_OPERATE_NAME_SUFFIX, contextCarrier, (String) objInst.getSkyWalkingDynamicField());
Tags.MQ_BROKER.set(activeSpan, (String) objInst.getSkyWalkingDynamicField());
Tags.MQ_TOPIC.set(activeSpan, topicName);
SpanLayer.asMQ(activeSpan);
activeSpan.setComponent(ComponentsDefine.KAFKA);
CarrierItem next = contextCarrier.items();
while (next.hasNext()) {
next = next.next();
record.headers().add(next.getHeadKey(), next.getHeadValue().getBytes());
}
EnhancedInstance callbackInstance = (EnhancedInstance) allArguments[1];
if (callbackInstance != null) {
callbackInstance.setSkyWalkingDynamicField(ContextManager.capture());
}
}
Aggregations