Search in sources :

Example 51 with ProducerRecord

use of org.apache.kafka.clients.producer.ProducerRecord in project components by Talend.

the class KafkaDatasetOtherDelimTestIT method init.

@Before
public void init() throws TimeoutException {
    // there may exists other topics than these build in(configured in pom.xml) topics, but ignore them
    // ----------------- Send sample data to TOPIC_IN start --------------------
    String testID = "sampleTest" + new Random().nextInt();
    List<Person> expectedPersons = Person.genRandomList(testID, 10);
    Properties props = new Properties();
    props.put("bootstrap.servers", BOOTSTRAP_HOST);
    props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    Producer<Void, String> producer = new KafkaProducer<>(props);
    for (Person person : expectedPersons) {
        ProducerRecord<Void, String> message = new ProducerRecord<>(TOPIC_IN, person.toCSV(fieldDelimiter));
        producer.send(message);
    }
    producer.close();
// ----------------- Send sample data to TOPIC_IN end --------------------
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) KafkaDatasetProperties(org.talend.components.kafka.dataset.KafkaDatasetProperties) Before(org.junit.Before)

Example 52 with ProducerRecord

use of org.apache.kafka.clients.producer.ProducerRecord in project components by Talend.

the class KafkaDatasetTestIT method init.

@Before
public void init() throws TimeoutException {
    // there may exists other topics than these build in(configured in pom.xml) topics, but ignore them
    // ----------------- Send sample data to TOPIC_IN start --------------------
    String testID = "sampleTest" + new Random().nextInt();
    List<Person> expectedPersons = Person.genRandomList(testID, 10);
    Properties props = new Properties();
    props.put("bootstrap.servers", BOOTSTRAP_HOST);
    props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    Producer<Void, String> producer = new KafkaProducer<>(props);
    for (Person person : expectedPersons) {
        ProducerRecord<Void, String> message = new ProducerRecord<>(TOPIC_IN, person.toCSV(";"));
        producer.send(message);
    }
    producer.close();
// ----------------- Send sample data to TOPIC_IN end --------------------
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) Random(java.util.Random) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) KafkaDatasetProperties(org.talend.components.kafka.dataset.KafkaDatasetProperties) Properties(java.util.Properties) Before(org.junit.Before)

Example 53 with ProducerRecord

use of org.apache.kafka.clients.producer.ProducerRecord in project components by Talend.

the class KafkaAvroBeamRuntimeTestIT method avroBasicTest.

/**
 * Read avro(Person) format and write avro(Person) format with schema.
 */
@Test
public void avroBasicTest() throws IOException {
    String testID = "avroBasicTest" + new Random().nextInt();
    expectedPersons = Person.genRandomList(testID, maxRecords);
    // ----------------- Send data to TOPIC_AVRO_IN start --------------------
    Properties props = new Properties();
    props.put("bootstrap.servers", BOOTSTRAP_HOST);
    props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    props.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer");
    Producer<Void, byte[]> producer = new KafkaProducer<>(props);
    for (Person person : expectedPersons) {
        ProducerRecord<Void, byte[]> message = new ProducerRecord<>(TOPIC_AVRO_IN, person.serToAvroBytes());
        producer.send(message);
    }
    producer.close();
    // ----------------- Send data to TOPIC_AVRO_IN done --------------------
    KafkaInputProperties inputProperties = new KafkaInputProperties("input");
    inputProperties.init();
    inputProperties.setDatasetProperties(inputDatasetProperties);
    inputProperties.autoOffsetReset.setValue(KafkaInputProperties.OffsetType.EARLIEST);
    inputProperties.useMaxNumRecords.setValue(false);
    // inputProperties.maxNumRecords.setValue(maxRecords.longValue());
    inputProperties.useMaxReadTime.setValue(true);
    inputProperties.maxReadTime.setValue(5000l);
    KafkaOutputProperties outputProperties = new KafkaOutputProperties("output");
    outputProperties.init();
    outputProperties.setDatasetProperties(outputDatasetProperties);
    outputProperties.partitionType.setValue(KafkaOutputProperties.PartitionType.ROUND_ROBIN);
    outputProperties.useCompress.setValue(false);
    KafkaInputPTransformRuntime inputRuntime = new KafkaInputPTransformRuntime();
    inputRuntime.initialize(null, inputProperties);
    KafkaOutputPTransformRuntime outputRuntime = new KafkaOutputPTransformRuntime();
    outputRuntime.initialize(null, outputProperties);
    // ----------------- pipeline start --------------------
    pipeline.apply(inputRuntime).apply(Filter.by(new KafkaCsvBeamRuntimeTestIT.FilterByGroup(testID))).apply(outputRuntime);
    PipelineResult result = pipeline.run();
    // ----------------- pipeline done --------------------
    // ----------------- Read data from TOPIC_AVRO_OUT start --------------------
    props = new Properties();
    props.put("bootstrap.servers", BOOTSTRAP_HOST);
    props.put("group.id", "getResult");
    props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
    props.put("value.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer");
    props.put("auto.offset.reset", "earliest");
    KafkaConsumer<String, byte[]> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Arrays.asList(TOPIC_AVRO_OUT));
    List<Person> results = new ArrayList<>();
    while (true) {
        ConsumerRecords<String, byte[]> records = consumer.poll(100);
        for (ConsumerRecord<String, byte[]> record : records) {
            Person person = Person.desFromAvroBytes(record.value());
            if (testID.equals(person.group)) {
                results.add(person);
            }
        }
        if (results.size() >= maxRecords) {
            break;
        }
    }
    // ----------------- Read data from TOPIC_AVRO_OUT done --------------------
    assertEquals(expectedPersons, results);
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) KafkaInputProperties(org.talend.components.kafka.input.KafkaInputProperties) ArrayList(java.util.ArrayList) PipelineResult(org.apache.beam.sdk.PipelineResult) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) KafkaInputProperties(org.talend.components.kafka.input.KafkaInputProperties) KafkaDatasetProperties(org.talend.components.kafka.dataset.KafkaDatasetProperties) Properties(java.util.Properties) KafkaDatastoreProperties(org.talend.components.kafka.datastore.KafkaDatastoreProperties) KafkaOutputProperties(org.talend.components.kafka.output.KafkaOutputProperties) KafkaOutputProperties(org.talend.components.kafka.output.KafkaOutputProperties) Random(java.util.Random) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Test(org.junit.Test)

Example 54 with ProducerRecord

use of org.apache.kafka.clients.producer.ProducerRecord in project components by Talend.

the class KafkaAvroBeamRuntimeTestIT method avroBasicTest2.

/**
 * Read avro(Person) format and write avro(Person) format with schema.
 */
@Test
public void avroBasicTest2() throws IOException {
    String testID = "avroBasicTest2" + new Random().nextInt();
    expectedPersons = Person.genRandomList(testID, maxRecords);
    // ----------------- Send data to TOPIC_AVRO_IN start --------------------
    Properties props = new Properties();
    props.put("bootstrap.servers", BOOTSTRAP_HOST);
    props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    props.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer");
    Producer<Void, byte[]> producer = new KafkaProducer<>(props);
    for (Person person : expectedPersons) {
        ProducerRecord<Void, byte[]> message = new ProducerRecord<>(TOPIC_AVRO_IN, person.serToAvroBytes());
        producer.send(message);
    }
    producer.close();
    // ----------------- Send data to TOPIC_AVRO_IN done --------------------
    KafkaInputProperties inputProperties = new KafkaInputProperties("input");
    inputProperties.init();
    inputProperties.setDatasetProperties(inputDatasetProperties);
    inputProperties.autoOffsetReset.setValue(KafkaInputProperties.OffsetType.EARLIEST);
    inputProperties.useMaxNumRecords.setValue(false);
    // inputProperties.maxNumRecords.setValue(maxRecords.longValue());
    inputProperties.useMaxReadTime.setValue(true);
    inputProperties.maxReadTime.setValue(5000l);
    KafkaOutputProperties outputProperties = new KafkaOutputProperties("output");
    outputProperties.init();
    outputProperties.setDatasetProperties(outputDatasetProperties);
    outputProperties.partitionType.setValue(KafkaOutputProperties.PartitionType.COLUMN);
    outputProperties.keyColumn.setValue("name");
    outputProperties.useCompress.setValue(false);
    KafkaInputPTransformRuntime inputRuntime = new KafkaInputPTransformRuntime();
    inputRuntime.initialize(null, inputProperties);
    KafkaOutputPTransformRuntime outputRuntime = new KafkaOutputPTransformRuntime();
    outputRuntime.initialize(null, outputProperties);
    // ----------------- pipeline start --------------------
    pipeline.apply(inputRuntime).apply(Filter.by(new KafkaCsvBeamRuntimeTestIT.FilterByGroup(testID))).apply(outputRuntime);
    PipelineResult result = pipeline.run();
    // ----------------- pipeline done --------------------
    // ----------------- Read data from TOPIC_AVRO_OUT start --------------------
    props = new Properties();
    props.put("bootstrap.servers", BOOTSTRAP_HOST);
    props.put("group.id", "getResult");
    props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
    props.put("value.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer");
    props.put("auto.offset.reset", "earliest");
    KafkaConsumer<String, byte[]> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Arrays.asList(TOPIC_AVRO_OUT));
    List<Person> results = new ArrayList<>();
    List<String> keys = new ArrayList<>();
    while (true) {
        ConsumerRecords<String, byte[]> records = consumer.poll(100);
        for (ConsumerRecord<String, byte[]> record : records) {
            Person person = Person.desFromAvroBytes(record.value());
            if (testID.equals(person.group)) {
                keys.add(record.key());
                results.add(person);
            }
        }
        if (results.size() >= maxRecords) {
            break;
        }
    }
    // ----------------- Read data from TOPIC_AVRO_OUT done --------------------
    assertEquals(expectedPersons, results);
    List<String> expectedKeys = new ArrayList<>();
    for (Person person : results) {
        expectedKeys.add(person.name);
    }
    assertEquals(expectedKeys, keys);
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) KafkaInputProperties(org.talend.components.kafka.input.KafkaInputProperties) ArrayList(java.util.ArrayList) PipelineResult(org.apache.beam.sdk.PipelineResult) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) KafkaInputProperties(org.talend.components.kafka.input.KafkaInputProperties) KafkaDatasetProperties(org.talend.components.kafka.dataset.KafkaDatasetProperties) Properties(java.util.Properties) KafkaDatastoreProperties(org.talend.components.kafka.datastore.KafkaDatastoreProperties) KafkaOutputProperties(org.talend.components.kafka.output.KafkaOutputProperties) KafkaOutputProperties(org.talend.components.kafka.output.KafkaOutputProperties) Random(java.util.Random) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Test(org.junit.Test)

Example 55 with ProducerRecord

use of org.apache.kafka.clients.producer.ProducerRecord in project incubator-skywalking by apache.

the class KafkaProducerInterceptor method beforeMethod.

@Override
public void beforeMethod(EnhancedInstance objInst, Method method, Object[] allArguments, Class<?>[] argumentsTypes, MethodInterceptResult result) throws Throwable {
    ContextCarrier contextCarrier = new ContextCarrier();
    ProducerRecord record = (ProducerRecord) allArguments[0];
    String topicName = (String) ((EnhancedInstance) record).getSkyWalkingDynamicField();
    AbstractSpan activeSpan = ContextManager.createExitSpan(OPERATE_NAME_PREFIX + topicName + PRODUCER_OPERATE_NAME_SUFFIX, contextCarrier, (String) objInst.getSkyWalkingDynamicField());
    Tags.MQ_BROKER.set(activeSpan, (String) objInst.getSkyWalkingDynamicField());
    Tags.MQ_TOPIC.set(activeSpan, topicName);
    SpanLayer.asMQ(activeSpan);
    activeSpan.setComponent(ComponentsDefine.KAFKA);
    CarrierItem next = contextCarrier.items();
    while (next.hasNext()) {
        next = next.next();
        record.headers().add(next.getHeadKey(), next.getHeadValue().getBytes());
    }
    EnhancedInstance callbackInstance = (EnhancedInstance) allArguments[1];
    if (callbackInstance != null) {
        callbackInstance.setSkyWalkingDynamicField(ContextManager.capture());
    }
}
Also used : ContextCarrier(org.apache.skywalking.apm.agent.core.context.ContextCarrier) CarrierItem(org.apache.skywalking.apm.agent.core.context.CarrierItem) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) EnhancedInstance(org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.EnhancedInstance) AbstractSpan(org.apache.skywalking.apm.agent.core.context.trace.AbstractSpan)

Aggregations

ProducerRecord (org.apache.kafka.clients.producer.ProducerRecord)193 Test (org.junit.Test)90 KafkaProducer (org.apache.kafka.clients.producer.KafkaProducer)57 Properties (java.util.Properties)50 RecordMetadata (org.apache.kafka.clients.producer.RecordMetadata)40 ArrayList (java.util.ArrayList)39 Callback (org.apache.kafka.clients.producer.Callback)30 Future (java.util.concurrent.Future)26 TopicPartition (org.apache.kafka.common.TopicPartition)24 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)21 HashMap (java.util.HashMap)20 Random (java.util.Random)19 IOException (java.io.IOException)16 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)16 KafkaConsumer (org.apache.kafka.clients.consumer.KafkaConsumer)16 KafkaException (org.apache.kafka.common.KafkaException)16 List (java.util.List)13 MockProducer (org.apache.kafka.clients.producer.MockProducer)13 DefaultPartitioner (org.apache.kafka.clients.producer.internals.DefaultPartitioner)12 StreamsException (org.apache.kafka.streams.errors.StreamsException)12