Search in sources :

Example 1 with KafkaOutputProperties

use of org.talend.components.kafka.output.KafkaOutputProperties in project components by Talend.

the class KafkaAvroBeamRuntimeTestIT method avroBasicTest.

/**
 * Read avro(Person) format and write avro(Person) format with schema.
 */
@Test
public void avroBasicTest() throws IOException {
    String testID = "avroBasicTest" + new Random().nextInt();
    expectedPersons = Person.genRandomList(testID, maxRecords);
    // ----------------- Send data to TOPIC_AVRO_IN start --------------------
    Properties props = new Properties();
    props.put("bootstrap.servers", BOOTSTRAP_HOST);
    props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    props.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer");
    Producer<Void, byte[]> producer = new KafkaProducer<>(props);
    for (Person person : expectedPersons) {
        ProducerRecord<Void, byte[]> message = new ProducerRecord<>(TOPIC_AVRO_IN, person.serToAvroBytes());
        producer.send(message);
    }
    producer.close();
    // ----------------- Send data to TOPIC_AVRO_IN done --------------------
    KafkaInputProperties inputProperties = new KafkaInputProperties("input");
    inputProperties.init();
    inputProperties.setDatasetProperties(inputDatasetProperties);
    inputProperties.autoOffsetReset.setValue(KafkaInputProperties.OffsetType.EARLIEST);
    inputProperties.useMaxNumRecords.setValue(false);
    // inputProperties.maxNumRecords.setValue(maxRecords.longValue());
    inputProperties.useMaxReadTime.setValue(true);
    inputProperties.maxReadTime.setValue(5000l);
    KafkaOutputProperties outputProperties = new KafkaOutputProperties("output");
    outputProperties.init();
    outputProperties.setDatasetProperties(outputDatasetProperties);
    outputProperties.partitionType.setValue(KafkaOutputProperties.PartitionType.ROUND_ROBIN);
    outputProperties.useCompress.setValue(false);
    KafkaInputPTransformRuntime inputRuntime = new KafkaInputPTransformRuntime();
    inputRuntime.initialize(null, inputProperties);
    KafkaOutputPTransformRuntime outputRuntime = new KafkaOutputPTransformRuntime();
    outputRuntime.initialize(null, outputProperties);
    // ----------------- pipeline start --------------------
    pipeline.apply(inputRuntime).apply(Filter.by(new KafkaCsvBeamRuntimeTestIT.FilterByGroup(testID))).apply(outputRuntime);
    PipelineResult result = pipeline.run();
    // ----------------- pipeline done --------------------
    // ----------------- Read data from TOPIC_AVRO_OUT start --------------------
    props = new Properties();
    props.put("bootstrap.servers", BOOTSTRAP_HOST);
    props.put("group.id", "getResult");
    props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
    props.put("value.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer");
    props.put("auto.offset.reset", "earliest");
    KafkaConsumer<String, byte[]> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Arrays.asList(TOPIC_AVRO_OUT));
    List<Person> results = new ArrayList<>();
    while (true) {
        ConsumerRecords<String, byte[]> records = consumer.poll(100);
        for (ConsumerRecord<String, byte[]> record : records) {
            Person person = Person.desFromAvroBytes(record.value());
            if (testID.equals(person.group)) {
                results.add(person);
            }
        }
        if (results.size() >= maxRecords) {
            break;
        }
    }
    // ----------------- Read data from TOPIC_AVRO_OUT done --------------------
    assertEquals(expectedPersons, results);
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) KafkaInputProperties(org.talend.components.kafka.input.KafkaInputProperties) ArrayList(java.util.ArrayList) PipelineResult(org.apache.beam.sdk.PipelineResult) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) KafkaInputProperties(org.talend.components.kafka.input.KafkaInputProperties) KafkaDatasetProperties(org.talend.components.kafka.dataset.KafkaDatasetProperties) Properties(java.util.Properties) KafkaDatastoreProperties(org.talend.components.kafka.datastore.KafkaDatastoreProperties) KafkaOutputProperties(org.talend.components.kafka.output.KafkaOutputProperties) KafkaOutputProperties(org.talend.components.kafka.output.KafkaOutputProperties) Random(java.util.Random) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Test(org.junit.Test)

Example 2 with KafkaOutputProperties

use of org.talend.components.kafka.output.KafkaOutputProperties in project components by Talend.

the class KafkaAvroBeamRuntimeTestIT method avroBasicTest2.

/**
 * Read avro(Person) format and write avro(Person) format with schema.
 */
@Test
public void avroBasicTest2() throws IOException {
    String testID = "avroBasicTest2" + new Random().nextInt();
    expectedPersons = Person.genRandomList(testID, maxRecords);
    // ----------------- Send data to TOPIC_AVRO_IN start --------------------
    Properties props = new Properties();
    props.put("bootstrap.servers", BOOTSTRAP_HOST);
    props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    props.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer");
    Producer<Void, byte[]> producer = new KafkaProducer<>(props);
    for (Person person : expectedPersons) {
        ProducerRecord<Void, byte[]> message = new ProducerRecord<>(TOPIC_AVRO_IN, person.serToAvroBytes());
        producer.send(message);
    }
    producer.close();
    // ----------------- Send data to TOPIC_AVRO_IN done --------------------
    KafkaInputProperties inputProperties = new KafkaInputProperties("input");
    inputProperties.init();
    inputProperties.setDatasetProperties(inputDatasetProperties);
    inputProperties.autoOffsetReset.setValue(KafkaInputProperties.OffsetType.EARLIEST);
    inputProperties.useMaxNumRecords.setValue(false);
    // inputProperties.maxNumRecords.setValue(maxRecords.longValue());
    inputProperties.useMaxReadTime.setValue(true);
    inputProperties.maxReadTime.setValue(5000l);
    KafkaOutputProperties outputProperties = new KafkaOutputProperties("output");
    outputProperties.init();
    outputProperties.setDatasetProperties(outputDatasetProperties);
    outputProperties.partitionType.setValue(KafkaOutputProperties.PartitionType.COLUMN);
    outputProperties.keyColumn.setValue("name");
    outputProperties.useCompress.setValue(false);
    KafkaInputPTransformRuntime inputRuntime = new KafkaInputPTransformRuntime();
    inputRuntime.initialize(null, inputProperties);
    KafkaOutputPTransformRuntime outputRuntime = new KafkaOutputPTransformRuntime();
    outputRuntime.initialize(null, outputProperties);
    // ----------------- pipeline start --------------------
    pipeline.apply(inputRuntime).apply(Filter.by(new KafkaCsvBeamRuntimeTestIT.FilterByGroup(testID))).apply(outputRuntime);
    PipelineResult result = pipeline.run();
    // ----------------- pipeline done --------------------
    // ----------------- Read data from TOPIC_AVRO_OUT start --------------------
    props = new Properties();
    props.put("bootstrap.servers", BOOTSTRAP_HOST);
    props.put("group.id", "getResult");
    props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
    props.put("value.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer");
    props.put("auto.offset.reset", "earliest");
    KafkaConsumer<String, byte[]> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Arrays.asList(TOPIC_AVRO_OUT));
    List<Person> results = new ArrayList<>();
    List<String> keys = new ArrayList<>();
    while (true) {
        ConsumerRecords<String, byte[]> records = consumer.poll(100);
        for (ConsumerRecord<String, byte[]> record : records) {
            Person person = Person.desFromAvroBytes(record.value());
            if (testID.equals(person.group)) {
                keys.add(record.key());
                results.add(person);
            }
        }
        if (results.size() >= maxRecords) {
            break;
        }
    }
    // ----------------- Read data from TOPIC_AVRO_OUT done --------------------
    assertEquals(expectedPersons, results);
    List<String> expectedKeys = new ArrayList<>();
    for (Person person : results) {
        expectedKeys.add(person.name);
    }
    assertEquals(expectedKeys, keys);
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) KafkaInputProperties(org.talend.components.kafka.input.KafkaInputProperties) ArrayList(java.util.ArrayList) PipelineResult(org.apache.beam.sdk.PipelineResult) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) KafkaInputProperties(org.talend.components.kafka.input.KafkaInputProperties) KafkaDatasetProperties(org.talend.components.kafka.dataset.KafkaDatasetProperties) Properties(java.util.Properties) KafkaDatastoreProperties(org.talend.components.kafka.datastore.KafkaDatastoreProperties) KafkaOutputProperties(org.talend.components.kafka.output.KafkaOutputProperties) KafkaOutputProperties(org.talend.components.kafka.output.KafkaOutputProperties) Random(java.util.Random) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Test(org.junit.Test)

Example 3 with KafkaOutputProperties

use of org.talend.components.kafka.output.KafkaOutputProperties in project components by Talend.

the class KafkaCsvBeamRuntimeTestIT method basicTest.

public void basicTest(String title, String topicSuffix, String fieldDelim) {
    String testID = title + new Random().nextInt();
    expectedPersons = Person.genRandomList(testID, maxRecords);
    // ----------------- Send data to TOPIC_IN start --------------------
    Properties props = new Properties();
    props.put("bootstrap.servers", BOOTSTRAP_HOST);
    props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    Producer<Void, String> producer = new KafkaProducer<>(props);
    for (Person person : expectedPersons) {
        ProducerRecord<Void, String> message = new ProducerRecord<>(TOPIC_IN + topicSuffix, person.toCSV(fieldDelim));
        producer.send(message);
    }
    producer.close();
    // ----------------- Send data to TOPIC_IN done --------------------
    KafkaInputProperties inputProperties = new KafkaInputProperties("input");
    inputProperties.init();
    inputProperties.setDatasetProperties(inputDatasetProperties);
    inputProperties.autoOffsetReset.setValue(KafkaInputProperties.OffsetType.EARLIEST);
    inputProperties.useMaxNumRecords.setValue(false);
    // inputProperties.maxNumRecords.setValue(maxRecords.longValue());
    inputProperties.useMaxReadTime.setValue(true);
    inputProperties.maxReadTime.setValue(5000l);
    KafkaOutputProperties outputProperties = new KafkaOutputProperties("output");
    outputProperties.init();
    outputProperties.setDatasetProperties(outputDatasetProperties);
    outputProperties.partitionType.setValue(KafkaOutputProperties.PartitionType.ROUND_ROBIN);
    outputProperties.useCompress.setValue(false);
    inputDatasetProperties.topic.setValue(TOPIC_IN + topicSuffix);
    outputDatasetProperties.topic.setValue(TOPIC_OUT + topicSuffix);
    KafkaInputPTransformRuntime inputRuntime = new KafkaInputPTransformRuntime();
    inputRuntime.initialize(null, inputProperties);
    KafkaOutputPTransformRuntime outputRuntime = new KafkaOutputPTransformRuntime();
    outputRuntime.initialize(null, outputProperties);
    // ----------------- pipeline start --------------------
    pipeline.apply(inputRuntime).apply(Filter.by(new FilterByGroup(testID))).apply(outputRuntime);
    PipelineResult result = pipeline.run();
    // ----------------- pipeline done --------------------
    // ----------------- Read data from TOPIC_OUT start --------------------
    props = new Properties();
    props.put("bootstrap.servers", BOOTSTRAP_HOST);
    props.put("group.id", "getResult");
    props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
    props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
    props.put("auto.offset.reset", "earliest");
    KafkaConsumer<Void, String> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Arrays.asList(TOPIC_OUT + topicSuffix));
    List<Person> results = new ArrayList<>();
    while (true) {
        ConsumerRecords<Void, String> records = consumer.poll(100);
        for (ConsumerRecord<Void, String> record : records) {
            Person person = Person.fromCSV(record.value(), fieldDelim);
            if (testID.equals(person.group)) {
                results.add(person);
            }
        }
        if (results.size() >= maxRecords) {
            break;
        }
    }
    // ----------------- Read data from TOPIC_OUT end --------------------
    assertEquals(expectedPersons, results);
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) KafkaInputProperties(org.talend.components.kafka.input.KafkaInputProperties) ArrayList(java.util.ArrayList) PipelineResult(org.apache.beam.sdk.PipelineResult) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) KafkaInputProperties(org.talend.components.kafka.input.KafkaInputProperties) KafkaDatasetProperties(org.talend.components.kafka.dataset.KafkaDatasetProperties) Properties(java.util.Properties) KafkaDatastoreProperties(org.talend.components.kafka.datastore.KafkaDatastoreProperties) KafkaOutputProperties(org.talend.components.kafka.output.KafkaOutputProperties) KafkaOutputProperties(org.talend.components.kafka.output.KafkaOutputProperties) Random(java.util.Random) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord)

Example 4 with KafkaOutputProperties

use of org.talend.components.kafka.output.KafkaOutputProperties in project components by Talend.

the class KafkaCsvBeamRuntimeTestIT method basicTest2.

public void basicTest2(String title, String topicSuffix, String fieldDelim) {
    String testID = title + new Random().nextInt();
    expectedPersons = Person.genRandomList(testID, maxRecords);
    // ----------------- Send data to TOPIC_IN start --------------------
    Properties props = new Properties();
    props.put("bootstrap.servers", BOOTSTRAP_HOST);
    props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    Producer<Void, String> producer = new KafkaProducer<>(props);
    for (Person person : expectedPersons) {
        ProducerRecord<Void, String> message = new ProducerRecord<>(TOPIC_IN + topicSuffix, person.toCSV(fieldDelim));
        producer.send(message);
    }
    producer.close();
    // ----------------- Send data to TOPIC_IN done --------------------
    KafkaInputProperties inputProperties = new KafkaInputProperties("input");
    inputProperties.init();
    inputProperties.setDatasetProperties(inputDatasetProperties);
    inputProperties.autoOffsetReset.setValue(KafkaInputProperties.OffsetType.EARLIEST);
    inputProperties.useMaxNumRecords.setValue(false);
    // inputProperties.maxNumRecords.setValue(maxRecords.longValue());
    inputProperties.useMaxReadTime.setValue(true);
    inputProperties.maxReadTime.setValue(5000l);
    KafkaOutputProperties outputProperties = new KafkaOutputProperties("output");
    outputProperties.init();
    outputProperties.setDatasetProperties(outputDatasetProperties);
    outputProperties.partitionType.setValue(KafkaOutputProperties.PartitionType.COLUMN);
    // name generated by KafkaAvroRegistry
    outputProperties.keyColumn.setValue("field1");
    outputProperties.useCompress.setValue(false);
    inputDatasetProperties.topic.setValue(TOPIC_IN + topicSuffix);
    outputDatasetProperties.topic.setValue(TOPIC_OUT + topicSuffix);
    KafkaInputPTransformRuntime inputRuntime = new KafkaInputPTransformRuntime();
    inputRuntime.initialize(null, inputProperties);
    KafkaOutputPTransformRuntime outputRuntime = new KafkaOutputPTransformRuntime();
    outputRuntime.initialize(null, outputProperties);
    // ----------------- pipeline start --------------------
    pipeline.apply(inputRuntime).apply(Filter.by(new FilterByGroup(testID))).apply(outputRuntime);
    PipelineResult result = pipeline.run();
    // ----------------- pipeline done --------------------
    // ----------------- Read data from TOPIC_OUT start --------------------
    props = new Properties();
    props.put("bootstrap.servers", BOOTSTRAP_HOST);
    props.put("group.id", "getResult");
    props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
    props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
    props.put("auto.offset.reset", "earliest");
    KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Arrays.asList(TOPIC_OUT + topicSuffix));
    List<Person> results = new ArrayList<>();
    List<String> keys = new ArrayList<>();
    while (true) {
        ConsumerRecords<String, String> records = consumer.poll(100);
        for (ConsumerRecord<String, String> record : records) {
            Person person = Person.fromCSV(record.value(), fieldDelim);
            if (testID.equals(person.group)) {
                keys.add(record.key());
                results.add(person);
            }
        }
        if (results.size() >= maxRecords) {
            break;
        }
    }
    // ----------------- Read data from TOPIC_OUT end --------------------
    assertEquals(expectedPersons, results);
    List<String> expectedKeys = new ArrayList<>();
    for (Person person : results) {
        expectedKeys.add(person.name);
    }
    assertEquals(expectedKeys, keys);
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) KafkaInputProperties(org.talend.components.kafka.input.KafkaInputProperties) ArrayList(java.util.ArrayList) PipelineResult(org.apache.beam.sdk.PipelineResult) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) KafkaInputProperties(org.talend.components.kafka.input.KafkaInputProperties) KafkaDatasetProperties(org.talend.components.kafka.dataset.KafkaDatasetProperties) Properties(java.util.Properties) KafkaDatastoreProperties(org.talend.components.kafka.datastore.KafkaDatastoreProperties) KafkaOutputProperties(org.talend.components.kafka.output.KafkaOutputProperties) KafkaOutputProperties(org.talend.components.kafka.output.KafkaOutputProperties) Random(java.util.Random) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord)

Aggregations

ArrayList (java.util.ArrayList)4 Properties (java.util.Properties)4 Random (java.util.Random)4 PipelineResult (org.apache.beam.sdk.PipelineResult)4 KafkaConsumer (org.apache.kafka.clients.consumer.KafkaConsumer)4 KafkaProducer (org.apache.kafka.clients.producer.KafkaProducer)4 ProducerRecord (org.apache.kafka.clients.producer.ProducerRecord)4 KafkaDatasetProperties (org.talend.components.kafka.dataset.KafkaDatasetProperties)4 KafkaDatastoreProperties (org.talend.components.kafka.datastore.KafkaDatastoreProperties)4 KafkaInputProperties (org.talend.components.kafka.input.KafkaInputProperties)4 KafkaOutputProperties (org.talend.components.kafka.output.KafkaOutputProperties)4 Test (org.junit.Test)2