Search in sources :

Example 1 with KafkaSinglePortOutputOperator

use of org.apache.apex.malhar.kafka.KafkaSinglePortOutputOperator in project apex-malhar by apache.

the class Application method populateDAG.

@Override
public void populateDAG(DAG dag, Configuration conf) {
    BatchSequenceGenerator sequenceGenerator = dag.addOperator("sequenceGenerator", BatchSequenceGenerator.class);
    PassthroughFailOperator passthroughFailOperator = dag.addOperator("passthrough", PassthroughFailOperator.class);
    KafkaSinglePortExactlyOnceOutputOperator<String> kafkaExactlyOnceOutputOperator = dag.addOperator("kafkaExactlyOnceOutputOperator", KafkaSinglePortExactlyOnceOutputOperator.class);
    KafkaSinglePortOutputOperator kafkaOutputOperator = dag.addOperator("kafkaOutputOperator", KafkaSinglePortOutputOperator.class);
    dag.addStream("sequenceToPassthrough", sequenceGenerator.out, passthroughFailOperator.input);
    dag.addStream("linesToKafka", passthroughFailOperator.output, kafkaOutputOperator.inputPort, kafkaExactlyOnceOutputOperator.inputPort);
    KafkaSinglePortInputOperator kafkaInputTopicExactly = dag.addOperator("kafkaTopicExactly", KafkaSinglePortInputOperator.class);
    kafkaInputTopicExactly.setInitialOffset(KafkaSinglePortInputOperator.InitialOffset.EARLIEST.name());
    KafkaSinglePortInputOperator kafkaInputTopicAtLeast = dag.addOperator("kafkaTopicAtLeast", KafkaSinglePortInputOperator.class);
    kafkaInputTopicAtLeast.setInitialOffset(KafkaSinglePortInputOperator.InitialOffset.EARLIEST.name());
    ValidationToFile validationToFile = dag.addOperator("validationToFile", ValidationToFile.class);
    dag.addStream("messagesFromExactly", kafkaInputTopicExactly.outputPort, validationToFile.topicExactlyInput);
    dag.addStream("messagesFromAtLeast", kafkaInputTopicAtLeast.outputPort, validationToFile.topicAtLeastInput);
}
Also used : KafkaSinglePortInputOperator(org.apache.apex.malhar.kafka.KafkaSinglePortInputOperator) KafkaSinglePortOutputOperator(org.apache.apex.malhar.kafka.KafkaSinglePortOutputOperator)

Example 2 with KafkaSinglePortOutputOperator

use of org.apache.apex.malhar.kafka.KafkaSinglePortOutputOperator in project apex-malhar by apache.

the class KafkaEndpoint method populateOutputDAG.

@Override
public RelInfo populateOutputDAG(DAG dag, JavaTypeFactory typeFactory) {
    RelInfo spec = messageFormat.populateOutputDAG(dag, typeFactory);
    KafkaSinglePortOutputOperator kafkaOutput = dag.addOperator(OperatorUtils.getUniqueOperatorName("KafkaOutput"), KafkaSinglePortOutputOperator.class);
    kafkaOutput.setTopic((String) operands.get(KAFKA_TOPICS));
    Properties props = new Properties();
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, VALUE_SERIALIZER);
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KEY_SERIALIZER);
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, operands.get(KAFKA_SERVERS));
    kafkaOutput.setProperties(props);
    dag.addStream(OperatorUtils.getUniqueStreamName("Formatter", "Kafka"), spec.getOutPort(), kafkaOutput.inputPort);
    return new RelInfo("Output", spec.getInputPorts(), spec.getOperator(), null, messageFormat.getRowType(typeFactory));
}
Also used : RelInfo(org.apache.apex.malhar.sql.planner.RelInfo) Properties(java.util.Properties) KafkaSinglePortOutputOperator(org.apache.apex.malhar.kafka.KafkaSinglePortOutputOperator)

Example 3 with KafkaSinglePortOutputOperator

use of org.apache.apex.malhar.kafka.KafkaSinglePortOutputOperator in project apex-malhar by apache.

the class SerDeTest method testPortEndpoint.

@Test
public void testPortEndpoint() throws IOException, ClassNotFoundException {
    LogicalPlan dag = new LogicalPlan();
    String schemaIn = "{\"separator\":\",\",\"quoteChar\":\"\\\"\",\"fields\":[" + "{\"name\":\"RowTime\",\"type\":\"Date\",\"constraints\":{\"format\":\"dd/MM/yyyy hh:mm:ss Z\"}}," + "{\"name\":\"id\",\"type\":\"Integer\"}," + "{\"name\":\"Product\",\"type\":\"String\"}," + "{\"name\":\"units\",\"type\":\"Integer\"}]}";
    String schemaOut = "{\"separator\":\",\",\"quoteChar\":\"\\\"\",\"fields\":[" + "{\"name\":\"RowTime1\",\"type\":\"Date\",\"constraints\":{\"format\":\"dd/MM/yyyy hh:mm:ss Z\"}}," + "{\"name\":\"RowTime2\",\"type\":\"Date\",\"constraints\":{\"format\":\"dd/MM/yyyy hh:mm:ss Z\"}}," + "{\"name\":\"Product\",\"type\":\"String\"}]}";
    KafkaSinglePortInputOperator kafkaInput = dag.addOperator("KafkaInput", KafkaSinglePortInputOperator.class);
    kafkaInput.setTopics("testdata0");
    kafkaInput.setInitialOffset("EARLIEST");
    Properties props = new Properties();
    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, KafkaEndpoint.KEY_DESERIALIZER);
    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaEndpoint.VALUE_DESERIALIZER);
    kafkaInput.setConsumerProps(props);
    kafkaInput.setClusters("localhost:9092");
    CsvParser csvParser = dag.addOperator("CSVParser", CsvParser.class);
    csvParser.setSchema(schemaIn);
    dag.addStream("KafkaToCSV", kafkaInput.outputPort, csvParser.in);
    CsvFormatter formatter = dag.addOperator("CSVFormatter", CsvFormatter.class);
    formatter.setSchema(schemaOut);
    KafkaSinglePortOutputOperator kafkaOutput = dag.addOperator("KafkaOutput", KafkaSinglePortOutputOperator.class);
    kafkaOutput.setTopic("testresult");
    props = new Properties();
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaEndpoint.VALUE_SERIALIZER);
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaEndpoint.KEY_SERIALIZER);
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    kafkaOutput.setProperties(props);
    dag.addStream("CSVToKafka", formatter.out, kafkaOutput.inputPort);
    SQLExecEnvironment.getEnvironment().registerTable("ORDERS", new StreamEndpoint(csvParser.out, InputPOJO.class)).registerTable("SALES", new StreamEndpoint(formatter.in, OutputPOJO.class)).registerFunction("APEXCONCAT", FileEndpointTest.class, "apex_concat_str").executeSQL(dag, "INSERT INTO SALES " + "SELECT STREAM ROWTIME, " + "FLOOR(ROWTIME TO DAY), " + "APEXCONCAT('OILPAINT', SUBSTRING(PRODUCT, 6, 7)) " + "FROM ORDERS WHERE ID > 3 " + "AND " + "PRODUCT LIKE 'paint%'");
    dag.validate();
}
Also used : StreamEndpoint(org.apache.apex.malhar.sql.table.StreamEndpoint) CsvFormatter(org.apache.apex.malhar.contrib.formatter.CsvFormatter) KafkaSinglePortInputOperator(org.apache.apex.malhar.kafka.KafkaSinglePortInputOperator) LogicalPlan(com.datatorrent.stram.plan.logical.LogicalPlan) CsvParser(org.apache.apex.malhar.contrib.parser.CsvParser) Properties(java.util.Properties) KafkaSinglePortOutputOperator(org.apache.apex.malhar.kafka.KafkaSinglePortOutputOperator) Test(org.junit.Test)

Aggregations

KafkaSinglePortOutputOperator (org.apache.apex.malhar.kafka.KafkaSinglePortOutputOperator)3 Properties (java.util.Properties)2 KafkaSinglePortInputOperator (org.apache.apex.malhar.kafka.KafkaSinglePortInputOperator)2 LogicalPlan (com.datatorrent.stram.plan.logical.LogicalPlan)1 CsvFormatter (org.apache.apex.malhar.contrib.formatter.CsvFormatter)1 CsvParser (org.apache.apex.malhar.contrib.parser.CsvParser)1 RelInfo (org.apache.apex.malhar.sql.planner.RelInfo)1 StreamEndpoint (org.apache.apex.malhar.sql.table.StreamEndpoint)1 Test (org.junit.Test)1