Search in sources :

Example 1 with KafkaEndpoint

use of org.apache.apex.malhar.sql.table.KafkaEndpoint in project apex-malhar by apache.

the class PureStyleSQLApplication method populateDAG.

@Override
public void populateDAG(DAG dag, Configuration conf) {
    // Source definition
    String schemaInName = conf.get("schemaInName");
    String schemaInDef = conf.get("schemaInDef");
    String broker = conf.get("broker");
    String sourceTopic = conf.get("topic");
    // Destination definition
    String schemaOutName = conf.get("schemaOutName");
    String schemaOutDef = conf.get("schemaOutDef");
    String outputFolder = conf.get("outputFolder");
    String outFilename = conf.get("destFileName");
    // SQL statement
    String sql = conf.get("sql");
    SQLExecEnvironment.getEnvironment().registerTable(schemaInName, new KafkaEndpoint(broker, sourceTopic, new CSVMessageFormat(schemaInDef))).registerTable(schemaOutName, new FileEndpoint(outputFolder, outFilename, new CSVMessageFormat(schemaOutDef))).registerFunction("APEXCONCAT", this.getClass(), "apex_concat_str").executeSQL(dag, sql);
}
Also used : CSVMessageFormat(org.apache.apex.malhar.sql.table.CSVMessageFormat) KafkaEndpoint(org.apache.apex.malhar.sql.table.KafkaEndpoint) FileEndpoint(org.apache.apex.malhar.sql.table.FileEndpoint)

Example 2 with KafkaEndpoint

use of org.apache.apex.malhar.sql.table.KafkaEndpoint in project apex-malhar by apache.

the class ApexSQLTableFactory method create.

@SuppressWarnings("unchecked")
@Override
public Table create(SchemaPlus schemaPlus, String name, Map<String, Object> operands, RelDataType rowType) {
    Endpoint endpoint;
    String endpointSystemType = (String) operands.get(Endpoint.ENDPOINT);
    if (endpointSystemType.equalsIgnoreCase(Endpoint.EndpointType.FILE.name())) {
        endpoint = new FileEndpoint();
    } else if (endpointSystemType.equalsIgnoreCase(Endpoint.EndpointType.KAFKA.name())) {
        endpoint = new KafkaEndpoint();
    } else {
        throw new RuntimeException("Cannot find endpoint");
    }
    endpoint.setEndpointOperands((Map<String, Object>) operands.get(Endpoint.SYSTEM_OPERANDS));
    MessageFormat mf;
    String messageFormat = (String) operands.get(MessageFormat.MESSAGE_FORMAT);
    if (messageFormat.equalsIgnoreCase(MessageFormat.MessageFormatType.CSV.name())) {
        mf = new CSVMessageFormat();
    } else {
        throw new RuntimeException("Cannot find message format");
    }
    mf.setMessageFormatOperands((Map<String, Object>) operands.get(MessageFormat.MESSAGE_FORMAT_OPERANDS));
    endpoint.setMessageFormat(mf);
    return new ApexSQLTable(schemaPlus, name, operands, rowType, endpoint);
}
Also used : FileEndpoint(org.apache.apex.malhar.sql.table.FileEndpoint) KafkaEndpoint(org.apache.apex.malhar.sql.table.KafkaEndpoint) Endpoint(org.apache.apex.malhar.sql.table.Endpoint) MessageFormat(org.apache.apex.malhar.sql.table.MessageFormat) CSVMessageFormat(org.apache.apex.malhar.sql.table.CSVMessageFormat) CSVMessageFormat(org.apache.apex.malhar.sql.table.CSVMessageFormat) KafkaEndpoint(org.apache.apex.malhar.sql.table.KafkaEndpoint) FileEndpoint(org.apache.apex.malhar.sql.table.FileEndpoint)

Example 3 with KafkaEndpoint

use of org.apache.apex.malhar.sql.table.KafkaEndpoint in project apex-malhar by apache.

the class SerDeTest method testJoin.

@Test
public void testJoin() throws IOException, ClassNotFoundException {
    LogicalPlan dag = new LogicalPlan();
    String schemaIn0 = "{\"separator\":\",\",\"quoteChar\":\"\\\"\",\"fields\":[" + "{\"name\":\"RowTime\",\"type\":\"Date\",\"constraints\":{\"format\":\"dd/MM/yyyy hh:mm:ss Z\"}}," + "{\"name\":\"id\",\"type\":\"Integer\"}," + "{\"name\":\"Product\",\"type\":\"String\"}," + "{\"name\":\"units\",\"type\":\"Integer\"}]}";
    String schemaIn1 = "{\"separator\":\",\",\"quoteChar\":\"\\\"\",\"fields\":[" + "{\"name\":\"id\",\"type\":\"Integer\"}," + "{\"name\":\"Category\",\"type\":\"String\"}]}";
    String schemaOut = "{\"separator\":\",\",\"quoteChar\":\"\\\"\",\"fields\":[" + "{\"name\":\"RowTime1\",\"type\":\"Date\",\"constraints\":{\"format\":\"dd/MM/yyyy hh:mm:ss Z\"}}," + "{\"name\":\"RowTime2\",\"type\":\"Date\",\"constraints\":{\"format\":\"dd/MM/yyyy hh:mm:ss Z\"}}," + "{\"name\":\"Product\",\"type\":\"String\"}," + "{\"name\":\"Category\",\"type\":\"String\"}]}";
    String sql = "INSERT INTO SALES SELECT STREAM A.ROWTIME, FLOOR(A.ROWTIME TO DAY), " + "APEXCONCAT('OILPAINT', SUBSTRING(A.PRODUCT, 6, 7)), B.CATEGORY " + "FROM ORDERS AS A " + "JOIN CATEGORY AS B ON A.id = B.id " + "WHERE A.id > 3 AND A.PRODUCT LIKE 'paint%'";
    SQLExecEnvironment.getEnvironment().registerTable("ORDERS", new KafkaEndpoint("localhost:9092", "testdata0", new CSVMessageFormat(schemaIn0))).registerTable("CATEGORY", new KafkaEndpoint("localhost:9092", "testdata1", new CSVMessageFormat(schemaIn1))).registerTable("SALES", new KafkaEndpoint("localhost:9092", "testresult", new CSVMessageFormat(schemaOut))).registerFunction("APEXCONCAT", FileEndpointTest.class, "apex_concat_str").executeSQL(dag, sql);
    dag.validate();
}
Also used : CSVMessageFormat(org.apache.apex.malhar.sql.table.CSVMessageFormat) KafkaEndpoint(org.apache.apex.malhar.sql.table.KafkaEndpoint) LogicalPlan(com.datatorrent.stram.plan.logical.LogicalPlan) Test(org.junit.Test)

Example 4 with KafkaEndpoint

use of org.apache.apex.malhar.sql.table.KafkaEndpoint in project apex-malhar by apache.

the class SerDeTest method testJoinFilter.

@Test
public void testJoinFilter() throws IOException, ClassNotFoundException {
    LogicalPlan dag = new LogicalPlan();
    String schemaIn0 = "{\"separator\":\",\",\"quoteChar\":\"\\\"\",\"fields\":[" + "{\"name\":\"RowTime\",\"type\":\"Date\",\"constraints\":{\"format\":\"dd/MM/yyyy hh:mm:ss Z\"}}," + "{\"name\":\"id\",\"type\":\"Integer\"}," + "{\"name\":\"Product\",\"type\":\"String\"}," + "{\"name\":\"units\",\"type\":\"Integer\"}]}";
    String schemaIn1 = "{\"separator\":\",\",\"quoteChar\":\"\\\"\",\"fields\":[" + "{\"name\":\"id\",\"type\":\"Integer\"}," + "{\"name\":\"Category\",\"type\":\"String\"}]}";
    String schemaOut = "{\"separator\":\",\",\"quoteChar\":\"\\\"\",\"fields\":[" + "{\"name\":\"RowTime1\",\"type\":\"Date\",\"constraints\":{\"format\":\"dd/MM/yyyy hh:mm:ss Z\"}}," + "{\"name\":\"RowTime2\",\"type\":\"Date\",\"constraints\":{\"format\":\"dd/MM/yyyy hh:mm:ss Z\"}}," + "{\"name\":\"Product\",\"type\":\"String\"}," + "{\"name\":\"Category\",\"type\":\"String\"}]}";
    String sql = "INSERT INTO SALES SELECT STREAM A.ROWTIME, FLOOR(A.ROWTIME TO DAY), " + "APEXCONCAT('OILPAINT', SUBSTRING(A.PRODUCT, 6, 7)), B.CATEGORY " + "FROM ORDERS AS A JOIN CATEGORY AS B ON A.id = B.id AND A.id > 3" + "WHERE A.PRODUCT LIKE 'paint%'";
    SQLExecEnvironment.getEnvironment().registerTable("ORDERS", new KafkaEndpoint("localhost:9092", "testdata0", new CSVMessageFormat(schemaIn0))).registerTable("CATEGORY", new KafkaEndpoint("localhost:9092", "testdata1", new CSVMessageFormat(schemaIn1))).registerTable("SALES", new KafkaEndpoint("localhost:9092", "testresult", new CSVMessageFormat(schemaOut))).registerFunction("APEXCONCAT", FileEndpointTest.class, "apex_concat_str").executeSQL(dag, sql);
    dag.validate();
}
Also used : CSVMessageFormat(org.apache.apex.malhar.sql.table.CSVMessageFormat) KafkaEndpoint(org.apache.apex.malhar.sql.table.KafkaEndpoint) LogicalPlan(com.datatorrent.stram.plan.logical.LogicalPlan) Test(org.junit.Test)

Aggregations

CSVMessageFormat (org.apache.apex.malhar.sql.table.CSVMessageFormat)4 KafkaEndpoint (org.apache.apex.malhar.sql.table.KafkaEndpoint)4 LogicalPlan (com.datatorrent.stram.plan.logical.LogicalPlan)2 FileEndpoint (org.apache.apex.malhar.sql.table.FileEndpoint)2 Test (org.junit.Test)2 Endpoint (org.apache.apex.malhar.sql.table.Endpoint)1 MessageFormat (org.apache.apex.malhar.sql.table.MessageFormat)1