Search in sources :

Example 1 with FieldInfo

use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.

the class RedisPOJOOperatorTest method testOutputOperator.

@Test
public void testOutputOperator() throws IOException {
    this.operatorStore = new RedisStore();
    operatorStore.connect();
    String appId = "test_appid";
    int operatorId = 0;
    operatorStore.removeCommittedWindowId(appId, operatorId);
    operatorStore.disconnect();
    RedisPOJOOutputOperator outputOperator = new RedisPOJOOutputOperator();
    ArrayList<FieldInfo> fields = new ArrayList<FieldInfo>();
    fields.add(new FieldInfo("column1", "intValue", SupportType.INTEGER));
    fields.add(new FieldInfo("column2", "getStringValue()", SupportType.STRING));
    outputOperator.setDataColumns(fields);
    try {
        com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap attributes = new com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap();
        attributes.put(DAG.APPLICATION_ID, appId);
        outputOperator.setStore(operatorStore);
        outputOperator.setup(mockOperatorContext(operatorId, attributes));
        outputOperator.beginWindow(101);
        KeyValPair<String, Object> keyVal = new KeyValPair<String, Object>("test_abc1", new TestClass(1, "abc"));
        outputOperator.input.process(keyVal);
        outputOperator.endWindow();
        outputOperator.teardown();
        operatorStore.connect();
        Map<String, String> out = operatorStore.getMap("test_abc1");
        Assert.assertEquals("1", out.get("column1"));
        Assert.assertEquals("abc", out.get("column2"));
    } finally {
        operatorStore.remove("test_abc1");
        operatorStore.disconnect();
    }
}
Also used : ArrayList(java.util.ArrayList) KeyValPair(org.apache.apex.malhar.lib.util.KeyValPair) FieldInfo(org.apache.apex.malhar.lib.util.FieldInfo) Test(org.junit.Test)

Example 2 with FieldInfo

use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.

the class TestPOJO method getFieldsInfo.

public static List<FieldInfo> getFieldsInfo() {
    List<FieldInfo> fieldsInfo = new ArrayList<FieldInfo>();
    fieldsInfo.add(new FieldInfo("name", "name", SupportType.STRING));
    fieldsInfo.add(new FieldInfo("age", "age", SupportType.INTEGER));
    fieldsInfo.add(new FieldInfo("address", "address", SupportType.STRING));
    return fieldsInfo;
}
Also used : ArrayList(java.util.ArrayList) FieldInfo(org.apache.apex.malhar.lib.util.FieldInfo)

Example 3 with FieldInfo

use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.

the class JdbcPojoPollableOpeartorTest method testDBPoller.

@Test
public void testDBPoller() throws Exception {
    insertEvents(10, true, 0);
    JdbcStore store = new JdbcStore();
    store.setDatabaseDriver(DB_DRIVER);
    store.setDatabaseUrl(URL);
    List<FieldInfo> fieldInfos = getFieldInfos();
    Attribute.AttributeMap.DefaultAttributeMap portAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
    portAttributes.put(Context.PortContext.TUPLE_CLASS, TestPOJOEvent.class);
    TestPortContext tpc = new TestPortContext(portAttributes);
    JdbcPOJOPollInputOperator inputOperator = new JdbcPOJOPollInputOperator();
    inputOperator.setStore(store);
    inputOperator.setTableName(TABLE_POJO_NAME);
    inputOperator.setKey("id");
    inputOperator.setFieldInfos(fieldInfos);
    inputOperator.setFetchSize(100);
    inputOperator.setBatchSize(100);
    inputOperator.setPartitionCount(2);
    Collection<com.datatorrent.api.Partitioner.Partition<AbstractJdbcPollInputOperator<Object>>> newPartitions = inputOperator.definePartitions(new ArrayList<Partitioner.Partition<AbstractJdbcPollInputOperator<Object>>>(), null);
    int operatorId = 0;
    for (com.datatorrent.api.Partitioner.Partition<AbstractJdbcPollInputOperator<Object>> partition : newPartitions) {
        Attribute.AttributeMap.DefaultAttributeMap partitionAttributeMap = new Attribute.AttributeMap.DefaultAttributeMap();
        partitionAttributeMap.put(DAG.APPLICATION_ID, APP_ID);
        partitionAttributeMap.put(Context.DAGContext.APPLICATION_PATH, dir);
        OperatorContext partitioningContext = mockOperatorContext(operatorId++, partitionAttributeMap);
        JdbcPOJOPollInputOperator parition = (JdbcPOJOPollInputOperator) partition.getPartitionedInstance();
        parition.outputPort.setup(tpc);
        parition.setScheduledExecutorService(mockscheduler);
        parition.setup(partitioningContext);
        parition.activate(partitioningContext);
    }
    Iterator<com.datatorrent.api.Partitioner.Partition<AbstractJdbcPollInputOperator<Object>>> itr = newPartitions.iterator();
    // First partition is for range queries,last is for polling queries
    JdbcPOJOPollInputOperator firstInstance = (JdbcPOJOPollInputOperator) itr.next().getPartitionedInstance();
    CollectorTestSink<Object> sink1 = new CollectorTestSink<>();
    firstInstance.outputPort.setSink(sink1);
    firstInstance.beginWindow(0);
    firstInstance.pollRecords();
    try {
        firstInstance.pollRecords();
        // non-poller partition
        Assert.fail("expected closed connection");
    } catch (Exception e) {
    // expected
    }
    firstInstance.emitTuples();
    firstInstance.endWindow();
    Assert.assertEquals("rows from db", 5, sink1.collectedTuples.size());
    for (Object tuple : sink1.collectedTuples) {
        TestPOJOEvent pojoEvent = (TestPOJOEvent) tuple;
        Assert.assertTrue("date", pojoEvent.getStartDate() instanceof Date);
        Assert.assertTrue("date", pojoEvent.getId() < 5);
    }
    JdbcPOJOPollInputOperator secondInstance = (JdbcPOJOPollInputOperator) itr.next().getPartitionedInstance();
    CollectorTestSink<Object> sink2 = new CollectorTestSink<>();
    secondInstance.outputPort.setSink(sink2);
    secondInstance.beginWindow(0);
    secondInstance.pollRecords();
    secondInstance.emitTuples();
    secondInstance.endWindow();
    Assert.assertEquals("rows from db", 5, sink2.collectedTuples.size());
    for (Object tuple : sink2.collectedTuples) {
        TestPOJOEvent pojoEvent = (TestPOJOEvent) tuple;
        Assert.assertTrue("date", pojoEvent.getId() < 10 && pojoEvent.getId() >= 5);
    }
    insertEvents(4, false, 10);
    JdbcPOJOPollInputOperator thirdInstance = (JdbcPOJOPollInputOperator) itr.next().getPartitionedInstance();
    CollectorTestSink<Object> sink3 = new CollectorTestSink<>();
    thirdInstance.outputPort.setSink(sink3);
    thirdInstance.beginWindow(0);
    thirdInstance.pollRecords();
    thirdInstance.pollRecords();
    thirdInstance.emitTuples();
    thirdInstance.endWindow();
    Assert.assertEquals("rows from db", 4, sink3.collectedTuples.size());
}
Also used : Attribute(com.datatorrent.api.Attribute) TestPortContext(org.apache.apex.malhar.lib.helper.TestPortContext) OperatorContextTestHelper.mockOperatorContext(org.apache.apex.malhar.lib.helper.OperatorContextTestHelper.mockOperatorContext) OperatorContext(com.datatorrent.api.Context.OperatorContext) Partitioner(com.datatorrent.api.Partitioner) IOException(java.io.IOException) Date(java.sql.Date) FieldInfo(org.apache.apex.malhar.lib.util.FieldInfo) CollectorTestSink(org.apache.apex.malhar.lib.testbench.CollectorTestSink) Test(org.junit.Test)

Example 4 with FieldInfo

use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.

the class JdbcPojoPollableOpeartorTest method testDBPollerExtraField.

@Test
public void testDBPollerExtraField() throws Exception {
    insertEvents(10, true, 0);
    JdbcStore store = new JdbcStore();
    store.setDatabaseDriver(DB_DRIVER);
    store.setDatabaseUrl(URL);
    List<FieldInfo> fieldInfos = getFieldInfos();
    Attribute.AttributeMap.DefaultAttributeMap portAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
    portAttributes.put(Context.PortContext.TUPLE_CLASS, TestPOJOEvent.class);
    TestPortContext tpc = new TestPortContext(portAttributes);
    JdbcPOJOPollInputOperator inputOperator = new JdbcPOJOPollInputOperator();
    inputOperator.setStore(store);
    inputOperator.setTableName(TABLE_POJO_NAME);
    inputOperator.setColumnsExpression("ID,STARTDATE,STARTTIME,STARTTIMESTAMP");
    inputOperator.setKey("id");
    inputOperator.setFieldInfos(fieldInfos);
    inputOperator.setFetchSize(100);
    inputOperator.setBatchSize(100);
    inputOperator.setPartitionCount(2);
    Collection<com.datatorrent.api.Partitioner.Partition<AbstractJdbcPollInputOperator<Object>>> newPartitions = inputOperator.definePartitions(new ArrayList<Partitioner.Partition<AbstractJdbcPollInputOperator<Object>>>(), null);
    int operatorId = 0;
    for (com.datatorrent.api.Partitioner.Partition<AbstractJdbcPollInputOperator<Object>> partition : newPartitions) {
        Attribute.AttributeMap.DefaultAttributeMap partitionAttributeMap = new Attribute.AttributeMap.DefaultAttributeMap();
        partitionAttributeMap.put(DAG.APPLICATION_ID, APP_ID);
        partitionAttributeMap.put(Context.DAGContext.APPLICATION_PATH, dir);
        OperatorContext partitioningContext = mockOperatorContext(operatorId++, partitionAttributeMap);
        JdbcPOJOPollInputOperator parition = (JdbcPOJOPollInputOperator) partition.getPartitionedInstance();
        parition.outputPort.setup(tpc);
        parition.setScheduledExecutorService(mockscheduler);
        parition.setup(partitioningContext);
        parition.activate(partitioningContext);
    }
    Iterator<com.datatorrent.api.Partitioner.Partition<AbstractJdbcPollInputOperator<Object>>> itr = newPartitions.iterator();
    // First partition is for range queries,last is for polling queries
    JdbcPOJOPollInputOperator firstInstance = (JdbcPOJOPollInputOperator) itr.next().getPartitionedInstance();
    CollectorTestSink<Object> sink1 = new CollectorTestSink<>();
    firstInstance.outputPort.setSink(sink1);
    firstInstance.beginWindow(0);
    Assert.assertFalse(firstInstance.ps.isClosed());
    firstInstance.pollRecords();
    Assert.assertTrue(firstInstance.ps.isClosed());
    firstInstance.emitTuples();
    firstInstance.endWindow();
    Assert.assertEquals("rows from db", 5, sink1.collectedTuples.size());
    for (Object tuple : sink1.collectedTuples) {
        TestPOJOEvent pojoEvent = (TestPOJOEvent) tuple;
        Assert.assertTrue("date", pojoEvent.getStartDate() instanceof Date);
        Assert.assertTrue("date", pojoEvent.getId() < 5);
    }
    JdbcPOJOPollInputOperator secondInstance = (JdbcPOJOPollInputOperator) itr.next().getPartitionedInstance();
    CollectorTestSink<Object> sink2 = new CollectorTestSink<>();
    secondInstance.outputPort.setSink(sink2);
    secondInstance.beginWindow(0);
    secondInstance.pollRecords();
    secondInstance.emitTuples();
    secondInstance.endWindow();
    Assert.assertEquals("rows from db", 5, sink2.collectedTuples.size());
    for (Object tuple : sink2.collectedTuples) {
        TestPOJOEvent pojoEvent = (TestPOJOEvent) tuple;
        Assert.assertTrue("date", pojoEvent.getId() < 10 && pojoEvent.getId() >= 5);
    }
    insertEvents(4, false, 10);
    JdbcPOJOPollInputOperator thirdInstance = (JdbcPOJOPollInputOperator) itr.next().getPartitionedInstance();
    CollectorTestSink<Object> sink3 = new CollectorTestSink<>();
    thirdInstance.outputPort.setSink(sink3);
    thirdInstance.beginWindow(0);
    thirdInstance.pollRecords();
    thirdInstance.emitTuples();
    thirdInstance.endWindow();
    Assert.assertEquals("rows from db", 4, sink3.collectedTuples.size());
}
Also used : Attribute(com.datatorrent.api.Attribute) TestPortContext(org.apache.apex.malhar.lib.helper.TestPortContext) OperatorContextTestHelper.mockOperatorContext(org.apache.apex.malhar.lib.helper.OperatorContextTestHelper.mockOperatorContext) OperatorContext(com.datatorrent.api.Context.OperatorContext) Partitioner(com.datatorrent.api.Partitioner) Date(java.sql.Date) FieldInfo(org.apache.apex.malhar.lib.util.FieldInfo) CollectorTestSink(org.apache.apex.malhar.lib.testbench.CollectorTestSink) Test(org.junit.Test)

Example 5 with FieldInfo

use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.

the class StreamingJsonParser method createFieldInfoMap.

/**
 * Creates a map representing fieldName in POJO:field in JSON:Data type
 *
 * @return List of FieldInfo
 */
private List<FieldInfo> createFieldInfoMap(String str) {
    fieldInfos = new ArrayList<FieldInfo>();
    StringTokenizer strtok = new StringTokenizer(str, RECORD_SEPARATOR);
    while (strtok.hasMoreTokens()) {
        String[] token = strtok.nextToken().split(FIELD_SEPARATOR);
        try {
            fieldInfos.add(new FieldInfo(token[0], token[1], SupportType.valueOf(token[2])));
        } catch (Exception e) {
            LOG.error("Invalid support type", e);
        }
    }
    return fieldInfos;
}
Also used : StringTokenizer(java.util.StringTokenizer) FieldInfo(org.apache.apex.malhar.lib.util.FieldInfo) ParseException(org.json.simple.parser.ParseException) UnsupportedEncodingException(java.io.UnsupportedEncodingException)

Aggregations

FieldInfo (org.apache.apex.malhar.lib.util.FieldInfo)36 Test (org.junit.Test)14 ArrayList (java.util.ArrayList)11 Attribute (com.datatorrent.api.Attribute)6 OperatorContext (com.datatorrent.api.Context.OperatorContext)6 OperatorContextTestHelper.mockOperatorContext (org.apache.apex.malhar.lib.helper.OperatorContextTestHelper.mockOperatorContext)6 TestPortContext (org.apache.apex.malhar.lib.helper.TestPortContext)6 CollectorTestSink (org.apache.apex.malhar.lib.testbench.CollectorTestSink)6 SQLException (java.sql.SQLException)5 Date (java.util.Date)4 CountDownLatch (java.util.concurrent.CountDownLatch)4 Partitioner (com.datatorrent.api.Partitioner)3 Date (java.sql.Date)3 ResultSetMetaData (java.sql.ResultSetMetaData)3 UUID (java.util.UUID)3 ActiveFieldInfo (org.apache.apex.malhar.lib.db.jdbc.JdbcPOJOInputOperator.ActiveFieldInfo)3 ColumnDefinitions (com.datastax.driver.core.ColumnDefinitions)2 DataType (com.datastax.driver.core.DataType)2 ResultSet (com.datastax.driver.core.ResultSet)2 Field (java.lang.reflect.Field)2