Search in sources :

Example 31 with FieldInfo

use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.

the class JDBCLoaderTest method testMysqlDBQuery.

@Test
public void testMysqlDBQuery() throws Exception {
    CountDownLatch latch = new CountDownLatch(1);
    testMeta.dbloader.setQueryStmt("Select id, name from " + testMeta.dbloader.getTableName() + " where AGE = ? and ADDRESS = ?");
    latch.await(1000, TimeUnit.MILLISECONDS);
    ArrayList<FieldInfo> includeKeys = new ArrayList<>();
    includeKeys.add(new FieldInfo("ID", "ID", FieldInfo.SupportType.INTEGER));
    includeKeys.add(new FieldInfo("NAME", "NAME", FieldInfo.SupportType.STRING));
    testMeta.dbloader.setFieldInfo(null, includeKeys);
    ArrayList<Object> keys = new ArrayList<Object>();
    keys.add(25);
    keys.add("Texas");
    ArrayList<Object> columnInfo = (ArrayList<Object>) testMeta.dbloader.get(keys);
    Assert.assertEquals("ID", 2, columnInfo.get(0));
    Assert.assertEquals("NAME", "Allen", columnInfo.get(1).toString().trim());
}
Also used : ArrayList(java.util.ArrayList) CountDownLatch(java.util.concurrent.CountDownLatch) FieldInfo(org.apache.apex.malhar.lib.util.FieldInfo) Test(org.junit.Test)

Example 32 with FieldInfo

use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.

the class JDBCLoaderTest method testMysqlDBLookupIncludeAllKeysEmptyQuery.

@Test
public void testMysqlDBLookupIncludeAllKeysEmptyQuery() throws Exception {
    CountDownLatch latch = new CountDownLatch(1);
    ArrayList<FieldInfo> lookupKeys = new ArrayList<>();
    lookupKeys.add(new FieldInfo("ID", "ID", FieldInfo.SupportType.INTEGER));
    ArrayList<FieldInfo> includeKeys = new ArrayList<>();
    includeKeys.add(new FieldInfo("ID", "ID", FieldInfo.SupportType.INTEGER));
    includeKeys.add(new FieldInfo("NAME", "NAME", FieldInfo.SupportType.STRING));
    includeKeys.add(new FieldInfo("AGE", "AGE", FieldInfo.SupportType.INTEGER));
    includeKeys.add(new FieldInfo("ADDRESS", "ADDRESS", FieldInfo.SupportType.STRING));
    includeKeys.add(new FieldInfo("SALARY", "SALARY", FieldInfo.SupportType.DOUBLE));
    testMeta.dbloader.setQueryStmt("");
    testMeta.dbloader.setFieldInfo(lookupKeys, includeKeys);
    latch.await(1000, TimeUnit.MILLISECONDS);
    ArrayList<Object> keys = new ArrayList<Object>();
    keys.add(4);
    ArrayList<Object> columnInfo = (ArrayList<Object>) testMeta.dbloader.get(keys);
    Assert.assertEquals("ID", 4, columnInfo.get(0));
    Assert.assertEquals("NAME", "Mark", columnInfo.get(1).toString().trim());
    Assert.assertEquals("AGE", 25, columnInfo.get(2));
    Assert.assertEquals("ADDRESS", "Rich-Mond", columnInfo.get(3).toString().trim());
    Assert.assertEquals("SALARY", 65000.0, columnInfo.get(4));
}
Also used : ArrayList(java.util.ArrayList) CountDownLatch(java.util.concurrent.CountDownLatch) FieldInfo(org.apache.apex.malhar.lib.util.FieldInfo) Test(org.junit.Test)

Example 33 with FieldInfo

use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.

the class JdbcPojoPollableOpeartorTest method getFieldInfos.

private List<FieldInfo> getFieldInfos() {
    List<FieldInfo> fieldInfos = Lists.newArrayList();
    fieldInfos.add(new FieldInfo("ID", "id", null));
    fieldInfos.add(new FieldInfo("STARTDATE", "startDate", null));
    fieldInfos.add(new FieldInfo("STARTTIME", "startTime", null));
    fieldInfos.add(new FieldInfo("STARTTIMESTAMP", "startTimestamp", null));
    fieldInfos.add(new FieldInfo("NAME", "name", null));
    return fieldInfos;
}
Also used : FieldInfo(org.apache.apex.malhar.lib.util.FieldInfo)

Example 34 with FieldInfo

use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.

the class JdbcPojoPollableOpeartorTest method testRecovery.

@Test
public void testRecovery() throws IOException {
    int operatorId = 1;
    when(windowDataManagerMock.getLargestCompletedWindow()).thenReturn(1L);
    when(windowDataManagerMock.retrieve(1)).thenReturn(WindowData.of(null, 0, 4));
    insertEvents(10, true, 0);
    JdbcStore store = new JdbcStore();
    store.setDatabaseDriver(DB_DRIVER);
    store.setDatabaseUrl(URL);
    List<FieldInfo> fieldInfos = getFieldInfos();
    Attribute.AttributeMap.DefaultAttributeMap portAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
    portAttributes.put(Context.PortContext.TUPLE_CLASS, TestPOJOEvent.class);
    TestPortContext tpc = new TestPortContext(portAttributes);
    Attribute.AttributeMap.DefaultAttributeMap partitionAttributeMap = new Attribute.AttributeMap.DefaultAttributeMap();
    partitionAttributeMap.put(DAG.APPLICATION_ID, APP_ID);
    partitionAttributeMap.put(Context.DAGContext.APPLICATION_PATH, dir);
    OperatorContext context = mockOperatorContext(operatorId, partitionAttributeMap);
    JdbcPOJOPollInputOperator inputOperator = new JdbcPOJOPollInputOperator();
    inputOperator.setStore(store);
    inputOperator.setTableName(TABLE_POJO_NAME);
    inputOperator.setKey("id");
    inputOperator.setFieldInfos(fieldInfos);
    inputOperator.setFetchSize(100);
    inputOperator.setBatchSize(100);
    // setting as not calling partition logic
    inputOperator.lastEmittedRow = 0;
    inputOperator.isPollerPartition = true;
    inputOperator.rangeQueryPair = new KeyValPair<>(0, 8);
    inputOperator.outputPort.setup(tpc);
    inputOperator.setScheduledExecutorService(mockscheduler);
    inputOperator.setup(context);
    inputOperator.setWindowManager(windowDataManagerMock);
    inputOperator.activate(context);
    CollectorTestSink<Object> sink = new CollectorTestSink<>();
    inputOperator.outputPort.setSink(sink);
    inputOperator.beginWindow(0);
    verify(mockscheduler, times(0)).scheduleAtFixedRate(any(Runnable.class), anyLong(), anyLong(), any(TimeUnit.class));
    verify(mockscheduler, times(0)).schedule(any(Runnable.class), anyLong(), any(TimeUnit.class));
    inputOperator.emitTuples();
    inputOperator.endWindow();
    inputOperator.beginWindow(1);
    verify(mockscheduler, times(1)).scheduleAtFixedRate(any(Runnable.class), anyLong(), anyLong(), any(TimeUnit.class));
    verify(mockscheduler, times(0)).schedule(any(Runnable.class), anyLong(), any(TimeUnit.class));
}
Also used : Attribute(com.datatorrent.api.Attribute) TestPortContext(org.apache.apex.malhar.lib.helper.TestPortContext) OperatorContextTestHelper.mockOperatorContext(org.apache.apex.malhar.lib.helper.OperatorContextTestHelper.mockOperatorContext) OperatorContext(com.datatorrent.api.Context.OperatorContext) TimeUnit(java.util.concurrent.TimeUnit) FieldInfo(org.apache.apex.malhar.lib.util.FieldInfo) CollectorTestSink(org.apache.apex.malhar.lib.testbench.CollectorTestSink) Test(org.junit.Test)

Example 35 with FieldInfo

use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.

the class JdbcPojoPollableOpeartorTest method testPollWithOffsetRebase.

@Test
public void testPollWithOffsetRebase() throws Exception {
    // clear table
    insertEvents(0, true, 0);
    JdbcStore store = new JdbcStore();
    store.setDatabaseDriver(DB_DRIVER);
    store.setDatabaseUrl(URL);
    List<FieldInfo> fieldInfos = getFieldInfos();
    Attribute.AttributeMap.DefaultAttributeMap portAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
    portAttributes.put(Context.PortContext.TUPLE_CLASS, TestPOJOEvent.class);
    TestPortContext tpc = new TestPortContext(portAttributes);
    JdbcPOJOPollInputOperator inputOperator = new JdbcPOJOPollInputOperator();
    inputOperator.setStore(store);
    inputOperator.setTableName(TABLE_POJO_NAME);
    inputOperator.setColumnsExpression("ID,STARTDATE,STARTTIME,STARTTIMESTAMP");
    inputOperator.setKey("id");
    inputOperator.setFieldInfos(fieldInfos);
    inputOperator.setFetchSize(100);
    inputOperator.setBatchSize(100);
    inputOperator.setPartitionCount(1);
    inputOperator.setRebaseOffset(true);
    Collection<com.datatorrent.api.Partitioner.Partition<AbstractJdbcPollInputOperator<Object>>> newPartitions = inputOperator.definePartitions(new ArrayList<Partitioner.Partition<AbstractJdbcPollInputOperator<Object>>>(), null);
    int operatorId = 0;
    for (com.datatorrent.api.Partitioner.Partition<AbstractJdbcPollInputOperator<Object>> partition : newPartitions) {
        Attribute.AttributeMap.DefaultAttributeMap partitionAttributeMap = new Attribute.AttributeMap.DefaultAttributeMap();
        partitionAttributeMap.put(DAG.APPLICATION_ID, APP_ID);
        partitionAttributeMap.put(Context.DAGContext.APPLICATION_PATH, dir);
        OperatorContext partitioningContext = mockOperatorContext(operatorId++, partitionAttributeMap);
        JdbcPOJOPollInputOperator parition = (JdbcPOJOPollInputOperator) partition.getPartitionedInstance();
        parition.outputPort.setup(tpc);
        parition.setScheduledExecutorService(mockscheduler);
        parition.setup(partitioningContext);
        parition.activate(partitioningContext);
    }
    Iterator<com.datatorrent.api.Partitioner.Partition<AbstractJdbcPollInputOperator<Object>>> itr = newPartitions.iterator();
    // First partition is for range queries,last is for polling queries
    JdbcPOJOPollInputOperator firstInstance = (JdbcPOJOPollInputOperator) itr.next().getPartitionedInstance();
    int rows = 0;
    int windowId = 0;
    insertEvents(4, false, rows);
    rows += 4;
    JdbcPOJOPollInputOperator poller = (JdbcPOJOPollInputOperator) itr.next().getPartitionedInstance();
    CollectorTestSink<Object> sink3 = new CollectorTestSink<>();
    poller.outputPort.setSink(sink3);
    poller.beginWindow(windowId++);
    poller.pollRecords();
    poller.emitTuples();
    Assert.assertEquals("emitted", rows, sink3.collectedTuples.size());
    poller.endWindow();
    insertEvents(1, false, rows);
    rows += 1;
    poller.beginWindow(windowId++);
    // offset rebase, fetch 1 record
    poller.pollRecords();
    poller.emitTuples();
    Assert.assertEquals("emitted", rows, sink3.collectedTuples.size());
    poller.endWindow();
}
Also used : Attribute(com.datatorrent.api.Attribute) TestPortContext(org.apache.apex.malhar.lib.helper.TestPortContext) OperatorContextTestHelper.mockOperatorContext(org.apache.apex.malhar.lib.helper.OperatorContextTestHelper.mockOperatorContext) OperatorContext(com.datatorrent.api.Context.OperatorContext) FieldInfo(org.apache.apex.malhar.lib.util.FieldInfo) Partitioner(com.datatorrent.api.Partitioner) CollectorTestSink(org.apache.apex.malhar.lib.testbench.CollectorTestSink) Test(org.junit.Test)

Aggregations

FieldInfo (org.apache.apex.malhar.lib.util.FieldInfo)36 Test (org.junit.Test)14 ArrayList (java.util.ArrayList)11 Attribute (com.datatorrent.api.Attribute)6 OperatorContext (com.datatorrent.api.Context.OperatorContext)6 OperatorContextTestHelper.mockOperatorContext (org.apache.apex.malhar.lib.helper.OperatorContextTestHelper.mockOperatorContext)6 TestPortContext (org.apache.apex.malhar.lib.helper.TestPortContext)6 CollectorTestSink (org.apache.apex.malhar.lib.testbench.CollectorTestSink)6 SQLException (java.sql.SQLException)5 Date (java.util.Date)4 CountDownLatch (java.util.concurrent.CountDownLatch)4 Partitioner (com.datatorrent.api.Partitioner)3 Date (java.sql.Date)3 ResultSetMetaData (java.sql.ResultSetMetaData)3 UUID (java.util.UUID)3 ActiveFieldInfo (org.apache.apex.malhar.lib.db.jdbc.JdbcPOJOInputOperator.ActiveFieldInfo)3 ColumnDefinitions (com.datastax.driver.core.ColumnDefinitions)2 DataType (com.datastax.driver.core.DataType)2 ResultSet (com.datastax.driver.core.ResultSet)2 Field (java.lang.reflect.Field)2