Search in sources :

Example 36 with OperatorContext

use of com.datatorrent.api.Context.OperatorContext in project apex-malhar by apache.

the class JdbcPojoPollableOpeartorTest method testRecovery.

@Test
public void testRecovery() throws IOException {
    int operatorId = 1;
    when(windowDataManagerMock.getLargestCompletedWindow()).thenReturn(1L);
    when(windowDataManagerMock.retrieve(1)).thenReturn(WindowData.of(null, 0, 4));
    insertEvents(10, true, 0);
    JdbcStore store = new JdbcStore();
    store.setDatabaseDriver(DB_DRIVER);
    store.setDatabaseUrl(URL);
    List<FieldInfo> fieldInfos = getFieldInfos();
    Attribute.AttributeMap.DefaultAttributeMap portAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
    portAttributes.put(Context.PortContext.TUPLE_CLASS, TestPOJOEvent.class);
    TestPortContext tpc = new TestPortContext(portAttributes);
    Attribute.AttributeMap.DefaultAttributeMap partitionAttributeMap = new Attribute.AttributeMap.DefaultAttributeMap();
    partitionAttributeMap.put(DAG.APPLICATION_ID, APP_ID);
    partitionAttributeMap.put(Context.DAGContext.APPLICATION_PATH, dir);
    OperatorContext context = mockOperatorContext(operatorId, partitionAttributeMap);
    JdbcPOJOPollInputOperator inputOperator = new JdbcPOJOPollInputOperator();
    inputOperator.setStore(store);
    inputOperator.setTableName(TABLE_POJO_NAME);
    inputOperator.setKey("id");
    inputOperator.setFieldInfos(fieldInfos);
    inputOperator.setFetchSize(100);
    inputOperator.setBatchSize(100);
    // setting as not calling partition logic
    inputOperator.lastEmittedRow = 0;
    inputOperator.isPollerPartition = true;
    inputOperator.rangeQueryPair = new KeyValPair<>(0, 8);
    inputOperator.outputPort.setup(tpc);
    inputOperator.setScheduledExecutorService(mockscheduler);
    inputOperator.setup(context);
    inputOperator.setWindowManager(windowDataManagerMock);
    inputOperator.activate(context);
    CollectorTestSink<Object> sink = new CollectorTestSink<>();
    inputOperator.outputPort.setSink(sink);
    inputOperator.beginWindow(0);
    verify(mockscheduler, times(0)).scheduleAtFixedRate(any(Runnable.class), anyLong(), anyLong(), any(TimeUnit.class));
    verify(mockscheduler, times(0)).schedule(any(Runnable.class), anyLong(), any(TimeUnit.class));
    inputOperator.emitTuples();
    inputOperator.endWindow();
    inputOperator.beginWindow(1);
    verify(mockscheduler, times(1)).scheduleAtFixedRate(any(Runnable.class), anyLong(), anyLong(), any(TimeUnit.class));
    verify(mockscheduler, times(0)).schedule(any(Runnable.class), anyLong(), any(TimeUnit.class));
}
Also used : Attribute(com.datatorrent.api.Attribute) TestPortContext(org.apache.apex.malhar.lib.helper.TestPortContext) OperatorContextTestHelper.mockOperatorContext(org.apache.apex.malhar.lib.helper.OperatorContextTestHelper.mockOperatorContext) OperatorContext(com.datatorrent.api.Context.OperatorContext) TimeUnit(java.util.concurrent.TimeUnit) FieldInfo(org.apache.apex.malhar.lib.util.FieldInfo) CollectorTestSink(org.apache.apex.malhar.lib.testbench.CollectorTestSink) Test(org.junit.Test)

Example 37 with OperatorContext

use of com.datatorrent.api.Context.OperatorContext in project apex-malhar by apache.

the class JdbcPojoPollableOpeartorTest method testPollWithOffsetRebase.

@Test
public void testPollWithOffsetRebase() throws Exception {
    // clear table
    insertEvents(0, true, 0);
    JdbcStore store = new JdbcStore();
    store.setDatabaseDriver(DB_DRIVER);
    store.setDatabaseUrl(URL);
    List<FieldInfo> fieldInfos = getFieldInfos();
    Attribute.AttributeMap.DefaultAttributeMap portAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
    portAttributes.put(Context.PortContext.TUPLE_CLASS, TestPOJOEvent.class);
    TestPortContext tpc = new TestPortContext(portAttributes);
    JdbcPOJOPollInputOperator inputOperator = new JdbcPOJOPollInputOperator();
    inputOperator.setStore(store);
    inputOperator.setTableName(TABLE_POJO_NAME);
    inputOperator.setColumnsExpression("ID,STARTDATE,STARTTIME,STARTTIMESTAMP");
    inputOperator.setKey("id");
    inputOperator.setFieldInfos(fieldInfos);
    inputOperator.setFetchSize(100);
    inputOperator.setBatchSize(100);
    inputOperator.setPartitionCount(1);
    inputOperator.setRebaseOffset(true);
    Collection<com.datatorrent.api.Partitioner.Partition<AbstractJdbcPollInputOperator<Object>>> newPartitions = inputOperator.definePartitions(new ArrayList<Partitioner.Partition<AbstractJdbcPollInputOperator<Object>>>(), null);
    int operatorId = 0;
    for (com.datatorrent.api.Partitioner.Partition<AbstractJdbcPollInputOperator<Object>> partition : newPartitions) {
        Attribute.AttributeMap.DefaultAttributeMap partitionAttributeMap = new Attribute.AttributeMap.DefaultAttributeMap();
        partitionAttributeMap.put(DAG.APPLICATION_ID, APP_ID);
        partitionAttributeMap.put(Context.DAGContext.APPLICATION_PATH, dir);
        OperatorContext partitioningContext = mockOperatorContext(operatorId++, partitionAttributeMap);
        JdbcPOJOPollInputOperator parition = (JdbcPOJOPollInputOperator) partition.getPartitionedInstance();
        parition.outputPort.setup(tpc);
        parition.setScheduledExecutorService(mockscheduler);
        parition.setup(partitioningContext);
        parition.activate(partitioningContext);
    }
    Iterator<com.datatorrent.api.Partitioner.Partition<AbstractJdbcPollInputOperator<Object>>> itr = newPartitions.iterator();
    // First partition is for range queries,last is for polling queries
    JdbcPOJOPollInputOperator firstInstance = (JdbcPOJOPollInputOperator) itr.next().getPartitionedInstance();
    int rows = 0;
    int windowId = 0;
    insertEvents(4, false, rows);
    rows += 4;
    JdbcPOJOPollInputOperator poller = (JdbcPOJOPollInputOperator) itr.next().getPartitionedInstance();
    CollectorTestSink<Object> sink3 = new CollectorTestSink<>();
    poller.outputPort.setSink(sink3);
    poller.beginWindow(windowId++);
    poller.pollRecords();
    poller.emitTuples();
    Assert.assertEquals("emitted", rows, sink3.collectedTuples.size());
    poller.endWindow();
    insertEvents(1, false, rows);
    rows += 1;
    poller.beginWindow(windowId++);
    // offset rebase, fetch 1 record
    poller.pollRecords();
    poller.emitTuples();
    Assert.assertEquals("emitted", rows, sink3.collectedTuples.size());
    poller.endWindow();
}
Also used : Attribute(com.datatorrent.api.Attribute) TestPortContext(org.apache.apex.malhar.lib.helper.TestPortContext) OperatorContextTestHelper.mockOperatorContext(org.apache.apex.malhar.lib.helper.OperatorContextTestHelper.mockOperatorContext) OperatorContext(com.datatorrent.api.Context.OperatorContext) FieldInfo(org.apache.apex.malhar.lib.util.FieldInfo) Partitioner(com.datatorrent.api.Partitioner) CollectorTestSink(org.apache.apex.malhar.lib.testbench.CollectorTestSink) Test(org.junit.Test)

Example 38 with OperatorContext

use of com.datatorrent.api.Context.OperatorContext in project apex-malhar by apache.

the class JdbcPojoOperatorTest method testJdbcPojoInsertOutputOperatorNullId.

/**
 * This test will assume direct mapping for POJO fields to DB columns.
 * Non-Nullable DB field missing in POJO id1 field which is non-nullable in DB
 * is missing from POJO POJO(id, name) -> DB(id1, name)
 */
@Test
public void testJdbcPojoInsertOutputOperatorNullId() {
    JdbcTransactionalStore transactionalStore = new JdbcTransactionalStore();
    transactionalStore.setDatabaseDriver(DB_DRIVER);
    transactionalStore.setDatabaseUrl(URL);
    com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap attributeMap = new com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap();
    attributeMap.put(DAG.APPLICATION_ID, APP_ID);
    OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap);
    TestPOJOOutputOperator outputOperator = new TestPOJOOutputOperator();
    outputOperator.setBatchSize(3);
    outputOperator.setTablename(TABLE_POJO_NAME_ID_DIFF);
    outputOperator.setStore(transactionalStore);
    outputOperator.setup(context);
    Attribute.AttributeMap.DefaultAttributeMap portAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
    portAttributes.put(Context.PortContext.TUPLE_CLASS, TestPOJOEvent.class);
    TestPortContext tpc = new TestPortContext(portAttributes);
    outputOperator.input.setup(tpc);
    boolean exceptionOccurred = false;
    try {
        outputOperator.activate(context);
    } catch (Exception e) {
        exceptionOccurred = true;
        Assert.assertTrue(e instanceof RuntimeException);
        Assert.assertTrue(e.getMessage().toLowerCase().contains("id1 not found in pojo"));
    }
    Assert.assertTrue(exceptionOccurred);
}
Also used : Attribute(com.datatorrent.api.Attribute) TestPortContext(org.apache.apex.malhar.lib.helper.TestPortContext) SQLException(java.sql.SQLException) OperatorContextTestHelper.mockOperatorContext(org.apache.apex.malhar.lib.helper.OperatorContextTestHelper.mockOperatorContext) OperatorContext(com.datatorrent.api.Context.OperatorContext) Test(org.junit.Test)

Example 39 with OperatorContext

use of com.datatorrent.api.Context.OperatorContext in project apex-malhar by apache.

the class JdbcPojoOperatorTest method testJdbcPojoOutputOperatorMerge.

@Test
public void testJdbcPojoOutputOperatorMerge() {
    JdbcTransactionalStore transactionalStore = new JdbcTransactionalStore();
    transactionalStore.setDatabaseDriver(DB_DRIVER);
    transactionalStore.setDatabaseUrl(URL);
    com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap attributeMap = new com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap();
    attributeMap.put(DAG.APPLICATION_ID, APP_ID);
    OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap);
    TestPOJOOutputOperator.TestPOJONonInsertOutputOperator updateOperator = new TestPOJOOutputOperator.TestPOJONonInsertOutputOperator();
    updateOperator.setBatchSize(3);
    updateOperator.setStore(transactionalStore);
    updateOperator.setSqlStatement("MERGE INTO " + TABLE_POJO_NAME + " AS T USING (VALUES (?, ?)) AS FOO(id, name) " + "ON T.id = FOO.id " + "WHEN MATCHED THEN UPDATE SET name = FOO.name " + "WHEN NOT MATCHED THEN INSERT( id, name ) VALUES (FOO.id, FOO.name);");
    List<JdbcFieldInfo> fieldInfos = Lists.newArrayList();
    fieldInfos.add(new JdbcFieldInfo("id", "id", null, Types.INTEGER));
    fieldInfos.add(new JdbcFieldInfo("name", "name", null, Types.VARCHAR));
    updateOperator.setFieldInfos(fieldInfos);
    updateOperator.setup(context);
    Attribute.AttributeMap.DefaultAttributeMap portAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
    portAttributes.put(Context.PortContext.TUPLE_CLASS, TestPOJOEvent.class);
    TestPortContext tpc = new TestPortContext(portAttributes);
    updateOperator.input.setup(tpc);
    updateOperator.activate(context);
    List<TestPOJOEvent> events = Lists.newArrayList();
    for (int i = 0; i < 10; i++) {
        events.add(new TestPOJOEvent(i, "test" + i));
    }
    for (int i = 0; i < 5; i++) {
        events.add(new TestPOJOEvent(i, "test" + 100));
    }
    updateOperator.getDistinctNonUnique();
    updateOperator.beginWindow(0);
    for (TestPOJOEvent event : events) {
        updateOperator.input.process(event);
    }
    updateOperator.endWindow();
    // Expect 10 unique ids: 0 - 9
    Assert.assertEquals("rows in db", 10, updateOperator.getNumOfEventsInStore());
    // Expect 6 unique name: test-100, test-5, test-6, test-7, test-8, test-9
    Assert.assertEquals("rows in db", 6, updateOperator.getDistinctNonUnique());
}
Also used : Attribute(com.datatorrent.api.Attribute) TestPortContext(org.apache.apex.malhar.lib.helper.TestPortContext) OperatorContextTestHelper.mockOperatorContext(org.apache.apex.malhar.lib.helper.OperatorContextTestHelper.mockOperatorContext) OperatorContext(com.datatorrent.api.Context.OperatorContext) Test(org.junit.Test)

Example 40 with OperatorContext

use of com.datatorrent.api.Context.OperatorContext in project apex-malhar by apache.

the class JdbcPojoOperatorTest method testJdbcPojoInsertOutputOperatorNullName.

/**
 * This test will assume direct mapping for POJO fields to DB columns Nullable
 * DB field missing in POJO name1 field, which is nullable in DB is missing
 * from POJO POJO(id, name) -> DB(id, name1)
 */
@Test
public void testJdbcPojoInsertOutputOperatorNullName() {
    JdbcTransactionalStore transactionalStore = new JdbcTransactionalStore();
    transactionalStore.setDatabaseDriver(DB_DRIVER);
    transactionalStore.setDatabaseUrl(URL);
    com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap attributeMap = new com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap();
    attributeMap.put(DAG.APPLICATION_ID, APP_ID);
    OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap);
    TestPOJOOutputOperator outputOperator = new TestPOJOOutputOperator();
    outputOperator.setBatchSize(3);
    outputOperator.setTablename(TABLE_POJO_NAME_NAME_DIFF);
    outputOperator.setStore(transactionalStore);
    outputOperator.setup(context);
    Attribute.AttributeMap.DefaultAttributeMap portAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
    portAttributes.put(Context.PortContext.TUPLE_CLASS, TestPOJOEvent.class);
    TestPortContext tpc = new TestPortContext(portAttributes);
    outputOperator.input.setup(tpc);
    outputOperator.activate(context);
    List<TestPOJOEvent> events = Lists.newArrayList();
    for (int i = 0; i < 10; i++) {
        events.add(new TestPOJOEvent(i, "test" + i));
    }
    outputOperator.beginWindow(0);
    for (TestPOJOEvent event : events) {
        outputOperator.input.process(event);
    }
    outputOperator.endWindow();
    Assert.assertEquals("rows in db", 10, outputOperator.getNumOfEventsInStore(TABLE_POJO_NAME_NAME_DIFF));
    Assert.assertEquals("null name rows in db", 10, outputOperator.getNumOfNullEventsInStore(TABLE_POJO_NAME_NAME_DIFF));
}
Also used : Attribute(com.datatorrent.api.Attribute) TestPortContext(org.apache.apex.malhar.lib.helper.TestPortContext) OperatorContextTestHelper.mockOperatorContext(org.apache.apex.malhar.lib.helper.OperatorContextTestHelper.mockOperatorContext) OperatorContext(com.datatorrent.api.Context.OperatorContext) Test(org.junit.Test)

Aggregations

OperatorContext (com.datatorrent.api.Context.OperatorContext)60 OperatorContextTestHelper.mockOperatorContext (org.apache.apex.malhar.lib.helper.OperatorContextTestHelper.mockOperatorContext)57 Test (org.junit.Test)51 Attribute (com.datatorrent.api.Attribute)27 CollectorTestSink (org.apache.apex.malhar.lib.testbench.CollectorTestSink)19 TestPortContext (org.apache.apex.malhar.lib.helper.TestPortContext)12 AttributeMap (com.datatorrent.api.Attribute.AttributeMap)11 ArrayList (java.util.ArrayList)9 Random (java.util.Random)8 FieldInfo (org.apache.apex.malhar.lib.util.FieldInfo)6 InMemSpillableStateStore (org.apache.apex.malhar.lib.state.spillable.inmem.InMemSpillableStateStore)5 Partitioner (com.datatorrent.api.Partitioner)4 IOException (java.io.IOException)4 Statement (java.sql.Statement)4 FilePartitionMapping (org.apache.apex.malhar.hive.AbstractFSRollingOutputOperator.FilePartitionMapping)4 TestEvent (org.apache.apex.malhar.lib.db.jdbc.JdbcNonTransactionalOutputOperatorTest.TestEvent)4 StringSerde (org.apache.apex.malhar.lib.utils.serde.StringSerde)4 PortContext (com.datatorrent.stram.engine.PortContext)3 Connection (java.sql.Connection)3 Date (java.sql.Date)3