Search in sources :

Example 1 with TestPortContext

use of org.apache.apex.malhar.lib.helper.TestPortContext in project apex-malhar by apache.

the class IncrementalStepScanInputOperatorTest method testInit.

@KuduClusterTestContext(kuduClusterBasedTest = true)
@Test
public void testInit() throws Exception {
    Attribute.AttributeMap.DefaultAttributeMap attributeMapForInputOperator = new Attribute.AttributeMap.DefaultAttributeMap();
    attributeMapForInputOperator.put(DAG.APPLICATION_ID, APP_ID);
    operatorContext = mockOperatorContext(OPERATOR_ID_FOR_ONE_TO_ONE_PARTITIONER, attributeMapForInputOperator);
    Attribute.AttributeMap.DefaultAttributeMap portAttributesForInputOperator = new Attribute.AttributeMap.DefaultAttributeMap();
    portAttributesForInputOperator.put(Context.PortContext.TUPLE_CLASS, UnitTestTablePojo.class);
    testPortContext = new TestPortContext(portAttributesForInputOperator);
    incrementalStepScanInputOperator = new IncrementalStepScanInputOperator(UnitTestTablePojo.class, "kuduincrementalstepscaninputoperator.properties");
    incrementalStepScanInputOperator.setNumberOfPartitions(numberOfKuduInputOperatorPartitions);
    incrementalStepScanInputOperator.setPartitionScanStrategy(partitonScanStrategy);
    incrementalStepScanInputOperator.setScanOrderStrategy(scanOrderStrategy);
    partitioningContext = new Partitioner.PartitioningContext() {

        @Override
        public int getParallelPartitionCount() {
            return numberOfKuduInputOperatorPartitions;
        }

        @Override
        public List<Operator.InputPort<?>> getInputPorts() {
            return null;
        }
    };
    partitions = incrementalStepScanInputOperator.definePartitions(new ArrayList(), partitioningContext);
    Iterator<Partitioner.Partition<AbstractKuduInputOperator>> iteratorForMeta = partitions.iterator();
    IncrementalStepScanInputOperator actualOperator = (IncrementalStepScanInputOperator) iteratorForMeta.next().getPartitionedInstance();
    // Adjust the bindings as if apex has completed the partioning.The runtime of the framework does this in reality
    incrementalStepScanInputOperator = actualOperator;
    incrementalStepScanInputOperator.setup(operatorContext);
    incrementalStepScanInputOperator.activate(operatorContext);
    // rewire parent operator to enable proper unit testing method calls
    incrementalStepScanInputOperator.getPartitioner().setPrototypeKuduInputOperator(incrementalStepScanInputOperator);
    incrementalStepScanInputOperator.getScanner().setParentOperator(incrementalStepScanInputOperator);
}
Also used : Operator(com.datatorrent.api.Operator) Attribute(com.datatorrent.api.Attribute) TestPortContext(org.apache.apex.malhar.lib.helper.TestPortContext) ArrayList(java.util.ArrayList) ArrayList(java.util.ArrayList) List(java.util.List) Partitioner(com.datatorrent.api.Partitioner) Test(org.junit.Test) KuduClusterTestContext(org.apache.apex.malhar.kudu.test.KuduClusterTestContext)

Example 2 with TestPortContext

use of org.apache.apex.malhar.lib.helper.TestPortContext in project apex-malhar by apache.

the class KuduCreateUpdateDeleteOutputOperatorTest method setUpKuduOutputOperatorContext.

@KuduClusterTestContext(kuduClusterBasedTest = true)
@Before
public void setUpKuduOutputOperatorContext() throws Exception {
    Attribute.AttributeMap.DefaultAttributeMap attributeMap = new Attribute.AttributeMap.DefaultAttributeMap();
    attributeMap.put(DAG.APPLICATION_ID, APP_ID);
    contextForKuduOutputOperator = mockOperatorContext(OPERATOR_ID_FOR_KUDU_CRUD, attributeMap);
    simpleKuduOutputOperator = new BaseKuduOutputOperator();
    Attribute.AttributeMap.DefaultAttributeMap portAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
    portAttributes.put(Context.PortContext.TUPLE_CLASS, UnitTestTablePojo.class);
    testPortContextForKuduOutput = new TestPortContext(portAttributes);
    simpleKuduOutputOperator.setup(contextForKuduOutputOperator);
    simpleKuduOutputOperator.activate(contextForKuduOutputOperator);
    simpleKuduOutputOperator.input.setup(testPortContextForKuduOutput);
}
Also used : Attribute(com.datatorrent.api.Attribute) TestPortContext(org.apache.apex.malhar.lib.helper.TestPortContext) Before(org.junit.Before) KuduClusterTestContext(org.apache.apex.malhar.kudu.test.KuduClusterTestContext)

Example 3 with TestPortContext

use of org.apache.apex.malhar.lib.helper.TestPortContext in project apex-malhar by apache.

the class AbstractUpsertOutputOperatorCountersTest method setupApexContexts.

@Before
public void setupApexContexts() throws Exception {
    Attribute.AttributeMap.DefaultAttributeMap attributeMapForCounters = new Attribute.AttributeMap.DefaultAttributeMap();
    attributeMapForCounters.put(DAG.APPLICATION_ID, APP_ID);
    contextForCountersOperator = mockOperatorContext(OPERATOR_ID_FOR_COUNTER_COLUMNS, attributeMapForCounters);
    Attribute.AttributeMap.DefaultAttributeMap portAttributesForCounters = new Attribute.AttributeMap.DefaultAttributeMap();
    portAttributesForCounters.put(Context.PortContext.TUPLE_CLASS, CounterColumnTableEntry.class);
    testPortContextForCounters = new TestPortContext(portAttributesForCounters);
    counterUpdatesOperator = new CounterColumnUpdatesOperator();
    counterUpdatesOperator.setup(contextForCountersOperator);
    counterUpdatesOperator.activate(contextForCountersOperator);
    counterUpdatesOperator.input.setup(testPortContextForCounters);
}
Also used : Attribute(com.datatorrent.api.Attribute) TestPortContext(org.apache.apex.malhar.lib.helper.TestPortContext) Before(org.junit.Before)

Example 4 with TestPortContext

use of org.apache.apex.malhar.lib.helper.TestPortContext in project apex-malhar by apache.

the class CassandraOperatorTest method testCassandraInputOperator.

/*
   * This test can be run on cassandra server installed on node17.
   */
@Test
public void testCassandraInputOperator() {
    String query1 = "SELECT * FROM " + KEYSPACE + "." + "%t;";
    CassandraStore store = new CassandraStore();
    store.setNode(NODE);
    store.setKeyspace(KEYSPACE);
    AttributeMap.DefaultAttributeMap attributeMap = new AttributeMap.DefaultAttributeMap();
    attributeMap.put(DAG.APPLICATION_ID, APP_ID);
    OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap);
    TestInputOperator inputOperator = new TestInputOperator();
    inputOperator.setStore(store);
    inputOperator.setQuery(query1);
    inputOperator.setTablename(TABLE_NAME_INPUT);
    inputOperator.setPrimaryKeyColumn("id");
    List<FieldInfo> fieldInfos = Lists.newArrayList();
    fieldInfos.add(new FieldInfo("id", "id", null));
    fieldInfos.add(new FieldInfo("age", "age", null));
    fieldInfos.add(new FieldInfo("lastname", "lastname", null));
    inputOperator.setFieldInfos(fieldInfos);
    inputOperator.insertEventsInTable(30);
    CollectorTestSink<Object> sink = new CollectorTestSink<>();
    inputOperator.outputPort.setSink(sink);
    Attribute.AttributeMap.DefaultAttributeMap portAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
    portAttributes.put(Context.PortContext.TUPLE_CLASS, TestInputPojo.class);
    TestPortContext tpc = new TestPortContext(portAttributes);
    inputOperator.setup(context);
    inputOperator.outputPort.setup(tpc);
    inputOperator.activate(context);
    inputOperator.beginWindow(0);
    inputOperator.emitTuples();
    inputOperator.endWindow();
    Assert.assertEquals("rows from db", 30, sink.collectedTuples.size());
    ArrayList<Integer> listOfIDs = inputOperator.getIds();
    // Rows are not stored in the same order in cassandra table in which they are inserted.
    for (int i = 0; i < 10; i++) {
        TestInputPojo object = (TestInputPojo) sink.collectedTuples.get(i);
        Assert.assertTrue("id set in testpojo", listOfIDs.contains(object.getId()));
        Assert.assertEquals("name set in testpojo", inputOperator.getNames().get(object.getId()), object.getLastname());
        Assert.assertEquals("age set in testpojo", inputOperator.getAge().get(object.getId()).intValue(), object.getAge());
    }
    sink.clear();
    inputOperator.columnDataTypes.clear();
    String query2 = "SELECT * FROM " + KEYSPACE + "." + "%t where token(%p) > %v;";
    inputOperator.setQuery(query2);
    inputOperator.setStartRow(10);
    inputOperator.setup(context);
    inputOperator.outputPort.setup(tpc);
    inputOperator.activate(context);
    inputOperator.beginWindow(1);
    inputOperator.emitTuples();
    inputOperator.endWindow();
    Assert.assertEquals("rows from db", 26, sink.collectedTuples.size());
    sink.clear();
    inputOperator.columnDataTypes.clear();
    String query3 = "SELECT * FROM " + KEYSPACE + "." + "%t where token(%p) > %v LIMIT %l;";
    inputOperator.setQuery(query3);
    inputOperator.setStartRow(1);
    inputOperator.setLimit(10);
    inputOperator.setup(context);
    inputOperator.outputPort.setup(tpc);
    inputOperator.activate(context);
    inputOperator.beginWindow(2);
    inputOperator.emitTuples();
    inputOperator.endWindow();
    Assert.assertEquals("rows from db", 10, sink.collectedTuples.size());
}
Also used : Attribute(com.datatorrent.api.Attribute) TestPortContext(org.apache.apex.malhar.lib.helper.TestPortContext) AttributeMap(com.datatorrent.api.Attribute.AttributeMap) OperatorContextTestHelper.mockOperatorContext(org.apache.apex.malhar.lib.helper.OperatorContextTestHelper.mockOperatorContext) OperatorContext(com.datatorrent.api.Context.OperatorContext) FieldInfo(org.apache.apex.malhar.lib.util.FieldInfo) CollectorTestSink(org.apache.apex.malhar.lib.testbench.CollectorTestSink) Test(org.junit.Test)

Example 5 with TestPortContext

use of org.apache.apex.malhar.lib.helper.TestPortContext in project apex-malhar by apache.

the class JdbcPojoOperatorTest method testJdbcPojoOutputOperator.

@Test
public void testJdbcPojoOutputOperator() {
    JdbcTransactionalStore transactionalStore = new JdbcTransactionalStore();
    transactionalStore.setDatabaseDriver(DB_DRIVER);
    transactionalStore.setDatabaseUrl(URL);
    com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap attributeMap = new com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap();
    attributeMap.put(DAG.APPLICATION_ID, APP_ID);
    OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap);
    TestPOJOOutputOperator outputOperator = new TestPOJOOutputOperator();
    outputOperator.setBatchSize(3);
    outputOperator.setTablename(TABLE_POJO_NAME);
    List<JdbcFieldInfo> fieldInfos = Lists.newArrayList();
    fieldInfos.add(new JdbcFieldInfo("ID", "id", null, Types.INTEGER));
    fieldInfos.add(new JdbcFieldInfo("NAME", "name", null, Types.VARCHAR));
    outputOperator.setFieldInfos(fieldInfos);
    outputOperator.setStore(transactionalStore);
    outputOperator.setup(context);
    Attribute.AttributeMap.DefaultAttributeMap portAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
    portAttributes.put(Context.PortContext.TUPLE_CLASS, TestPOJOEvent.class);
    TestPortContext tpc = new TestPortContext(portAttributes);
    outputOperator.input.setup(tpc);
    outputOperator.activate(context);
    List<TestPOJOEvent> events = Lists.newArrayList();
    for (int i = 0; i < 10; i++) {
        events.add(new TestPOJOEvent(i, "test" + i));
    }
    outputOperator.beginWindow(0);
    for (TestPOJOEvent event : events) {
        outputOperator.input.process(event);
    }
    outputOperator.endWindow();
    Assert.assertEquals("rows in db", 10, outputOperator.getNumOfEventsInStore(TABLE_POJO_NAME));
}
Also used : Attribute(com.datatorrent.api.Attribute) TestPortContext(org.apache.apex.malhar.lib.helper.TestPortContext) OperatorContextTestHelper.mockOperatorContext(org.apache.apex.malhar.lib.helper.OperatorContextTestHelper.mockOperatorContext) OperatorContext(com.datatorrent.api.Context.OperatorContext) Test(org.junit.Test)

Aggregations

Attribute (com.datatorrent.api.Attribute)19 TestPortContext (org.apache.apex.malhar.lib.helper.TestPortContext)19 Test (org.junit.Test)13 OperatorContext (com.datatorrent.api.Context.OperatorContext)12 OperatorContextTestHelper.mockOperatorContext (org.apache.apex.malhar.lib.helper.OperatorContextTestHelper.mockOperatorContext)12 CollectorTestSink (org.apache.apex.malhar.lib.testbench.CollectorTestSink)8 FieldInfo (org.apache.apex.malhar.lib.util.FieldInfo)6 Before (org.junit.Before)5 Partitioner (com.datatorrent.api.Partitioner)4 Date (java.sql.Date)3 AttributeMap (com.datatorrent.api.Attribute.AttributeMap)2 ArrayList (java.util.ArrayList)2 KuduClusterTestContext (org.apache.apex.malhar.kudu.test.KuduClusterTestContext)2 Operator (com.datatorrent.api.Operator)1 IOException (java.io.IOException)1 SQLException (java.sql.SQLException)1 Time (java.sql.Time)1 Timestamp (java.sql.Timestamp)1 List (java.util.List)1 TimeUnit (java.util.concurrent.TimeUnit)1