Search in sources :

Example 61 with CollectorTestSink

use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.

the class POJOEnricherTest method includeSelectedKeys.

@Test
public void includeSelectedKeys() {
    POJOEnricher oper = new POJOEnricher();
    oper.setStore(testMeta.dbloader);
    oper.setLookupFields(Arrays.asList("ID"));
    oper.setIncludeFields(Arrays.asList("NAME", "AGE", "ADDRESS"));
    oper.outputClass = EmployeeOrder.class;
    oper.inputClass = Order.class;
    oper.setup(null);
    CollectorTestSink sink = new CollectorTestSink();
    TestUtils.setSink(oper.output, sink);
    oper.activate(null);
    oper.beginWindow(1);
    Order tuple = new Order(3, 4, 700);
    oper.input.process(tuple);
    oper.endWindow();
    oper.deactivate();
    Assert.assertEquals("includeSelectedKeys: Number of tuples emitted: ", 1, sink.collectedTuples.size());
    Assert.assertEquals("Ouput Tuple: ", "{OID=3, ID=4, amount=700.0, NAME='Mark', AGE=25, ADDRESS='Rich-Mond', SALARY=0.0}", sink.collectedTuples.get(0).toString());
}
Also used : CollectorTestSink(org.apache.apex.malhar.lib.testbench.CollectorTestSink) Test(org.junit.Test)

Example 62 with CollectorTestSink

use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.

the class POJOEnricherTest method includeAllKeys.

@Test
public void includeAllKeys() {
    POJOEnricher oper = new POJOEnricher();
    oper.setStore(testMeta.dbloader);
    oper.setLookupFields(Arrays.asList("ID"));
    oper.setIncludeFields(Arrays.asList("NAME", "AGE", "ADDRESS", "SALARY"));
    oper.outputClass = EmployeeOrder.class;
    oper.inputClass = Order.class;
    oper.setup(null);
    CollectorTestSink sink = new CollectorTestSink();
    TestUtils.setSink(oper.output, sink);
    oper.activate(null);
    oper.beginWindow(1);
    Order tuple = new Order(3, 4, 700);
    oper.input.process(tuple);
    oper.endWindow();
    oper.deactivate();
    Assert.assertEquals("includeSelectedKeys: Number of tuples emitted: ", 1, sink.collectedTuples.size());
    Assert.assertEquals("Ouput Tuple: ", "{OID=3, ID=4, amount=700.0, NAME='Mark', AGE=25, ADDRESS='Rich-Mond', SALARY=65000.0}", sink.collectedTuples.get(0).toString());
}
Also used : CollectorTestSink(org.apache.apex.malhar.lib.testbench.CollectorTestSink) Test(org.junit.Test)

Example 63 with CollectorTestSink

use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.

the class PagedCouchDBInputOperatorTest method testCouchDBInputOperator.

@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testCouchDBInputOperator() {
    String testDocumentIdPrefix = "PagedTestDoc";
    for (int i = 1; i <= 10; i++) {
        Map<String, String> mapTuple = Maps.newHashMap();
        mapTuple.put("_id", testDocumentIdPrefix + i);
        mapTuple.put("name", "PTD" + i);
        mapTuple.put("type", "test");
        CouchDBTestHelper.insertDocument(mapTuple);
    }
    TestPagedDBInputOperator operatorTest = new TestPagedDBInputOperator();
    CouchDbStore store = new CouchDbStore();
    store.setDbName(CouchDBTestHelper.TEST_DB);
    operatorTest.setStore(store);
    CollectorTestSink sink = new CollectorTestSink();
    operatorTest.outputPort.setSink(sink);
    operatorTest.setPageSize(5);
    operatorTest.setup(mockOperatorContext(3));
    int totalDocsInDb = CouchDBTestHelper.getTotalDocuments();
    int rounds = (totalDocsInDb % 5 == 0 ? 0 : 1) + (totalDocsInDb / 5);
    int remainingDocCount = totalDocsInDb;
    for (int i = 0; i < rounds; i++) {
        operatorTest.beginWindow(i);
        operatorTest.emitTuples();
        operatorTest.endWindow();
        Assert.assertEquals("number emitted tuples", remainingDocCount > 5 ? 5 : remainingDocCount, sink.collectedTuples.size());
        remainingDocCount = remainingDocCount - 5;
        sink.clear();
    }
}
Also used : CollectorTestSink(org.apache.apex.malhar.lib.testbench.CollectorTestSink) Test(org.junit.Test)

Example 64 with CollectorTestSink

use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.

the class ApplicationTest method testGetApplication.

/**
 * Test of getApplication method, of class Application.
 */
@Test
public void testGetApplication() throws Exception {
    Configuration conf = new Configuration(false);
    conf.addResource("dt-site-mobile.xml");
    Server server = new Server(0);
    Servlet servlet = new SamplePubSubWebSocketServlet();
    ServletHolder sh = new ServletHolder(servlet);
    ServletContextHandler contextHandler = new ServletContextHandler(server, "/", ServletContextHandler.SESSIONS);
    contextHandler.addServlet(sh, "/pubsub");
    contextHandler.addServlet(sh, "/*");
    server.start();
    Connector[] connector = server.getConnectors();
    conf.set("dt.attr.GATEWAY_CONNECT_ADDRESS", "localhost:" + connector[0].getLocalPort());
    URI uri = PubSubHelper.getURI("localhost:" + connector[0].getLocalPort());
    PubSubWebSocketOutputOperator<Object> outputOperator = new PubSubWebSocketOutputOperator<Object>();
    outputOperator.setUri(uri);
    outputOperator.setTopic(conf.get("dt.application.MobileExample.operator.QueryLocation.topic"));
    PubSubWebSocketInputOperator<Map<String, String>> inputOperator = new PubSubWebSocketInputOperator<Map<String, String>>();
    inputOperator.setUri(uri);
    inputOperator.setTopic(conf.get("dt.application.MobileExample.operator.LocationResults.topic"));
    CollectorTestSink<Object> sink = new CollectorTestSink<Object>();
    inputOperator.outputPort.setSink(sink);
    Map<String, String> data = new HashMap<String, String>();
    data.put("command", "add");
    data.put("phone", "5559990");
    Application app = new Application();
    LocalMode lma = LocalMode.newInstance();
    lma.prepareDAG(app, conf);
    LocalMode.Controller lc = lma.getController();
    lc.setHeartbeatMonitoringEnabled(false);
    lc.runAsync();
    Thread.sleep(5000);
    inputOperator.setup(null);
    outputOperator.setup(null);
    inputOperator.activate(null);
    outputOperator.beginWindow(0);
    outputOperator.input.process(data);
    outputOperator.endWindow();
    inputOperator.beginWindow(0);
    int timeoutMillis = 5000;
    while (sink.collectedTuples.size() < 5 && timeoutMillis > 0) {
        inputOperator.emitTuples();
        timeoutMillis -= 20;
        Thread.sleep(20);
    }
    inputOperator.endWindow();
    lc.shutdown();
    inputOperator.teardown();
    outputOperator.teardown();
    server.stop();
    Assert.assertTrue("size of output is 5 ", sink.collectedTuples.size() == 5);
    for (Object obj : sink.collectedTuples) {
        Assert.assertEquals("Expected phone number", "5559990", ((Map<String, String>) obj).get("phone"));
    }
}
Also used : PubSubWebSocketOutputOperator(org.apache.apex.malhar.lib.io.PubSubWebSocketOutputOperator) Connector(org.eclipse.jetty.server.Connector) Configuration(org.apache.hadoop.conf.Configuration) Server(org.eclipse.jetty.server.Server) HashMap(java.util.HashMap) ServletHolder(org.eclipse.jetty.servlet.ServletHolder) SamplePubSubWebSocketServlet(org.apache.apex.malhar.lib.helper.SamplePubSubWebSocketServlet) URI(java.net.URI) PubSubWebSocketInputOperator(org.apache.apex.malhar.lib.io.PubSubWebSocketInputOperator) LocalMode(com.datatorrent.api.LocalMode) Servlet(javax.servlet.Servlet) SamplePubSubWebSocketServlet(org.apache.apex.malhar.lib.helper.SamplePubSubWebSocketServlet) ServletContextHandler(org.eclipse.jetty.servlet.ServletContextHandler) HashMap(java.util.HashMap) Map(java.util.Map) CollectorTestSink(org.apache.apex.malhar.lib.testbench.CollectorTestSink) Test(org.junit.Test)

Example 65 with CollectorTestSink

use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.

the class MapOperatorTest method testNodeProcessingSchema.

public void testNodeProcessingSchema(MapOperator<LongWritable, Text, Text, IntWritable> oper) throws IOException {
    CollectorTestSink sortSink = new CollectorTestSink();
    oper.output.setSink(sortSink);
    oper.setMapClass(WordCount.Map.class);
    oper.setCombineClass(WordCount.Reduce.class);
    oper.setDirName(testMeta.testDir);
    oper.setConfigFile(null);
    oper.setInputFormatClass(TextInputFormat.class);
    Configuration conf = new Configuration();
    JobConf jobConf = new JobConf(conf);
    FileInputFormat.setInputPaths(jobConf, new Path(testMeta.testDir));
    TextInputFormat inputFormat = new TextInputFormat();
    inputFormat.configure(jobConf);
    InputSplit[] splits = inputFormat.getSplits(jobConf, 1);
    SerializationFactory serializationFactory = new SerializationFactory(conf);
    Serializer keySerializer = serializationFactory.getSerializer(splits[0].getClass());
    keySerializer.open(oper.getOutstream());
    keySerializer.serialize(splits[0]);
    oper.setInputSplitClass(splits[0].getClass());
    keySerializer.close();
    oper.setup(null);
    oper.beginWindow(0);
    oper.emitTuples();
    oper.emitTuples();
    oper.endWindow();
    oper.beginWindow(1);
    oper.emitTuples();
    oper.endWindow();
    Assert.assertEquals("number emitted tuples", 3, sortSink.collectedTuples.size());
    for (Object o : sortSink.collectedTuples) {
        LOG.debug(o.toString());
    }
    LOG.debug("Done testing round\n");
    oper.teardown();
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) TextInputFormat(org.apache.hadoop.mapred.TextInputFormat) SerializationFactory(org.apache.hadoop.io.serializer.SerializationFactory) JobConf(org.apache.hadoop.mapred.JobConf) InputSplit(org.apache.hadoop.mapred.InputSplit) CollectorTestSink(org.apache.apex.malhar.lib.testbench.CollectorTestSink) Serializer(org.apache.hadoop.io.serializer.Serializer)

Aggregations

CollectorTestSink (org.apache.apex.malhar.lib.testbench.CollectorTestSink)162 Test (org.junit.Test)133 HashMap (java.util.HashMap)56 Map (java.util.Map)33 File (java.io.File)21 ArrayList (java.util.ArrayList)21 OperatorContext (com.datatorrent.api.Context.OperatorContext)19 OperatorContextTestHelper.mockOperatorContext (org.apache.apex.malhar.lib.helper.OperatorContextTestHelper.mockOperatorContext)18 KeyValPair (org.apache.apex.malhar.lib.util.KeyValPair)15 Path (org.apache.hadoop.fs.Path)15 Attribute (com.datatorrent.api.Attribute)14 ColumnIndex (org.apache.apex.malhar.lib.streamquery.index.ColumnIndex)13 LineByLineFileInputOperator (org.apache.apex.malhar.lib.fs.LineByLineFileInputOperator)12 Kryo (com.esotericsoftware.kryo.Kryo)10 Date (java.util.Date)8 TestPortContext (org.apache.apex.malhar.lib.helper.TestPortContext)8 PortContext (com.datatorrent.stram.engine.PortContext)7 List (java.util.List)6 CountDownLatch (java.util.concurrent.CountDownLatch)6 SelectOperator (org.apache.apex.malhar.contrib.misc.streamquery.SelectOperator)6