use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.
the class POJOTimeBasedJoinOperatorTest method testInnerJoinOperator.
@Test
public void testInnerJoinOperator() throws IOException, InterruptedException {
Kryo kryo = new Kryo();
POJOJoinOperator oper = new POJOJoinOperator();
JoinStore store = new InMemoryStore(200, 200);
oper.setLeftStore(kryo.copy(store));
oper.setRightStore(kryo.copy(store));
oper.setIncludeFields("ID,Name;OID,Amount");
oper.setKeyFields("ID,CID");
oper.outputClass = CustOrder.class;
oper.setup(MapTimeBasedJoinOperator.context);
CollectorTestSink<List<CustOrder>> sink = new CollectorTestSink<List<CustOrder>>();
@SuppressWarnings({ "unchecked", "rawtypes" }) CollectorTestSink<Object> tmp = (CollectorTestSink) sink;
oper.outputPort.setSink(tmp);
oper.beginWindow(0);
Customer tuple = new Customer(1, "Anil");
oper.input1.process(tuple);
CountDownLatch latch = new CountDownLatch(1);
Order order = new Order(102, 1, 300);
oper.input2.process(order);
Order order2 = new Order(103, 3, 300);
oper.input2.process(order2);
Order order3 = new Order(104, 7, 300);
oper.input2.process(order3);
latch.await(3000, TimeUnit.MILLISECONDS);
oper.endWindow();
/* Number of tuple, emitted */
Assert.assertEquals("Number of tuple emitted ", 1, sink.collectedTuples.size());
List<CustOrder> emittedList = sink.collectedTuples.iterator().next();
CustOrder emitted = emittedList.get(0);
Assert.assertEquals("value of ID :", tuple.ID, emitted.ID);
Assert.assertEquals("value of Name :", tuple.Name, emitted.Name);
Assert.assertEquals("value of OID: ", order.OID, emitted.OID);
Assert.assertEquals("value of Amount: ", order.Amount, emitted.Amount);
}
use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.
the class ApacheLogParseMapOutputOperatorTest method testUserDefinedPattern.
/**
* Test oper logic emits correct results
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testUserDefinedPattern() {
ApacheLogParseMapOutputOperator oper = new ApacheLogParseMapOutputOperator();
CollectorTestSink sink = new CollectorTestSink();
oper.output.setSink(sink);
oper.setRegexGroups(new String[] { null, "ipAddr", null, "userId", "date", "url", "httpCode", "rest" });
String token = "127.0.0.1 - - [04/Apr/2013:17:17:21 -0700] \"GET /favicon.ico HTTP/1.1\" 404 498 \"-\" \"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.43 Safari/537.31\"";
oper.setLogRegex("^([\\d\\.]+) (\\S+) (\\S+) \\[([\\w:/]+\\s[+\\-]\\d{4})\\] \"[A-Z]+ (.+?) HTTP/\\S+\" (\\d{3})(.*)");
oper.setup(null);
oper.beginWindow(0);
oper.data.process(token);
oper.endWindow();
Assert.assertEquals("number emitted tuples", 1, sink.collectedTuples.size());
Map<String, Object> map = (Map<String, Object>) sink.collectedTuples.get(0);
log.debug("map {}", map);
Assert.assertEquals("Size of map is 6", 6, map.size());
Assert.assertEquals("checking ip", "127.0.0.1", map.get("ipAddr"));
Assert.assertEquals("checking userid", "-", map.get("userId"));
Assert.assertEquals("checking date", "04/Apr/2013:17:17:21 -0700", map.get("date"));
Assert.assertEquals("checking url", "/favicon.ico", map.get("url"));
Assert.assertEquals("checking http code", "404", map.get("httpCode"));
Assert.assertEquals("checking bytes", "498 \"-\" \"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.43 Safari/537.31\"", map.get("rest"));
}
use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.
the class AbstractFileInputOperatorFailureHandlingTest method testFailureHandling.
@Test
public void testFailureHandling() throws Exception {
FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.getDir()).getAbsolutePath()), true);
HashSet<String> allLines = Sets.newHashSet();
// Create files with 100 records.
for (int file = 0; file < 10; file++) {
HashSet<String> lines = Sets.newHashSet();
for (int line = 0; line < 10; line++) {
lines.add("f" + file + "l" + line);
}
allLines.addAll(lines);
FileUtils.write(new File(testMeta.getDir(), "file" + file), StringUtils.join(lines, '\n'));
}
Thread.sleep(10);
TestFileInputOperator oper = new TestFileInputOperator();
CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
@SuppressWarnings({ "unchecked", "rawtypes" }) CollectorTestSink<Object> sink = (CollectorTestSink) queryResults;
oper.output.setSink(sink);
oper.setDirectory(testMeta.getDir());
oper.getScanner().setFilePatternRegexp(".*file[\\d]");
oper.setup(mockOperatorContext(1, new Attribute.AttributeMap.DefaultAttributeMap()));
for (long wid = 0; wid < 1000; wid++) {
oper.beginWindow(wid);
oper.emitTuples();
oper.endWindow();
}
oper.teardown();
Assert.assertEquals("number tuples", 100, queryResults.collectedTuples.size());
Assert.assertEquals("lines", allLines, new HashSet<String>(queryResults.collectedTuples));
TestUtils.deleteTargetTestClassFolder(testMeta.desc);
}
use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.
the class SelectTopOperatorTest method testOperator.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testOperator() throws Exception {
SelectTopOperator oper = new SelectTopOperator();
oper.setTopValue(2);
CollectorTestSink sink = new CollectorTestSink();
oper.outport.setSink(sink);
oper.beginWindow(1);
HashMap<String, Object> tuple = new HashMap<String, Object>();
tuple.put("a", 0);
tuple.put("b", 1);
tuple.put("c", 2);
oper.inport.process(tuple);
tuple = new HashMap<String, Object>();
tuple.put("a", 1);
tuple.put("b", 3);
tuple.put("c", 4);
oper.inport.process(tuple);
tuple = new HashMap<String, Object>();
tuple.put("a", 1);
tuple.put("b", 5);
tuple.put("c", 6);
oper.inport.process(tuple);
oper.endWindow();
LOG.debug("{}", sink.collectedTuples);
}
use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.
the class BetweenConditionTest method testSqlSelect.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testSqlSelect() {
// create operator
SelectOperator oper = new SelectOperator();
oper.addIndex(new ColumnIndex("b", null));
oper.addIndex(new ColumnIndex("c", null));
BetweenCondition cond = new BetweenCondition("a", 0, 2);
oper.setCondition(cond);
CollectorTestSink sink = new CollectorTestSink();
oper.outport.setSink(sink);
oper.setup(null);
oper.beginWindow(1);
HashMap<String, Object> tuple = new HashMap<String, Object>();
tuple.put("a", 0);
tuple.put("b", 1);
tuple.put("c", 2);
oper.inport.process(tuple);
tuple = new HashMap<String, Object>();
tuple.put("a", 1);
tuple.put("b", 3);
tuple.put("c", 4);
oper.inport.process(tuple);
tuple = new HashMap<String, Object>();
tuple.put("a", 2);
tuple.put("b", 5);
tuple.put("c", 6);
oper.inport.process(tuple);
tuple = new HashMap<String, Object>();
tuple.put("a", 3);
tuple.put("b", 7);
tuple.put("c", 8);
oper.inport.process(tuple);
oper.endWindow();
oper.teardown();
LOG.debug("{}", sink.collectedTuples);
}
Aggregations