use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.
the class CSVParserTest method TestParserWithHeader.
@Test
public void TestParserWithHeader() {
CsvToMapParser parser = new CsvToMapParser();
parser.setFieldDelimiter(',');
parser.setLineDelimiter("\n");
parser.setHasHeader(true);
ArrayList<CsvToMapParser.Field> fields = new ArrayList<CsvToMapParser.Field>();
Field field1 = new Field();
field1.setName("Eid");
field1.setType("INTEGER");
fields.add(field1);
Field field2 = new Field();
field2.setName("Name");
field2.setType("STRING");
fields.add(field2);
Field field3 = new Field();
field3.setName("Salary");
field3.setType("LONG");
fields.add(field3);
parser.setFields(fields);
CollectorTestSink<Object> sink = new CollectorTestSink<Object>();
parser.output.setSink(sink);
parser.setup(null);
String input = "Eid,Name,Salary\n123,xyz,567777\n321,abc,7777000\n456,pqr,5454545454";
parser.input.process(input.getBytes());
parser.teardown();
Assert.assertEquals("Tuples read should be same ", 6, sink.collectedTuples.size());
Assert.assertEquals("Eid", sink.collectedTuples.get(0));
Assert.assertEquals("Name", sink.collectedTuples.get(1));
Assert.assertEquals("Salary", sink.collectedTuples.get(2));
Assert.assertEquals("{Name=xyz, Salary=567777, Eid=123}", sink.collectedTuples.get(3).toString());
Assert.assertEquals("{Name=abc, Salary=7777000, Eid=321}", sink.collectedTuples.get(4).toString());
Assert.assertEquals("{Name=pqr, Salary=5454545454, Eid=456}", sink.collectedTuples.get(5).toString());
sink.clear();
}
use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.
the class RabbitMQInputOperatorTest method testRecoveryAndIdempotency.
@Test
public void testRecoveryAndIdempotency() throws Exception {
RabbitMQInputOperator operator = new RabbitMQInputOperator();
operator.setWindowDataManager(new FSWindowDataManager());
operator.setHost("localhost");
operator.setExchange("testEx");
operator.setExchangeType("fanout");
Attribute.AttributeMap attributeMap = new Attribute.AttributeMap.DefaultAttributeMap();
CollectorTestSink<Object> sink = new CollectorTestSink<Object>();
operator.outputPort.setSink(sink);
OperatorContext context = mockOperatorContext(1, attributeMap);
operator.setup(context);
operator.activate(context);
final RabbitMQMessageGenerator publisher = new RabbitMQMessageGenerator();
publisher.setup();
publisher.generateMessages(5);
Thread.sleep(10000);
operator.beginWindow(1);
operator.emitTuples();
operator.endWindow();
operator.deactivate();
Assert.assertEquals("num of messages in window 1", 15, sink.collectedTuples.size());
// failure and then re-deployment of operator
sink.collectedTuples.clear();
operator.setup(context);
operator.activate(context);
Assert.assertEquals("largest recovery window", 1, operator.getWindowDataManager().getLargestCompletedWindow());
operator.beginWindow(1);
operator.endWindow();
Assert.assertEquals("num of messages in window 1", 15, sink.collectedTuples.size());
sink.collectedTuples.clear();
operator.deactivate();
operator.teardown();
operator.getWindowDataManager().committed(1);
publisher.teardown();
}
use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.
the class ChangeTest method testNodeProcessingSchema.
/**
* @param oper Data value for comparison.
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
public <V extends Number> void testNodeProcessingSchema(Change<V> oper) {
CollectorTestSink changeSink = new CollectorTestSink();
CollectorTestSink percentSink = new CollectorTestSink();
oper.change.setSink(changeSink);
oper.percent.setSink(percentSink);
oper.beginWindow(0);
oper.base.process(oper.getValue(10));
oper.data.process(oper.getValue(5));
oper.data.process(oper.getValue(15));
oper.data.process(oper.getValue(20));
oper.endWindow();
Assert.assertEquals("number emitted tuples", 3, changeSink.collectedTuples.size());
Assert.assertEquals("number emitted tuples", 3, percentSink.collectedTuples.size());
log.debug("\nLogging tuples");
for (Object o : changeSink.collectedTuples) {
log.debug(String.format("change %s", o));
}
for (Object o : percentSink.collectedTuples) {
log.debug(String.format("percent change %s", o));
}
}
use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.
the class DeleteOperatorTest method testSqlSelect.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testSqlSelect() {
// create operator
DeleteOperator oper = new DeleteOperator();
EqualValueCondition condition = new EqualValueCondition();
condition.addEqualValue("a", 1);
oper.setCondition(condition);
CollectorTestSink sink = new CollectorTestSink();
oper.outport.setSink(sink);
oper.setup(null);
oper.beginWindow(1);
HashMap<String, Object> tuple = new HashMap<String, Object>();
tuple.put("a", 0);
tuple.put("b", 1);
tuple.put("c", 2);
oper.inport.process(tuple);
tuple = new HashMap<String, Object>();
tuple.put("a", 1);
tuple.put("b", 3);
tuple.put("c", 4);
oper.inport.process(tuple);
tuple = new HashMap<String, Object>();
tuple.put("a", 1);
tuple.put("b", 5);
tuple.put("c", 6);
oper.inport.process(tuple);
oper.endWindow();
oper.teardown();
LOG.debug("{}", sink.collectedTuples);
}
use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.
the class GroupByOperatorTest method testSqlGroupBy.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testSqlGroupBy() {
// create operator
GroupByHavingOperator oper = new GroupByHavingOperator();
oper.addColumnGroupByIndex(new ColumnIndex("b", null));
try {
oper.addAggregateIndex(new SumFunction("c", null));
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return;
}
EqualValueCondition condition = new EqualValueCondition();
condition.addEqualValue("a", 1);
oper.setCondition(condition);
CollectorTestSink sink = new CollectorTestSink();
oper.outport.setSink(sink);
oper.setup(null);
oper.beginWindow(1);
HashMap<String, Object> tuple = new HashMap<String, Object>();
tuple.put("a", 1);
tuple.put("b", 1);
tuple.put("c", 2);
oper.inport.process(tuple);
tuple = new HashMap<String, Object>();
tuple.put("a", 1);
tuple.put("b", 1);
tuple.put("c", 4);
oper.inport.process(tuple);
tuple = new HashMap<String, Object>();
tuple.put("a", 1);
tuple.put("b", 2);
tuple.put("c", 6);
oper.inport.process(tuple);
tuple = new HashMap<String, Object>();
tuple.put("a", 1);
tuple.put("b", 2);
tuple.put("c", 7);
oper.inport.process(tuple);
oper.endWindow();
oper.teardown();
LOG.debug("{}", sink.collectedTuples);
}
Aggregations