use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.
the class InnerJoinOperatorTest method testSqlSelect.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testSqlSelect() {
// create operator
InnerJoinOperator oper = new InnerJoinOperator();
CollectorTestSink sink = new CollectorTestSink();
oper.outport.setSink(sink);
// set column join condition
Condition cond = new JoinColumnEqualCondition("a", "a");
oper.setJoinCondition(cond);
// add columns
oper.selectTable1Column(new ColumnIndex("b", null));
oper.selectTable2Column(new ColumnIndex("c", null));
oper.setup(null);
oper.beginWindow(1);
HashMap<String, Object> tuple = new HashMap<String, Object>();
tuple.put("a", 0);
tuple.put("b", 1);
tuple.put("c", 2);
oper.inport1.process(tuple);
tuple = new HashMap<String, Object>();
tuple.put("a", 1);
tuple.put("b", 3);
tuple.put("c", 4);
oper.inport1.process(tuple);
tuple = new HashMap<String, Object>();
tuple.put("a", 0);
tuple.put("b", 7);
tuple.put("c", 8);
oper.inport2.process(tuple);
tuple = new HashMap<String, Object>();
tuple.put("a", 1);
tuple.put("b", 5);
tuple.put("c", 6);
oper.inport2.process(tuple);
oper.endWindow();
oper.teardown();
LOG.debug("{}", sink.collectedTuples);
}
use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.
the class SelectAverageTest method testSqlSelect.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testSqlSelect() {
// create operator
SelectFunctionOperator oper = new SelectFunctionOperator();
oper.addSqlFunction(new AverageFunction("b", null));
CollectorTestSink sink = new CollectorTestSink();
oper.outport.setSink(sink);
oper.setup(null);
oper.beginWindow(1);
HashMap<String, Object> tuple = new HashMap<String, Object>();
tuple.put("a", 0);
tuple.put("b", 1);
tuple.put("c", 2);
oper.inport.process(tuple);
tuple = new HashMap<String, Object>();
tuple.put("a", 1);
tuple.put("b", 3);
tuple.put("c", 4);
oper.inport.process(tuple);
tuple = new HashMap<String, Object>();
tuple.put("a", 1);
tuple.put("b", 5);
tuple.put("c", 6);
oper.inport.process(tuple);
oper.endWindow();
oper.teardown();
LOG.debug("{}", sink.collectedTuples);
}
use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.
the class SelectFirstLastTest method testSqlSelect.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testSqlSelect() {
// create operator
SelectFunctionOperator oper = new SelectFunctionOperator();
oper.addSqlFunction(new FirstLastFunction("b", null, false));
CollectorTestSink sink = new CollectorTestSink();
oper.outport.setSink(sink);
oper.setup(null);
oper.beginWindow(1);
HashMap<String, Object> tuple = new HashMap<String, Object>();
tuple.put("a", 0);
tuple.put("b", null);
tuple.put("c", 2);
oper.inport.process(tuple);
tuple = new HashMap<String, Object>();
tuple.put("a", 1);
tuple.put("b", null);
tuple.put("c", 4);
oper.inport.process(tuple);
tuple = new HashMap<String, Object>();
tuple.put("a", 1);
tuple.put("b", 5);
tuple.put("c", 6);
oper.inport.process(tuple);
oper.endWindow();
oper.teardown();
LOG.debug("{}", sink.collectedTuples);
}
use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.
the class RedisInputOperatorTest method testRecoveryAndIdempotency.
@Test
public void testRecoveryAndIdempotency() throws Exception {
this.operatorStore = new RedisStore();
this.testStore = new RedisStore();
testStore.connect();
ScanParams params = new ScanParams();
params.count(1);
testStore.put("test_abc", "789");
testStore.put("test_def", "456");
testStore.put("test_ghi", "123");
RedisKeyValueInputOperator operator = new RedisKeyValueInputOperator();
operator.setWindowDataManager(new FSWindowDataManager());
operator.setStore(operatorStore);
operator.setScanCount(1);
Attribute.AttributeMap attributeMap = new Attribute.AttributeMap.DefaultAttributeMap();
CollectorTestSink<Object> sink = new CollectorTestSink<Object>();
operator.outputPort.setSink(sink);
OperatorContext context = mockOperatorContext(1, attributeMap);
try {
operator.setup(context);
operator.beginWindow(1);
operator.emitTuples();
operator.endWindow();
int numberOfMessagesInWindow1 = sink.collectedTuples.size();
sink.collectedTuples.clear();
operator.beginWindow(2);
operator.emitTuples();
operator.endWindow();
int numberOfMessagesInWindow2 = sink.collectedTuples.size();
sink.collectedTuples.clear();
// failure and then re-deployment of operator
// Re-instantiating to reset values
operator = new RedisKeyValueInputOperator();
operator.setWindowDataManager(new FSWindowDataManager());
operator.setStore(operatorStore);
operator.setScanCount(1);
operator.outputPort.setSink(sink);
operator.setup(context);
Assert.assertEquals("largest recovery window", 2, operator.getWindowDataManager().getLargestCompletedWindow());
operator.beginWindow(1);
operator.emitTuples();
operator.emitTuples();
operator.endWindow();
Assert.assertEquals("num of messages in window 1", numberOfMessagesInWindow1, sink.collectedTuples.size());
sink.collectedTuples.clear();
operator.beginWindow(2);
operator.emitTuples();
operator.endWindow();
Assert.assertEquals("num of messages in window 2", numberOfMessagesInWindow2, sink.collectedTuples.size());
} finally {
for (Object e : sink.collectedTuples) {
KeyValPair<String, String> entry = (KeyValPair<String, String>) e;
testStore.remove(entry.getKey());
}
sink.collectedTuples.clear();
operator.getWindowDataManager().committed(5);
operator.teardown();
}
}
use of org.apache.apex.malhar.lib.testbench.CollectorTestSink in project apex-malhar by apache.
the class CassandraOperatorTest method testCassandraInputOperator.
/*
* This test can be run on cassandra server installed on node17.
*/
@Test
public void testCassandraInputOperator() {
String query1 = "SELECT * FROM " + KEYSPACE + "." + "%t;";
CassandraStore store = new CassandraStore();
store.setNode(NODE);
store.setKeyspace(KEYSPACE);
AttributeMap.DefaultAttributeMap attributeMap = new AttributeMap.DefaultAttributeMap();
attributeMap.put(DAG.APPLICATION_ID, APP_ID);
OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap);
TestInputOperator inputOperator = new TestInputOperator();
inputOperator.setStore(store);
inputOperator.setQuery(query1);
inputOperator.setTablename(TABLE_NAME_INPUT);
inputOperator.setPrimaryKeyColumn("id");
List<FieldInfo> fieldInfos = Lists.newArrayList();
fieldInfos.add(new FieldInfo("id", "id", null));
fieldInfos.add(new FieldInfo("age", "age", null));
fieldInfos.add(new FieldInfo("lastname", "lastname", null));
inputOperator.setFieldInfos(fieldInfos);
inputOperator.insertEventsInTable(30);
CollectorTestSink<Object> sink = new CollectorTestSink<>();
inputOperator.outputPort.setSink(sink);
Attribute.AttributeMap.DefaultAttributeMap portAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
portAttributes.put(Context.PortContext.TUPLE_CLASS, TestInputPojo.class);
TestPortContext tpc = new TestPortContext(portAttributes);
inputOperator.setup(context);
inputOperator.outputPort.setup(tpc);
inputOperator.activate(context);
inputOperator.beginWindow(0);
inputOperator.emitTuples();
inputOperator.endWindow();
Assert.assertEquals("rows from db", 30, sink.collectedTuples.size());
ArrayList<Integer> listOfIDs = inputOperator.getIds();
// Rows are not stored in the same order in cassandra table in which they are inserted.
for (int i = 0; i < 10; i++) {
TestInputPojo object = (TestInputPojo) sink.collectedTuples.get(i);
Assert.assertTrue("id set in testpojo", listOfIDs.contains(object.getId()));
Assert.assertEquals("name set in testpojo", inputOperator.getNames().get(object.getId()), object.getLastname());
Assert.assertEquals("age set in testpojo", inputOperator.getAge().get(object.getId()).intValue(), object.getAge());
}
sink.clear();
inputOperator.columnDataTypes.clear();
String query2 = "SELECT * FROM " + KEYSPACE + "." + "%t where token(%p) > %v;";
inputOperator.setQuery(query2);
inputOperator.setStartRow(10);
inputOperator.setup(context);
inputOperator.outputPort.setup(tpc);
inputOperator.activate(context);
inputOperator.beginWindow(1);
inputOperator.emitTuples();
inputOperator.endWindow();
Assert.assertEquals("rows from db", 26, sink.collectedTuples.size());
sink.clear();
inputOperator.columnDataTypes.clear();
String query3 = "SELECT * FROM " + KEYSPACE + "." + "%t where token(%p) > %v LIMIT %l;";
inputOperator.setQuery(query3);
inputOperator.setStartRow(1);
inputOperator.setLimit(10);
inputOperator.setup(context);
inputOperator.outputPort.setup(tpc);
inputOperator.activate(context);
inputOperator.beginWindow(2);
inputOperator.emitTuples();
inputOperator.endWindow();
Assert.assertEquals("rows from db", 10, sink.collectedTuples.size());
}
Aggregations