use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.
the class RedisPOJOOperatorTest method testOutputOperator.
@Test
public void testOutputOperator() throws IOException {
this.operatorStore = new RedisStore();
operatorStore.connect();
String appId = "test_appid";
int operatorId = 0;
operatorStore.removeCommittedWindowId(appId, operatorId);
operatorStore.disconnect();
RedisPOJOOutputOperator outputOperator = new RedisPOJOOutputOperator();
ArrayList<FieldInfo> fields = new ArrayList<FieldInfo>();
fields.add(new FieldInfo("column1", "intValue", SupportType.INTEGER));
fields.add(new FieldInfo("column2", "getStringValue()", SupportType.STRING));
outputOperator.setDataColumns(fields);
try {
com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap attributes = new com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap();
attributes.put(DAG.APPLICATION_ID, appId);
outputOperator.setStore(operatorStore);
outputOperator.setup(mockOperatorContext(operatorId, attributes));
outputOperator.beginWindow(101);
KeyValPair<String, Object> keyVal = new KeyValPair<String, Object>("test_abc1", new TestClass(1, "abc"));
outputOperator.input.process(keyVal);
outputOperator.endWindow();
outputOperator.teardown();
operatorStore.connect();
Map<String, String> out = operatorStore.getMap("test_abc1");
Assert.assertEquals("1", out.get("column1"));
Assert.assertEquals("abc", out.get("column2"));
} finally {
operatorStore.remove("test_abc1");
operatorStore.disconnect();
}
}
use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.
the class TestPOJO method getFieldsInfo.
public static List<FieldInfo> getFieldsInfo() {
List<FieldInfo> fieldsInfo = new ArrayList<FieldInfo>();
fieldsInfo.add(new FieldInfo("name", "name", SupportType.STRING));
fieldsInfo.add(new FieldInfo("age", "age", SupportType.INTEGER));
fieldsInfo.add(new FieldInfo("address", "address", SupportType.STRING));
return fieldsInfo;
}
use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.
the class JdbcPojoPollableOpeartorTest method testDBPoller.
@Test
public void testDBPoller() throws Exception {
insertEvents(10, true, 0);
JdbcStore store = new JdbcStore();
store.setDatabaseDriver(DB_DRIVER);
store.setDatabaseUrl(URL);
List<FieldInfo> fieldInfos = getFieldInfos();
Attribute.AttributeMap.DefaultAttributeMap portAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
portAttributes.put(Context.PortContext.TUPLE_CLASS, TestPOJOEvent.class);
TestPortContext tpc = new TestPortContext(portAttributes);
JdbcPOJOPollInputOperator inputOperator = new JdbcPOJOPollInputOperator();
inputOperator.setStore(store);
inputOperator.setTableName(TABLE_POJO_NAME);
inputOperator.setKey("id");
inputOperator.setFieldInfos(fieldInfos);
inputOperator.setFetchSize(100);
inputOperator.setBatchSize(100);
inputOperator.setPartitionCount(2);
Collection<com.datatorrent.api.Partitioner.Partition<AbstractJdbcPollInputOperator<Object>>> newPartitions = inputOperator.definePartitions(new ArrayList<Partitioner.Partition<AbstractJdbcPollInputOperator<Object>>>(), null);
int operatorId = 0;
for (com.datatorrent.api.Partitioner.Partition<AbstractJdbcPollInputOperator<Object>> partition : newPartitions) {
Attribute.AttributeMap.DefaultAttributeMap partitionAttributeMap = new Attribute.AttributeMap.DefaultAttributeMap();
partitionAttributeMap.put(DAG.APPLICATION_ID, APP_ID);
partitionAttributeMap.put(Context.DAGContext.APPLICATION_PATH, dir);
OperatorContext partitioningContext = mockOperatorContext(operatorId++, partitionAttributeMap);
JdbcPOJOPollInputOperator parition = (JdbcPOJOPollInputOperator) partition.getPartitionedInstance();
parition.outputPort.setup(tpc);
parition.setScheduledExecutorService(mockscheduler);
parition.setup(partitioningContext);
parition.activate(partitioningContext);
}
Iterator<com.datatorrent.api.Partitioner.Partition<AbstractJdbcPollInputOperator<Object>>> itr = newPartitions.iterator();
// First partition is for range queries,last is for polling queries
JdbcPOJOPollInputOperator firstInstance = (JdbcPOJOPollInputOperator) itr.next().getPartitionedInstance();
CollectorTestSink<Object> sink1 = new CollectorTestSink<>();
firstInstance.outputPort.setSink(sink1);
firstInstance.beginWindow(0);
firstInstance.pollRecords();
try {
firstInstance.pollRecords();
// non-poller partition
Assert.fail("expected closed connection");
} catch (Exception e) {
// expected
}
firstInstance.emitTuples();
firstInstance.endWindow();
Assert.assertEquals("rows from db", 5, sink1.collectedTuples.size());
for (Object tuple : sink1.collectedTuples) {
TestPOJOEvent pojoEvent = (TestPOJOEvent) tuple;
Assert.assertTrue("date", pojoEvent.getStartDate() instanceof Date);
Assert.assertTrue("date", pojoEvent.getId() < 5);
}
JdbcPOJOPollInputOperator secondInstance = (JdbcPOJOPollInputOperator) itr.next().getPartitionedInstance();
CollectorTestSink<Object> sink2 = new CollectorTestSink<>();
secondInstance.outputPort.setSink(sink2);
secondInstance.beginWindow(0);
secondInstance.pollRecords();
secondInstance.emitTuples();
secondInstance.endWindow();
Assert.assertEquals("rows from db", 5, sink2.collectedTuples.size());
for (Object tuple : sink2.collectedTuples) {
TestPOJOEvent pojoEvent = (TestPOJOEvent) tuple;
Assert.assertTrue("date", pojoEvent.getId() < 10 && pojoEvent.getId() >= 5);
}
insertEvents(4, false, 10);
JdbcPOJOPollInputOperator thirdInstance = (JdbcPOJOPollInputOperator) itr.next().getPartitionedInstance();
CollectorTestSink<Object> sink3 = new CollectorTestSink<>();
thirdInstance.outputPort.setSink(sink3);
thirdInstance.beginWindow(0);
thirdInstance.pollRecords();
thirdInstance.pollRecords();
thirdInstance.emitTuples();
thirdInstance.endWindow();
Assert.assertEquals("rows from db", 4, sink3.collectedTuples.size());
}
use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.
the class JdbcPojoPollableOpeartorTest method testDBPollerExtraField.
@Test
public void testDBPollerExtraField() throws Exception {
insertEvents(10, true, 0);
JdbcStore store = new JdbcStore();
store.setDatabaseDriver(DB_DRIVER);
store.setDatabaseUrl(URL);
List<FieldInfo> fieldInfos = getFieldInfos();
Attribute.AttributeMap.DefaultAttributeMap portAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
portAttributes.put(Context.PortContext.TUPLE_CLASS, TestPOJOEvent.class);
TestPortContext tpc = new TestPortContext(portAttributes);
JdbcPOJOPollInputOperator inputOperator = new JdbcPOJOPollInputOperator();
inputOperator.setStore(store);
inputOperator.setTableName(TABLE_POJO_NAME);
inputOperator.setColumnsExpression("ID,STARTDATE,STARTTIME,STARTTIMESTAMP");
inputOperator.setKey("id");
inputOperator.setFieldInfos(fieldInfos);
inputOperator.setFetchSize(100);
inputOperator.setBatchSize(100);
inputOperator.setPartitionCount(2);
Collection<com.datatorrent.api.Partitioner.Partition<AbstractJdbcPollInputOperator<Object>>> newPartitions = inputOperator.definePartitions(new ArrayList<Partitioner.Partition<AbstractJdbcPollInputOperator<Object>>>(), null);
int operatorId = 0;
for (com.datatorrent.api.Partitioner.Partition<AbstractJdbcPollInputOperator<Object>> partition : newPartitions) {
Attribute.AttributeMap.DefaultAttributeMap partitionAttributeMap = new Attribute.AttributeMap.DefaultAttributeMap();
partitionAttributeMap.put(DAG.APPLICATION_ID, APP_ID);
partitionAttributeMap.put(Context.DAGContext.APPLICATION_PATH, dir);
OperatorContext partitioningContext = mockOperatorContext(operatorId++, partitionAttributeMap);
JdbcPOJOPollInputOperator parition = (JdbcPOJOPollInputOperator) partition.getPartitionedInstance();
parition.outputPort.setup(tpc);
parition.setScheduledExecutorService(mockscheduler);
parition.setup(partitioningContext);
parition.activate(partitioningContext);
}
Iterator<com.datatorrent.api.Partitioner.Partition<AbstractJdbcPollInputOperator<Object>>> itr = newPartitions.iterator();
// First partition is for range queries,last is for polling queries
JdbcPOJOPollInputOperator firstInstance = (JdbcPOJOPollInputOperator) itr.next().getPartitionedInstance();
CollectorTestSink<Object> sink1 = new CollectorTestSink<>();
firstInstance.outputPort.setSink(sink1);
firstInstance.beginWindow(0);
Assert.assertFalse(firstInstance.ps.isClosed());
firstInstance.pollRecords();
Assert.assertTrue(firstInstance.ps.isClosed());
firstInstance.emitTuples();
firstInstance.endWindow();
Assert.assertEquals("rows from db", 5, sink1.collectedTuples.size());
for (Object tuple : sink1.collectedTuples) {
TestPOJOEvent pojoEvent = (TestPOJOEvent) tuple;
Assert.assertTrue("date", pojoEvent.getStartDate() instanceof Date);
Assert.assertTrue("date", pojoEvent.getId() < 5);
}
JdbcPOJOPollInputOperator secondInstance = (JdbcPOJOPollInputOperator) itr.next().getPartitionedInstance();
CollectorTestSink<Object> sink2 = new CollectorTestSink<>();
secondInstance.outputPort.setSink(sink2);
secondInstance.beginWindow(0);
secondInstance.pollRecords();
secondInstance.emitTuples();
secondInstance.endWindow();
Assert.assertEquals("rows from db", 5, sink2.collectedTuples.size());
for (Object tuple : sink2.collectedTuples) {
TestPOJOEvent pojoEvent = (TestPOJOEvent) tuple;
Assert.assertTrue("date", pojoEvent.getId() < 10 && pojoEvent.getId() >= 5);
}
insertEvents(4, false, 10);
JdbcPOJOPollInputOperator thirdInstance = (JdbcPOJOPollInputOperator) itr.next().getPartitionedInstance();
CollectorTestSink<Object> sink3 = new CollectorTestSink<>();
thirdInstance.outputPort.setSink(sink3);
thirdInstance.beginWindow(0);
thirdInstance.pollRecords();
thirdInstance.emitTuples();
thirdInstance.endWindow();
Assert.assertEquals("rows from db", 4, sink3.collectedTuples.size());
}
use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.
the class StreamingJsonParser method createFieldInfoMap.
/**
* Creates a map representing fieldName in POJO:field in JSON:Data type
*
* @return List of FieldInfo
*/
private List<FieldInfo> createFieldInfoMap(String str) {
fieldInfos = new ArrayList<FieldInfo>();
StringTokenizer strtok = new StringTokenizer(str, RECORD_SEPARATOR);
while (strtok.hasMoreTokens()) {
String[] token = strtok.nextToken().split(FIELD_SEPARATOR);
try {
fieldInfos.add(new FieldInfo(token[0], token[1], SupportType.valueOf(token[2])));
} catch (Exception e) {
LOG.error("Invalid support type", e);
}
}
return fieldInfos;
}
Aggregations