use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.
the class JdbcPOJOInputOperator method setup.
@Override
public void setup(Context.OperatorContext context) {
Preconditions.checkArgument(query != null || tableName != null, "both query and table name are not set");
super.setup(context);
try {
// closing the query statement in super class as it is not needed
queryStatement.close();
if (query == null && columnsExpression == null) {
StringBuilder columns = new StringBuilder();
for (int i = 0; i < fieldInfos.size(); i++) {
columns.append(fieldInfos.get(i).getColumnName());
if (i < fieldInfos.size() - 1) {
columns.append(",");
}
}
columnsExpression = columns.toString();
LOG.debug("select expr {}", columnsExpression);
}
preparedStatement = store.connection.prepareStatement(queryToRetrieveData());
if (columnDataTypes == null) {
populateColumnDataTypes();
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
for (FieldInfo fi : fieldInfos) {
columnFieldSetters.add(new ActiveFieldInfo(fi));
}
}
use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.
the class CassandraPOJOOutputOperator method prepareStatementFromFieldsAndTableName.
private PreparedStatement prepareStatementFromFieldsAndTableName() {
if (tablename == null || tablename.length() == 0) {
throw new RuntimeException("Please sepcify query or table name.");
}
StringBuilder queryfields = new StringBuilder();
StringBuilder values = new StringBuilder();
for (FieldInfo fieldInfo : fieldInfos) {
if (queryfields.length() == 0) {
queryfields.append(fieldInfo.getColumnName());
values.append("?");
} else {
queryfields.append(",").append(fieldInfo.getColumnName());
values.append(",").append("?");
}
}
String statement = "INSERT INTO " + store.keyspace + "." + tablename + " (" + queryfields.toString() + ") " + "VALUES (" + values.toString() + ");";
LOG.debug("statement is {}", statement);
return store.getSession().prepare(statement);
}
use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.
the class CassandraPOJOOutputOperator method populateFieldInfosFromPojo.
private void populateFieldInfosFromPojo(ColumnDefinitions rsMetaData) {
fieldInfos = Lists.newArrayList();
Field[] fields = pojoClass.getDeclaredFields();
for (int i = 0; i < rsMetaData.size(); i++) {
String columnName = rsMetaData.getName(i);
String pojoField = getMatchingField(fields, columnName);
if (pojoField != null && pojoField.length() != 0) {
fieldInfos.add(new FieldInfo(columnName, pojoField, null));
} else {
LOG.warn("Couldn't find corrosponding pojo field for column: " + columnName);
}
}
}
use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.
the class CassandraOperatorTest method testCassandraInputOperator.
/*
* This test can be run on cassandra server installed on node17.
*/
@Test
public void testCassandraInputOperator() {
String query1 = "SELECT * FROM " + KEYSPACE + "." + "%t;";
CassandraStore store = new CassandraStore();
store.setNode(NODE);
store.setKeyspace(KEYSPACE);
AttributeMap.DefaultAttributeMap attributeMap = new AttributeMap.DefaultAttributeMap();
attributeMap.put(DAG.APPLICATION_ID, APP_ID);
OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap);
TestInputOperator inputOperator = new TestInputOperator();
inputOperator.setStore(store);
inputOperator.setQuery(query1);
inputOperator.setTablename(TABLE_NAME_INPUT);
inputOperator.setPrimaryKeyColumn("id");
List<FieldInfo> fieldInfos = Lists.newArrayList();
fieldInfos.add(new FieldInfo("id", "id", null));
fieldInfos.add(new FieldInfo("age", "age", null));
fieldInfos.add(new FieldInfo("lastname", "lastname", null));
inputOperator.setFieldInfos(fieldInfos);
inputOperator.insertEventsInTable(30);
CollectorTestSink<Object> sink = new CollectorTestSink<>();
inputOperator.outputPort.setSink(sink);
Attribute.AttributeMap.DefaultAttributeMap portAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
portAttributes.put(Context.PortContext.TUPLE_CLASS, TestInputPojo.class);
TestPortContext tpc = new TestPortContext(portAttributes);
inputOperator.setup(context);
inputOperator.outputPort.setup(tpc);
inputOperator.activate(context);
inputOperator.beginWindow(0);
inputOperator.emitTuples();
inputOperator.endWindow();
Assert.assertEquals("rows from db", 30, sink.collectedTuples.size());
ArrayList<Integer> listOfIDs = inputOperator.getIds();
// Rows are not stored in the same order in cassandra table in which they are inserted.
for (int i = 0; i < 10; i++) {
TestInputPojo object = (TestInputPojo) sink.collectedTuples.get(i);
Assert.assertTrue("id set in testpojo", listOfIDs.contains(object.getId()));
Assert.assertEquals("name set in testpojo", inputOperator.getNames().get(object.getId()), object.getLastname());
Assert.assertEquals("age set in testpojo", inputOperator.getAge().get(object.getId()).intValue(), object.getAge());
}
sink.clear();
inputOperator.columnDataTypes.clear();
String query2 = "SELECT * FROM " + KEYSPACE + "." + "%t where token(%p) > %v;";
inputOperator.setQuery(query2);
inputOperator.setStartRow(10);
inputOperator.setup(context);
inputOperator.outputPort.setup(tpc);
inputOperator.activate(context);
inputOperator.beginWindow(1);
inputOperator.emitTuples();
inputOperator.endWindow();
Assert.assertEquals("rows from db", 26, sink.collectedTuples.size());
sink.clear();
inputOperator.columnDataTypes.clear();
String query3 = "SELECT * FROM " + KEYSPACE + "." + "%t where token(%p) > %v LIMIT %l;";
inputOperator.setQuery(query3);
inputOperator.setStartRow(1);
inputOperator.setLimit(10);
inputOperator.setup(context);
inputOperator.outputPort.setup(tpc);
inputOperator.activate(context);
inputOperator.beginWindow(2);
inputOperator.emitTuples();
inputOperator.endWindow();
Assert.assertEquals("rows from db", 10, sink.collectedTuples.size());
}
use of org.apache.apex.malhar.lib.util.FieldInfo in project apex-malhar by apache.
the class JDBCLoaderTest method testMysqlDBLookupIncludeAllKeys.
@Test
public void testMysqlDBLookupIncludeAllKeys() throws Exception {
CountDownLatch latch = new CountDownLatch(1);
ArrayList<FieldInfo> lookupKeys = new ArrayList<>();
lookupKeys.add(new FieldInfo("ID", "ID", FieldInfo.SupportType.INTEGER));
ArrayList<FieldInfo> includeKeys = new ArrayList<>();
includeKeys.add(new FieldInfo("ID", "ID", FieldInfo.SupportType.INTEGER));
includeKeys.add(new FieldInfo("NAME", "NAME", FieldInfo.SupportType.STRING));
includeKeys.add(new FieldInfo("AGE", "AGE", FieldInfo.SupportType.INTEGER));
includeKeys.add(new FieldInfo("ADDRESS", "ADDRESS", FieldInfo.SupportType.STRING));
includeKeys.add(new FieldInfo("SALARY", "SALARY", FieldInfo.SupportType.DOUBLE));
testMeta.dbloader.setFieldInfo(lookupKeys, includeKeys);
latch.await(1000, TimeUnit.MILLISECONDS);
ArrayList<Object> keys = new ArrayList<Object>();
keys.add(4);
ArrayList<Object> columnInfo = (ArrayList<Object>) testMeta.dbloader.get(keys);
Assert.assertEquals("ID", 4, columnInfo.get(0));
Assert.assertEquals("NAME", "Mark", columnInfo.get(1).toString().trim());
Assert.assertEquals("AGE", 25, columnInfo.get(2));
Assert.assertEquals("ADDRESS", "Rich-Mond", columnInfo.get(3).toString().trim());
Assert.assertEquals("SALARY", 65000.0, columnInfo.get(4));
}
Aggregations