use of com.sap.hadoop.windowing.query2.definition.QueryDef in project SQLWindowing by hbutani.
the class Executor method executeSelectList.
/**
* For each row in the partition:
* 1. evaluate the where condition if applicable.
* 2. evaluate the value for each column retrieved
* from the select list
* 3. Forward the writable value or object based on the
* implementation of the ForwardSink
* @param qDef
* @param oPart
* @param rS
* @throws WindowingException
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
public static void executeSelectList(QueryDef qDef, Partition oPart, ForwardSink rS) throws WindowingException {
ArrayList<ColumnDef> cols = qDef.getSelectList().getColumns();
ObjectInspector selectOI = qDef.getSelectList().getOI();
SerDe oSerDe = qDef.getOutput().getSerDe();
Object[] output = new Object[cols.size()];
WhereDef whDef = qDef.getWhere();
boolean applyWhere = whDef != null;
Converter whConverter = !applyWhere ? null : ObjectInspectorConverters.getConverter(whDef.getOI(), PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
ExprNodeEvaluator whCondEval = !applyWhere ? null : whDef.getExprEvaluator();
Writable value = null;
PartitionIterator<Object> pItr = oPart.iterator();
RuntimeUtils.connectLeadLagFunctionsToPartition(qDef, pItr);
while (pItr.hasNext()) {
int colCnt = 0;
ArrayList selectList = new ArrayList();
Object oRow = pItr.next();
if (applyWhere) {
Object whCond = null;
try {
whCond = whCondEval.evaluate(oRow);
whCond = whConverter.convert(whCond);
} catch (HiveException he) {
throw new WindowingException(he);
}
if (whCond == null || !((Boolean) whCond).booleanValue()) {
continue;
}
}
for (ColumnDef cDef : cols) {
try {
Object newCol = cDef.getExprEvaluator().evaluate(oRow);
output[colCnt++] = newCol;
selectList.add(newCol);
} catch (HiveException he) {
throw new WindowingException(he);
}
}
//else collect the writable key-value pairs for outstream
if (rS.acceptObject()) {
rS.collectOutput(output);
} else {
try {
value = oSerDe.serialize(selectList, selectOI);
} catch (SerDeException se) {
throw new WindowingException(se);
}
rS.collectOutput(NullWritable.get(), value);
}
}
}
use of com.sap.hadoop.windowing.query2.definition.QueryDef in project SQLWindowing by hbutani.
the class LocalExecutor method execute.
public void execute(QueryDef qDef, WindowingShell wShell) throws WindowingException {
QueryTranslationInfo tInfo = qDef.getTranslationInfo();
HiveTableSpec hvTblSpec = qDef.getInput().getHiveTableSpec();
WindowingInput wIn = IOUtils.createTableWindowingInput(hvTblSpec.getDbName(), hvTblSpec.getTableName(), tInfo.getHiveCfg());
//Partition p = IOUtils.createPartition(partClassName, partMemSize, wIn);
PartitionsIterator partsItr = new PartitionsIterator(wIn, qDef);
while (partsItr.hasNext()) {
Partition p = partsItr.next();
Partition oP = executeChain(qDef, p);
//IOUtils.dumpPartition(oP, System.out);
executeSelectList(qDef, oP, new SysOutRS(out));
}
}
use of com.sap.hadoop.windowing.query2.definition.QueryDef in project SQLWindowing by hbutani.
the class RuntimeUtils method createFirstPartitionForChain.
/**
* Create a new partition.
* The input OI is used to evaluate rows appended to the partition.
* The serde is determined based on whether the query has a map-phase
* or not. The OI on the serde is used by PTFs to evaluate output of the
* partition.
* @param qDef
* @param oi
* @param hiveConf
* @return
* @throws WindowingException
*/
public static Partition createFirstPartitionForChain(QueryDef qDef, ObjectInspector oi, HiveConf hiveConf, boolean isMapSide) throws WindowingException {
TableFuncDef tabDef = getFirstTableFunction(qDef);
TableFunctionEvaluator tEval = tabDef.getFunction();
String partClassName = tEval.getPartitionClass();
int partMemSize = tEval.getPartitionMemSize();
Partition part = null;
SerDe serde = tabDef.getInput().getSerde();
part = new Partition(partClassName, partMemSize, serde, (StructObjectInspector) oi);
return part;
}
use of com.sap.hadoop.windowing.query2.definition.QueryDef in project SQLWindowing by hbutani.
the class MRExecutor method createOutputTableDesc.
/**
* Use the settings on the QueryOutputDef to define the
* properties for the output table in hive.
* @param qDef
* @return
* @throws WindowingException
*/
static TableDesc createOutputTableDesc(QueryDef qDef) throws WindowingException {
QueryOutputDef oDef = qDef.getOutput();
Class<? extends SerDe> serDeClass = oDef.getSerDe().getClass();
Properties p = oDef.getSpec().getSerDeProps();
String columnNamesList = p.getProperty(Constants.LIST_COLUMNS);
String columnTypesList = p.getProperty(Constants.LIST_COLUMN_TYPES);
String fieldSeparator = p.getProperty(Constants.FIELD_DELIM, Integer.toString(Utilities.ctrlaCode));
return PlanUtils.getTableDesc(serDeClass, fieldSeparator, columnNamesList, columnTypesList, false);
}
Aggregations