use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project hive by apache.
the class DynamicValueRegistryTez method init.
@Override
public void init(RegistryConf conf) throws Exception {
RegistryConfTez rct = (RegistryConfTez) conf;
for (String inputSourceName : rct.baseWork.getInputSourceToRuntimeValuesInfo().keySet()) {
LOG.info("Runtime value source: " + inputSourceName);
LogicalInput runtimeValueInput = rct.inputs.get(inputSourceName);
RuntimeValuesInfo runtimeValuesInfo = rct.baseWork.getInputSourceToRuntimeValuesInfo().get(inputSourceName);
// Setup deserializer/obj inspectors for the incoming data source
Deserializer deserializer = ReflectionUtils.newInstance(runtimeValuesInfo.getTableDesc().getDeserializerClass(), null);
deserializer.initialize(rct.conf, runtimeValuesInfo.getTableDesc().getProperties());
ObjectInspector inspector = deserializer.getObjectInspector();
// Set up col expressions for the dynamic values using this input
List<ExprNodeEvaluator> colExprEvaluators = new ArrayList<ExprNodeEvaluator>();
for (ExprNodeDesc expr : runtimeValuesInfo.getColExprs()) {
ExprNodeEvaluator exprEval = ExprNodeEvaluatorFactory.get(expr, null);
exprEval.initialize(inspector);
colExprEvaluators.add(exprEval);
}
runtimeValueInput.start();
List<Input> inputList = new ArrayList<Input>();
inputList.add(runtimeValueInput);
rct.processorContext.waitForAllInputsReady(inputList);
KeyValueReader kvReader = (KeyValueReader) runtimeValueInput.getReader();
long rowCount = 0;
while (kvReader.next()) {
Object row = deserializer.deserialize((Writable) kvReader.getCurrentValue());
rowCount++;
for (int colIdx = 0; colIdx < colExprEvaluators.size(); ++colIdx) {
// Read each expression and save it to the value registry
ExprNodeEvaluator eval = colExprEvaluators.get(colIdx);
Object val = eval.evaluate(row);
setValue(runtimeValuesInfo.getDynamicValueIDs().get(colIdx), val);
}
}
// For now, expecting a single row (min/max, aggregated bloom filter), or no rows
if (rowCount == 0) {
LOG.debug("No input rows from " + inputSourceName + ", filling dynamic values with nulls");
for (int colIdx = 0; colIdx < colExprEvaluators.size(); ++colIdx) {
ExprNodeEvaluator eval = colExprEvaluators.get(colIdx);
setValue(runtimeValuesInfo.getDynamicValueIDs().get(colIdx), null);
}
} else if (rowCount > 1) {
throw new IllegalStateException("Expected 0 or 1 rows from " + inputSourceName + ", got " + rowCount);
}
}
}
use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project SQLWindowing by hbutani.
the class NPath method execute.
@Override
public void execute(PartitionIterator<Object> pItr, Partition outP) throws WindowingException {
while (pItr.hasNext()) {
Object iRow = pItr.next();
SymbolFunctionResult syFnRes = SymbolFunction.match(syFn, iRow, pItr);
if (syFnRes.matches) {
int sz = syFnRes.nextRow - (pItr.getIndex() - 1);
Object selectListInput = NPathUtils.getSelectListInput(iRow, tDef.getInput().getOI(), pItr, sz);
ArrayList<Object> oRow = new ArrayList<Object>();
for (ExprNodeEvaluator resExprEval : resultExprEvals) {
try {
oRow.add(resExprEval.evaluate(selectListInput));
} catch (HiveException he) {
throw new WindowingException(he);
}
}
outP.append(oRow);
}
}
}
use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project SQLWindowing by hbutani.
the class ResultExpressionParser method buildSelectListEvaluators.
private void buildSelectListEvaluators() throws WindowingException {
selectListExprEvaluators = new ArrayList<ExprNodeEvaluator>();
selectListExprOIs = new ArrayList<ObjectInspector>();
ArrayList<String> selectListExprNames = new ArrayList<String>();
int i = 0;
Iterator<Object> it = selectSpec.getColumnListAndAlias();
while (it.hasNext()) {
Object[] selectColDetails = (Object[]) it.next();
String selectColName = (String) selectColDetails[1];
ASTNode selectColumnNode = (ASTNode) selectColDetails[2];
ExprNodeDesc selectColumnExprNode = TranslateUtils.buildExprNode(selectColumnNode, selectListInputTypeCheckCtx);
ExprNodeEvaluator selectColumnExprEval = ExprNodeEvaluatorFactory.get(selectColumnExprNode);
ObjectInspector selectColumnOI = null;
try {
selectColumnOI = selectColumnExprEval.initialize(selectListInputOI);
} catch (HiveException he) {
throw new WindowingException(he);
}
selectColName = getColumnName(selectColName, selectColumnExprNode, i);
selectListExprEvaluators.add(selectColumnExprEval);
selectListExprOIs.add(selectColumnOI);
selectListExprNames.add(selectColName);
i++;
}
selectListOutputOI = ObjectInspectorFactory.getStandardStructObjectInspector(selectListExprNames, selectListExprOIs);
}
use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project SQLWindowing by hbutani.
the class SymbolParser method parse.
public void parse() throws WindowingException {
symbols = patternStr.split("\\.");
symbolFunctions = new ArrayList<SymbolFunction>();
for (String symbol : symbols) {
boolean isStar = symbol.endsWith("*");
boolean isPlus = symbol.endsWith("+");
symbol = (isStar || isPlus) ? symbol.substring(0, symbol.length() - 1) : symbol;
Object[] symbolDetails = symbolExprEvalMap.get(symbol.toLowerCase());
if (symbolDetails == null) {
throw new WindowingException(sprintf("Unknown Symbol %s", symbol));
}
ExprNodeEvaluator symbolExprEval = (ExprNodeEvaluator) symbolDetails[0];
ObjectInspector symbolExprOI = (ObjectInspector) symbolDetails[1];
SymbolFunction sFn = new Symbol(symbolExprEval, symbolExprOI);
if (isStar) {
sFn = new Star(sFn);
} else if (isPlus) {
sFn = new Plus(sFn);
}
symbolFunctions.add(sFn);
}
symbolFnChain = new Chain(symbolFunctions);
}
use of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator in project SQLWindowing by hbutani.
the class QueryDefDeserializer method visit.
/*
* Recreate the ExprEvaluator, OI using the current inputInfo This is the
* inputInfo on the first InputDef in chain if the query does not have a map
* phase; else it is the mapInputInfo on the table function definition
*/
@Override
public void visit(ValueBoundaryDef boundary) throws WindowingException {
ExprNodeEvaluator exprEval = WindowingExprNodeEvaluatorFactory.get(tInfo, boundary.getExprNode());
ObjectInspector oi = TranslateUtils.initExprNodeEvaluator(qDef, boundary.getExprNode(), exprEval, inputInfo);
boundary.setExprEvaluator(exprEval);
boundary.setOI(oi);
}
Aggregations