use of org.apache.hadoop.hive.ql.plan.ExprNodeDesc in project hive by apache.
the class HiveGBOpConvUtil method getValueKeysForRS.
/**
* Get Value Keys for RS following MapSide GB
*
* @param GroupByOperator
* MapSide GB
* @param outputKeyColumnNames
* @param colExprMap
* @return List of ExprNodeDesc of Values
* @throws SemanticException
*/
private static ArrayList<ExprNodeDesc> getValueKeysForRS(Operator inOp, int aggStartPos, List<String> outputKeyColumnNames, ArrayList<ColumnInfo> colInfoLst, Map<String, ExprNodeDesc> colExprMap, boolean addEmptyTabAlias, boolean setColToNonVirtual) throws SemanticException {
List<ColumnInfo> mapGBColInfoLst = inOp.getSchema().getSignature();
ArrayList<ExprNodeDesc> valueKeys = null;
if (aggStartPos >= mapGBColInfoLst.size()) {
valueKeys = new ArrayList<ExprNodeDesc>();
} else {
valueKeys = ExprNodeDescUtils.genExprNodeDesc(inOp, aggStartPos, mapGBColInfoLst.size() - 1, true, setColToNonVirtual);
for (int i = 0; i < valueKeys.size(); ++i) {
String outputColName = SemanticAnalyzer.getColumnInternalName(i);
outputKeyColumnNames.add(outputColName);
// TODO: Verify if this is needed (Why can't it be always null/empty
String tabAlias = addEmptyTabAlias ? "" : null;
ColumnInfo colInfo = new ColumnInfo(Utilities.ReduceField.VALUE.toString() + "." + outputColName, valueKeys.get(i).getTypeInfo(), tabAlias, false);
colInfoLst.add(colInfo);
colExprMap.put(colInfo.getInternalName(), valueKeys.get(i));
}
}
return valueKeys;
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeDesc in project hive by apache.
the class HiveGBOpConvUtil method genReduceGBRS.
private static OpAttr genReduceGBRS(OpAttr inputOpAf, GBInfo gbInfo) throws SemanticException {
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
ArrayList<String> outputColumnNames = new ArrayList<String>();
ArrayList<ColumnInfo> colInfoLst = new ArrayList<ColumnInfo>();
GroupByOperator reduceSideGB1 = (GroupByOperator) inputOpAf.inputs.get(0);
List<ColumnInfo> gb1ColInfoLst = reduceSideGB1.getSchema().getSignature();
ArrayList<ExprNodeDesc> reduceKeys = getReduceKeysForRS(reduceSideGB1, 0, gbInfo.gbKeys.size() - 1, outputColumnNames, false, colInfoLst, colExprMap, true, true);
if (inclGrpSetInReduceSide(gbInfo)) {
addGrpSetCol(false, gb1ColInfoLst.get(reduceKeys.size()).getInternalName(), true, reduceKeys, outputColumnNames, colInfoLst, colExprMap);
}
ArrayList<ExprNodeDesc> reduceValues = getValueKeysForRS(reduceSideGB1, reduceSideGB1.getConf().getKeys().size(), outputColumnNames, colInfoLst, colExprMap, true, true);
ReduceSinkOperator rsOp = (ReduceSinkOperator) OperatorFactory.getAndMakeChild(PlanUtils.getReduceSinkDesc(reduceKeys, reduceValues, outputColumnNames, true, -1, getNumPartFieldsForReduceSideRS(gbInfo), getParallelismForReduceSideRS(gbInfo), AcidUtils.Operation.NOT_ACID), new RowSchema(colInfoLst), reduceSideGB1);
rsOp.setColumnExprMap(colExprMap);
return new OpAttr("", new HashSet<Integer>(), rsOp);
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeDesc in project hive by apache.
the class PTFTranslator method initExprNodeEvaluator.
private ObjectInspector initExprNodeEvaluator(ExprNodeEvaluator exprEval, ExprNodeDesc exprNode, ShapeDetails inpShape) throws HiveException {
ObjectInspector outOI;
outOI = exprEval.initialize(inpShape.getOI());
/*
* if there are any LeadLag functions in this Expression Tree: - setup a
* duplicate Evaluator for the 1st arg of the LLFuncDesc - initialize it
* using the InputInfo provided for this Expr tree - set the duplicate
* evaluator on the LLUDF instance.
*/
List<ExprNodeGenericFuncDesc> llFuncExprs = llInfo.getLLFuncExprsInTopExpr(exprNode);
if (llFuncExprs != null) {
for (ExprNodeGenericFuncDesc llFuncExpr : llFuncExprs) {
ExprNodeDesc firstArg = llFuncExpr.getChildren().get(0);
ExprNodeEvaluator dupExprEval = WindowingExprNodeEvaluatorFactory.get(llInfo, firstArg);
dupExprEval.initialize(inpShape.getOI());
GenericUDFLeadLag llFn = (GenericUDFLeadLag) llFuncExpr.getGenericUDF();
llFn.setExprEvaluator(dupExprEval);
}
}
return outOI;
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeDesc in project hive by apache.
the class PTFTranslator method buildExpressionDef.
/*
* Expr translation helper methods
*/
public PTFExpressionDef buildExpressionDef(ShapeDetails inpShape, ASTNode arg) throws HiveException {
PTFExpressionDef argDef = new PTFExpressionDef();
ExprNodeDesc exprNode = semAly.genExprNodeDesc(arg, inpShape.getRr(), inpShape.getTypeCheckCtx());
ExprNodeEvaluator exprEval = WindowingExprNodeEvaluatorFactory.get(llInfo, exprNode);
ObjectInspector oi = initExprNodeEvaluator(exprEval, exprNode, inpShape);
argDef.setExpressionTreeString(arg.toStringTree());
argDef.setExprNode(exprNode);
argDef.setExprEvaluator(exprEval);
argDef.setOI(oi);
return argDef;
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeDesc in project hive by apache.
the class TableAccessAnalyzer method genColNameMap.
/*
* This method takes in an input operator and a subset of its output
* column names, and generates the input column names for the operator
* corresponding to those outputs. If the mapping from the input column
* name to the output column name is not simple, the method returns
* false, else it returns true. The list of output column names is
* modified by this method to be the list of corresponding input column
* names.
*/
private static boolean genColNameMap(Operator<? extends OperatorDesc> op, List<String> currColNames) {
List<ExprNodeDesc> colList = null;
List<String> outputColNames = null;
assert (op.columnNamesRowResolvedCanBeObtained());
// column names
if (op instanceof SelectOperator) {
SelectDesc selectDesc = ((SelectOperator) op).getConf();
if (!selectDesc.isSelStarNoCompute()) {
colList = selectDesc.getColList();
outputColNames = selectDesc.getOutputColumnNames();
// Only columns and constants can be selected
for (int pos = 0; pos < colList.size(); pos++) {
ExprNodeDesc colExpr = colList.get(pos);
String outputColName = outputColNames.get(pos);
// If it is not a column we need for the keys, move on
if (!currColNames.contains(outputColName)) {
continue;
}
if (colExpr instanceof ExprNodeConstantDesc) {
currColNames.remove(outputColName);
continue;
} else if (colExpr instanceof ExprNodeColumnDesc) {
String inputColName = ((ExprNodeColumnDesc) colExpr).getColumn();
if (!outputColName.equals(inputColName)) {
currColNames.set(currColNames.indexOf(outputColName), inputColName);
}
} else {
// the column map can not be generated
return false;
}
}
}
}
return true;
}
Aggregations