use of org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowExpressionSpec in project hive by apache.
the class SemanticAnalyzer method parseSelect.
public static ArrayList<WindowExpressionSpec> parseSelect(String selectExprStr) throws SemanticException {
ASTNode selNode = null;
try {
ParseDriver pd = new ParseDriver();
selNode = pd.parseSelect(selectExprStr, null);
} catch (ParseException pe) {
throw new SemanticException(pe);
}
ArrayList<WindowExpressionSpec> selSpec = new ArrayList<WindowExpressionSpec>();
int childCount = selNode.getChildCount();
for (int i = 0; i < childCount; i++) {
ASTNode selExpr = (ASTNode) selNode.getChild(i);
if (selExpr.getType() != HiveParser.TOK_SELEXPR) {
throw new SemanticException(String.format("Only Select expressions supported in dynamic select list: %s", selectExprStr));
}
ASTNode expr = (ASTNode) selExpr.getChild(0);
if (expr.getType() == HiveParser.TOK_ALLCOLREF) {
throw new SemanticException(String.format("'%s' column not allowed in dynamic select list", selectExprStr));
}
ASTNode aliasNode = selExpr.getChildCount() > 1 && selExpr.getChild(1).getType() == HiveParser.Identifier ? (ASTNode) selExpr.getChild(1) : null;
String alias = null;
if (aliasNode != null) {
alias = aliasNode.getText();
} else {
String[] tabColAlias = getColAlias(selExpr, null, null, true, -1);
alias = tabColAlias[1];
}
WindowExpressionSpec exprSpec = new WindowExpressionSpec();
exprSpec.setAlias(alias);
exprSpec.setExpression(expr);
selSpec.add(exprSpec);
}
return selSpec;
}
use of org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowExpressionSpec in project hive by apache.
the class SemanticAnalyzer method genWindowingPlan.
//--------------------------- Windowing handling: PTFInvocationSpec to PTFDesc --------------------
Operator genWindowingPlan(QB qb, WindowingSpec wSpec, Operator input) throws SemanticException {
wSpec.validateAndMakeEffective();
if (!isCBOExecuted() && !qb.getParseInfo().getDestToGroupBy().isEmpty()) {
// If CBO did not optimize the query, we might need to replace grouping function
final String selClauseName = qb.getParseInfo().getClauseNames().iterator().next();
final boolean cubeRollupGrpSetPresent = (!qb.getParseInfo().getDestRollups().isEmpty() || !qb.getParseInfo().getDestGroupingSets().isEmpty() || !qb.getParseInfo().getDestCubes().isEmpty());
for (WindowExpressionSpec wExprSpec : wSpec.getWindowExpressions()) {
// Special handling of grouping function
wExprSpec.setExpression(rewriteGroupingFunctionAST(getGroupByForClause(qb.getParseInfo(), selClauseName), wExprSpec.getExpression(), !cubeRollupGrpSetPresent));
}
}
WindowingComponentizer groups = new WindowingComponentizer(wSpec);
RowResolver rr = opParseCtx.get(input).getRowResolver();
while (groups.hasNext()) {
wSpec = groups.next(conf, this, unparseTranslator, rr);
input = genReduceSinkPlanForWindowing(wSpec, rr, input);
rr = opParseCtx.get(input).getRowResolver();
PTFTranslator translator = new PTFTranslator();
PTFDesc ptfDesc = translator.translate(wSpec, this, conf, rr, unparseTranslator);
RowResolver ptfOpRR = ptfDesc.getFuncDef().getOutputShape().getRr();
input = putOpInsertMap(OperatorFactory.getAndMakeChild(ptfDesc, new RowSchema(ptfOpRR.getColumnInfos()), input), ptfOpRR);
input = genSelectAllDesc(input);
rr = ptfOpRR;
}
return input;
}
use of org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowExpressionSpec in project hive by apache.
the class PTFTranslator method buildRowResolverForWindowing.
protected RowResolver buildRowResolverForWindowing(WindowTableFunctionDef def) throws SemanticException {
RowResolver rr = new RowResolver();
HashMap<String, WindowExpressionSpec> aliasToExprMap = windowingSpec.getAliasToWdwExpr();
/*
* add Window Functions
*/
for (WindowFunctionDef wFnDef : def.getWindowFunctions()) {
ASTNode ast = aliasToExprMap.get(wFnDef.getAlias()).getExpression();
ObjectInspector wFnOI = null;
if (wFnDef.isPivotResult()) {
wFnOI = ((ListObjectInspector) wFnDef.getOI()).getListElementObjectInspector();
} else {
wFnOI = wFnDef.getOI();
}
ColumnInfo cInfo = new ColumnInfo(wFnDef.getAlias(), TypeInfoUtils.getTypeInfoFromObjectInspector(wFnOI), null, true, true);
rr.putExpression(ast, cInfo);
}
RowResolver inpRR = def.getRawInputShape().getRr();
/*
* add columns from inpRR
*/
for (ColumnInfo inpCInfo : inputRR.getColumnInfos()) {
ColumnInfo cInfo = new ColumnInfo(inpCInfo);
ASTNode inExpr = PTFTranslator.getASTNode(inpCInfo, inpRR);
if (inExpr != null) {
rr.putExpression(inExpr, cInfo);
} else {
String[] tabColAlias = inputRR.reverseLookup(inpCInfo.getInternalName());
if (tabColAlias != null) {
rr.put(tabColAlias[0], tabColAlias[1], cInfo);
} else {
rr.put(inpCInfo.getTabAlias(), inpCInfo.getAlias(), cInfo);
}
}
String[] altMapping = inputRR.getAlternateMappings(inpCInfo.getInternalName());
if (altMapping != null) {
rr.put(altMapping[0], altMapping[1], cInfo);
}
}
return rr;
}
use of org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowExpressionSpec in project hive by apache.
the class PTFTranslator method translate.
public PTFDesc translate(WindowingSpec wdwSpec, SemanticAnalyzer semAly, HiveConf hCfg, RowResolver inputRR, UnparseTranslator unparseT) throws SemanticException {
init(semAly, hCfg, inputRR, unparseT);
windowingSpec = wdwSpec;
ptfDesc = new PTFDesc();
ptfDesc.setCfg(hCfg);
ptfDesc.setLlInfo(llInfo);
WindowTableFunctionDef wdwTFnDef = new WindowTableFunctionDef();
ptfDesc.setFuncDef(wdwTFnDef);
PTFQueryInputSpec inpSpec = new PTFQueryInputSpec();
inpSpec.setType(PTFQueryInputType.WINDOWING);
wdwTFnDef.setInput(translate(inpSpec, 0));
ShapeDetails inpShape = wdwTFnDef.getInput().getOutputShape();
WindowingTableFunctionResolver tFn = (WindowingTableFunctionResolver) FunctionRegistry.getTableFunctionResolver(FunctionRegistry.WINDOWING_TABLE_FUNCTION);
if (tFn == null) {
throw new SemanticException(String.format("Internal Error: Unknown Table Function %s", FunctionRegistry.WINDOWING_TABLE_FUNCTION));
}
wdwTFnDef.setName(FunctionRegistry.WINDOWING_TABLE_FUNCTION);
wdwTFnDef.setResolverClassName(tFn.getClass().getName());
wdwTFnDef.setAlias("ptf_" + 1);
wdwTFnDef.setExpressionTreeString(null);
wdwTFnDef.setTransformsRawInput(false);
tFn.initialize(hCfg, ptfDesc, wdwTFnDef);
TableFunctionEvaluator tEval = tFn.getEvaluator();
wdwTFnDef.setTFunction(tEval);
wdwTFnDef.setCarryForwardNames(tFn.carryForwardNames());
wdwTFnDef.setRawInputShape(inpShape);
PartitioningSpec partiSpec = wdwSpec.getQueryPartitioningSpec();
if (partiSpec == null) {
throw new SemanticException("Invalid use of Windowing: there is no Partitioning associated with Windowing");
}
PartitionDef partDef = translate(inpShape, wdwSpec.getQueryPartitionSpec());
OrderDef ordDef = translate(inpShape, wdwSpec.getQueryOrderSpec(), partDef);
wdwTFnDef.setPartition(partDef);
wdwTFnDef.setOrder(ordDef);
/*
* process Wdw functions
*/
ArrayList<WindowFunctionDef> windowFunctions = new ArrayList<WindowFunctionDef>();
if (wdwSpec.getWindowExpressions() != null) {
for (WindowExpressionSpec expr : wdwSpec.getWindowExpressions()) {
if (expr instanceof WindowFunctionSpec) {
WindowFunctionDef wFnDef = translate(wdwTFnDef, (WindowFunctionSpec) expr);
windowFunctions.add(wFnDef);
}
}
wdwTFnDef.setWindowFunctions(windowFunctions);
}
/*
* set outputFromWdwFnProcessing
*/
ArrayList<String> aliases = new ArrayList<String>();
ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
for (WindowFunctionDef wFnDef : windowFunctions) {
aliases.add(wFnDef.getAlias());
if (wFnDef.isPivotResult()) {
fieldOIs.add(((ListObjectInspector) wFnDef.getOI()).getListElementObjectInspector());
} else {
fieldOIs.add(wFnDef.getOI());
}
}
PTFTranslator.addInputColumnsToList(inpShape, aliases, fieldOIs);
StructObjectInspector wdwOutOI = ObjectInspectorFactory.getStandardStructObjectInspector(aliases, fieldOIs);
tFn.setWdwProcessingOutputOI(wdwOutOI);
RowResolver wdwOutRR = buildRowResolverForWindowing(wdwTFnDef);
ShapeDetails wdwOutShape = setupShape(wdwOutOI, null, wdwOutRR);
wdwTFnDef.setOutputShape(wdwOutShape);
tFn.setupOutputOI();
PTFDeserializer.alterOutputOIForStreaming(ptfDesc);
return ptfDesc;
}
use of org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowExpressionSpec in project hive by apache.
the class WindowingComponentizer method groupFunctions.
private void groupFunctions() throws SemanticException {
for (WindowExpressionSpec expr : originalSpec.getWindowExpressions()) {
WindowFunctionSpec wFn = (WindowFunctionSpec) expr;
PartitioningSpec wFnGrp = wFn.getWindowSpec().getPartitioning();
WindowingSpec wSpec = groups.get(wFnGrp);
if (wSpec == null) {
wSpec = new WindowingSpec();
groups.put(wFnGrp, wSpec);
}
wSpec.addWindowFunction(wFn);
}
}
Aggregations