Search in sources :

Example 1 with HiveParserTypeCheckCtx

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx in project flink by apache.

the class HiveParserUtils method extractLateralViewInfo.

// extracts useful information for a given lateral view node
public static LateralViewInfo extractLateralViewInfo(HiveParserASTNode lateralView, HiveParserRowResolver inputRR, HiveParserSemanticAnalyzer hiveAnalyzer, FrameworkConfig frameworkConfig, RelOptCluster cluster) throws SemanticException {
    // checks the left sub-tree
    HiveParserASTNode sel = (HiveParserASTNode) lateralView.getChild(0);
    Preconditions.checkArgument(sel.getToken().getType() == HiveASTParser.TOK_SELECT);
    Preconditions.checkArgument(sel.getChildCount() == 1);
    HiveParserASTNode selExpr = (HiveParserASTNode) sel.getChild(0);
    Preconditions.checkArgument(selExpr.getToken().getType() == HiveASTParser.TOK_SELEXPR);
    // decide function name and function
    HiveParserASTNode func = (HiveParserASTNode) selExpr.getChild(0);
    Preconditions.checkArgument(func.getToken().getType() == HiveASTParser.TOK_FUNCTION);
    String funcName = getFunctionText(func, true);
    SqlOperator sqlOperator = getSqlOperator(funcName, frameworkConfig.getOperatorTable(), SqlFunctionCategory.USER_DEFINED_TABLE_FUNCTION);
    Preconditions.checkArgument(isUDTF(sqlOperator), funcName + " is not a valid UDTF");
    // decide operands
    List<ExprNodeDesc> operands = new ArrayList<>(func.getChildCount() - 1);
    List<ColumnInfo> operandColInfos = new ArrayList<>(func.getChildCount() - 1);
    HiveParserTypeCheckCtx typeCheckCtx = new HiveParserTypeCheckCtx(inputRR, frameworkConfig, cluster);
    for (int i = 1; i < func.getChildCount(); i++) {
        ExprNodeDesc exprDesc = hiveAnalyzer.genExprNodeDesc((HiveParserASTNode) func.getChild(i), inputRR, typeCheckCtx);
        operands.add(exprDesc);
        operandColInfos.add(new ColumnInfo(getColumnInternalName(i - 1), exprDesc.getWritableObjectInspector(), null, false));
    }
    // decide table alias -- there must be a table alias
    HiveParserASTNode tabAliasNode = (HiveParserASTNode) selExpr.getChild(selExpr.getChildCount() - 1);
    Preconditions.checkArgument(tabAliasNode.getToken().getType() == HiveASTParser.TOK_TABALIAS);
    String tabAlias = unescapeIdentifier(tabAliasNode.getChild(0).getText().toLowerCase());
    // decide column aliases -- column aliases are optional
    List<String> colAliases = new ArrayList<>();
    for (int i = 1; i < selExpr.getChildCount() - 1; i++) {
        HiveParserASTNode child = (HiveParserASTNode) selExpr.getChild(i);
        Preconditions.checkArgument(child.getToken().getType() == HiveASTParser.Identifier);
        colAliases.add(unescapeIdentifier(child.getText().toLowerCase()));
    }
    return new LateralViewInfo(funcName, sqlOperator, operands, operandColInfos, colAliases, tabAlias);
}
Also used : HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) SqlOperator(org.apache.calcite.sql.SqlOperator) ArrayList(java.util.ArrayList) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) NlsString(org.apache.calcite.util.NlsString) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) HiveParserTypeCheckCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx)

Example 2 with HiveParserTypeCheckCtx

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx in project flink by apache.

the class HiveParserTypeCheckProcFactory method processGByExpr.

/**
 * Function to do groupby subexpression elimination. This is called by all the processors
 * initially. As an example, consider the query select a+b, count(1) from T group by a+b; Then
 * a+b is already precomputed in the group by operators key, so we substitute a+b in the select
 * list with the internal column name of the a+b expression that appears in the in input row
 * resolver.
 *
 * @param nd The node that is being inspected.
 * @param procCtx The processor context.
 * @return exprNodeColumnDesc.
 */
public static ExprNodeDesc processGByExpr(Node nd, Object procCtx) throws SemanticException {
    // We recursively create the exprNodeDesc. Base cases: when we encounter
    // a column ref, we convert that into an exprNodeColumnDesc; when we
    // encounter
    // a constant, we convert that into an exprNodeConstantDesc. For others we
    // just
    // build the exprNodeFuncDesc with recursively built children.
    HiveParserASTNode expr = (HiveParserASTNode) nd;
    HiveParserTypeCheckCtx ctx = (HiveParserTypeCheckCtx) procCtx;
    // having key in (select .. where a = min(b.value)
    if (!ctx.isUseCaching() && ctx.getOuterRR() == null) {
        return null;
    }
    HiveParserRowResolver input = ctx.getInputRR();
    ExprNodeDesc desc = null;
    if (input == null || !ctx.getAllowGBExprElimination()) {
        return null;
    }
    // If the current subExpression is pre-calculated, as in Group-By etc.
    ColumnInfo colInfo = input.getExpression(expr);
    // try outer row resolver
    HiveParserRowResolver outerRR = ctx.getOuterRR();
    if (colInfo == null && outerRR != null) {
        colInfo = outerRR.getExpression(expr);
    }
    if (colInfo != null) {
        desc = new ExprNodeColumnDesc(colInfo);
        HiveParserASTNode source = input.getExpressionSource(expr);
        if (source != null) {
            ctx.getUnparseTranslator().addCopyTranslation(expr, source);
        }
        return desc;
    }
    return desc;
}
Also used : HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) HiveParserTypeCheckCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx)

Example 3 with HiveParserTypeCheckCtx

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx in project flink by apache.

the class HiveParserCalcitePlanner method getWindowRexAndType.

private Pair<RexNode, TypeInfo> getWindowRexAndType(HiveParserWindowingSpec.WindowExpressionSpec winExprSpec, RelNode srcRel) throws SemanticException {
    RexNode window;
    if (winExprSpec instanceof HiveParserWindowingSpec.WindowFunctionSpec) {
        HiveParserWindowingSpec.WindowFunctionSpec wFnSpec = (HiveParserWindowingSpec.WindowFunctionSpec) winExprSpec;
        HiveParserASTNode windowProjAst = wFnSpec.getExpression();
        // TODO: do we need to get to child?
        int wndSpecASTIndx = getWindowSpecIndx(windowProjAst);
        // 2. Get Hive Aggregate Info
        AggInfo hiveAggInfo = getHiveAggInfo(windowProjAst, wndSpecASTIndx - 1, relToRowResolver.get(srcRel), (HiveParserWindowingSpec.WindowFunctionSpec) winExprSpec, semanticAnalyzer, frameworkConfig, cluster);
        // 3. Get Calcite Return type for Agg Fn
        RelDataType calciteAggFnRetType = HiveParserUtils.toRelDataType(hiveAggInfo.getReturnType(), cluster.getTypeFactory());
        // 4. Convert Agg Fn args to Calcite
        Map<String, Integer> posMap = relToHiveColNameCalcitePosMap.get(srcRel);
        HiveParserRexNodeConverter converter = new HiveParserRexNodeConverter(cluster, srcRel.getRowType(), posMap, 0, false, funcConverter);
        List<RexNode> calciteAggFnArgs = new ArrayList<>();
        List<RelDataType> calciteAggFnArgTypes = new ArrayList<>();
        for (int i = 0; i < hiveAggInfo.getAggParams().size(); i++) {
            calciteAggFnArgs.add(converter.convert(hiveAggInfo.getAggParams().get(i)));
            calciteAggFnArgTypes.add(HiveParserUtils.toRelDataType(hiveAggInfo.getAggParams().get(i).getTypeInfo(), cluster.getTypeFactory()));
        }
        // 5. Get Calcite Agg Fn
        final SqlAggFunction calciteAggFn = HiveParserSqlFunctionConverter.getCalciteAggFn(hiveAggInfo.getUdfName(), hiveAggInfo.isDistinct(), calciteAggFnArgTypes, calciteAggFnRetType);
        // 6. Translate Window spec
        HiveParserRowResolver inputRR = relToRowResolver.get(srcRel);
        HiveParserWindowingSpec.WindowSpec wndSpec = ((HiveParserWindowingSpec.WindowFunctionSpec) winExprSpec).getWindowSpec();
        List<RexNode> partitionKeys = getPartitionKeys(wndSpec.getPartition(), converter, inputRR, new HiveParserTypeCheckCtx(inputRR, frameworkConfig, cluster), semanticAnalyzer);
        List<RexFieldCollation> orderKeys = getOrderKeys(wndSpec.getOrder(), converter, inputRR, new HiveParserTypeCheckCtx(inputRR, frameworkConfig, cluster), semanticAnalyzer);
        RexWindowBound lowerBound = getBound(wndSpec.getWindowFrame().getStart(), cluster);
        RexWindowBound upperBound = getBound(wndSpec.getWindowFrame().getEnd(), cluster);
        boolean isRows = wndSpec.getWindowFrame().getWindowType() == HiveParserWindowingSpec.WindowType.ROWS;
        window = HiveParserUtils.makeOver(cluster.getRexBuilder(), calciteAggFnRetType, calciteAggFn, calciteAggFnArgs, partitionKeys, orderKeys, lowerBound, upperBound, isRows, true, false, false, false);
        window = window.accept(funcConverter);
    } else {
        throw new SemanticException("Unsupported window Spec");
    }
    return new Pair<>(window, HiveParserTypeConverter.convert(window.getType()));
}
Also used : ArrayList(java.util.ArrayList) RelDataType(org.apache.calcite.rel.type.RelDataType) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) RexWindowBound(org.apache.calcite.rex.RexWindowBound) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) Pair(org.apache.calcite.util.Pair) ObjectPair(org.apache.hadoop.hive.common.ObjectPair) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) HiveParserBaseSemanticAnalyzer.getHiveAggInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getHiveAggInfo) AggInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.AggInfo) HiveParserWindowingSpec(org.apache.flink.table.planner.delegation.hive.copy.HiveParserWindowingSpec) SqlAggFunction(org.apache.calcite.sql.SqlAggFunction) RexFieldCollation(org.apache.calcite.rex.RexFieldCollation) HiveParserTypeCheckCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx) RexNode(org.apache.calcite.rex.RexNode)

Example 4 with HiveParserTypeCheckCtx

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx in project flink by apache.

the class HiveParserCalcitePlanner method genSelectLogicalPlan.

// NOTE: there can only be one select clause since we don't handle multi destination insert.
private RelNode genSelectLogicalPlan(HiveParserQB qb, RelNode srcRel, RelNode starSrcRel, Map<String, Integer> outerNameToPos, HiveParserRowResolver outerRR) throws SemanticException {
    // 0. Generate a Select Node for Windowing
    // Exclude the newly-generated select columns from */etc. resolution.
    HashSet<ColumnInfo> excludedColumns = new HashSet<>();
    RelNode selForWindow = genSelectForWindowing(qb, srcRel, excludedColumns);
    srcRel = (selForWindow == null) ? srcRel : selForWindow;
    ArrayList<ExprNodeDesc> exprNodeDescs = new ArrayList<>();
    // 1. Get Select Expression List
    HiveParserQBParseInfo qbp = qb.getParseInfo();
    String selClauseName = qbp.getClauseNames().iterator().next();
    HiveParserASTNode selExprList = qbp.getSelForClause(selClauseName);
    // make sure if there is subquery it is top level expression
    HiveParserSubQueryUtils.checkForTopLevelSubqueries(selExprList);
    final boolean cubeRollupGrpSetPresent = !qbp.getDestRollups().isEmpty() || !qbp.getDestGroupingSets().isEmpty() || !qbp.getDestCubes().isEmpty();
    // 3. Query Hints
    int posn = 0;
    boolean hintPresent = selExprList.getChild(0).getType() == HiveASTParser.QUERY_HINT;
    if (hintPresent) {
        posn++;
    }
    // 4. Bailout if select involves Transform
    boolean isInTransform = selExprList.getChild(posn).getChild(0).getType() == HiveASTParser.TOK_TRANSFORM;
    if (isInTransform) {
        String msg = "SELECT TRANSFORM is currently not supported in CBO, turn off cbo to use TRANSFORM.";
        throw new SemanticException(msg);
    }
    // 2.Row resolvers for input, output
    HiveParserRowResolver outRR = new HiveParserRowResolver();
    Integer pos = 0;
    // TODO: will this also fix windowing? try
    HiveParserRowResolver inputRR = relToRowResolver.get(srcRel), starRR = inputRR;
    if (starSrcRel != null) {
        starRR = relToRowResolver.get(starSrcRel);
    }
    // 5. Check if select involves UDTF
    String udtfTableAlias = null;
    SqlOperator udtfOperator = null;
    String genericUDTFName = null;
    ArrayList<String> udtfColAliases = new ArrayList<>();
    HiveParserASTNode expr = (HiveParserASTNode) selExprList.getChild(posn).getChild(0);
    int exprType = expr.getType();
    if (exprType == HiveASTParser.TOK_FUNCTION || exprType == HiveASTParser.TOK_FUNCTIONSTAR) {
        String funcName = HiveParserTypeCheckProcFactory.DefaultExprProcessor.getFunctionText(expr, true);
        // we can't just try to get table function here because the operator table throws
        // exception if it's not a table function
        SqlOperator sqlOperator = HiveParserUtils.getAnySqlOperator(funcName, frameworkConfig.getOperatorTable());
        if (HiveParserUtils.isUDTF(sqlOperator)) {
            LOG.debug("Found UDTF " + funcName);
            udtfOperator = sqlOperator;
            genericUDTFName = funcName;
            if (!HiveParserUtils.isNative(sqlOperator)) {
                semanticAnalyzer.unparseTranslator.addIdentifierTranslation((HiveParserASTNode) expr.getChild(0));
            }
            if (exprType == HiveASTParser.TOK_FUNCTIONSTAR) {
                semanticAnalyzer.genColListRegex(".*", null, (HiveParserASTNode) expr.getChild(0), exprNodeDescs, null, inputRR, starRR, pos, outRR, qb.getAliases(), false);
            }
        }
    }
    if (udtfOperator != null) {
        // Only support a single expression when it's a UDTF
        if (selExprList.getChildCount() > 1) {
            throw new SemanticException(generateErrorMessage((HiveParserASTNode) selExprList.getChild(1), ErrorMsg.UDTF_MULTIPLE_EXPR.getMsg()));
        }
        HiveParserASTNode selExpr = (HiveParserASTNode) selExprList.getChild(posn);
        // column names also can be inferred from result of UDTF
        for (int i = 1; i < selExpr.getChildCount(); i++) {
            HiveParserASTNode selExprChild = (HiveParserASTNode) selExpr.getChild(i);
            switch(selExprChild.getType()) {
                case HiveASTParser.Identifier:
                    udtfColAliases.add(unescapeIdentifier(selExprChild.getText().toLowerCase()));
                    semanticAnalyzer.unparseTranslator.addIdentifierTranslation(selExprChild);
                    break;
                case HiveASTParser.TOK_TABALIAS:
                    assert (selExprChild.getChildCount() == 1);
                    udtfTableAlias = unescapeIdentifier(selExprChild.getChild(0).getText());
                    qb.addAlias(udtfTableAlias);
                    semanticAnalyzer.unparseTranslator.addIdentifierTranslation((HiveParserASTNode) selExprChild.getChild(0));
                    break;
                default:
                    throw new SemanticException("Find invalid token type " + selExprChild.getType() + " in UDTF.");
            }
        }
        LOG.debug("UDTF table alias is " + udtfTableAlias);
        LOG.debug("UDTF col aliases are " + udtfColAliases);
    }
    // 6. Iterate over all expression (after SELECT)
    HiveParserASTNode exprList;
    if (udtfOperator != null) {
        exprList = expr;
    } else {
        exprList = selExprList;
    }
    // For UDTF's, skip the function name to get the expressions
    int startPos = udtfOperator != null ? posn + 1 : posn;
    // track the col aliases provided by user
    List<String> colAliases = new ArrayList<>();
    for (int i = startPos; i < exprList.getChildCount(); ++i) {
        colAliases.add(null);
        // 6.1 child can be EXPR AS ALIAS, or EXPR.
        HiveParserASTNode child = (HiveParserASTNode) exprList.getChild(i);
        boolean hasAsClause = child.getChildCount() == 2;
        // slightly different.
        if (udtfOperator == null && child.getChildCount() > 2) {
            throw new SemanticException(generateErrorMessage((HiveParserASTNode) child.getChild(2), ErrorMsg.INVALID_AS.getMsg()));
        }
        String tabAlias;
        String colAlias;
        if (udtfOperator != null) {
            tabAlias = null;
            colAlias = semanticAnalyzer.getAutogenColAliasPrfxLbl() + i;
            expr = child;
        } else {
            // 6.3 Get rid of TOK_SELEXPR
            expr = (HiveParserASTNode) child.getChild(0);
            String[] colRef = HiveParserUtils.getColAlias(child, semanticAnalyzer.getAutogenColAliasPrfxLbl(), inputRR, semanticAnalyzer.autogenColAliasPrfxIncludeFuncName(), i);
            tabAlias = colRef[0];
            colAlias = colRef[1];
            if (hasAsClause) {
                colAliases.set(colAliases.size() - 1, colAlias);
                semanticAnalyzer.unparseTranslator.addIdentifierTranslation((HiveParserASTNode) child.getChild(1));
            }
        }
        Map<HiveParserASTNode, RelNode> subQueryToRelNode = new HashMap<>();
        boolean isSubQuery = genSubQueryRelNode(qb, expr, srcRel, false, subQueryToRelNode);
        if (isSubQuery) {
            ExprNodeDesc subQueryDesc = semanticAnalyzer.genExprNodeDesc(expr, relToRowResolver.get(srcRel), outerRR, subQueryToRelNode, false);
            exprNodeDescs.add(subQueryDesc);
            ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(pos), subQueryDesc.getWritableObjectInspector(), tabAlias, false);
            if (!outRR.putWithCheck(tabAlias, colAlias, null, colInfo)) {
                throw new SemanticException("Cannot add column to RR: " + tabAlias + "." + colAlias + " => " + colInfo + " due to duplication, see previous warnings");
            }
        } else {
            // 6.4 Build ExprNode corresponding to columns
            if (expr.getType() == HiveASTParser.TOK_ALLCOLREF) {
                pos = semanticAnalyzer.genColListRegex(".*", expr.getChildCount() == 0 ? null : HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) expr.getChild(0)).toLowerCase(), expr, exprNodeDescs, excludedColumns, inputRR, starRR, pos, outRR, qb.getAliases(), false);
            } else if (expr.getType() == HiveASTParser.TOK_TABLE_OR_COL && !hasAsClause && !inputRR.getIsExprResolver() && HiveParserUtils.isRegex(unescapeIdentifier(expr.getChild(0).getText()), semanticAnalyzer.getConf())) {
                // In case the expression is a regex COL. This can only happen without AS clause
                // We don't allow this for ExprResolver - the Group By case
                pos = semanticAnalyzer.genColListRegex(unescapeIdentifier(expr.getChild(0).getText()), null, expr, exprNodeDescs, excludedColumns, inputRR, starRR, pos, outRR, qb.getAliases(), true);
            } else if (expr.getType() == HiveASTParser.DOT && expr.getChild(0).getType() == HiveASTParser.TOK_TABLE_OR_COL && inputRR.hasTableAlias(unescapeIdentifier(expr.getChild(0).getChild(0).getText().toLowerCase())) && !hasAsClause && !inputRR.getIsExprResolver() && HiveParserUtils.isRegex(unescapeIdentifier(expr.getChild(1).getText()), semanticAnalyzer.getConf())) {
                // In case the expression is TABLE.COL (col can be regex). This can only happen
                // without AS clause
                // We don't allow this for ExprResolver - the Group By case
                pos = semanticAnalyzer.genColListRegex(unescapeIdentifier(expr.getChild(1).getText()), unescapeIdentifier(expr.getChild(0).getChild(0).getText().toLowerCase()), expr, exprNodeDescs, excludedColumns, inputRR, starRR, pos, outRR, qb.getAliases(), false);
            } else if (HiveASTParseUtils.containsTokenOfType(expr, HiveASTParser.TOK_FUNCTIONDI) && !(srcRel instanceof Aggregate)) {
                // Likely a malformed query eg, select hash(distinct c1) from t1;
                throw new SemanticException("Distinct without an aggregation.");
            } else {
                // Case when this is an expression
                HiveParserTypeCheckCtx typeCheckCtx = new HiveParserTypeCheckCtx(inputRR, frameworkConfig, cluster);
                // We allow stateful functions in the SELECT list (but nowhere else)
                typeCheckCtx.setAllowStatefulFunctions(true);
                if (!qbp.getDestToGroupBy().isEmpty()) {
                    // Special handling of grouping function
                    expr = rewriteGroupingFunctionAST(getGroupByForClause(qbp, selClauseName), expr, !cubeRollupGrpSetPresent);
                }
                ExprNodeDesc exprDesc = semanticAnalyzer.genExprNodeDesc(expr, inputRR, typeCheckCtx);
                String recommended = semanticAnalyzer.recommendName(exprDesc, colAlias);
                if (recommended != null && outRR.get(null, recommended) == null) {
                    colAlias = recommended;
                }
                exprNodeDescs.add(exprDesc);
                ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(pos), exprDesc.getWritableObjectInspector(), tabAlias, false);
                colInfo.setSkewedCol(exprDesc instanceof ExprNodeColumnDesc && ((ExprNodeColumnDesc) exprDesc).isSkewedCol());
                // Hive errors out in case of duplication. We allow it and see what happens.
                outRR.put(tabAlias, colAlias, colInfo);
                if (exprDesc instanceof ExprNodeColumnDesc) {
                    ExprNodeColumnDesc colExp = (ExprNodeColumnDesc) exprDesc;
                    String[] altMapping = inputRR.getAlternateMappings(colExp.getColumn());
                    if (altMapping != null) {
                        // TODO: this can overwrite the mapping. Should this be allowed?
                        outRR.put(altMapping[0], altMapping[1], colInfo);
                    }
                }
                pos++;
            }
        }
    }
    // 7. Convert Hive projections to Calcite
    List<RexNode> calciteColLst = new ArrayList<>();
    HiveParserRexNodeConverter rexNodeConverter = new HiveParserRexNodeConverter(cluster, srcRel.getRowType(), outerNameToPos, buildHiveColNameToInputPosMap(exprNodeDescs, inputRR), relToRowResolver.get(srcRel), outerRR, 0, false, subqueryId, funcConverter);
    for (ExprNodeDesc colExpr : exprNodeDescs) {
        RexNode calciteCol = rexNodeConverter.convert(colExpr);
        calciteCol = convertNullLiteral(calciteCol).accept(funcConverter);
        calciteColLst.add(calciteCol);
    }
    // 8. Build Calcite Rel
    RelNode res;
    if (udtfOperator != null) {
        // The basic idea for CBO support of UDTF is to treat UDTF as a special project.
        res = genUDTFPlan(udtfOperator, genericUDTFName, udtfTableAlias, udtfColAliases, qb, calciteColLst, outRR.getColumnInfos(), srcRel, true, false);
    } else {
        // and thus introduces unnecessary agg node.
        if (HiveParserUtils.isIdentityProject(srcRel, calciteColLst, colAliases) && outerRR != null) {
            res = srcRel;
        } else {
            res = genSelectRelNode(calciteColLst, outRR, srcRel);
        }
    }
    // 9. Handle select distinct as GBY if there exist windowing functions
    if (selForWindow != null && selExprList.getToken().getType() == HiveASTParser.TOK_SELECTDI) {
        ImmutableBitSet groupSet = ImmutableBitSet.range(res.getRowType().getFieldList().size());
        res = LogicalAggregate.create(res, groupSet, Collections.emptyList(), Collections.emptyList());
        HiveParserRowResolver groupByOutputRowResolver = new HiveParserRowResolver();
        for (int i = 0; i < outRR.getColumnInfos().size(); i++) {
            ColumnInfo colInfo = outRR.getColumnInfos().get(i);
            ColumnInfo newColInfo = new ColumnInfo(colInfo.getInternalName(), colInfo.getType(), colInfo.getTabAlias(), colInfo.getIsVirtualCol());
            groupByOutputRowResolver.put(colInfo.getTabAlias(), colInfo.getAlias(), newColInfo);
        }
        relToHiveColNameCalcitePosMap.put(res, buildHiveToCalciteColumnMap(groupByOutputRowResolver));
        relToRowResolver.put(res, groupByOutputRowResolver);
    }
    return res;
}
Also used : ImmutableBitSet(org.apache.calcite.util.ImmutableBitSet) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) SqlOperator(org.apache.calcite.sql.SqlOperator) ArrayList(java.util.ArrayList) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) HashSet(java.util.HashSet) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) HiveParserQBParseInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBParseInfo) RelNode(org.apache.calcite.rel.RelNode) Aggregate(org.apache.calcite.rel.core.Aggregate) LogicalAggregate(org.apache.calcite.rel.logical.LogicalAggregate) HiveParserTypeCheckCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx) RexNode(org.apache.calcite.rex.RexNode)

Aggregations

HiveParserASTNode (org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode)4 HiveParserTypeCheckCtx (org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx)4 ArrayList (java.util.ArrayList)3 HiveParserRowResolver (org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver)3 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)3 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)3 RexNode (org.apache.calcite.rex.RexNode)2 SqlOperator (org.apache.calcite.sql.SqlOperator)2 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)2 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)2 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 LinkedHashMap (java.util.LinkedHashMap)1 RelNode (org.apache.calcite.rel.RelNode)1 Aggregate (org.apache.calcite.rel.core.Aggregate)1 LogicalAggregate (org.apache.calcite.rel.logical.LogicalAggregate)1 RelDataType (org.apache.calcite.rel.type.RelDataType)1 RexFieldCollation (org.apache.calcite.rex.RexFieldCollation)1 RexWindowBound (org.apache.calcite.rex.RexWindowBound)1 SqlAggFunction (org.apache.calcite.sql.SqlAggFunction)1