Search in sources :

Example 1 with HiveParserASTNode

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.

the class HiveParserCalcitePlanner method genFilterLogicalPlan.

private RelNode genFilterLogicalPlan(HiveParserQB qb, RelNode srcRel, Map<String, Integer> outerNameToPosMap, HiveParserRowResolver outerRR) throws SemanticException {
    RelNode filterRel = null;
    Iterator<HiveParserASTNode> whereClauseIterator = qb.getParseInfo().getDestToWhereExpr().values().iterator();
    if (whereClauseIterator.hasNext()) {
        filterRel = genFilterRelNode(qb, (HiveParserASTNode) whereClauseIterator.next().getChild(0), srcRel, outerNameToPosMap, outerRR, false);
    }
    return filterRel;
}
Also used : HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) RelNode(org.apache.calcite.rel.RelNode)

Example 2 with HiveParserASTNode

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.

the class HiveParserCalcitePlanner method genLateralViewPlan.

private RelNode genLateralViewPlan(HiveParserQB qb, Map<String, RelNode> aliasToRel) throws SemanticException {
    Map<String, ArrayList<HiveParserASTNode>> aliasToLateralViews = qb.getParseInfo().getAliasToLateralViews();
    Preconditions.checkArgument(aliasToLateralViews.size() == 1, "We only support lateral views for 1 alias");
    Map.Entry<String, ArrayList<HiveParserASTNode>> entry = aliasToLateralViews.entrySet().iterator().next();
    String alias = entry.getKey();
    RelNode res = null;
    List<HiveParserASTNode> lateralViews = entry.getValue();
    for (HiveParserASTNode lateralView : lateralViews) {
        Preconditions.checkArgument(lateralView.getChildCount() == 2);
        final boolean isOuter = lateralView.getType() == HiveASTParser.TOK_LATERAL_VIEW_OUTER;
        // this is the 1st lateral view
        if (res == null) {
            // LHS can be table or sub-query
            res = aliasToRel.get(alias);
        }
        Preconditions.checkState(res != null, "Failed to decide LHS table for current lateral view");
        HiveParserRowResolver inputRR = relToRowResolver.get(res);
        HiveParserUtils.LateralViewInfo info = HiveParserUtils.extractLateralViewInfo(lateralView, inputRR, semanticAnalyzer, frameworkConfig, cluster);
        HiveParserRexNodeConverter rexNodeConverter = new HiveParserRexNodeConverter(cluster, res.getRowType(), relToHiveColNameCalcitePosMap.get(res), 0, false, funcConverter);
        List<RexNode> operands = new ArrayList<>(info.getOperands().size());
        for (ExprNodeDesc exprDesc : info.getOperands()) {
            operands.add(rexNodeConverter.convert(exprDesc).accept(funcConverter));
        }
        res = genUDTFPlan(info.getSqlOperator(), info.getFuncName(), info.getTabAlias(), info.getColAliases(), qb, operands, info.getOperandColInfos(), res, false, isOuter);
    }
    return res;
}
Also used : HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ArrayList(java.util.ArrayList) RelNode(org.apache.calcite.rel.RelNode) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) HiveParserBaseSemanticAnalyzer.buildHiveToCalciteColumnMap(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.buildHiveToCalciteColumnMap) AbstractMap(java.util.AbstractMap) HiveParserBaseSemanticAnalyzer.buildHiveColNameToInputPosMap(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.buildHiveColNameToInputPosMap) RexNode(org.apache.calcite.rex.RexNode)

Example 3 with HiveParserASTNode

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.

the class HiveParserCalcitePlanner method genGBLogicalPlan.

// Generate GB plan.
private RelNode genGBLogicalPlan(HiveParserQB qb, RelNode srcRel) throws SemanticException {
    RelNode gbRel = null;
    HiveParserQBParseInfo qbp = qb.getParseInfo();
    // 1. Gather GB Expressions (AST) (GB + Aggregations)
    // NOTE: Multi Insert is not supported
    String detsClauseName = qbp.getClauseNames().iterator().next();
    HiveParserASTNode selExprList = qb.getParseInfo().getSelForClause(detsClauseName);
    HiveParserSubQueryUtils.checkForTopLevelSubqueries(selExprList);
    if (selExprList.getToken().getType() == HiveASTParser.TOK_SELECTDI && selExprList.getChildCount() == 1 && selExprList.getChild(0).getChildCount() == 1) {
        HiveParserASTNode node = (HiveParserASTNode) selExprList.getChild(0).getChild(0);
        if (node.getToken().getType() == HiveASTParser.TOK_ALLCOLREF) {
            srcRel = genSelectLogicalPlan(qb, srcRel, srcRel, null, null);
            HiveParserRowResolver rr = relToRowResolver.get(srcRel);
            qbp.setSelExprForClause(detsClauseName, HiveParserUtils.genSelectDIAST(rr));
        }
    }
    // Select DISTINCT + windowing; GBy handled by genSelectForWindowing
    if (selExprList.getToken().getType() == HiveASTParser.TOK_SELECTDI && !qb.getAllWindowingSpecs().isEmpty()) {
        return null;
    }
    List<HiveParserASTNode> gbAstExprs = getGroupByForClause(qbp, detsClauseName);
    HashMap<String, HiveParserASTNode> aggregationTrees = qbp.getAggregationExprsForClause(detsClauseName);
    boolean hasGrpByAstExprs = !gbAstExprs.isEmpty();
    boolean hasAggregationTrees = aggregationTrees != null && !aggregationTrees.isEmpty();
    final boolean cubeRollupGrpSetPresent = !qbp.getDestRollups().isEmpty() || !qbp.getDestGroupingSets().isEmpty() || !qbp.getDestCubes().isEmpty();
    // 2. Sanity check
    if (semanticAnalyzer.getConf().getBoolVar(HiveConf.ConfVars.HIVEGROUPBYSKEW) && qbp.getDistinctFuncExprsForClause(detsClauseName).size() > 1) {
        throw new SemanticException(ErrorMsg.UNSUPPORTED_MULTIPLE_DISTINCTS.getMsg());
    }
    if (hasGrpByAstExprs || hasAggregationTrees) {
        ArrayList<ExprNodeDesc> gbExprNodeDescs = new ArrayList<>();
        ArrayList<String> outputColNames = new ArrayList<>();
        // 3. Input, Output Row Resolvers
        HiveParserRowResolver inputRR = relToRowResolver.get(srcRel);
        HiveParserRowResolver outputRR = new HiveParserRowResolver();
        outputRR.setIsExprResolver(true);
        if (hasGrpByAstExprs) {
            // 4. Construct GB Keys (ExprNode)
            for (HiveParserASTNode gbAstExpr : gbAstExprs) {
                Map<HiveParserASTNode, ExprNodeDesc> astToExprNodeDesc = semanticAnalyzer.genAllExprNodeDesc(gbAstExpr, inputRR);
                ExprNodeDesc grpbyExprNDesc = astToExprNodeDesc.get(gbAstExpr);
                if (grpbyExprNDesc == null) {
                    throw new SemanticException("Invalid Column Reference: " + gbAstExpr.dump());
                }
                addToGBExpr(outputRR, inputRR, gbAstExpr, grpbyExprNDesc, gbExprNodeDescs, outputColNames);
            }
        }
        // 5. GroupingSets, Cube, Rollup
        int numGroupCols = gbExprNodeDescs.size();
        List<Integer> groupingSets = null;
        if (cubeRollupGrpSetPresent) {
            if (qbp.getDestRollups().contains(detsClauseName)) {
                groupingSets = getGroupingSetsForRollup(gbAstExprs.size());
            } else if (qbp.getDestCubes().contains(detsClauseName)) {
                groupingSets = getGroupingSetsForCube(gbAstExprs.size());
            } else if (qbp.getDestGroupingSets().contains(detsClauseName)) {
                groupingSets = getGroupingSets(gbAstExprs, qbp, detsClauseName);
            }
        }
        // 6. Construct aggregation function Info
        ArrayList<AggInfo> aggInfos = new ArrayList<>();
        if (hasAggregationTrees) {
            for (HiveParserASTNode value : aggregationTrees.values()) {
                // 6.1 Determine type of UDAF
                // This is the GenericUDAF name
                String aggName = unescapeIdentifier(value.getChild(0).getText());
                boolean isDistinct = value.getType() == HiveASTParser.TOK_FUNCTIONDI;
                boolean isAllColumns = value.getType() == HiveASTParser.TOK_FUNCTIONSTAR;
                // 6.2 Convert UDAF Params to ExprNodeDesc
                ArrayList<ExprNodeDesc> aggParameters = new ArrayList<>();
                for (int i = 1; i < value.getChildCount(); i++) {
                    HiveParserASTNode paraExpr = (HiveParserASTNode) value.getChild(i);
                    ExprNodeDesc paraExprNode = semanticAnalyzer.genExprNodeDesc(paraExpr, inputRR);
                    aggParameters.add(paraExprNode);
                }
                GenericUDAFEvaluator.Mode aggMode = HiveParserUtils.groupByDescModeToUDAFMode(GroupByDesc.Mode.COMPLETE, isDistinct);
                GenericUDAFEvaluator genericUDAFEvaluator = HiveParserUtils.getGenericUDAFEvaluator(aggName, aggParameters, value, isDistinct, isAllColumns, frameworkConfig.getOperatorTable());
                assert (genericUDAFEvaluator != null);
                HiveParserBaseSemanticAnalyzer.GenericUDAFInfo udaf = HiveParserUtils.getGenericUDAFInfo(genericUDAFEvaluator, aggMode, aggParameters);
                String aggAlias = null;
                if (value.getParent().getType() == HiveASTParser.TOK_SELEXPR && value.getParent().getChildCount() == 2) {
                    aggAlias = unescapeIdentifier(value.getParent().getChild(1).getText().toLowerCase());
                }
                AggInfo aggInfo = new AggInfo(aggParameters, udaf.returnType, aggName, isDistinct, isAllColumns, aggAlias);
                aggInfos.add(aggInfo);
                String field = aggAlias == null ? getColumnInternalName(numGroupCols + aggInfos.size() - 1) : aggAlias;
                outputColNames.add(field);
                outputRR.putExpression(value, new ColumnInfo(field, aggInfo.getReturnType(), "", false));
            }
        }
        // grouping sets
        if (groupingSets != null && !groupingSets.isEmpty()) {
            String field = getColumnInternalName(numGroupCols + aggInfos.size());
            outputColNames.add(field);
            outputRR.put(null, VirtualColumn.GROUPINGID.getName(), new ColumnInfo(field, // flink grouping_id's return type is bigint
            TypeInfoFactory.longTypeInfo, null, true));
        }
        // 8. We create the group_by operator
        gbRel = genGBRelNode(gbExprNodeDescs, aggInfos, groupingSets, srcRel);
        relToHiveColNameCalcitePosMap.put(gbRel, buildHiveToCalciteColumnMap(outputRR));
        relToRowResolver.put(gbRel, outputRR);
    }
    return gbRel;
}
Also used : HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) GenericUDAFEvaluator(org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator) ArrayList(java.util.ArrayList) HiveParserBaseSemanticAnalyzer.getHiveAggInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getHiveAggInfo) AggInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.AggInfo) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) HiveParserQBParseInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBParseInfo) RelNode(org.apache.calcite.rel.RelNode) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) HiveParserBaseSemanticAnalyzer(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 4 with HiveParserASTNode

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.

the class HiveParserCalcitePlanner method genSetOpLogicalPlan.

@SuppressWarnings("nls")
private RelNode genSetOpLogicalPlan(HiveParserQBExpr.Opcode opcode, String alias, String leftalias, RelNode leftRel, String rightalias, RelNode rightRel) throws SemanticException {
    // 1. Get Row Resolvers, Column map for original left and right input of SetOp Rel
    HiveParserRowResolver leftRR = relToRowResolver.get(leftRel);
    HiveParserRowResolver rightRR = relToRowResolver.get(rightRel);
    HashMap<String, ColumnInfo> leftMap = leftRR.getFieldMap(leftalias);
    HashMap<String, ColumnInfo> rightMap = rightRR.getFieldMap(rightalias);
    // 2. Validate that SetOp is feasible according to Hive (by using type info from RR)
    if (leftMap.size() != rightMap.size()) {
        throw new SemanticException("Schema of both sides of union should match.");
    }
    // 3. construct SetOp Output RR using original left & right Input
    HiveParserRowResolver setOpOutRR = new HiveParserRowResolver();
    Iterator<Map.Entry<String, ColumnInfo>> lIter = leftMap.entrySet().iterator();
    Iterator<Map.Entry<String, ColumnInfo>> rIter = rightMap.entrySet().iterator();
    while (lIter.hasNext()) {
        Map.Entry<String, ColumnInfo> lEntry = lIter.next();
        Map.Entry<String, ColumnInfo> rEntry = rIter.next();
        ColumnInfo lInfo = lEntry.getValue();
        ColumnInfo rInfo = rEntry.getValue();
        String field = lEntry.getKey();
        // try widening conversion, otherwise fail union
        TypeInfo commonTypeInfo = FunctionRegistry.getCommonClassForUnionAll(lInfo.getType(), rInfo.getType());
        if (commonTypeInfo == null) {
            HiveParserASTNode tabRef = getQB().getAliases().isEmpty() ? null : getQB().getParseInfo().getSrcForAlias(getQB().getAliases().get(0));
            throw new SemanticException(generateErrorMessage(tabRef, "Schema of both sides of setop should match: Column " + field + " is of type " + lInfo.getType().getTypeName() + " on first table and type " + rInfo.getType().getTypeName() + " on second table"));
        }
        ColumnInfo setOpColInfo = new ColumnInfo(lInfo);
        setOpColInfo.setType(commonTypeInfo);
        setOpOutRR.put(alias, field, setOpColInfo);
    }
    // 4. Determine which columns requires cast on left/right input (Calcite requires exact
    // types on both sides of SetOp)
    boolean leftNeedsTypeCast = false;
    boolean rightNeedsTypeCast = false;
    List<RexNode> leftProjs = new ArrayList<>();
    List<RexNode> rightProjs = new ArrayList<>();
    List<RelDataTypeField> leftFields = leftRel.getRowType().getFieldList();
    List<RelDataTypeField> rightFields = rightRel.getRowType().getFieldList();
    for (int i = 0; i < leftFields.size(); i++) {
        RelDataType leftFieldType = leftFields.get(i).getType();
        RelDataType rightFieldType = rightFields.get(i).getType();
        if (!leftFieldType.equals(rightFieldType)) {
            RelDataType unionFieldType = HiveParserUtils.toRelDataType(setOpOutRR.getColumnInfos().get(i).getType(), cluster.getTypeFactory());
            if (!unionFieldType.equals(leftFieldType)) {
                leftNeedsTypeCast = true;
            }
            leftProjs.add(cluster.getRexBuilder().ensureType(unionFieldType, cluster.getRexBuilder().makeInputRef(leftFieldType, i), true));
            if (!unionFieldType.equals(rightFieldType)) {
                rightNeedsTypeCast = true;
            }
            rightProjs.add(cluster.getRexBuilder().ensureType(unionFieldType, cluster.getRexBuilder().makeInputRef(rightFieldType, i), true));
        } else {
            leftProjs.add(cluster.getRexBuilder().ensureType(leftFieldType, cluster.getRexBuilder().makeInputRef(leftFieldType, i), true));
            rightProjs.add(cluster.getRexBuilder().ensureType(rightFieldType, cluster.getRexBuilder().makeInputRef(rightFieldType, i), true));
        }
    }
    // parity
    if (leftNeedsTypeCast) {
        leftRel = LogicalProject.create(leftRel, Collections.emptyList(), leftProjs, leftRel.getRowType().getFieldNames());
    }
    if (rightNeedsTypeCast) {
        rightRel = LogicalProject.create(rightRel, Collections.emptyList(), rightProjs, rightRel.getRowType().getFieldNames());
    }
    // 6. Construct SetOp Rel
    List<RelNode> leftAndRight = Arrays.asList(leftRel, rightRel);
    SetOp setOpRel;
    switch(opcode) {
        case UNION:
            setOpRel = LogicalUnion.create(leftAndRight, true);
            break;
        case INTERSECT:
            setOpRel = LogicalIntersect.create(leftAndRight, false);
            break;
        case INTERSECTALL:
            setOpRel = LogicalIntersect.create(leftAndRight, true);
            break;
        case EXCEPT:
            setOpRel = LogicalMinus.create(leftAndRight, false);
            break;
        case EXCEPTALL:
            setOpRel = LogicalMinus.create(leftAndRight, true);
            break;
        default:
            throw new SemanticException("Unsupported set operator " + opcode.toString());
    }
    relToRowResolver.put(setOpRel, setOpOutRR);
    relToHiveColNameCalcitePosMap.put(setOpRel, buildHiveToCalciteColumnMap(setOpOutRR));
    return setOpRel;
}
Also used : SetOp(org.apache.calcite.rel.core.SetOp) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ArrayList(java.util.ArrayList) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) RelDataType(org.apache.calcite.rel.type.RelDataType) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) RelDataTypeField(org.apache.calcite.rel.type.RelDataTypeField) RelNode(org.apache.calcite.rel.RelNode) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) HiveParserBaseSemanticAnalyzer.buildHiveToCalciteColumnMap(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.buildHiveToCalciteColumnMap) AbstractMap(java.util.AbstractMap) HiveParserBaseSemanticAnalyzer.buildHiveColNameToInputPosMap(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.buildHiveColNameToInputPosMap) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) RexNode(org.apache.calcite.rex.RexNode)

Example 5 with HiveParserASTNode

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.

the class HiveParserCalcitePlanner method genOBLogicalPlan.

private Pair<Sort, RelNode> genOBLogicalPlan(HiveParserQB qb, RelNode srcRel, boolean outermostOB) throws SemanticException {
    Sort sortRel = null;
    RelNode originalOBInput = null;
    HiveParserQBParseInfo qbp = qb.getParseInfo();
    String dest = qbp.getClauseNames().iterator().next();
    HiveParserASTNode obAST = qbp.getOrderByForClause(dest);
    if (obAST != null) {
        // 1. OB Expr sanity test
        // in strict mode, in the presence of order by, limit must be specified
        Integer limit = qb.getParseInfo().getDestLimit(dest);
        if (limit == null) {
            String mapRedMode = semanticAnalyzer.getConf().getVar(HiveConf.ConfVars.HIVEMAPREDMODE);
            boolean banLargeQuery = Boolean.parseBoolean(semanticAnalyzer.getConf().get("hive.strict.checks.large.query", "false"));
            if ("strict".equalsIgnoreCase(mapRedMode) || banLargeQuery) {
                throw new SemanticException(generateErrorMessage(obAST, "Order by-s without limit"));
            }
        }
        // 2. Walk through OB exprs and extract field collations and additional
        // virtual columns needed
        final List<RexNode> virtualCols = new ArrayList<>();
        final List<RelFieldCollation> fieldCollations = new ArrayList<>();
        int fieldIndex;
        List<Node> obASTExprLst = obAST.getChildren();
        HiveParserASTNode obASTExpr;
        HiveParserASTNode nullOrderASTExpr;
        List<Pair<HiveParserASTNode, TypeInfo>> vcASTAndType = new ArrayList<>();
        HiveParserRowResolver inputRR = relToRowResolver.get(srcRel);
        HiveParserRowResolver outputRR = new HiveParserRowResolver();
        HiveParserRexNodeConverter converter = new HiveParserRexNodeConverter(cluster, srcRel.getRowType(), relToHiveColNameCalcitePosMap.get(srcRel), 0, false, funcConverter);
        int numSrcFields = srcRel.getRowType().getFieldCount();
        for (Node node : obASTExprLst) {
            // 2.1 Convert AST Expr to ExprNode
            obASTExpr = (HiveParserASTNode) node;
            nullOrderASTExpr = (HiveParserASTNode) obASTExpr.getChild(0);
            HiveParserASTNode ref = (HiveParserASTNode) nullOrderASTExpr.getChild(0);
            Map<HiveParserASTNode, ExprNodeDesc> astToExprNodeDesc = semanticAnalyzer.genAllExprNodeDesc(ref, inputRR);
            ExprNodeDesc obExprNodeDesc = astToExprNodeDesc.get(ref);
            if (obExprNodeDesc == null) {
                throw new SemanticException("Invalid order by expression: " + obASTExpr.toString());
            }
            // 2.2 Convert ExprNode to RexNode
            RexNode rexNode = converter.convert(obExprNodeDesc).accept(funcConverter);
            // present in the child (& hence we add a child Project Rel)
            if (rexNode instanceof RexInputRef) {
                fieldIndex = ((RexInputRef) rexNode).getIndex();
            } else {
                fieldIndex = numSrcFields + virtualCols.size();
                virtualCols.add(rexNode);
                vcASTAndType.add(new Pair<>(ref, obExprNodeDesc.getTypeInfo()));
            }
            // 2.4 Determine the Direction of order by
            RelFieldCollation.Direction direction = RelFieldCollation.Direction.DESCENDING;
            if (obASTExpr.getType() == HiveASTParser.TOK_TABSORTCOLNAMEASC) {
                direction = RelFieldCollation.Direction.ASCENDING;
            }
            RelFieldCollation.NullDirection nullOrder;
            if (nullOrderASTExpr.getType() == HiveASTParser.TOK_NULLS_FIRST) {
                nullOrder = RelFieldCollation.NullDirection.FIRST;
            } else if (nullOrderASTExpr.getType() == HiveASTParser.TOK_NULLS_LAST) {
                nullOrder = RelFieldCollation.NullDirection.LAST;
            } else {
                throw new SemanticException("Unexpected null ordering option: " + nullOrderASTExpr.getType());
            }
            // 2.5 Add to field collations
            fieldCollations.add(new RelFieldCollation(fieldIndex, direction, nullOrder));
        }
        // 3. Add Child Project Rel if needed, Generate Output RR, input Sel Rel
        // for top constraining Sel
        RelNode obInputRel = srcRel;
        if (!virtualCols.isEmpty()) {
            List<RexNode> originalInputRefs = srcRel.getRowType().getFieldList().stream().map(input -> new RexInputRef(input.getIndex(), input.getType())).collect(Collectors.toList());
            HiveParserRowResolver obSyntheticProjectRR = new HiveParserRowResolver();
            if (!HiveParserRowResolver.add(obSyntheticProjectRR, inputRR)) {
                throw new SemanticException("Duplicates detected when adding columns to RR: see previous message");
            }
            int vcolPos = inputRR.getRowSchema().getSignature().size();
            for (Pair<HiveParserASTNode, TypeInfo> astTypePair : vcASTAndType) {
                obSyntheticProjectRR.putExpression(astTypePair.getKey(), new ColumnInfo(getColumnInternalName(vcolPos), astTypePair.getValue(), null, false));
                vcolPos++;
            }
            obInputRel = genSelectRelNode(CompositeList.of(originalInputRefs, virtualCols), obSyntheticProjectRR, srcRel);
            if (outermostOB) {
                if (!HiveParserRowResolver.add(outputRR, inputRR)) {
                    throw new SemanticException("Duplicates detected when adding columns to RR: see previous message");
                }
            } else {
                if (!HiveParserRowResolver.add(outputRR, obSyntheticProjectRR)) {
                    throw new SemanticException("Duplicates detected when adding columns to RR: see previous message");
                }
            }
            originalOBInput = srcRel;
        } else {
            if (!HiveParserRowResolver.add(outputRR, inputRR)) {
                throw new SemanticException("Duplicates detected when adding columns to RR: see previous message");
            }
        }
        // 4. Construct SortRel
        RelTraitSet traitSet = cluster.traitSet();
        RelCollation canonizedCollation = traitSet.canonize(RelCollationImpl.of(fieldCollations));
        sortRel = LogicalSort.create(obInputRel, canonizedCollation, null, null);
        // 5. Update the maps
        Map<String, Integer> hiveColNameCalcitePosMap = buildHiveToCalciteColumnMap(outputRR);
        relToRowResolver.put(sortRel, outputRR);
        relToHiveColNameCalcitePosMap.put(sortRel, hiveColNameCalcitePosMap);
    }
    return (new Pair<>(sortRel, originalOBInput));
}
Also used : DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) LogicalSort(org.apache.calcite.rel.logical.LogicalSort) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) JoinType(org.apache.hadoop.hive.ql.parse.JoinType) HiveParserBaseSemanticAnalyzer.getGroupByForClause(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getGroupByForClause) HiveParserContext(org.apache.flink.table.planner.delegation.hive.copy.HiveParserContext) FunctionRegistry(org.apache.hadoop.hive.ql.exec.FunctionRegistry) RelCollationImpl(org.apache.calcite.rel.RelCollationImpl) BigDecimal(java.math.BigDecimal) HiveParserBaseSemanticAnalyzer.unescapeIdentifier(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.unescapeIdentifier) HiveInspectors(org.apache.flink.table.functions.hive.conversion.HiveInspectors) CorrelationId(org.apache.calcite.rel.core.CorrelationId) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) Map(java.util.Map) HiveParserASTBuilder(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTBuilder) RelTraitSet(org.apache.calcite.plan.RelTraitSet) RexWindowBound(org.apache.calcite.rex.RexWindowBound) ImmutableBitSet(org.apache.calcite.util.ImmutableBitSet) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) HiveParserSubQueryUtils(org.apache.flink.table.planner.delegation.hive.copy.HiveParserSubQueryUtils) HiveParserBaseSemanticAnalyzer.getHiveAggInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getHiveAggInfo) HiveTypeUtil(org.apache.flink.table.catalog.hive.util.HiveTypeUtil) HiveParserBaseSemanticAnalyzer(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer) org.apache.hadoop.hive.serde.serdeConstants(org.apache.hadoop.hive.serde.serdeConstants) Set(java.util.Set) HiveParserUtils.generateErrorMessage(org.apache.flink.table.planner.delegation.hive.HiveParserUtils.generateErrorMessage) RelFieldCollation(org.apache.calcite.rel.RelFieldCollation) HiveASTParseUtils(org.apache.flink.table.planner.delegation.hive.copy.HiveASTParseUtils) HiveParserBaseSemanticAnalyzer.getGroupingSetsForCube(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getGroupingSetsForCube) SqlStdOperatorTable(org.apache.calcite.sql.fun.SqlStdOperatorTable) HiveParserPreCboCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPreCboCtx) RelCollation(org.apache.calcite.rel.RelCollation) HiveParserBaseSemanticAnalyzer.getGroupingSets(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getGroupingSets) HiveParserBaseSemanticAnalyzer.getPartitionKeys(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getPartitionKeys) HiveParserBaseSemanticAnalyzer.removeOBInSubQuery(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.removeOBInSubQuery) HiveASTParser(org.apache.flink.table.planner.delegation.hive.parse.HiveASTParser) ErrorMsg(org.apache.hadoop.hive.ql.ErrorMsg) RexCall(org.apache.calcite.rex.RexCall) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) TypeInfoUtils(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) HiveParserJoinTypeCheckCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserJoinTypeCheckCtx) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) AggInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.AggInfo) HiveParserBaseSemanticAnalyzer.getCorrelationUse(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getCorrelationUse) HiveASTParseDriver(org.apache.flink.table.planner.delegation.hive.copy.HiveASTParseDriver) RexFieldCollation(org.apache.calcite.rex.RexFieldCollation) ViewExpanders(org.apache.calcite.plan.ViewExpanders) LogicalValues(org.apache.calcite.rel.logical.LogicalValues) LogicalCorrelate(org.apache.calcite.rel.logical.LogicalCorrelate) HiveParserBaseSemanticAnalyzer.validateNoHavingReferenceToAlias(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.validateNoHavingReferenceToAlias) HiveParserNamedJoinInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserNamedJoinInfo) RelDataType(org.apache.calcite.rel.type.RelDataType) HiveParserUtils.rewriteGroupingFunctionAST(org.apache.flink.table.planner.delegation.hive.HiveParserUtils.rewriteGroupingFunctionAST) LogicalIntersect(org.apache.calcite.rel.logical.LogicalIntersect) HiveParserQBSubQuery(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBSubQuery) Table(org.apache.hadoop.hive.ql.metadata.Table) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) Aggregate(org.apache.calcite.rel.core.Aggregate) HiveParserTypeCheckCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx) FrameworkConfig(org.apache.calcite.tools.FrameworkConfig) Node(org.apache.hadoop.hive.ql.lib.Node) HiveParserBaseSemanticAnalyzer.getBound(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getBound) HiveParserBaseSemanticAnalyzer.getColumnInternalName(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getColumnInternalName) HiveParserSqlFunctionConverter(org.apache.flink.table.planner.delegation.hive.copy.HiveParserSqlFunctionConverter) LogicalAggregate(org.apache.calcite.rel.logical.LogicalAggregate) JoinRelType(org.apache.calcite.rel.core.JoinRelType) AggregateCall(org.apache.calcite.rel.core.AggregateCall) SqlAggFunction(org.apache.calcite.sql.SqlAggFunction) ArrayDeque(java.util.ArrayDeque) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) ColumnAccessInfo(org.apache.hadoop.hive.ql.parse.ColumnAccessInfo) HiveParserBaseSemanticAnalyzer.obtainTableType(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.obtainTableType) HiveParserBaseSemanticAnalyzer.convert(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.convert) LogicalFilter(org.apache.calcite.rel.logical.LogicalFilter) RelFactories(org.apache.calcite.rel.core.RelFactories) LoggerFactory(org.slf4j.LoggerFactory) LogicalTableFunctionScan(org.apache.calcite.rel.logical.LogicalTableFunctionScan) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveParserBaseSemanticAnalyzer.processPositionAlias(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.processPositionAlias) HiveParserWindowingSpec(org.apache.flink.table.planner.delegation.hive.copy.HiveParserWindowingSpec) RexUtil(org.apache.calcite.rex.RexUtil) LogicalJoin(org.apache.calcite.rel.logical.LogicalJoin) HiveParserErrorMsg(org.apache.flink.table.planner.delegation.hive.parse.HiveParserErrorMsg) RexNode(org.apache.calcite.rex.RexNode) LogicalUnion(org.apache.calcite.rel.logical.LogicalUnion) RelOptCluster(org.apache.calcite.plan.RelOptCluster) LogicalDistribution(org.apache.flink.table.planner.plan.nodes.hive.LogicalDistribution) RexLiteral(org.apache.calcite.rex.RexLiteral) HiveParserQBParseInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBParseInfo) HiveParserTypeConverter(org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeConverter) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) Preconditions(org.apache.flink.util.Preconditions) CompositeList(org.apache.calcite.util.CompositeList) Collectors(java.util.stream.Collectors) RexInputRef(org.apache.calcite.rex.RexInputRef) VirtualColumn(org.apache.hadoop.hive.ql.metadata.VirtualColumn) List(java.util.List) Type(java.lang.reflect.Type) Sort(org.apache.calcite.rel.core.Sort) RelDataTypeField(org.apache.calcite.rel.type.RelDataTypeField) GenericUDAFEvaluator(org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator) HiveParserQueryState(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQueryState) Project(org.apache.calcite.rel.core.Project) CatalogManager(org.apache.flink.table.catalog.CatalogManager) HiveParserBaseSemanticAnalyzer.getGroupingSetsForRollup(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getGroupingSetsForRollup) HiveParserBaseSemanticAnalyzer.getWindowSpecIndx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getWindowSpecIndx) HashMap(java.util.HashMap) SetOp(org.apache.calcite.rel.core.SetOp) Deque(java.util.Deque) RelOptUtil(org.apache.calcite.plan.RelOptUtil) LogicalMinus(org.apache.calcite.rel.logical.LogicalMinus) DeduplicateCorrelateVariables(org.apache.calcite.sql2rel.DeduplicateCorrelateVariables) HiveParserBaseSemanticAnalyzer.initPhase1Ctx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.initPhase1Ctx) PlannerContext(org.apache.flink.table.planner.delegation.PlannerContext) HashSet(java.util.HashSet) HiveParserQBExpr(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBExpr) TableType(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.TableType) Pair(org.apache.calcite.util.Pair) HiveParserBaseSemanticAnalyzer.buildHiveToCalciteColumnMap(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.buildHiveToCalciteColumnMap) SqlOperator(org.apache.calcite.sql.SqlOperator) FlinkCalciteCatalogReader(org.apache.flink.table.planner.plan.FlinkCalciteCatalogReader) RelCollations(org.apache.calcite.rel.RelCollations) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) Logger(org.slf4j.Logger) HiveParserSemanticAnalyzer(org.apache.flink.table.planner.delegation.hive.copy.HiveParserSemanticAnalyzer) LogicalProject(org.apache.calcite.rel.logical.LogicalProject) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) TypeInfoFactory(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory) Iterator(java.util.Iterator) RexBuilder(org.apache.calcite.rex.RexBuilder) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) GroupByDesc(org.apache.hadoop.hive.ql.plan.GroupByDesc) HiveConf(org.apache.hadoop.hive.conf.HiveConf) HiveParserQB(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQB) RelNode(org.apache.calcite.rel.RelNode) HiveParserBaseSemanticAnalyzer.genValues(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.genValues) HiveParserUtils.verifyCanHandleAst(org.apache.flink.table.planner.delegation.hive.HiveParserUtils.verifyCanHandleAst) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) AbstractMap(java.util.AbstractMap) ObjectPair(org.apache.hadoop.hive.common.ObjectPair) HiveParserBaseSemanticAnalyzer.buildHiveColNameToInputPosMap(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.buildHiveColNameToInputPosMap) HiveParserCreateViewInfo(org.apache.flink.table.planner.delegation.hive.parse.HiveParserCreateViewInfo) SqlUserDefinedTableFunction(org.apache.calcite.sql.validate.SqlUserDefinedTableFunction) HiveParserBaseSemanticAnalyzer.topLevelConjunctCheck(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.topLevelConjunctCheck) Util(org.apache.calcite.util.Util) HiveParserBaseSemanticAnalyzer.addToGBExpr(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.addToGBExpr) Collections(java.util.Collections) HiveParserBaseSemanticAnalyzer.getOrderKeys(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getOrderKeys) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) Node(org.apache.hadoop.hive.ql.lib.Node) RexNode(org.apache.calcite.rex.RexNode) RelNode(org.apache.calcite.rel.RelNode) ArrayList(java.util.ArrayList) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) RelTraitSet(org.apache.calcite.plan.RelTraitSet) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) LogicalSort(org.apache.calcite.rel.logical.LogicalSort) Sort(org.apache.calcite.rel.core.Sort) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) Pair(org.apache.calcite.util.Pair) ObjectPair(org.apache.hadoop.hive.common.ObjectPair) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) RelCollation(org.apache.calcite.rel.RelCollation) HiveParserQBParseInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBParseInfo) RelNode(org.apache.calcite.rel.RelNode) RelFieldCollation(org.apache.calcite.rel.RelFieldCollation) RexInputRef(org.apache.calcite.rex.RexInputRef) RexNode(org.apache.calcite.rex.RexNode)

Aggregations

HiveParserASTNode (org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode)38 LinkedHashMap (java.util.LinkedHashMap)18 HashMap (java.util.HashMap)15 ArrayList (java.util.ArrayList)14 RelNode (org.apache.calcite.rel.RelNode)14 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)14 ValidationException (org.apache.flink.table.api.ValidationException)10 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)10 NotNullConstraint (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint)10 HiveParserRowResolver (org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver)10 RexNode (org.apache.calcite.rex.RexNode)9 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)9 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)9 Map (java.util.Map)8 HiveParserTypeCheckCtx (org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx)6 Table (org.apache.hadoop.hive.ql.metadata.Table)6 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)5 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)5 HiveParserQBParseInfo (org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBParseInfo)5 RelDataType (org.apache.calcite.rel.type.RelDataType)4