Search in sources :

Example 11 with HiveParserRowResolver

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver in project flink by apache.

the class HiveParserCalcitePlanner method getWindowRexAndType.

private Pair<RexNode, TypeInfo> getWindowRexAndType(HiveParserWindowingSpec.WindowExpressionSpec winExprSpec, RelNode srcRel) throws SemanticException {
    RexNode window;
    if (winExprSpec instanceof HiveParserWindowingSpec.WindowFunctionSpec) {
        HiveParserWindowingSpec.WindowFunctionSpec wFnSpec = (HiveParserWindowingSpec.WindowFunctionSpec) winExprSpec;
        HiveParserASTNode windowProjAst = wFnSpec.getExpression();
        // TODO: do we need to get to child?
        int wndSpecASTIndx = getWindowSpecIndx(windowProjAst);
        // 2. Get Hive Aggregate Info
        AggInfo hiveAggInfo = getHiveAggInfo(windowProjAst, wndSpecASTIndx - 1, relToRowResolver.get(srcRel), (HiveParserWindowingSpec.WindowFunctionSpec) winExprSpec, semanticAnalyzer, frameworkConfig, cluster);
        // 3. Get Calcite Return type for Agg Fn
        RelDataType calciteAggFnRetType = HiveParserUtils.toRelDataType(hiveAggInfo.getReturnType(), cluster.getTypeFactory());
        // 4. Convert Agg Fn args to Calcite
        Map<String, Integer> posMap = relToHiveColNameCalcitePosMap.get(srcRel);
        HiveParserRexNodeConverter converter = new HiveParserRexNodeConverter(cluster, srcRel.getRowType(), posMap, 0, false, funcConverter);
        List<RexNode> calciteAggFnArgs = new ArrayList<>();
        List<RelDataType> calciteAggFnArgTypes = new ArrayList<>();
        for (int i = 0; i < hiveAggInfo.getAggParams().size(); i++) {
            calciteAggFnArgs.add(converter.convert(hiveAggInfo.getAggParams().get(i)));
            calciteAggFnArgTypes.add(HiveParserUtils.toRelDataType(hiveAggInfo.getAggParams().get(i).getTypeInfo(), cluster.getTypeFactory()));
        }
        // 5. Get Calcite Agg Fn
        final SqlAggFunction calciteAggFn = HiveParserSqlFunctionConverter.getCalciteAggFn(hiveAggInfo.getUdfName(), hiveAggInfo.isDistinct(), calciteAggFnArgTypes, calciteAggFnRetType);
        // 6. Translate Window spec
        HiveParserRowResolver inputRR = relToRowResolver.get(srcRel);
        HiveParserWindowingSpec.WindowSpec wndSpec = ((HiveParserWindowingSpec.WindowFunctionSpec) winExprSpec).getWindowSpec();
        List<RexNode> partitionKeys = getPartitionKeys(wndSpec.getPartition(), converter, inputRR, new HiveParserTypeCheckCtx(inputRR, frameworkConfig, cluster), semanticAnalyzer);
        List<RexFieldCollation> orderKeys = getOrderKeys(wndSpec.getOrder(), converter, inputRR, new HiveParserTypeCheckCtx(inputRR, frameworkConfig, cluster), semanticAnalyzer);
        RexWindowBound lowerBound = getBound(wndSpec.getWindowFrame().getStart(), cluster);
        RexWindowBound upperBound = getBound(wndSpec.getWindowFrame().getEnd(), cluster);
        boolean isRows = wndSpec.getWindowFrame().getWindowType() == HiveParserWindowingSpec.WindowType.ROWS;
        window = HiveParserUtils.makeOver(cluster.getRexBuilder(), calciteAggFnRetType, calciteAggFn, calciteAggFnArgs, partitionKeys, orderKeys, lowerBound, upperBound, isRows, true, false, false, false);
        window = window.accept(funcConverter);
    } else {
        throw new SemanticException("Unsupported window Spec");
    }
    return new Pair<>(window, HiveParserTypeConverter.convert(window.getType()));
}
Also used : ArrayList(java.util.ArrayList) RelDataType(org.apache.calcite.rel.type.RelDataType) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) RexWindowBound(org.apache.calcite.rex.RexWindowBound) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) Pair(org.apache.calcite.util.Pair) ObjectPair(org.apache.hadoop.hive.common.ObjectPair) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) HiveParserBaseSemanticAnalyzer.getHiveAggInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getHiveAggInfo) AggInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.AggInfo) HiveParserWindowingSpec(org.apache.flink.table.planner.delegation.hive.copy.HiveParserWindowingSpec) SqlAggFunction(org.apache.calcite.sql.SqlAggFunction) RexFieldCollation(org.apache.calcite.rex.RexFieldCollation) HiveParserTypeCheckCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx) RexNode(org.apache.calcite.rex.RexNode)

Example 12 with HiveParserRowResolver

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver in project flink by apache.

the class HiveParserCalcitePlanner method genDistSortBy.

// Generate plan for sort by, cluster by and distribute by. This is basically same as generating
// order by plan.
// Should refactor to combine them.
private Pair<RelNode, RelNode> genDistSortBy(HiveParserQB qb, RelNode srcRel, boolean outermostOB) throws SemanticException {
    RelNode res = null;
    RelNode originalInput = null;
    HiveParserQBParseInfo qbp = qb.getParseInfo();
    String destClause = qbp.getClauseNames().iterator().next();
    HiveParserASTNode sortAST = qbp.getSortByForClause(destClause);
    HiveParserASTNode distAST = qbp.getDistributeByForClause(destClause);
    HiveParserASTNode clusterAST = qbp.getClusterByForClause(destClause);
    if (sortAST != null || distAST != null || clusterAST != null) {
        List<RexNode> virtualCols = new ArrayList<>();
        List<Pair<HiveParserASTNode, TypeInfo>> vcASTAndType = new ArrayList<>();
        List<RelFieldCollation> fieldCollations = new ArrayList<>();
        List<Integer> distKeys = new ArrayList<>();
        HiveParserRowResolver inputRR = relToRowResolver.get(srcRel);
        HiveParserRexNodeConverter converter = new HiveParserRexNodeConverter(cluster, srcRel.getRowType(), relToHiveColNameCalcitePosMap.get(srcRel), 0, false, funcConverter);
        int numSrcFields = srcRel.getRowType().getFieldCount();
        // handle cluster by
        if (clusterAST != null) {
            if (sortAST != null) {
                throw new SemanticException("Cannot have both CLUSTER BY and SORT BY");
            }
            if (distAST != null) {
                throw new SemanticException("Cannot have both CLUSTER BY and DISTRIBUTE BY");
            }
            for (Node node : clusterAST.getChildren()) {
                HiveParserASTNode childAST = (HiveParserASTNode) node;
                Map<HiveParserASTNode, ExprNodeDesc> astToExprNodeDesc = semanticAnalyzer.genAllExprNodeDesc(childAST, inputRR);
                ExprNodeDesc childNodeDesc = astToExprNodeDesc.get(childAST);
                if (childNodeDesc == null) {
                    throw new SemanticException("Invalid CLUSTER BY expression: " + childAST.toString());
                }
                RexNode childRexNode = converter.convert(childNodeDesc).accept(funcConverter);
                int fieldIndex;
                if (childRexNode instanceof RexInputRef) {
                    fieldIndex = ((RexInputRef) childRexNode).getIndex();
                } else {
                    fieldIndex = numSrcFields + virtualCols.size();
                    virtualCols.add(childRexNode);
                    vcASTAndType.add(new Pair<>(childAST, childNodeDesc.getTypeInfo()));
                }
                // cluster by doesn't support specifying ASC/DESC or NULLS FIRST/LAST, so use
                // default values
                fieldCollations.add(new RelFieldCollation(fieldIndex, RelFieldCollation.Direction.ASCENDING, RelFieldCollation.NullDirection.FIRST));
                distKeys.add(fieldIndex);
            }
        } else {
            // handle sort by
            if (sortAST != null) {
                for (Node node : sortAST.getChildren()) {
                    HiveParserASTNode childAST = (HiveParserASTNode) node;
                    HiveParserASTNode nullOrderAST = (HiveParserASTNode) childAST.getChild(0);
                    HiveParserASTNode fieldAST = (HiveParserASTNode) nullOrderAST.getChild(0);
                    Map<HiveParserASTNode, ExprNodeDesc> astToExprNodeDesc = semanticAnalyzer.genAllExprNodeDesc(fieldAST, inputRR);
                    ExprNodeDesc fieldNodeDesc = astToExprNodeDesc.get(fieldAST);
                    if (fieldNodeDesc == null) {
                        throw new SemanticException("Invalid sort by expression: " + fieldAST.toString());
                    }
                    RexNode childRexNode = converter.convert(fieldNodeDesc).accept(funcConverter);
                    int fieldIndex;
                    if (childRexNode instanceof RexInputRef) {
                        fieldIndex = ((RexInputRef) childRexNode).getIndex();
                    } else {
                        fieldIndex = numSrcFields + virtualCols.size();
                        virtualCols.add(childRexNode);
                        vcASTAndType.add(new Pair<>(childAST, fieldNodeDesc.getTypeInfo()));
                    }
                    RelFieldCollation.Direction direction = RelFieldCollation.Direction.DESCENDING;
                    if (childAST.getType() == HiveASTParser.TOK_TABSORTCOLNAMEASC) {
                        direction = RelFieldCollation.Direction.ASCENDING;
                    }
                    RelFieldCollation.NullDirection nullOrder;
                    if (nullOrderAST.getType() == HiveASTParser.TOK_NULLS_FIRST) {
                        nullOrder = RelFieldCollation.NullDirection.FIRST;
                    } else if (nullOrderAST.getType() == HiveASTParser.TOK_NULLS_LAST) {
                        nullOrder = RelFieldCollation.NullDirection.LAST;
                    } else {
                        throw new SemanticException("Unexpected null ordering option: " + nullOrderAST.getType());
                    }
                    fieldCollations.add(new RelFieldCollation(fieldIndex, direction, nullOrder));
                }
            }
            // handle distribute by
            if (distAST != null) {
                for (Node node : distAST.getChildren()) {
                    HiveParserASTNode childAST = (HiveParserASTNode) node;
                    Map<HiveParserASTNode, ExprNodeDesc> astToExprNodeDesc = semanticAnalyzer.genAllExprNodeDesc(childAST, inputRR);
                    ExprNodeDesc childNodeDesc = astToExprNodeDesc.get(childAST);
                    if (childNodeDesc == null) {
                        throw new SemanticException("Invalid DISTRIBUTE BY expression: " + childAST.toString());
                    }
                    RexNode childRexNode = converter.convert(childNodeDesc).accept(funcConverter);
                    int fieldIndex;
                    if (childRexNode instanceof RexInputRef) {
                        fieldIndex = ((RexInputRef) childRexNode).getIndex();
                    } else {
                        fieldIndex = numSrcFields + virtualCols.size();
                        virtualCols.add(childRexNode);
                        vcASTAndType.add(new Pair<>(childAST, childNodeDesc.getTypeInfo()));
                    }
                    distKeys.add(fieldIndex);
                }
            }
        }
        Preconditions.checkState(!fieldCollations.isEmpty() || !distKeys.isEmpty(), "Both field collations and dist keys are empty");
        // add child SEL if needed
        RelNode realInput = srcRel;
        HiveParserRowResolver outputRR = new HiveParserRowResolver();
        if (!virtualCols.isEmpty()) {
            List<RexNode> originalInputRefs = srcRel.getRowType().getFieldList().stream().map(input -> new RexInputRef(input.getIndex(), input.getType())).collect(Collectors.toList());
            HiveParserRowResolver addedProjectRR = new HiveParserRowResolver();
            if (!HiveParserRowResolver.add(addedProjectRR, inputRR)) {
                throw new SemanticException("Duplicates detected when adding columns to RR: see previous message");
            }
            int vColPos = inputRR.getRowSchema().getSignature().size();
            for (Pair<HiveParserASTNode, TypeInfo> astTypePair : vcASTAndType) {
                addedProjectRR.putExpression(astTypePair.getKey(), new ColumnInfo(getColumnInternalName(vColPos), astTypePair.getValue(), null, false));
                vColPos++;
            }
            realInput = genSelectRelNode(CompositeList.of(originalInputRefs, virtualCols), addedProjectRR, srcRel);
            if (outermostOB) {
                if (!HiveParserRowResolver.add(outputRR, inputRR)) {
                    throw new SemanticException("Duplicates detected when adding columns to RR: see previous message");
                }
            } else {
                if (!HiveParserRowResolver.add(outputRR, addedProjectRR)) {
                    throw new SemanticException("Duplicates detected when adding columns to RR: see previous message");
                }
            }
            originalInput = srcRel;
        } else {
            if (!HiveParserRowResolver.add(outputRR, inputRR)) {
                throw new SemanticException("Duplicates detected when adding columns to RR: see previous message");
            }
        }
        // create rel node
        RelTraitSet traitSet = cluster.traitSet();
        RelCollation canonizedCollation = traitSet.canonize(RelCollationImpl.of(fieldCollations));
        res = LogicalDistribution.create(realInput, canonizedCollation, distKeys);
        Map<String, Integer> hiveColNameCalcitePosMap = buildHiveToCalciteColumnMap(outputRR);
        relToRowResolver.put(res, outputRR);
        relToHiveColNameCalcitePosMap.put(res, hiveColNameCalcitePosMap);
    }
    return (new Pair<>(res, originalInput));
}
Also used : DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) LogicalSort(org.apache.calcite.rel.logical.LogicalSort) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) JoinType(org.apache.hadoop.hive.ql.parse.JoinType) HiveParserBaseSemanticAnalyzer.getGroupByForClause(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getGroupByForClause) HiveParserContext(org.apache.flink.table.planner.delegation.hive.copy.HiveParserContext) FunctionRegistry(org.apache.hadoop.hive.ql.exec.FunctionRegistry) RelCollationImpl(org.apache.calcite.rel.RelCollationImpl) BigDecimal(java.math.BigDecimal) HiveParserBaseSemanticAnalyzer.unescapeIdentifier(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.unescapeIdentifier) HiveInspectors(org.apache.flink.table.functions.hive.conversion.HiveInspectors) CorrelationId(org.apache.calcite.rel.core.CorrelationId) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) Map(java.util.Map) HiveParserASTBuilder(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTBuilder) RelTraitSet(org.apache.calcite.plan.RelTraitSet) RexWindowBound(org.apache.calcite.rex.RexWindowBound) ImmutableBitSet(org.apache.calcite.util.ImmutableBitSet) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) HiveParserSubQueryUtils(org.apache.flink.table.planner.delegation.hive.copy.HiveParserSubQueryUtils) HiveParserBaseSemanticAnalyzer.getHiveAggInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getHiveAggInfo) HiveTypeUtil(org.apache.flink.table.catalog.hive.util.HiveTypeUtil) HiveParserBaseSemanticAnalyzer(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer) org.apache.hadoop.hive.serde.serdeConstants(org.apache.hadoop.hive.serde.serdeConstants) Set(java.util.Set) HiveParserUtils.generateErrorMessage(org.apache.flink.table.planner.delegation.hive.HiveParserUtils.generateErrorMessage) RelFieldCollation(org.apache.calcite.rel.RelFieldCollation) HiveASTParseUtils(org.apache.flink.table.planner.delegation.hive.copy.HiveASTParseUtils) HiveParserBaseSemanticAnalyzer.getGroupingSetsForCube(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getGroupingSetsForCube) SqlStdOperatorTable(org.apache.calcite.sql.fun.SqlStdOperatorTable) HiveParserPreCboCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPreCboCtx) RelCollation(org.apache.calcite.rel.RelCollation) HiveParserBaseSemanticAnalyzer.getGroupingSets(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getGroupingSets) HiveParserBaseSemanticAnalyzer.getPartitionKeys(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getPartitionKeys) HiveParserBaseSemanticAnalyzer.removeOBInSubQuery(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.removeOBInSubQuery) HiveASTParser(org.apache.flink.table.planner.delegation.hive.parse.HiveASTParser) ErrorMsg(org.apache.hadoop.hive.ql.ErrorMsg) RexCall(org.apache.calcite.rex.RexCall) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) TypeInfoUtils(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) HiveParserJoinTypeCheckCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserJoinTypeCheckCtx) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) AggInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.AggInfo) HiveParserBaseSemanticAnalyzer.getCorrelationUse(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getCorrelationUse) HiveASTParseDriver(org.apache.flink.table.planner.delegation.hive.copy.HiveASTParseDriver) RexFieldCollation(org.apache.calcite.rex.RexFieldCollation) ViewExpanders(org.apache.calcite.plan.ViewExpanders) LogicalValues(org.apache.calcite.rel.logical.LogicalValues) LogicalCorrelate(org.apache.calcite.rel.logical.LogicalCorrelate) HiveParserBaseSemanticAnalyzer.validateNoHavingReferenceToAlias(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.validateNoHavingReferenceToAlias) HiveParserNamedJoinInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserNamedJoinInfo) RelDataType(org.apache.calcite.rel.type.RelDataType) HiveParserUtils.rewriteGroupingFunctionAST(org.apache.flink.table.planner.delegation.hive.HiveParserUtils.rewriteGroupingFunctionAST) LogicalIntersect(org.apache.calcite.rel.logical.LogicalIntersect) HiveParserQBSubQuery(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBSubQuery) Table(org.apache.hadoop.hive.ql.metadata.Table) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) Aggregate(org.apache.calcite.rel.core.Aggregate) HiveParserTypeCheckCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx) FrameworkConfig(org.apache.calcite.tools.FrameworkConfig) Node(org.apache.hadoop.hive.ql.lib.Node) HiveParserBaseSemanticAnalyzer.getBound(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getBound) HiveParserBaseSemanticAnalyzer.getColumnInternalName(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getColumnInternalName) HiveParserSqlFunctionConverter(org.apache.flink.table.planner.delegation.hive.copy.HiveParserSqlFunctionConverter) LogicalAggregate(org.apache.calcite.rel.logical.LogicalAggregate) JoinRelType(org.apache.calcite.rel.core.JoinRelType) AggregateCall(org.apache.calcite.rel.core.AggregateCall) SqlAggFunction(org.apache.calcite.sql.SqlAggFunction) ArrayDeque(java.util.ArrayDeque) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) ColumnAccessInfo(org.apache.hadoop.hive.ql.parse.ColumnAccessInfo) HiveParserBaseSemanticAnalyzer.obtainTableType(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.obtainTableType) HiveParserBaseSemanticAnalyzer.convert(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.convert) LogicalFilter(org.apache.calcite.rel.logical.LogicalFilter) RelFactories(org.apache.calcite.rel.core.RelFactories) LoggerFactory(org.slf4j.LoggerFactory) LogicalTableFunctionScan(org.apache.calcite.rel.logical.LogicalTableFunctionScan) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveParserBaseSemanticAnalyzer.processPositionAlias(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.processPositionAlias) HiveParserWindowingSpec(org.apache.flink.table.planner.delegation.hive.copy.HiveParserWindowingSpec) RexUtil(org.apache.calcite.rex.RexUtil) LogicalJoin(org.apache.calcite.rel.logical.LogicalJoin) HiveParserErrorMsg(org.apache.flink.table.planner.delegation.hive.parse.HiveParserErrorMsg) RexNode(org.apache.calcite.rex.RexNode) LogicalUnion(org.apache.calcite.rel.logical.LogicalUnion) RelOptCluster(org.apache.calcite.plan.RelOptCluster) LogicalDistribution(org.apache.flink.table.planner.plan.nodes.hive.LogicalDistribution) RexLiteral(org.apache.calcite.rex.RexLiteral) HiveParserQBParseInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBParseInfo) HiveParserTypeConverter(org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeConverter) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) Preconditions(org.apache.flink.util.Preconditions) CompositeList(org.apache.calcite.util.CompositeList) Collectors(java.util.stream.Collectors) RexInputRef(org.apache.calcite.rex.RexInputRef) VirtualColumn(org.apache.hadoop.hive.ql.metadata.VirtualColumn) List(java.util.List) Type(java.lang.reflect.Type) Sort(org.apache.calcite.rel.core.Sort) RelDataTypeField(org.apache.calcite.rel.type.RelDataTypeField) GenericUDAFEvaluator(org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator) HiveParserQueryState(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQueryState) Project(org.apache.calcite.rel.core.Project) CatalogManager(org.apache.flink.table.catalog.CatalogManager) HiveParserBaseSemanticAnalyzer.getGroupingSetsForRollup(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getGroupingSetsForRollup) HiveParserBaseSemanticAnalyzer.getWindowSpecIndx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getWindowSpecIndx) HashMap(java.util.HashMap) SetOp(org.apache.calcite.rel.core.SetOp) Deque(java.util.Deque) RelOptUtil(org.apache.calcite.plan.RelOptUtil) LogicalMinus(org.apache.calcite.rel.logical.LogicalMinus) DeduplicateCorrelateVariables(org.apache.calcite.sql2rel.DeduplicateCorrelateVariables) HiveParserBaseSemanticAnalyzer.initPhase1Ctx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.initPhase1Ctx) PlannerContext(org.apache.flink.table.planner.delegation.PlannerContext) HashSet(java.util.HashSet) HiveParserQBExpr(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBExpr) TableType(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.TableType) Pair(org.apache.calcite.util.Pair) HiveParserBaseSemanticAnalyzer.buildHiveToCalciteColumnMap(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.buildHiveToCalciteColumnMap) SqlOperator(org.apache.calcite.sql.SqlOperator) FlinkCalciteCatalogReader(org.apache.flink.table.planner.plan.FlinkCalciteCatalogReader) RelCollations(org.apache.calcite.rel.RelCollations) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) Logger(org.slf4j.Logger) HiveParserSemanticAnalyzer(org.apache.flink.table.planner.delegation.hive.copy.HiveParserSemanticAnalyzer) LogicalProject(org.apache.calcite.rel.logical.LogicalProject) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) TypeInfoFactory(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory) Iterator(java.util.Iterator) RexBuilder(org.apache.calcite.rex.RexBuilder) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) GroupByDesc(org.apache.hadoop.hive.ql.plan.GroupByDesc) HiveConf(org.apache.hadoop.hive.conf.HiveConf) HiveParserQB(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQB) RelNode(org.apache.calcite.rel.RelNode) HiveParserBaseSemanticAnalyzer.genValues(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.genValues) HiveParserUtils.verifyCanHandleAst(org.apache.flink.table.planner.delegation.hive.HiveParserUtils.verifyCanHandleAst) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) AbstractMap(java.util.AbstractMap) ObjectPair(org.apache.hadoop.hive.common.ObjectPair) HiveParserBaseSemanticAnalyzer.buildHiveColNameToInputPosMap(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.buildHiveColNameToInputPosMap) HiveParserCreateViewInfo(org.apache.flink.table.planner.delegation.hive.parse.HiveParserCreateViewInfo) SqlUserDefinedTableFunction(org.apache.calcite.sql.validate.SqlUserDefinedTableFunction) HiveParserBaseSemanticAnalyzer.topLevelConjunctCheck(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.topLevelConjunctCheck) Util(org.apache.calcite.util.Util) HiveParserBaseSemanticAnalyzer.addToGBExpr(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.addToGBExpr) Collections(java.util.Collections) HiveParserBaseSemanticAnalyzer.getOrderKeys(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getOrderKeys) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) Node(org.apache.hadoop.hive.ql.lib.Node) RexNode(org.apache.calcite.rex.RexNode) RelNode(org.apache.calcite.rel.RelNode) ArrayList(java.util.ArrayList) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) RelTraitSet(org.apache.calcite.plan.RelTraitSet) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) Pair(org.apache.calcite.util.Pair) ObjectPair(org.apache.hadoop.hive.common.ObjectPair) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) RelCollation(org.apache.calcite.rel.RelCollation) HiveParserQBParseInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBParseInfo) RelNode(org.apache.calcite.rel.RelNode) RelFieldCollation(org.apache.calcite.rel.RelFieldCollation) RexInputRef(org.apache.calcite.rex.RexInputRef) RexNode(org.apache.calcite.rex.RexNode)

Example 13 with HiveParserRowResolver

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver in project flink by apache.

the class HiveParserCalcitePlanner method genSelectLogicalPlan.

// NOTE: there can only be one select clause since we don't handle multi destination insert.
private RelNode genSelectLogicalPlan(HiveParserQB qb, RelNode srcRel, RelNode starSrcRel, Map<String, Integer> outerNameToPos, HiveParserRowResolver outerRR) throws SemanticException {
    // 0. Generate a Select Node for Windowing
    // Exclude the newly-generated select columns from */etc. resolution.
    HashSet<ColumnInfo> excludedColumns = new HashSet<>();
    RelNode selForWindow = genSelectForWindowing(qb, srcRel, excludedColumns);
    srcRel = (selForWindow == null) ? srcRel : selForWindow;
    ArrayList<ExprNodeDesc> exprNodeDescs = new ArrayList<>();
    // 1. Get Select Expression List
    HiveParserQBParseInfo qbp = qb.getParseInfo();
    String selClauseName = qbp.getClauseNames().iterator().next();
    HiveParserASTNode selExprList = qbp.getSelForClause(selClauseName);
    // make sure if there is subquery it is top level expression
    HiveParserSubQueryUtils.checkForTopLevelSubqueries(selExprList);
    final boolean cubeRollupGrpSetPresent = !qbp.getDestRollups().isEmpty() || !qbp.getDestGroupingSets().isEmpty() || !qbp.getDestCubes().isEmpty();
    // 3. Query Hints
    int posn = 0;
    boolean hintPresent = selExprList.getChild(0).getType() == HiveASTParser.QUERY_HINT;
    if (hintPresent) {
        posn++;
    }
    // 4. Bailout if select involves Transform
    boolean isInTransform = selExprList.getChild(posn).getChild(0).getType() == HiveASTParser.TOK_TRANSFORM;
    if (isInTransform) {
        String msg = "SELECT TRANSFORM is currently not supported in CBO, turn off cbo to use TRANSFORM.";
        throw new SemanticException(msg);
    }
    // 2.Row resolvers for input, output
    HiveParserRowResolver outRR = new HiveParserRowResolver();
    Integer pos = 0;
    // TODO: will this also fix windowing? try
    HiveParserRowResolver inputRR = relToRowResolver.get(srcRel), starRR = inputRR;
    if (starSrcRel != null) {
        starRR = relToRowResolver.get(starSrcRel);
    }
    // 5. Check if select involves UDTF
    String udtfTableAlias = null;
    SqlOperator udtfOperator = null;
    String genericUDTFName = null;
    ArrayList<String> udtfColAliases = new ArrayList<>();
    HiveParserASTNode expr = (HiveParserASTNode) selExprList.getChild(posn).getChild(0);
    int exprType = expr.getType();
    if (exprType == HiveASTParser.TOK_FUNCTION || exprType == HiveASTParser.TOK_FUNCTIONSTAR) {
        String funcName = HiveParserTypeCheckProcFactory.DefaultExprProcessor.getFunctionText(expr, true);
        // we can't just try to get table function here because the operator table throws
        // exception if it's not a table function
        SqlOperator sqlOperator = HiveParserUtils.getAnySqlOperator(funcName, frameworkConfig.getOperatorTable());
        if (HiveParserUtils.isUDTF(sqlOperator)) {
            LOG.debug("Found UDTF " + funcName);
            udtfOperator = sqlOperator;
            genericUDTFName = funcName;
            if (!HiveParserUtils.isNative(sqlOperator)) {
                semanticAnalyzer.unparseTranslator.addIdentifierTranslation((HiveParserASTNode) expr.getChild(0));
            }
            if (exprType == HiveASTParser.TOK_FUNCTIONSTAR) {
                semanticAnalyzer.genColListRegex(".*", null, (HiveParserASTNode) expr.getChild(0), exprNodeDescs, null, inputRR, starRR, pos, outRR, qb.getAliases(), false);
            }
        }
    }
    if (udtfOperator != null) {
        // Only support a single expression when it's a UDTF
        if (selExprList.getChildCount() > 1) {
            throw new SemanticException(generateErrorMessage((HiveParserASTNode) selExprList.getChild(1), ErrorMsg.UDTF_MULTIPLE_EXPR.getMsg()));
        }
        HiveParserASTNode selExpr = (HiveParserASTNode) selExprList.getChild(posn);
        // column names also can be inferred from result of UDTF
        for (int i = 1; i < selExpr.getChildCount(); i++) {
            HiveParserASTNode selExprChild = (HiveParserASTNode) selExpr.getChild(i);
            switch(selExprChild.getType()) {
                case HiveASTParser.Identifier:
                    udtfColAliases.add(unescapeIdentifier(selExprChild.getText().toLowerCase()));
                    semanticAnalyzer.unparseTranslator.addIdentifierTranslation(selExprChild);
                    break;
                case HiveASTParser.TOK_TABALIAS:
                    assert (selExprChild.getChildCount() == 1);
                    udtfTableAlias = unescapeIdentifier(selExprChild.getChild(0).getText());
                    qb.addAlias(udtfTableAlias);
                    semanticAnalyzer.unparseTranslator.addIdentifierTranslation((HiveParserASTNode) selExprChild.getChild(0));
                    break;
                default:
                    throw new SemanticException("Find invalid token type " + selExprChild.getType() + " in UDTF.");
            }
        }
        LOG.debug("UDTF table alias is " + udtfTableAlias);
        LOG.debug("UDTF col aliases are " + udtfColAliases);
    }
    // 6. Iterate over all expression (after SELECT)
    HiveParserASTNode exprList;
    if (udtfOperator != null) {
        exprList = expr;
    } else {
        exprList = selExprList;
    }
    // For UDTF's, skip the function name to get the expressions
    int startPos = udtfOperator != null ? posn + 1 : posn;
    // track the col aliases provided by user
    List<String> colAliases = new ArrayList<>();
    for (int i = startPos; i < exprList.getChildCount(); ++i) {
        colAliases.add(null);
        // 6.1 child can be EXPR AS ALIAS, or EXPR.
        HiveParserASTNode child = (HiveParserASTNode) exprList.getChild(i);
        boolean hasAsClause = child.getChildCount() == 2;
        // slightly different.
        if (udtfOperator == null && child.getChildCount() > 2) {
            throw new SemanticException(generateErrorMessage((HiveParserASTNode) child.getChild(2), ErrorMsg.INVALID_AS.getMsg()));
        }
        String tabAlias;
        String colAlias;
        if (udtfOperator != null) {
            tabAlias = null;
            colAlias = semanticAnalyzer.getAutogenColAliasPrfxLbl() + i;
            expr = child;
        } else {
            // 6.3 Get rid of TOK_SELEXPR
            expr = (HiveParserASTNode) child.getChild(0);
            String[] colRef = HiveParserUtils.getColAlias(child, semanticAnalyzer.getAutogenColAliasPrfxLbl(), inputRR, semanticAnalyzer.autogenColAliasPrfxIncludeFuncName(), i);
            tabAlias = colRef[0];
            colAlias = colRef[1];
            if (hasAsClause) {
                colAliases.set(colAliases.size() - 1, colAlias);
                semanticAnalyzer.unparseTranslator.addIdentifierTranslation((HiveParserASTNode) child.getChild(1));
            }
        }
        Map<HiveParserASTNode, RelNode> subQueryToRelNode = new HashMap<>();
        boolean isSubQuery = genSubQueryRelNode(qb, expr, srcRel, false, subQueryToRelNode);
        if (isSubQuery) {
            ExprNodeDesc subQueryDesc = semanticAnalyzer.genExprNodeDesc(expr, relToRowResolver.get(srcRel), outerRR, subQueryToRelNode, false);
            exprNodeDescs.add(subQueryDesc);
            ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(pos), subQueryDesc.getWritableObjectInspector(), tabAlias, false);
            if (!outRR.putWithCheck(tabAlias, colAlias, null, colInfo)) {
                throw new SemanticException("Cannot add column to RR: " + tabAlias + "." + colAlias + " => " + colInfo + " due to duplication, see previous warnings");
            }
        } else {
            // 6.4 Build ExprNode corresponding to columns
            if (expr.getType() == HiveASTParser.TOK_ALLCOLREF) {
                pos = semanticAnalyzer.genColListRegex(".*", expr.getChildCount() == 0 ? null : HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) expr.getChild(0)).toLowerCase(), expr, exprNodeDescs, excludedColumns, inputRR, starRR, pos, outRR, qb.getAliases(), false);
            } else if (expr.getType() == HiveASTParser.TOK_TABLE_OR_COL && !hasAsClause && !inputRR.getIsExprResolver() && HiveParserUtils.isRegex(unescapeIdentifier(expr.getChild(0).getText()), semanticAnalyzer.getConf())) {
                // In case the expression is a regex COL. This can only happen without AS clause
                // We don't allow this for ExprResolver - the Group By case
                pos = semanticAnalyzer.genColListRegex(unescapeIdentifier(expr.getChild(0).getText()), null, expr, exprNodeDescs, excludedColumns, inputRR, starRR, pos, outRR, qb.getAliases(), true);
            } else if (expr.getType() == HiveASTParser.DOT && expr.getChild(0).getType() == HiveASTParser.TOK_TABLE_OR_COL && inputRR.hasTableAlias(unescapeIdentifier(expr.getChild(0).getChild(0).getText().toLowerCase())) && !hasAsClause && !inputRR.getIsExprResolver() && HiveParserUtils.isRegex(unescapeIdentifier(expr.getChild(1).getText()), semanticAnalyzer.getConf())) {
                // In case the expression is TABLE.COL (col can be regex). This can only happen
                // without AS clause
                // We don't allow this for ExprResolver - the Group By case
                pos = semanticAnalyzer.genColListRegex(unescapeIdentifier(expr.getChild(1).getText()), unescapeIdentifier(expr.getChild(0).getChild(0).getText().toLowerCase()), expr, exprNodeDescs, excludedColumns, inputRR, starRR, pos, outRR, qb.getAliases(), false);
            } else if (HiveASTParseUtils.containsTokenOfType(expr, HiveASTParser.TOK_FUNCTIONDI) && !(srcRel instanceof Aggregate)) {
                // Likely a malformed query eg, select hash(distinct c1) from t1;
                throw new SemanticException("Distinct without an aggregation.");
            } else {
                // Case when this is an expression
                HiveParserTypeCheckCtx typeCheckCtx = new HiveParserTypeCheckCtx(inputRR, frameworkConfig, cluster);
                // We allow stateful functions in the SELECT list (but nowhere else)
                typeCheckCtx.setAllowStatefulFunctions(true);
                if (!qbp.getDestToGroupBy().isEmpty()) {
                    // Special handling of grouping function
                    expr = rewriteGroupingFunctionAST(getGroupByForClause(qbp, selClauseName), expr, !cubeRollupGrpSetPresent);
                }
                ExprNodeDesc exprDesc = semanticAnalyzer.genExprNodeDesc(expr, inputRR, typeCheckCtx);
                String recommended = semanticAnalyzer.recommendName(exprDesc, colAlias);
                if (recommended != null && outRR.get(null, recommended) == null) {
                    colAlias = recommended;
                }
                exprNodeDescs.add(exprDesc);
                ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(pos), exprDesc.getWritableObjectInspector(), tabAlias, false);
                colInfo.setSkewedCol(exprDesc instanceof ExprNodeColumnDesc && ((ExprNodeColumnDesc) exprDesc).isSkewedCol());
                // Hive errors out in case of duplication. We allow it and see what happens.
                outRR.put(tabAlias, colAlias, colInfo);
                if (exprDesc instanceof ExprNodeColumnDesc) {
                    ExprNodeColumnDesc colExp = (ExprNodeColumnDesc) exprDesc;
                    String[] altMapping = inputRR.getAlternateMappings(colExp.getColumn());
                    if (altMapping != null) {
                        // TODO: this can overwrite the mapping. Should this be allowed?
                        outRR.put(altMapping[0], altMapping[1], colInfo);
                    }
                }
                pos++;
            }
        }
    }
    // 7. Convert Hive projections to Calcite
    List<RexNode> calciteColLst = new ArrayList<>();
    HiveParserRexNodeConverter rexNodeConverter = new HiveParserRexNodeConverter(cluster, srcRel.getRowType(), outerNameToPos, buildHiveColNameToInputPosMap(exprNodeDescs, inputRR), relToRowResolver.get(srcRel), outerRR, 0, false, subqueryId, funcConverter);
    for (ExprNodeDesc colExpr : exprNodeDescs) {
        RexNode calciteCol = rexNodeConverter.convert(colExpr);
        calciteCol = convertNullLiteral(calciteCol).accept(funcConverter);
        calciteColLst.add(calciteCol);
    }
    // 8. Build Calcite Rel
    RelNode res;
    if (udtfOperator != null) {
        // The basic idea for CBO support of UDTF is to treat UDTF as a special project.
        res = genUDTFPlan(udtfOperator, genericUDTFName, udtfTableAlias, udtfColAliases, qb, calciteColLst, outRR.getColumnInfos(), srcRel, true, false);
    } else {
        // and thus introduces unnecessary agg node.
        if (HiveParserUtils.isIdentityProject(srcRel, calciteColLst, colAliases) && outerRR != null) {
            res = srcRel;
        } else {
            res = genSelectRelNode(calciteColLst, outRR, srcRel);
        }
    }
    // 9. Handle select distinct as GBY if there exist windowing functions
    if (selForWindow != null && selExprList.getToken().getType() == HiveASTParser.TOK_SELECTDI) {
        ImmutableBitSet groupSet = ImmutableBitSet.range(res.getRowType().getFieldList().size());
        res = LogicalAggregate.create(res, groupSet, Collections.emptyList(), Collections.emptyList());
        HiveParserRowResolver groupByOutputRowResolver = new HiveParserRowResolver();
        for (int i = 0; i < outRR.getColumnInfos().size(); i++) {
            ColumnInfo colInfo = outRR.getColumnInfos().get(i);
            ColumnInfo newColInfo = new ColumnInfo(colInfo.getInternalName(), colInfo.getType(), colInfo.getTabAlias(), colInfo.getIsVirtualCol());
            groupByOutputRowResolver.put(colInfo.getTabAlias(), colInfo.getAlias(), newColInfo);
        }
        relToHiveColNameCalcitePosMap.put(res, buildHiveToCalciteColumnMap(groupByOutputRowResolver));
        relToRowResolver.put(res, groupByOutputRowResolver);
    }
    return res;
}
Also used : ImmutableBitSet(org.apache.calcite.util.ImmutableBitSet) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) SqlOperator(org.apache.calcite.sql.SqlOperator) ArrayList(java.util.ArrayList) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) HashSet(java.util.HashSet) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) HiveParserQBParseInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBParseInfo) RelNode(org.apache.calcite.rel.RelNode) Aggregate(org.apache.calcite.rel.core.Aggregate) LogicalAggregate(org.apache.calcite.rel.logical.LogicalAggregate) HiveParserTypeCheckCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx) RexNode(org.apache.calcite.rex.RexNode)

Example 14 with HiveParserRowResolver

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver in project flink by apache.

the class HiveParserCalcitePlanner method genTableLogicalPlan.

private RelNode genTableLogicalPlan(String tableAlias, HiveParserQB qb) throws SemanticException {
    HiveParserRowResolver rowResolver = new HiveParserRowResolver();
    try {
        // 2. if returnpath is on and hivetestmode is on bail
        if (qb.getParseInfo().needTableSample(tableAlias) || semanticAnalyzer.getNameToSplitSampleMap().containsKey(tableAlias) || Boolean.parseBoolean(semanticAnalyzer.getConf().get("hive.cbo.returnpath.hiveop", "false")) && semanticAnalyzer.getConf().getBoolVar(HiveConf.ConfVars.HIVETESTMODE)) {
            String msg = String.format("Table Sample specified for %s." + " Currently we don't support Table Sample clauses in CBO," + " turn off cbo for queries on tableSamples.", tableAlias);
            LOG.debug(msg);
            throw new SemanticException(msg);
        }
        // 2. Get Table Metadata
        Table table = qb.getMetaData().getSrcForAlias(tableAlias);
        if (table.isTemporary()) {
            // Hive creates a temp table for VALUES, we need to convert it to LogicalValues
            RelNode values = genValues(tableAlias, table, rowResolver, cluster, getQB().getValuesTableToData().get(tableAlias));
            relToRowResolver.put(values, rowResolver);
            relToHiveColNameCalcitePosMap.put(values, buildHiveToCalciteColumnMap(rowResolver));
            return values;
        } else {
            // 3. Get Table Logical Schema (Row Type)
            // NOTE: Table logical schema = Non Partition Cols + Partition Cols + Virtual Cols
            // 3.1 Add Column info for non partition cols (Object Inspector fields)
            StructObjectInspector rowObjectInspector = (StructObjectInspector) table.getDeserializer().getObjectInspector();
            List<? extends StructField> fields = rowObjectInspector.getAllStructFieldRefs();
            ColumnInfo colInfo;
            String colName;
            for (StructField field : fields) {
                colName = field.getFieldName();
                colInfo = new ColumnInfo(field.getFieldName(), TypeInfoUtils.getTypeInfoFromObjectInspector(field.getFieldObjectInspector()), tableAlias, false);
                colInfo.setSkewedCol(HiveParserUtils.isSkewedCol(tableAlias, qb, colName));
                rowResolver.put(tableAlias, colName, colInfo);
            }
            // 3.2 Add column info corresponding to partition columns
            for (FieldSchema partCol : table.getPartCols()) {
                colName = partCol.getName();
                colInfo = new ColumnInfo(colName, TypeInfoFactory.getPrimitiveTypeInfo(partCol.getType()), tableAlias, true);
                rowResolver.put(tableAlias, colName, colInfo);
            }
            final TableType tableType = obtainTableType(table);
            Preconditions.checkArgument(tableType == TableType.NATIVE, "Only native tables are supported");
            // Build Hive Table Scan Rel
            RelNode tableRel = catalogReader.getTable(Arrays.asList(catalogManager.getCurrentCatalog(), table.getDbName(), table.getTableName())).toRel(ViewExpanders.toRelContext(flinkPlanner.createToRelContext(), cluster));
            // 6. Add Schema(RR) to RelNode-Schema map
            Map<String, Integer> hiveToCalciteColMap = buildHiveToCalciteColumnMap(rowResolver);
            relToRowResolver.put(tableRel, rowResolver);
            relToHiveColNameCalcitePosMap.put(tableRel, hiveToCalciteColMap);
            return tableRel;
        }
    } catch (Exception e) {
        if (e instanceof SemanticException) {
            throw (SemanticException) e;
        } else {
            throw (new RuntimeException(e));
        }
    }
}
Also used : SqlStdOperatorTable(org.apache.calcite.sql.fun.SqlStdOperatorTable) Table(org.apache.hadoop.hive.ql.metadata.Table) HiveParserBaseSemanticAnalyzer.obtainTableType(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.obtainTableType) TableType(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.TableType) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) RelNode(org.apache.calcite.rel.RelNode) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 15 with HiveParserRowResolver

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver in project flink by apache.

the class HiveParserCalcitePlanner method genLogicalPlan.

private RelNode genLogicalPlan(HiveParserQB qb, boolean outerMostQB, Map<String, Integer> outerNameToPosMap, HiveParserRowResolver outerRR) throws SemanticException {
    RelNode res;
    // First generate all the opInfos for the elements in the from clause
    Map<String, RelNode> aliasToRel = new HashMap<>();
    // 0. Check if we can handle the SubQuery;
    // canHandleQbForCbo returns null if the query can be handled.
    String reason = HiveParserUtils.canHandleQbForCbo(semanticAnalyzer.getQueryProperties());
    if (reason != null) {
        String msg = "CBO can not handle Sub Query" + " because it: " + reason;
        throw new SemanticException(msg);
    }
    // 1.1. Recurse over the subqueries to fill the subquery part of the plan
    for (String subqAlias : qb.getSubqAliases()) {
        HiveParserQBExpr qbexpr = qb.getSubqForAlias(subqAlias);
        RelNode relNode = genLogicalPlan(qbexpr);
        aliasToRel.put(subqAlias, relNode);
        if (qb.getViewToTabSchema().containsKey(subqAlias)) {
            if (!(relNode instanceof Project)) {
                throw new SemanticException("View " + subqAlias + " is corresponding to " + relNode.toString() + ", rather than a Project.");
            }
        }
    }
    // 1.2 Recurse over all the source tables
    for (String tableAlias : qb.getTabAliases()) {
        RelNode op = genTableLogicalPlan(tableAlias, qb);
        aliasToRel.put(tableAlias, op);
    }
    if (aliasToRel.isEmpty()) {
        RelNode dummySrc = LogicalValues.createOneRow(cluster);
        aliasToRel.put(HiveParserSemanticAnalyzer.DUMMY_TABLE, dummySrc);
        HiveParserRowResolver dummyRR = new HiveParserRowResolver();
        dummyRR.put(HiveParserSemanticAnalyzer.DUMMY_TABLE, "dummy_col", new ColumnInfo(getColumnInternalName(0), TypeInfoFactory.intTypeInfo, HiveParserSemanticAnalyzer.DUMMY_TABLE, false));
        relToRowResolver.put(dummySrc, dummyRR);
        relToHiveColNameCalcitePosMap.put(dummySrc, buildHiveToCalciteColumnMap(dummyRR));
    }
    if (!qb.getParseInfo().getAliasToLateralViews().isEmpty()) {
        // process lateral views
        res = genLateralViewPlan(qb, aliasToRel);
    } else if (qb.getParseInfo().getJoinExpr() != null) {
        // 1.3 process join
        res = genJoinLogicalPlan(qb.getParseInfo().getJoinExpr(), aliasToRel);
    } else {
        // If no join then there should only be either 1 TS or 1 SubQuery
        res = aliasToRel.values().iterator().next();
    }
    // 2. Build Rel for where Clause
    RelNode filterRel = genFilterLogicalPlan(qb, res, outerNameToPosMap, outerRR);
    res = (filterRel == null) ? res : filterRel;
    RelNode starSrcRel = res;
    // 3. Build Rel for GB Clause
    RelNode gbRel = genGBLogicalPlan(qb, res);
    res = gbRel == null ? res : gbRel;
    // 4. Build Rel for GB Having Clause
    RelNode gbHavingRel = genGBHavingLogicalPlan(qb, res);
    res = gbHavingRel == null ? res : gbHavingRel;
    // 5. Build Rel for Select Clause
    RelNode selectRel = genSelectLogicalPlan(qb, res, starSrcRel, outerNameToPosMap, outerRR);
    res = selectRel == null ? res : selectRel;
    // 6. Build Rel for OB Clause
    Pair<Sort, RelNode> obAndTopProj = genOBLogicalPlan(qb, res, outerMostQB);
    Sort orderRel = obAndTopProj.getKey();
    RelNode topConstrainingProjRel = obAndTopProj.getValue();
    res = orderRel == null ? res : orderRel;
    // Build Rel for SortBy/ClusterBy/DistributeBy. It can happen only if we don't have OrderBy.
    if (orderRel == null) {
        Pair<RelNode, RelNode> distAndTopProj = genDistSortBy(qb, res, outerMostQB);
        RelNode distRel = distAndTopProj.getKey();
        topConstrainingProjRel = distAndTopProj.getValue();
        res = distRel == null ? res : distRel;
    }
    // 7. Build Rel for Limit Clause
    Sort limitRel = genLimitLogicalPlan(qb, res);
    if (limitRel != null) {
        if (orderRel != null) {
            // merge limit into the order-by node
            HiveParserRowResolver orderRR = relToRowResolver.remove(orderRel);
            Map<String, Integer> orderColNameToPos = relToHiveColNameCalcitePosMap.remove(orderRel);
            res = LogicalSort.create(orderRel.getInput(), orderRel.collation, limitRel.offset, limitRel.fetch);
            relToRowResolver.put(res, orderRR);
            relToHiveColNameCalcitePosMap.put(res, orderColNameToPos);
            relToRowResolver.remove(limitRel);
            relToHiveColNameCalcitePosMap.remove(limitRel);
        } else {
            res = limitRel;
        }
    }
    // 8. Introduce top constraining select if needed.
    if (topConstrainingProjRel != null) {
        List<RexNode> originalInputRefs = topConstrainingProjRel.getRowType().getFieldList().stream().map(input -> new RexInputRef(input.getIndex(), input.getType())).collect(Collectors.toList());
        HiveParserRowResolver topConstrainingProjRR = new HiveParserRowResolver();
        if (!HiveParserRowResolver.add(topConstrainingProjRR, relToRowResolver.get(topConstrainingProjRel))) {
            LOG.warn("Duplicates detected when adding columns to RR: see previous message");
        }
        res = genSelectRelNode(originalInputRefs, topConstrainingProjRR, res);
    }
    // TODO: cleanup this
    if (qb.getParseInfo().getAlias() != null) {
        HiveParserRowResolver rr = relToRowResolver.get(res);
        HiveParserRowResolver newRR = new HiveParserRowResolver();
        String alias = qb.getParseInfo().getAlias();
        for (ColumnInfo colInfo : rr.getColumnInfos()) {
            String name = colInfo.getInternalName();
            String[] tmp = rr.reverseLookup(name);
            if ("".equals(tmp[0]) || tmp[1] == null) {
                // ast expression is not a valid column name for table
                tmp[1] = colInfo.getInternalName();
            }
            ColumnInfo newColInfo = new ColumnInfo(colInfo);
            newColInfo.setTabAlias(alias);
            newRR.put(alias, tmp[1], newColInfo);
        }
        relToRowResolver.put(res, newRR);
        relToHiveColNameCalcitePosMap.put(res, buildHiveToCalciteColumnMap(newRR));
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("Created Plan for Query Block " + qb.getId());
    }
    semanticAnalyzer.setQB(qb);
    return res;
}
Also used : DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) LogicalSort(org.apache.calcite.rel.logical.LogicalSort) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) JoinType(org.apache.hadoop.hive.ql.parse.JoinType) HiveParserBaseSemanticAnalyzer.getGroupByForClause(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getGroupByForClause) HiveParserContext(org.apache.flink.table.planner.delegation.hive.copy.HiveParserContext) FunctionRegistry(org.apache.hadoop.hive.ql.exec.FunctionRegistry) RelCollationImpl(org.apache.calcite.rel.RelCollationImpl) BigDecimal(java.math.BigDecimal) HiveParserBaseSemanticAnalyzer.unescapeIdentifier(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.unescapeIdentifier) HiveInspectors(org.apache.flink.table.functions.hive.conversion.HiveInspectors) CorrelationId(org.apache.calcite.rel.core.CorrelationId) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) Map(java.util.Map) HiveParserASTBuilder(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTBuilder) RelTraitSet(org.apache.calcite.plan.RelTraitSet) RexWindowBound(org.apache.calcite.rex.RexWindowBound) ImmutableBitSet(org.apache.calcite.util.ImmutableBitSet) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) HiveParserSubQueryUtils(org.apache.flink.table.planner.delegation.hive.copy.HiveParserSubQueryUtils) HiveParserBaseSemanticAnalyzer.getHiveAggInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getHiveAggInfo) HiveTypeUtil(org.apache.flink.table.catalog.hive.util.HiveTypeUtil) HiveParserBaseSemanticAnalyzer(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer) org.apache.hadoop.hive.serde.serdeConstants(org.apache.hadoop.hive.serde.serdeConstants) Set(java.util.Set) HiveParserUtils.generateErrorMessage(org.apache.flink.table.planner.delegation.hive.HiveParserUtils.generateErrorMessage) RelFieldCollation(org.apache.calcite.rel.RelFieldCollation) HiveASTParseUtils(org.apache.flink.table.planner.delegation.hive.copy.HiveASTParseUtils) HiveParserBaseSemanticAnalyzer.getGroupingSetsForCube(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getGroupingSetsForCube) SqlStdOperatorTable(org.apache.calcite.sql.fun.SqlStdOperatorTable) HiveParserPreCboCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPreCboCtx) RelCollation(org.apache.calcite.rel.RelCollation) HiveParserBaseSemanticAnalyzer.getGroupingSets(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getGroupingSets) HiveParserBaseSemanticAnalyzer.getPartitionKeys(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getPartitionKeys) HiveParserBaseSemanticAnalyzer.removeOBInSubQuery(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.removeOBInSubQuery) HiveASTParser(org.apache.flink.table.planner.delegation.hive.parse.HiveASTParser) ErrorMsg(org.apache.hadoop.hive.ql.ErrorMsg) RexCall(org.apache.calcite.rex.RexCall) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) TypeInfoUtils(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) HiveParserJoinTypeCheckCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserJoinTypeCheckCtx) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) AggInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.AggInfo) HiveParserBaseSemanticAnalyzer.getCorrelationUse(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getCorrelationUse) HiveASTParseDriver(org.apache.flink.table.planner.delegation.hive.copy.HiveASTParseDriver) RexFieldCollation(org.apache.calcite.rex.RexFieldCollation) ViewExpanders(org.apache.calcite.plan.ViewExpanders) LogicalValues(org.apache.calcite.rel.logical.LogicalValues) LogicalCorrelate(org.apache.calcite.rel.logical.LogicalCorrelate) HiveParserBaseSemanticAnalyzer.validateNoHavingReferenceToAlias(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.validateNoHavingReferenceToAlias) HiveParserNamedJoinInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserNamedJoinInfo) RelDataType(org.apache.calcite.rel.type.RelDataType) HiveParserUtils.rewriteGroupingFunctionAST(org.apache.flink.table.planner.delegation.hive.HiveParserUtils.rewriteGroupingFunctionAST) LogicalIntersect(org.apache.calcite.rel.logical.LogicalIntersect) HiveParserQBSubQuery(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBSubQuery) Table(org.apache.hadoop.hive.ql.metadata.Table) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) Aggregate(org.apache.calcite.rel.core.Aggregate) HiveParserTypeCheckCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx) FrameworkConfig(org.apache.calcite.tools.FrameworkConfig) Node(org.apache.hadoop.hive.ql.lib.Node) HiveParserBaseSemanticAnalyzer.getBound(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getBound) HiveParserBaseSemanticAnalyzer.getColumnInternalName(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getColumnInternalName) HiveParserSqlFunctionConverter(org.apache.flink.table.planner.delegation.hive.copy.HiveParserSqlFunctionConverter) LogicalAggregate(org.apache.calcite.rel.logical.LogicalAggregate) JoinRelType(org.apache.calcite.rel.core.JoinRelType) AggregateCall(org.apache.calcite.rel.core.AggregateCall) SqlAggFunction(org.apache.calcite.sql.SqlAggFunction) ArrayDeque(java.util.ArrayDeque) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) ColumnAccessInfo(org.apache.hadoop.hive.ql.parse.ColumnAccessInfo) HiveParserBaseSemanticAnalyzer.obtainTableType(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.obtainTableType) HiveParserBaseSemanticAnalyzer.convert(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.convert) LogicalFilter(org.apache.calcite.rel.logical.LogicalFilter) RelFactories(org.apache.calcite.rel.core.RelFactories) LoggerFactory(org.slf4j.LoggerFactory) LogicalTableFunctionScan(org.apache.calcite.rel.logical.LogicalTableFunctionScan) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveParserBaseSemanticAnalyzer.processPositionAlias(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.processPositionAlias) HiveParserWindowingSpec(org.apache.flink.table.planner.delegation.hive.copy.HiveParserWindowingSpec) RexUtil(org.apache.calcite.rex.RexUtil) LogicalJoin(org.apache.calcite.rel.logical.LogicalJoin) HiveParserErrorMsg(org.apache.flink.table.planner.delegation.hive.parse.HiveParserErrorMsg) RexNode(org.apache.calcite.rex.RexNode) LogicalUnion(org.apache.calcite.rel.logical.LogicalUnion) RelOptCluster(org.apache.calcite.plan.RelOptCluster) LogicalDistribution(org.apache.flink.table.planner.plan.nodes.hive.LogicalDistribution) RexLiteral(org.apache.calcite.rex.RexLiteral) HiveParserQBParseInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBParseInfo) HiveParserTypeConverter(org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeConverter) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) Preconditions(org.apache.flink.util.Preconditions) CompositeList(org.apache.calcite.util.CompositeList) Collectors(java.util.stream.Collectors) RexInputRef(org.apache.calcite.rex.RexInputRef) VirtualColumn(org.apache.hadoop.hive.ql.metadata.VirtualColumn) List(java.util.List) Type(java.lang.reflect.Type) Sort(org.apache.calcite.rel.core.Sort) RelDataTypeField(org.apache.calcite.rel.type.RelDataTypeField) GenericUDAFEvaluator(org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator) HiveParserQueryState(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQueryState) Project(org.apache.calcite.rel.core.Project) CatalogManager(org.apache.flink.table.catalog.CatalogManager) HiveParserBaseSemanticAnalyzer.getGroupingSetsForRollup(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getGroupingSetsForRollup) HiveParserBaseSemanticAnalyzer.getWindowSpecIndx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getWindowSpecIndx) HashMap(java.util.HashMap) SetOp(org.apache.calcite.rel.core.SetOp) Deque(java.util.Deque) RelOptUtil(org.apache.calcite.plan.RelOptUtil) LogicalMinus(org.apache.calcite.rel.logical.LogicalMinus) DeduplicateCorrelateVariables(org.apache.calcite.sql2rel.DeduplicateCorrelateVariables) HiveParserBaseSemanticAnalyzer.initPhase1Ctx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.initPhase1Ctx) PlannerContext(org.apache.flink.table.planner.delegation.PlannerContext) HashSet(java.util.HashSet) HiveParserQBExpr(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBExpr) TableType(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.TableType) Pair(org.apache.calcite.util.Pair) HiveParserBaseSemanticAnalyzer.buildHiveToCalciteColumnMap(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.buildHiveToCalciteColumnMap) SqlOperator(org.apache.calcite.sql.SqlOperator) FlinkCalciteCatalogReader(org.apache.flink.table.planner.plan.FlinkCalciteCatalogReader) RelCollations(org.apache.calcite.rel.RelCollations) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) Logger(org.slf4j.Logger) HiveParserSemanticAnalyzer(org.apache.flink.table.planner.delegation.hive.copy.HiveParserSemanticAnalyzer) LogicalProject(org.apache.calcite.rel.logical.LogicalProject) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) TypeInfoFactory(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory) Iterator(java.util.Iterator) RexBuilder(org.apache.calcite.rex.RexBuilder) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) GroupByDesc(org.apache.hadoop.hive.ql.plan.GroupByDesc) HiveConf(org.apache.hadoop.hive.conf.HiveConf) HiveParserQB(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQB) RelNode(org.apache.calcite.rel.RelNode) HiveParserBaseSemanticAnalyzer.genValues(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.genValues) HiveParserUtils.verifyCanHandleAst(org.apache.flink.table.planner.delegation.hive.HiveParserUtils.verifyCanHandleAst) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) AbstractMap(java.util.AbstractMap) ObjectPair(org.apache.hadoop.hive.common.ObjectPair) HiveParserBaseSemanticAnalyzer.buildHiveColNameToInputPosMap(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.buildHiveColNameToInputPosMap) HiveParserCreateViewInfo(org.apache.flink.table.planner.delegation.hive.parse.HiveParserCreateViewInfo) SqlUserDefinedTableFunction(org.apache.calcite.sql.validate.SqlUserDefinedTableFunction) HiveParserBaseSemanticAnalyzer.topLevelConjunctCheck(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.topLevelConjunctCheck) Util(org.apache.calcite.util.Util) HiveParserBaseSemanticAnalyzer.addToGBExpr(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.addToGBExpr) Collections(java.util.Collections) HiveParserBaseSemanticAnalyzer.getOrderKeys(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getOrderKeys) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) Project(org.apache.calcite.rel.core.Project) LogicalProject(org.apache.calcite.rel.logical.LogicalProject) RelNode(org.apache.calcite.rel.RelNode) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) LogicalSort(org.apache.calcite.rel.logical.LogicalSort) Sort(org.apache.calcite.rel.core.Sort) RexInputRef(org.apache.calcite.rex.RexInputRef) HiveParserQBExpr(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBExpr) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) RexNode(org.apache.calcite.rex.RexNode)

Aggregations

HiveParserRowResolver (org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver)15 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)12 ArrayList (java.util.ArrayList)11 RexNode (org.apache.calcite.rex.RexNode)11 HiveParserASTNode (org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode)11 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)11 RelNode (org.apache.calcite.rel.RelNode)10 HiveParserQBParseInfo (org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBParseInfo)8 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)8 HashMap (java.util.HashMap)6 LinkedHashMap (java.util.LinkedHashMap)6 RelDataType (org.apache.calcite.rel.type.RelDataType)6 JoinRelType (org.apache.calcite.rel.core.JoinRelType)5 RelDataTypeField (org.apache.calcite.rel.type.RelDataTypeField)5 SqlOperator (org.apache.calcite.sql.SqlOperator)5 ImmutableBitSet (org.apache.calcite.util.ImmutableBitSet)5 AggInfo (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.AggInfo)5 TableType (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.TableType)5 HiveParserBaseSemanticAnalyzer.getHiveAggInfo (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getHiveAggInfo)5 HiveParserBaseSemanticAnalyzer.obtainTableType (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.obtainTableType)5