Search in sources :

Example 6 with HiveParserRowResolver

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver in project flink by apache.

the class HiveParserCalcitePlanner method genOBLogicalPlan.

private Pair<Sort, RelNode> genOBLogicalPlan(HiveParserQB qb, RelNode srcRel, boolean outermostOB) throws SemanticException {
    Sort sortRel = null;
    RelNode originalOBInput = null;
    HiveParserQBParseInfo qbp = qb.getParseInfo();
    String dest = qbp.getClauseNames().iterator().next();
    HiveParserASTNode obAST = qbp.getOrderByForClause(dest);
    if (obAST != null) {
        // 1. OB Expr sanity test
        // in strict mode, in the presence of order by, limit must be specified
        Integer limit = qb.getParseInfo().getDestLimit(dest);
        if (limit == null) {
            String mapRedMode = semanticAnalyzer.getConf().getVar(HiveConf.ConfVars.HIVEMAPREDMODE);
            boolean banLargeQuery = Boolean.parseBoolean(semanticAnalyzer.getConf().get("hive.strict.checks.large.query", "false"));
            if ("strict".equalsIgnoreCase(mapRedMode) || banLargeQuery) {
                throw new SemanticException(generateErrorMessage(obAST, "Order by-s without limit"));
            }
        }
        // 2. Walk through OB exprs and extract field collations and additional
        // virtual columns needed
        final List<RexNode> virtualCols = new ArrayList<>();
        final List<RelFieldCollation> fieldCollations = new ArrayList<>();
        int fieldIndex;
        List<Node> obASTExprLst = obAST.getChildren();
        HiveParserASTNode obASTExpr;
        HiveParserASTNode nullOrderASTExpr;
        List<Pair<HiveParserASTNode, TypeInfo>> vcASTAndType = new ArrayList<>();
        HiveParserRowResolver inputRR = relToRowResolver.get(srcRel);
        HiveParserRowResolver outputRR = new HiveParserRowResolver();
        HiveParserRexNodeConverter converter = new HiveParserRexNodeConverter(cluster, srcRel.getRowType(), relToHiveColNameCalcitePosMap.get(srcRel), 0, false, funcConverter);
        int numSrcFields = srcRel.getRowType().getFieldCount();
        for (Node node : obASTExprLst) {
            // 2.1 Convert AST Expr to ExprNode
            obASTExpr = (HiveParserASTNode) node;
            nullOrderASTExpr = (HiveParserASTNode) obASTExpr.getChild(0);
            HiveParserASTNode ref = (HiveParserASTNode) nullOrderASTExpr.getChild(0);
            Map<HiveParserASTNode, ExprNodeDesc> astToExprNodeDesc = semanticAnalyzer.genAllExprNodeDesc(ref, inputRR);
            ExprNodeDesc obExprNodeDesc = astToExprNodeDesc.get(ref);
            if (obExprNodeDesc == null) {
                throw new SemanticException("Invalid order by expression: " + obASTExpr.toString());
            }
            // 2.2 Convert ExprNode to RexNode
            RexNode rexNode = converter.convert(obExprNodeDesc).accept(funcConverter);
            // present in the child (& hence we add a child Project Rel)
            if (rexNode instanceof RexInputRef) {
                fieldIndex = ((RexInputRef) rexNode).getIndex();
            } else {
                fieldIndex = numSrcFields + virtualCols.size();
                virtualCols.add(rexNode);
                vcASTAndType.add(new Pair<>(ref, obExprNodeDesc.getTypeInfo()));
            }
            // 2.4 Determine the Direction of order by
            RelFieldCollation.Direction direction = RelFieldCollation.Direction.DESCENDING;
            if (obASTExpr.getType() == HiveASTParser.TOK_TABSORTCOLNAMEASC) {
                direction = RelFieldCollation.Direction.ASCENDING;
            }
            RelFieldCollation.NullDirection nullOrder;
            if (nullOrderASTExpr.getType() == HiveASTParser.TOK_NULLS_FIRST) {
                nullOrder = RelFieldCollation.NullDirection.FIRST;
            } else if (nullOrderASTExpr.getType() == HiveASTParser.TOK_NULLS_LAST) {
                nullOrder = RelFieldCollation.NullDirection.LAST;
            } else {
                throw new SemanticException("Unexpected null ordering option: " + nullOrderASTExpr.getType());
            }
            // 2.5 Add to field collations
            fieldCollations.add(new RelFieldCollation(fieldIndex, direction, nullOrder));
        }
        // 3. Add Child Project Rel if needed, Generate Output RR, input Sel Rel
        // for top constraining Sel
        RelNode obInputRel = srcRel;
        if (!virtualCols.isEmpty()) {
            List<RexNode> originalInputRefs = srcRel.getRowType().getFieldList().stream().map(input -> new RexInputRef(input.getIndex(), input.getType())).collect(Collectors.toList());
            HiveParserRowResolver obSyntheticProjectRR = new HiveParserRowResolver();
            if (!HiveParserRowResolver.add(obSyntheticProjectRR, inputRR)) {
                throw new SemanticException("Duplicates detected when adding columns to RR: see previous message");
            }
            int vcolPos = inputRR.getRowSchema().getSignature().size();
            for (Pair<HiveParserASTNode, TypeInfo> astTypePair : vcASTAndType) {
                obSyntheticProjectRR.putExpression(astTypePair.getKey(), new ColumnInfo(getColumnInternalName(vcolPos), astTypePair.getValue(), null, false));
                vcolPos++;
            }
            obInputRel = genSelectRelNode(CompositeList.of(originalInputRefs, virtualCols), obSyntheticProjectRR, srcRel);
            if (outermostOB) {
                if (!HiveParserRowResolver.add(outputRR, inputRR)) {
                    throw new SemanticException("Duplicates detected when adding columns to RR: see previous message");
                }
            } else {
                if (!HiveParserRowResolver.add(outputRR, obSyntheticProjectRR)) {
                    throw new SemanticException("Duplicates detected when adding columns to RR: see previous message");
                }
            }
            originalOBInput = srcRel;
        } else {
            if (!HiveParserRowResolver.add(outputRR, inputRR)) {
                throw new SemanticException("Duplicates detected when adding columns to RR: see previous message");
            }
        }
        // 4. Construct SortRel
        RelTraitSet traitSet = cluster.traitSet();
        RelCollation canonizedCollation = traitSet.canonize(RelCollationImpl.of(fieldCollations));
        sortRel = LogicalSort.create(obInputRel, canonizedCollation, null, null);
        // 5. Update the maps
        Map<String, Integer> hiveColNameCalcitePosMap = buildHiveToCalciteColumnMap(outputRR);
        relToRowResolver.put(sortRel, outputRR);
        relToHiveColNameCalcitePosMap.put(sortRel, hiveColNameCalcitePosMap);
    }
    return (new Pair<>(sortRel, originalOBInput));
}
Also used : DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) LogicalSort(org.apache.calcite.rel.logical.LogicalSort) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) JoinType(org.apache.hadoop.hive.ql.parse.JoinType) HiveParserBaseSemanticAnalyzer.getGroupByForClause(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getGroupByForClause) HiveParserContext(org.apache.flink.table.planner.delegation.hive.copy.HiveParserContext) FunctionRegistry(org.apache.hadoop.hive.ql.exec.FunctionRegistry) RelCollationImpl(org.apache.calcite.rel.RelCollationImpl) BigDecimal(java.math.BigDecimal) HiveParserBaseSemanticAnalyzer.unescapeIdentifier(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.unescapeIdentifier) HiveInspectors(org.apache.flink.table.functions.hive.conversion.HiveInspectors) CorrelationId(org.apache.calcite.rel.core.CorrelationId) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) Map(java.util.Map) HiveParserASTBuilder(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTBuilder) RelTraitSet(org.apache.calcite.plan.RelTraitSet) RexWindowBound(org.apache.calcite.rex.RexWindowBound) ImmutableBitSet(org.apache.calcite.util.ImmutableBitSet) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) HiveParserSubQueryUtils(org.apache.flink.table.planner.delegation.hive.copy.HiveParserSubQueryUtils) HiveParserBaseSemanticAnalyzer.getHiveAggInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getHiveAggInfo) HiveTypeUtil(org.apache.flink.table.catalog.hive.util.HiveTypeUtil) HiveParserBaseSemanticAnalyzer(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer) org.apache.hadoop.hive.serde.serdeConstants(org.apache.hadoop.hive.serde.serdeConstants) Set(java.util.Set) HiveParserUtils.generateErrorMessage(org.apache.flink.table.planner.delegation.hive.HiveParserUtils.generateErrorMessage) RelFieldCollation(org.apache.calcite.rel.RelFieldCollation) HiveASTParseUtils(org.apache.flink.table.planner.delegation.hive.copy.HiveASTParseUtils) HiveParserBaseSemanticAnalyzer.getGroupingSetsForCube(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getGroupingSetsForCube) SqlStdOperatorTable(org.apache.calcite.sql.fun.SqlStdOperatorTable) HiveParserPreCboCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPreCboCtx) RelCollation(org.apache.calcite.rel.RelCollation) HiveParserBaseSemanticAnalyzer.getGroupingSets(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getGroupingSets) HiveParserBaseSemanticAnalyzer.getPartitionKeys(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getPartitionKeys) HiveParserBaseSemanticAnalyzer.removeOBInSubQuery(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.removeOBInSubQuery) HiveASTParser(org.apache.flink.table.planner.delegation.hive.parse.HiveASTParser) ErrorMsg(org.apache.hadoop.hive.ql.ErrorMsg) RexCall(org.apache.calcite.rex.RexCall) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) TypeInfoUtils(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) HiveParserJoinTypeCheckCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserJoinTypeCheckCtx) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) AggInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.AggInfo) HiveParserBaseSemanticAnalyzer.getCorrelationUse(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getCorrelationUse) HiveASTParseDriver(org.apache.flink.table.planner.delegation.hive.copy.HiveASTParseDriver) RexFieldCollation(org.apache.calcite.rex.RexFieldCollation) ViewExpanders(org.apache.calcite.plan.ViewExpanders) LogicalValues(org.apache.calcite.rel.logical.LogicalValues) LogicalCorrelate(org.apache.calcite.rel.logical.LogicalCorrelate) HiveParserBaseSemanticAnalyzer.validateNoHavingReferenceToAlias(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.validateNoHavingReferenceToAlias) HiveParserNamedJoinInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserNamedJoinInfo) RelDataType(org.apache.calcite.rel.type.RelDataType) HiveParserUtils.rewriteGroupingFunctionAST(org.apache.flink.table.planner.delegation.hive.HiveParserUtils.rewriteGroupingFunctionAST) LogicalIntersect(org.apache.calcite.rel.logical.LogicalIntersect) HiveParserQBSubQuery(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBSubQuery) Table(org.apache.hadoop.hive.ql.metadata.Table) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) Aggregate(org.apache.calcite.rel.core.Aggregate) HiveParserTypeCheckCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx) FrameworkConfig(org.apache.calcite.tools.FrameworkConfig) Node(org.apache.hadoop.hive.ql.lib.Node) HiveParserBaseSemanticAnalyzer.getBound(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getBound) HiveParserBaseSemanticAnalyzer.getColumnInternalName(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getColumnInternalName) HiveParserSqlFunctionConverter(org.apache.flink.table.planner.delegation.hive.copy.HiveParserSqlFunctionConverter) LogicalAggregate(org.apache.calcite.rel.logical.LogicalAggregate) JoinRelType(org.apache.calcite.rel.core.JoinRelType) AggregateCall(org.apache.calcite.rel.core.AggregateCall) SqlAggFunction(org.apache.calcite.sql.SqlAggFunction) ArrayDeque(java.util.ArrayDeque) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) ColumnAccessInfo(org.apache.hadoop.hive.ql.parse.ColumnAccessInfo) HiveParserBaseSemanticAnalyzer.obtainTableType(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.obtainTableType) HiveParserBaseSemanticAnalyzer.convert(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.convert) LogicalFilter(org.apache.calcite.rel.logical.LogicalFilter) RelFactories(org.apache.calcite.rel.core.RelFactories) LoggerFactory(org.slf4j.LoggerFactory) LogicalTableFunctionScan(org.apache.calcite.rel.logical.LogicalTableFunctionScan) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveParserBaseSemanticAnalyzer.processPositionAlias(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.processPositionAlias) HiveParserWindowingSpec(org.apache.flink.table.planner.delegation.hive.copy.HiveParserWindowingSpec) RexUtil(org.apache.calcite.rex.RexUtil) LogicalJoin(org.apache.calcite.rel.logical.LogicalJoin) HiveParserErrorMsg(org.apache.flink.table.planner.delegation.hive.parse.HiveParserErrorMsg) RexNode(org.apache.calcite.rex.RexNode) LogicalUnion(org.apache.calcite.rel.logical.LogicalUnion) RelOptCluster(org.apache.calcite.plan.RelOptCluster) LogicalDistribution(org.apache.flink.table.planner.plan.nodes.hive.LogicalDistribution) RexLiteral(org.apache.calcite.rex.RexLiteral) HiveParserQBParseInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBParseInfo) HiveParserTypeConverter(org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeConverter) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) Preconditions(org.apache.flink.util.Preconditions) CompositeList(org.apache.calcite.util.CompositeList) Collectors(java.util.stream.Collectors) RexInputRef(org.apache.calcite.rex.RexInputRef) VirtualColumn(org.apache.hadoop.hive.ql.metadata.VirtualColumn) List(java.util.List) Type(java.lang.reflect.Type) Sort(org.apache.calcite.rel.core.Sort) RelDataTypeField(org.apache.calcite.rel.type.RelDataTypeField) GenericUDAFEvaluator(org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator) HiveParserQueryState(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQueryState) Project(org.apache.calcite.rel.core.Project) CatalogManager(org.apache.flink.table.catalog.CatalogManager) HiveParserBaseSemanticAnalyzer.getGroupingSetsForRollup(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getGroupingSetsForRollup) HiveParserBaseSemanticAnalyzer.getWindowSpecIndx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getWindowSpecIndx) HashMap(java.util.HashMap) SetOp(org.apache.calcite.rel.core.SetOp) Deque(java.util.Deque) RelOptUtil(org.apache.calcite.plan.RelOptUtil) LogicalMinus(org.apache.calcite.rel.logical.LogicalMinus) DeduplicateCorrelateVariables(org.apache.calcite.sql2rel.DeduplicateCorrelateVariables) HiveParserBaseSemanticAnalyzer.initPhase1Ctx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.initPhase1Ctx) PlannerContext(org.apache.flink.table.planner.delegation.PlannerContext) HashSet(java.util.HashSet) HiveParserQBExpr(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBExpr) TableType(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.TableType) Pair(org.apache.calcite.util.Pair) HiveParserBaseSemanticAnalyzer.buildHiveToCalciteColumnMap(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.buildHiveToCalciteColumnMap) SqlOperator(org.apache.calcite.sql.SqlOperator) FlinkCalciteCatalogReader(org.apache.flink.table.planner.plan.FlinkCalciteCatalogReader) RelCollations(org.apache.calcite.rel.RelCollations) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) Logger(org.slf4j.Logger) HiveParserSemanticAnalyzer(org.apache.flink.table.planner.delegation.hive.copy.HiveParserSemanticAnalyzer) LogicalProject(org.apache.calcite.rel.logical.LogicalProject) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) TypeInfoFactory(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory) Iterator(java.util.Iterator) RexBuilder(org.apache.calcite.rex.RexBuilder) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) GroupByDesc(org.apache.hadoop.hive.ql.plan.GroupByDesc) HiveConf(org.apache.hadoop.hive.conf.HiveConf) HiveParserQB(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQB) RelNode(org.apache.calcite.rel.RelNode) HiveParserBaseSemanticAnalyzer.genValues(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.genValues) HiveParserUtils.verifyCanHandleAst(org.apache.flink.table.planner.delegation.hive.HiveParserUtils.verifyCanHandleAst) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) AbstractMap(java.util.AbstractMap) ObjectPair(org.apache.hadoop.hive.common.ObjectPair) HiveParserBaseSemanticAnalyzer.buildHiveColNameToInputPosMap(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.buildHiveColNameToInputPosMap) HiveParserCreateViewInfo(org.apache.flink.table.planner.delegation.hive.parse.HiveParserCreateViewInfo) SqlUserDefinedTableFunction(org.apache.calcite.sql.validate.SqlUserDefinedTableFunction) HiveParserBaseSemanticAnalyzer.topLevelConjunctCheck(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.topLevelConjunctCheck) Util(org.apache.calcite.util.Util) HiveParserBaseSemanticAnalyzer.addToGBExpr(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.addToGBExpr) Collections(java.util.Collections) HiveParserBaseSemanticAnalyzer.getOrderKeys(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getOrderKeys) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) Node(org.apache.hadoop.hive.ql.lib.Node) RexNode(org.apache.calcite.rex.RexNode) RelNode(org.apache.calcite.rel.RelNode) ArrayList(java.util.ArrayList) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) RelTraitSet(org.apache.calcite.plan.RelTraitSet) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) LogicalSort(org.apache.calcite.rel.logical.LogicalSort) Sort(org.apache.calcite.rel.core.Sort) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) Pair(org.apache.calcite.util.Pair) ObjectPair(org.apache.hadoop.hive.common.ObjectPair) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) RelCollation(org.apache.calcite.rel.RelCollation) HiveParserQBParseInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBParseInfo) RelNode(org.apache.calcite.rel.RelNode) RelFieldCollation(org.apache.calcite.rel.RelFieldCollation) RexInputRef(org.apache.calcite.rex.RexInputRef) RexNode(org.apache.calcite.rex.RexNode)

Example 7 with HiveParserRowResolver

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver in project flink by apache.

the class HiveParserCalcitePlanner method subqueryRestrictionCheck.

private void subqueryRestrictionCheck(HiveParserQB qb, HiveParserASTNode searchCond, RelNode srcRel, boolean forHavingClause, Set<HiveParserASTNode> corrScalarQueries) throws SemanticException {
    List<HiveParserASTNode> subQueriesInOriginalTree = HiveParserSubQueryUtils.findSubQueries(searchCond);
    HiveParserASTNode clonedSearchCond = (HiveParserASTNode) HiveParserSubQueryUtils.ADAPTOR.dupTree(searchCond);
    List<HiveParserASTNode> subQueries = HiveParserSubQueryUtils.findSubQueries(clonedSearchCond);
    for (int i = 0; i < subQueriesInOriginalTree.size(); i++) {
        int sqIdx = qb.incrNumSubQueryPredicates();
        HiveParserASTNode originalSubQueryAST = subQueriesInOriginalTree.get(i);
        HiveParserASTNode subQueryAST = subQueries.get(i);
        // HiveParserSubQueryUtils.rewriteParentQueryWhere(clonedSearchCond, subQueryAST);
        ObjectPair<Boolean, Integer> subqInfo = new ObjectPair<>(false, 0);
        if (!topLevelConjunctCheck(clonedSearchCond, subqInfo)) {
            // Restriction.7.h :: SubQuery predicates can appear only as top level conjuncts.
            throw new SemanticException(HiveParserErrorMsg.getMsg(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION, subQueryAST, "Only SubQuery expressions that are top level conjuncts are allowed"));
        }
        HiveParserASTNode outerQueryExpr = (HiveParserASTNode) subQueryAST.getChild(2);
        if (outerQueryExpr != null && outerQueryExpr.getType() == HiveASTParser.TOK_SUBQUERY_EXPR) {
            throw new SemanticException(HiveParserErrorMsg.getMsg(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION, outerQueryExpr, "IN/NOT IN subqueries are not allowed in LHS"));
        }
        HiveParserQBSubQuery subQuery = HiveParserSubQueryUtils.buildSubQuery(sqIdx, subQueryAST, originalSubQueryAST, semanticAnalyzer.ctx, frameworkConfig, cluster);
        HiveParserRowResolver inputRR = relToRowResolver.get(srcRel);
        String havingInputAlias = null;
        boolean isCorrScalarWithAgg = subQuery.subqueryRestrictionsCheck(inputRR, forHavingClause, havingInputAlias);
        if (isCorrScalarWithAgg) {
            corrScalarQueries.add(originalSubQueryAST);
        }
    }
}
Also used : HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) HiveParserQBSubQuery(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBSubQuery) ObjectPair(org.apache.hadoop.hive.common.ObjectPair) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 8 with HiveParserRowResolver

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver in project flink by apache.

the class HiveParserCalcitePlanner method genUDTFPlan.

private RelNode genUDTFPlan(SqlOperator sqlOperator, String genericUDTFName, String outputTableAlias, List<String> colAliases, HiveParserQB qb, List<RexNode> operands, List<ColumnInfo> opColInfos, RelNode input, boolean inSelect, boolean isOuter) throws SemanticException {
    Preconditions.checkState(!isOuter || !inSelect, "OUTER is not supported for SELECT UDTF");
    // No GROUP BY / DISTRIBUTE BY / SORT BY / CLUSTER BY
    HiveParserQBParseInfo qbp = qb.getParseInfo();
    if (inSelect && !qbp.getDestToGroupBy().isEmpty()) {
        throw new SemanticException(ErrorMsg.UDTF_NO_GROUP_BY.getMsg());
    }
    if (inSelect && !qbp.getDestToDistributeBy().isEmpty()) {
        throw new SemanticException(ErrorMsg.UDTF_NO_DISTRIBUTE_BY.getMsg());
    }
    if (inSelect && !qbp.getDestToSortBy().isEmpty()) {
        throw new SemanticException(ErrorMsg.UDTF_NO_SORT_BY.getMsg());
    }
    if (inSelect && !qbp.getDestToClusterBy().isEmpty()) {
        throw new SemanticException(ErrorMsg.UDTF_NO_CLUSTER_BY.getMsg());
    }
    if (inSelect && !qbp.getAliasToLateralViews().isEmpty()) {
        throw new SemanticException(ErrorMsg.UDTF_LATERAL_VIEW.getMsg());
    }
    LOG.debug("Table alias: " + outputTableAlias + " Col aliases: " + colAliases);
    // Create the object inspector for the input columns and initialize the UDTF
    RelDataType relDataType = HiveParserUtils.inferReturnTypeForOperands(sqlOperator, operands, cluster.getTypeFactory());
    DataType dataType = HiveParserUtils.toDataType(relDataType);
    StructObjectInspector outputOI = (StructObjectInspector) HiveInspectors.getObjectInspector(HiveTypeUtil.toHiveTypeInfo(dataType, false));
    // this should only happen for select udtf
    if (outputTableAlias == null) {
        Preconditions.checkState(inSelect, "Table alias not specified for lateral view");
        String prefix = "select_" + genericUDTFName + "_alias_";
        int i = 0;
        while (qb.getAliases().contains(prefix + i)) {
            i++;
        }
        outputTableAlias = prefix + i;
    }
    if (colAliases.isEmpty()) {
        // user did not specify alias names, infer names from outputOI
        for (StructField field : outputOI.getAllStructFieldRefs()) {
            colAliases.add(field.getFieldName());
        }
    }
    // Make sure that the number of column aliases in the AS clause matches the number of
    // columns output by the UDTF
    int numOutputCols = outputOI.getAllStructFieldRefs().size();
    int numSuppliedAliases = colAliases.size();
    if (numOutputCols != numSuppliedAliases) {
        throw new SemanticException(ErrorMsg.UDTF_ALIAS_MISMATCH.getMsg("expected " + numOutputCols + " aliases " + "but got " + numSuppliedAliases));
    }
    // Generate the output column info's / row resolver using internal names.
    ArrayList<ColumnInfo> udtfOutputCols = new ArrayList<>();
    Iterator<String> colAliasesIter = colAliases.iterator();
    for (StructField sf : outputOI.getAllStructFieldRefs()) {
        String colAlias = colAliasesIter.next();
        assert (colAlias != null);
        // Since the UDTF operator feeds into a LVJ operator that will rename all the internal
        // names,
        // we can just use field name from the UDTF's OI as the internal name
        ColumnInfo col = new ColumnInfo(sf.getFieldName(), TypeInfoUtils.getTypeInfoFromObjectInspector(sf.getFieldObjectInspector()), outputTableAlias, false);
        udtfOutputCols.add(col);
    }
    // Create the row resolver for the table function scan
    HiveParserRowResolver udtfOutRR = new HiveParserRowResolver();
    for (int i = 0; i < udtfOutputCols.size(); i++) {
        udtfOutRR.put(outputTableAlias, colAliases.get(i), udtfOutputCols.get(i));
    }
    // Build row type from field <type, name>
    RelDataType retType = HiveParserTypeConverter.getType(cluster, udtfOutRR, null);
    List<RelDataType> argTypes = new ArrayList<>();
    RelDataTypeFactory dtFactory = cluster.getRexBuilder().getTypeFactory();
    for (ColumnInfo ci : opColInfos) {
        argTypes.add(HiveParserUtils.toRelDataType(ci.getType(), dtFactory));
    }
    SqlOperator calciteOp = HiveParserSqlFunctionConverter.getCalciteFn(genericUDTFName, argTypes, retType, false);
    RexNode rexNode = cluster.getRexBuilder().makeCall(calciteOp, operands);
    // convert the rex call
    TableFunctionConverter udtfConverter = new TableFunctionConverter(cluster, input, frameworkConfig.getOperatorTable(), catalogReader.nameMatcher());
    RexCall convertedCall = (RexCall) rexNode.accept(udtfConverter);
    SqlOperator convertedOperator = convertedCall.getOperator();
    Preconditions.checkState(convertedOperator instanceof SqlUserDefinedTableFunction, "Expect operator to be " + SqlUserDefinedTableFunction.class.getSimpleName() + ", actually got " + convertedOperator.getClass().getSimpleName());
    // TODO: how to decide this?
    Type elementType = Object[].class;
    // create LogicalTableFunctionScan
    RelNode tableFunctionScan = LogicalTableFunctionScan.create(input.getCluster(), Collections.emptyList(), convertedCall, elementType, retType, null);
    // remember the table alias for the UDTF so that we can reference the cols later
    qb.addAlias(outputTableAlias);
    RelNode correlRel;
    RexBuilder rexBuilder = cluster.getRexBuilder();
    // find correlation in the converted call
    Pair<List<CorrelationId>, ImmutableBitSet> correlUse = getCorrelationUse(convertedCall);
    // create correlate node
    if (correlUse == null) {
        correlRel = plannerContext.createRelBuilder(catalogManager.getCurrentCatalog(), catalogManager.getCurrentDatabase()).push(input).push(tableFunctionScan).join(isOuter ? JoinRelType.LEFT : JoinRelType.INNER, rexBuilder.makeLiteral(true)).build();
    } else {
        if (correlUse.left.size() > 1) {
            tableFunctionScan = DeduplicateCorrelateVariables.go(rexBuilder, correlUse.left.get(0), Util.skip(correlUse.left), tableFunctionScan);
        }
        correlRel = LogicalCorrelate.create(input, tableFunctionScan, correlUse.left.get(0), correlUse.right, isOuter ? JoinRelType.LEFT : JoinRelType.INNER);
    }
    // Add new rel & its RR to the maps
    relToHiveColNameCalcitePosMap.put(tableFunctionScan, buildHiveToCalciteColumnMap(udtfOutRR));
    relToRowResolver.put(tableFunctionScan, udtfOutRR);
    HiveParserRowResolver correlRR = HiveParserRowResolver.getCombinedRR(relToRowResolver.get(input), relToRowResolver.get(tableFunctionScan));
    relToHiveColNameCalcitePosMap.put(correlRel, buildHiveToCalciteColumnMap(correlRR));
    relToRowResolver.put(correlRel, correlRR);
    if (!inSelect) {
        return correlRel;
    }
    // create project node
    List<RexNode> projects = new ArrayList<>();
    HiveParserRowResolver projectRR = new HiveParserRowResolver();
    int j = 0;
    for (int i = input.getRowType().getFieldCount(); i < correlRel.getRowType().getFieldCount(); i++) {
        projects.add(cluster.getRexBuilder().makeInputRef(correlRel, i));
        ColumnInfo inputColInfo = correlRR.getRowSchema().getSignature().get(i);
        String colAlias = inputColInfo.getAlias();
        ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(j++), inputColInfo.getObjectInspector(), null, false);
        projectRR.put(null, colAlias, colInfo);
    }
    RelNode projectNode = LogicalProject.create(correlRel, Collections.emptyList(), projects, tableFunctionScan.getRowType());
    relToHiveColNameCalcitePosMap.put(projectNode, buildHiveToCalciteColumnMap(projectRR));
    relToRowResolver.put(projectNode, projectRR);
    return projectNode;
}
Also used : ImmutableBitSet(org.apache.calcite.util.ImmutableBitSet) SqlOperator(org.apache.calcite.sql.SqlOperator) ArrayList(java.util.ArrayList) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) RelDataType(org.apache.calcite.rel.type.RelDataType) RexCall(org.apache.calcite.rex.RexCall) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) SqlUserDefinedTableFunction(org.apache.calcite.sql.validate.SqlUserDefinedTableFunction) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) DataType(org.apache.flink.table.types.DataType) RelDataType(org.apache.calcite.rel.type.RelDataType) RexBuilder(org.apache.calcite.rex.RexBuilder) ArrayList(java.util.ArrayList) CompositeList(org.apache.calcite.util.CompositeList) List(java.util.List) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) DataType(org.apache.flink.table.types.DataType) JoinType(org.apache.hadoop.hive.ql.parse.JoinType) RelDataType(org.apache.calcite.rel.type.RelDataType) JoinRelType(org.apache.calcite.rel.core.JoinRelType) HiveParserBaseSemanticAnalyzer.obtainTableType(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.obtainTableType) Type(java.lang.reflect.Type) TableType(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.TableType) HiveParserQBParseInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBParseInfo) RelNode(org.apache.calcite.rel.RelNode) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) RexNode(org.apache.calcite.rex.RexNode)

Example 9 with HiveParserRowResolver

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver in project flink by apache.

the class HiveParserTypeCheckProcFactory method processGByExpr.

/**
 * Function to do groupby subexpression elimination. This is called by all the processors
 * initially. As an example, consider the query select a+b, count(1) from T group by a+b; Then
 * a+b is already precomputed in the group by operators key, so we substitute a+b in the select
 * list with the internal column name of the a+b expression that appears in the in input row
 * resolver.
 *
 * @param nd The node that is being inspected.
 * @param procCtx The processor context.
 * @return exprNodeColumnDesc.
 */
public static ExprNodeDesc processGByExpr(Node nd, Object procCtx) throws SemanticException {
    // We recursively create the exprNodeDesc. Base cases: when we encounter
    // a column ref, we convert that into an exprNodeColumnDesc; when we
    // encounter
    // a constant, we convert that into an exprNodeConstantDesc. For others we
    // just
    // build the exprNodeFuncDesc with recursively built children.
    HiveParserASTNode expr = (HiveParserASTNode) nd;
    HiveParserTypeCheckCtx ctx = (HiveParserTypeCheckCtx) procCtx;
    // having key in (select .. where a = min(b.value)
    if (!ctx.isUseCaching() && ctx.getOuterRR() == null) {
        return null;
    }
    HiveParserRowResolver input = ctx.getInputRR();
    ExprNodeDesc desc = null;
    if (input == null || !ctx.getAllowGBExprElimination()) {
        return null;
    }
    // If the current subExpression is pre-calculated, as in Group-By etc.
    ColumnInfo colInfo = input.getExpression(expr);
    // try outer row resolver
    HiveParserRowResolver outerRR = ctx.getOuterRR();
    if (colInfo == null && outerRR != null) {
        colInfo = outerRR.getExpression(expr);
    }
    if (colInfo != null) {
        desc = new ExprNodeColumnDesc(colInfo);
        HiveParserASTNode source = input.getExpressionSource(expr);
        if (source != null) {
            ctx.getUnparseTranslator().addCopyTranslation(expr, source);
        }
        return desc;
    }
    return desc;
}
Also used : HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) HiveParserTypeCheckCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx)

Example 10 with HiveParserRowResolver

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver in project flink by apache.

the class HiveParserCalcitePlanner method genJoinRelNode.

private RelNode genJoinRelNode(RelNode leftRel, String leftTableAlias, RelNode rightRel, String rightTableAlias, JoinType hiveJoinType, HiveParserASTNode joinCondAst) throws SemanticException {
    HiveParserRowResolver leftRR = relToRowResolver.get(leftRel);
    HiveParserRowResolver rightRR = relToRowResolver.get(rightRel);
    // 1. Construct ExpressionNodeDesc representing Join Condition
    RexNode joinCondRex;
    List<String> namedColumns = null;
    if (joinCondAst != null) {
        HiveParserJoinTypeCheckCtx jCtx = new HiveParserJoinTypeCheckCtx(leftRR, rightRR, hiveJoinType, frameworkConfig, cluster);
        HiveParserRowResolver combinedRR = HiveParserRowResolver.getCombinedRR(leftRR, rightRR);
        if (joinCondAst.getType() == HiveASTParser.TOK_TABCOLNAME && !hiveJoinType.equals(JoinType.LEFTSEMI)) {
            namedColumns = new ArrayList<>();
            // We will transform using clause and make it look like an on-clause.
            // So, lets generate a valid on-clause AST from using.
            HiveParserASTNode and = (HiveParserASTNode) HiveASTParseDriver.ADAPTOR.create(HiveASTParser.KW_AND, "and");
            HiveParserASTNode equal = null;
            int count = 0;
            for (Node child : joinCondAst.getChildren()) {
                String columnName = ((HiveParserASTNode) child).getText();
                // dealing with views
                if (semanticAnalyzer.unparseTranslator != null && semanticAnalyzer.unparseTranslator.isEnabled()) {
                    semanticAnalyzer.unparseTranslator.addIdentifierTranslation((HiveParserASTNode) child);
                }
                namedColumns.add(columnName);
                HiveParserASTNode left = HiveParserASTBuilder.qualifiedName(leftTableAlias, columnName);
                HiveParserASTNode right = HiveParserASTBuilder.qualifiedName(rightTableAlias, columnName);
                equal = (HiveParserASTNode) HiveASTParseDriver.ADAPTOR.create(HiveASTParser.EQUAL, "=");
                HiveASTParseDriver.ADAPTOR.addChild(equal, left);
                HiveASTParseDriver.ADAPTOR.addChild(equal, right);
                HiveASTParseDriver.ADAPTOR.addChild(and, equal);
                count++;
            }
            joinCondAst = count > 1 ? and : equal;
        } else if (semanticAnalyzer.unparseTranslator != null && semanticAnalyzer.unparseTranslator.isEnabled()) {
            semanticAnalyzer.genAllExprNodeDesc(joinCondAst, combinedRR, jCtx);
        }
        Map<HiveParserASTNode, ExprNodeDesc> exprNodes = HiveParserUtils.genExprNode(joinCondAst, jCtx);
        if (jCtx.getError() != null) {
            throw new SemanticException(generateErrorMessage(jCtx.getErrorSrcNode(), jCtx.getError()));
        }
        ExprNodeDesc joinCondExprNode = exprNodes.get(joinCondAst);
        List<RelNode> inputRels = new ArrayList<>();
        inputRels.add(leftRel);
        inputRels.add(rightRel);
        joinCondRex = HiveParserRexNodeConverter.convert(cluster, joinCondExprNode, inputRels, relToRowResolver, relToHiveColNameCalcitePosMap, false, funcConverter).accept(funcConverter);
    } else {
        joinCondRex = cluster.getRexBuilder().makeLiteral(true);
    }
    // 3. Construct Join Rel Node and HiveParserRowResolver for the new Join Node
    boolean leftSemiJoin = false;
    JoinRelType calciteJoinType;
    switch(hiveJoinType) {
        case LEFTOUTER:
            calciteJoinType = JoinRelType.LEFT;
            break;
        case RIGHTOUTER:
            calciteJoinType = JoinRelType.RIGHT;
            break;
        case FULLOUTER:
            calciteJoinType = JoinRelType.FULL;
            break;
        case LEFTSEMI:
            calciteJoinType = JoinRelType.SEMI;
            leftSemiJoin = true;
            break;
        case INNER:
        default:
            calciteJoinType = JoinRelType.INNER;
            break;
    }
    RelNode topRel;
    HiveParserRowResolver topRR;
    if (leftSemiJoin) {
        List<RelDataTypeField> sysFieldList = new ArrayList<>();
        List<RexNode> leftJoinKeys = new ArrayList<>();
        List<RexNode> rightJoinKeys = new ArrayList<>();
        RexNode nonEquiConds = RelOptUtil.splitJoinCondition(sysFieldList, leftRel, rightRel, joinCondRex, leftJoinKeys, rightJoinKeys, null, null);
        if (!nonEquiConds.isAlwaysTrue()) {
            throw new SemanticException("Non equality condition not supported in Semi-Join" + nonEquiConds);
        }
        RelNode[] inputRels = new RelNode[] { leftRel, rightRel };
        final List<Integer> leftKeys = new ArrayList<>();
        final List<Integer> rightKeys = new ArrayList<>();
        joinCondRex = HiveParserUtils.projectNonColumnEquiConditions(RelFactories.DEFAULT_PROJECT_FACTORY, inputRels, leftJoinKeys, rightJoinKeys, 0, leftKeys, rightKeys);
        topRel = LogicalJoin.create(inputRels[0], inputRels[1], Collections.emptyList(), joinCondRex, Collections.emptySet(), calciteJoinType);
        // previous call to projectNonColumnEquiConditions updated it
        if (inputRels[0] != leftRel) {
            HiveParserRowResolver newLeftRR = new HiveParserRowResolver();
            if (!HiveParserRowResolver.add(newLeftRR, leftRR)) {
                LOG.warn("Duplicates detected when adding columns to RR: see previous message");
            }
            for (int i = leftRel.getRowType().getFieldCount(); i < inputRels[0].getRowType().getFieldCount(); i++) {
                ColumnInfo oColInfo = new ColumnInfo(getColumnInternalName(i), HiveParserTypeConverter.convert(inputRels[0].getRowType().getFieldList().get(i).getType()), null, false);
                newLeftRR.put(oColInfo.getTabAlias(), oColInfo.getInternalName(), oColInfo);
            }
            HiveParserRowResolver joinRR = new HiveParserRowResolver();
            if (!HiveParserRowResolver.add(joinRR, newLeftRR)) {
                LOG.warn("Duplicates detected when adding columns to RR: see previous message");
            }
            relToHiveColNameCalcitePosMap.put(topRel, buildHiveToCalciteColumnMap(joinRR));
            relToRowResolver.put(topRel, joinRR);
            // Introduce top project operator to remove additional column(s) that have been
            // introduced
            List<RexNode> topFields = new ArrayList<>();
            List<String> topFieldNames = new ArrayList<>();
            for (int i = 0; i < leftRel.getRowType().getFieldCount(); i++) {
                final RelDataTypeField field = leftRel.getRowType().getFieldList().get(i);
                topFields.add(leftRel.getCluster().getRexBuilder().makeInputRef(field.getType(), i));
                topFieldNames.add(field.getName());
            }
            topRel = LogicalProject.create(topRel, Collections.emptyList(), topFields, topFieldNames);
        }
        topRR = new HiveParserRowResolver();
        if (!HiveParserRowResolver.add(topRR, leftRR)) {
            LOG.warn("Duplicates detected when adding columns to RR: see previous message");
        }
    } else {
        topRel = LogicalJoin.create(leftRel, rightRel, Collections.emptyList(), joinCondRex, Collections.emptySet(), calciteJoinType);
        topRR = HiveParserRowResolver.getCombinedRR(leftRR, rightRR);
        if (namedColumns != null) {
            List<String> tableAliases = new ArrayList<>();
            tableAliases.add(leftTableAlias);
            tableAliases.add(rightTableAlias);
            topRR.setNamedJoinInfo(new HiveParserNamedJoinInfo(tableAliases, namedColumns, hiveJoinType));
        }
    }
    relToHiveColNameCalcitePosMap.put(topRel, buildHiveToCalciteColumnMap(topRR));
    relToRowResolver.put(topRel, topRR);
    return topRel;
}
Also used : HiveParserJoinTypeCheckCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserJoinTypeCheckCtx) HiveParserNamedJoinInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserNamedJoinInfo) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) Node(org.apache.hadoop.hive.ql.lib.Node) RexNode(org.apache.calcite.rex.RexNode) RelNode(org.apache.calcite.rel.RelNode) ArrayList(java.util.ArrayList) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) JoinRelType(org.apache.calcite.rel.core.JoinRelType) RelDataTypeField(org.apache.calcite.rel.type.RelDataTypeField) RelNode(org.apache.calcite.rel.RelNode) RexNode(org.apache.calcite.rex.RexNode)

Aggregations

HiveParserRowResolver (org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver)15 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)12 ArrayList (java.util.ArrayList)11 RexNode (org.apache.calcite.rex.RexNode)11 HiveParserASTNode (org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode)11 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)11 RelNode (org.apache.calcite.rel.RelNode)10 HiveParserQBParseInfo (org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBParseInfo)8 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)8 HashMap (java.util.HashMap)6 LinkedHashMap (java.util.LinkedHashMap)6 RelDataType (org.apache.calcite.rel.type.RelDataType)6 JoinRelType (org.apache.calcite.rel.core.JoinRelType)5 RelDataTypeField (org.apache.calcite.rel.type.RelDataTypeField)5 SqlOperator (org.apache.calcite.sql.SqlOperator)5 ImmutableBitSet (org.apache.calcite.util.ImmutableBitSet)5 AggInfo (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.AggInfo)5 TableType (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.TableType)5 HiveParserBaseSemanticAnalyzer.getHiveAggInfo (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.getHiveAggInfo)5 HiveParserBaseSemanticAnalyzer.obtainTableType (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.obtainTableType)5