Search in sources :

Example 26 with HiveParserASTNode

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.

the class HiveParserCalcitePlanner method genLogicalPlan.

// Given an AST, generate and return the RelNode plan. Returns null if nothing needs to be done.
public RelNode genLogicalPlan(HiveParserASTNode ast) throws SemanticException {
    LOG.info("Starting generating logical plan");
    HiveParserPreCboCtx cboCtx = new HiveParserPreCboCtx();
    // change the location of position alias process here
    processPositionAlias(ast, semanticAnalyzer.getConf());
    if (!semanticAnalyzer.genResolvedParseTree(ast, cboCtx)) {
        return null;
    }
    // plan
    for (String alias : semanticAnalyzer.getQB().getSubqAliases()) {
        removeOBInSubQuery(semanticAnalyzer.getQB().getSubqForAlias(alias));
    }
    HiveParserASTNode queryForCbo = ast;
    if (cboCtx.type == HiveParserPreCboCtx.Type.CTAS || cboCtx.type == HiveParserPreCboCtx.Type.VIEW) {
        // nodeOfInterest is the query
        queryForCbo = cboCtx.nodeOfInterest;
    }
    verifyCanHandleAst(queryForCbo, getQB(), semanticAnalyzer.getQueryProperties());
    semanticAnalyzer.disableJoinMerge = true;
    return logicalPlan();
}
Also used : HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) HiveParserPreCboCtx(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPreCboCtx)

Example 27 with HiveParserASTNode

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.

the class HiveParserCalcitePlanner method genFilterRelNode.

private RelNode genFilterRelNode(HiveParserQB qb, HiveParserASTNode searchCond, RelNode srcRel, Map<String, Integer> outerNameToPosMap, HiveParserRowResolver outerRR, boolean forHavingClause) throws SemanticException {
    Map<HiveParserASTNode, RelNode> subQueryToRelNode = new HashMap<>();
    boolean isSubQuery = genSubQueryRelNode(qb, searchCond, srcRel, forHavingClause, subQueryToRelNode);
    if (isSubQuery) {
        ExprNodeDesc subQueryExpr = semanticAnalyzer.genExprNodeDesc(searchCond, relToRowResolver.get(srcRel), outerRR, subQueryToRelNode, forHavingClause);
        Map<String, Integer> hiveColNameToCalcitePos = relToHiveColNameCalcitePosMap.get(srcRel);
        RexNode convertedFilterLHS = new HiveParserRexNodeConverter(cluster, srcRel.getRowType(), outerNameToPosMap, hiveColNameToCalcitePos, relToRowResolver.get(srcRel), outerRR, 0, true, subqueryId, funcConverter).convert(subQueryExpr).accept(funcConverter);
        RelNode filterRel = LogicalFilter.create(srcRel, convertedFilterLHS);
        relToHiveColNameCalcitePosMap.put(filterRel, relToHiveColNameCalcitePosMap.get(srcRel));
        relToRowResolver.put(filterRel, relToRowResolver.get(srcRel));
        subqueryId++;
        return filterRel;
    } else {
        return genFilterRelNode(searchCond, srcRel, outerNameToPosMap, outerRR, forHavingClause);
    }
}
Also used : HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) RelNode(org.apache.calcite.rel.RelNode) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) RexNode(org.apache.calcite.rex.RexNode)

Example 28 with HiveParserASTNode

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.

the class HiveParserCalcitePlanner method genSubQueryRelNode.

private boolean genSubQueryRelNode(HiveParserQB qb, HiveParserASTNode node, RelNode srcRel, boolean forHavingClause, Map<HiveParserASTNode, RelNode> subQueryToRelNode) throws SemanticException {
    Set<HiveParserASTNode> corrScalarQueriesWithAgg = new HashSet<>();
    // disallow sub-queries which HIVE doesn't currently support
    subqueryRestrictionCheck(qb, node, srcRel, forHavingClause, corrScalarQueriesWithAgg);
    Deque<HiveParserASTNode> stack = new ArrayDeque<>();
    stack.push(node);
    boolean isSubQuery = false;
    while (!stack.isEmpty()) {
        HiveParserASTNode next = stack.pop();
        switch(next.getType()) {
            case HiveASTParser.TOK_SUBQUERY_EXPR:
                // Restriction 2.h Subquery is not allowed in LHS
                if (next.getChildren().size() == 3 && next.getChild(2).getType() == HiveASTParser.TOK_SUBQUERY_EXPR) {
                    throw new SemanticException(HiveParserErrorMsg.getMsg(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION, next.getChild(2), "SubQuery in LHS expressions are not supported."));
                }
                String sbQueryAlias = "sq_" + qb.incrNumSubQueryPredicates();
                HiveParserQB subQB = new HiveParserQB(qb.getId(), sbQueryAlias, true);
                HiveParserBaseSemanticAnalyzer.Phase1Ctx ctx1 = initPhase1Ctx();
                semanticAnalyzer.doPhase1((HiveParserASTNode) next.getChild(1), subQB, ctx1, null);
                semanticAnalyzer.getMetaData(subQB);
                RelNode subQueryRelNode = genLogicalPlan(subQB, false, relToHiveColNameCalcitePosMap.get(srcRel), relToRowResolver.get(srcRel));
                subQueryToRelNode.put(next, subQueryRelNode);
                isSubQuery = true;
                break;
            default:
                int childCount = next.getChildCount();
                for (int i = childCount - 1; i >= 0; i--) {
                    stack.push((HiveParserASTNode) next.getChild(i));
                }
        }
    }
    return isSubQuery;
}
Also used : HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) HiveParserQB(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQB) ArrayDeque(java.util.ArrayDeque) RelNode(org.apache.calcite.rel.RelNode) HiveParserBaseSemanticAnalyzer(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer) HashSet(java.util.HashSet) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 29 with HiveParserASTNode

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableDropParts.

private Operation convertAlterTableDropParts(String[] qualified, HiveParserASTNode ast) {
    boolean ifExists = ast.getFirstChildWithType(HiveASTParser.TOK_IFEXISTS) != null;
    // If the drop has to fail on non-existent partitions, we cannot batch expressions.
    // That is because we actually have to check each separate expression for existence.
    // We could do a small optimization for the case where expr has all columns and all
    // operators are equality, if we assume those would always match one partition (which
    // may not be true with legacy, non-normalized column values). This is probably a
    // popular case but that's kinda hacky. Let's not do it for now.
    Table tab = getTable(new ObjectPath(qualified[0], qualified[1]));
    // hive represents drop partition specs with generic func desc, but what we need is just
    // spec maps
    List<Map<String, String>> partSpecs = new ArrayList<>();
    for (int i = 0; i < ast.getChildCount(); i++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(i);
        if (child.getType() == HiveASTParser.TOK_PARTSPEC) {
            partSpecs.add(getPartSpec(child));
        }
    }
    validateAlterTableType(tab);
    ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(qualified[0], qualified[1]));
    List<CatalogPartitionSpec> specs = partSpecs.stream().map(CatalogPartitionSpec::new).collect(Collectors.toList());
    return new DropPartitionsOperation(tableIdentifier, ifExists, specs);
}
Also used : DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.ql.metadata.Table) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ArrayList(java.util.ArrayList) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 30 with HiveParserASTNode

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTable.

private Operation convertAlterTable(HiveParserASTNode input) throws SemanticException {
    Operation operation = null;
    HiveParserASTNode ast = (HiveParserASTNode) input.getChild(1);
    String[] qualified = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) input.getChild(0));
    String tableName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
    HashMap<String, String> partSpec = null;
    HiveParserASTNode partSpecNode = (HiveParserASTNode) input.getChild(2);
    if (partSpecNode != null) {
        partSpec = getPartSpec(partSpecNode);
    }
    CatalogBaseTable alteredTable = getAlteredTable(tableName, false);
    switch(ast.getType()) {
        case HiveASTParser.TOK_ALTERTABLE_RENAME:
            operation = convertAlterTableRename(tableName, ast, false);
            break;
        case HiveASTParser.TOK_ALTERTABLE_ADDCOLS:
            operation = convertAlterTableModifyCols(alteredTable, tableName, ast, false);
            break;
        case HiveASTParser.TOK_ALTERTABLE_REPLACECOLS:
            operation = convertAlterTableModifyCols(alteredTable, tableName, ast, true);
            break;
        case HiveASTParser.TOK_ALTERTABLE_RENAMECOL:
            operation = convertAlterTableChangeCol(alteredTable, qualified, ast);
            break;
        case HiveASTParser.TOK_ALTERTABLE_ADDPARTS:
            operation = convertAlterTableAddParts(qualified, ast);
            break;
        case HiveASTParser.TOK_ALTERTABLE_DROPPARTS:
            operation = convertAlterTableDropParts(qualified, ast);
            break;
        case HiveASTParser.TOK_ALTERTABLE_PROPERTIES:
            operation = convertAlterTableProps(alteredTable, tableName, null, ast, false, false);
            break;
        case HiveASTParser.TOK_ALTERTABLE_DROPPROPERTIES:
            operation = convertAlterTableProps(alteredTable, tableName, null, ast, false, true);
            break;
        case HiveASTParser.TOK_ALTERTABLE_UPDATESTATS:
            operation = convertAlterTableProps(alteredTable, tableName, partSpec, ast, false, false);
            break;
        case HiveASTParser.TOK_ALTERTABLE_FILEFORMAT:
            operation = convertAlterTableFileFormat(alteredTable, ast, tableName, partSpec);
            break;
        case HiveASTParser.TOK_ALTERTABLE_LOCATION:
            operation = convertAlterTableLocation(alteredTable, ast, tableName, partSpec);
            break;
        case HiveASTParser.TOK_ALTERTABLE_SERIALIZER:
            operation = convertAlterTableSerde(alteredTable, ast, tableName, partSpec);
            break;
        case HiveASTParser.TOK_ALTERTABLE_SERDEPROPERTIES:
            operation = convertAlterTableSerdeProps(alteredTable, ast, tableName, partSpec);
            break;
        case HiveASTParser.TOK_ALTERTABLE_TOUCH:
        case HiveASTParser.TOK_ALTERTABLE_ARCHIVE:
        case HiveASTParser.TOK_ALTERTABLE_UNARCHIVE:
        case HiveASTParser.TOK_ALTERTABLE_PARTCOLTYPE:
        case HiveASTParser.TOK_ALTERTABLE_SKEWED:
        case HiveASTParser.TOK_ALTERTABLE_EXCHANGEPARTITION:
        case HiveASTParser.TOK_ALTERTABLE_MERGEFILES:
        case HiveASTParser.TOK_ALTERTABLE_RENAMEPART:
        case HiveASTParser.TOK_ALTERTABLE_SKEWED_LOCATION:
        case HiveASTParser.TOK_ALTERTABLE_BUCKETS:
        case HiveASTParser.TOK_ALTERTABLE_CLUSTER_SORT:
        case HiveASTParser.TOK_ALTERTABLE_COMPACT:
        case HiveASTParser.TOK_ALTERTABLE_UPDATECOLSTATS:
        case HiveASTParser.TOK_ALTERTABLE_DROPCONSTRAINT:
        case HiveASTParser.TOK_ALTERTABLE_ADDCONSTRAINT:
            handleUnsupportedOperation(ast);
            break;
        default:
            throw new ValidationException("Unknown AST node for ALTER TABLE: " + ast);
    }
    return operation;
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) HiveOperation(org.apache.hadoop.hive.ql.plan.HiveOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) DropCatalogFunctionOperation(org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) AlterPartitionPropertiesOperation(org.apache.flink.table.operations.ddl.AlterPartitionPropertiesOperation) ShowPartitionsOperation(org.apache.flink.table.operations.ShowPartitionsOperation) AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) Operation(org.apache.flink.table.operations.Operation) DropTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation) ShowViewsOperation(org.apache.flink.table.operations.ShowViewsOperation) ShowDatabasesOperation(org.apache.flink.table.operations.ShowDatabasesOperation) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) CreateTableASOperation(org.apache.flink.table.operations.ddl.CreateTableASOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) CreateCatalogFunctionOperation(org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation) CreateTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation)

Aggregations

HiveParserASTNode (org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode)38 LinkedHashMap (java.util.LinkedHashMap)18 HashMap (java.util.HashMap)15 ArrayList (java.util.ArrayList)14 RelNode (org.apache.calcite.rel.RelNode)14 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)14 ValidationException (org.apache.flink.table.api.ValidationException)10 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)10 NotNullConstraint (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint)10 HiveParserRowResolver (org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver)10 RexNode (org.apache.calcite.rex.RexNode)9 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)9 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)9 Map (java.util.Map)8 HiveParserTypeCheckCtx (org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx)6 Table (org.apache.hadoop.hive.ql.metadata.Table)6 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)5 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)5 HiveParserQBParseInfo (org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBParseInfo)5 RelDataType (org.apache.calcite.rel.type.RelDataType)4