Search in sources :

Example 46 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class AlterMaterializedViewRebuildAnalyzer method fixUpASTJoinInsertDeleteIncrementalRebuild.

private void fixUpASTJoinInsertDeleteIncrementalRebuild(ASTNode newAST) throws SemanticException {
    // Replace INSERT OVERWRITE by MERGE equivalent rewriting.
    // Here we need to do this complex AST rewriting that generates the same plan
    // that a MERGE clause would generate because CBO does not support MERGE yet.
    // TODO: Support MERGE as first class member in CBO to simplify this logic.
    // 1) Replace INSERT OVERWRITE by INSERT
    ASTNode insertNode = new ASTSearcher().simpleBreadthFirstSearch(newAST, HiveParser.TOK_QUERY, HiveParser.TOK_INSERT);
    ASTNode destinationNode = (ASTNode) insertNode.getChild(0);
    ASTNode newInsertInto = (ASTNode) ParseDriver.adaptor.create(HiveParser.TOK_INSERT_INTO, "TOK_INSERT_INTO");
    newInsertInto.addChildren(destinationNode.getChildren());
    ASTNode destinationParentNode = (ASTNode) destinationNode.getParent();
    int childIndex = destinationNode.childIndex;
    destinationParentNode.deleteChild(childIndex);
    destinationParentNode.insertChild(childIndex, newInsertInto);
    // 1.1) Extract name as we will need it afterwards:
    // TOK_DESTINATION TOK_TAB TOK_TABNAME <materialization_name>
    ASTNode materializationNode = new ASTSearcher().simpleBreadthFirstSearch(newInsertInto, HiveParser.TOK_INSERT_INTO, HiveParser.TOK_TAB, HiveParser.TOK_TABNAME);
    ASTNode subqueryNodeInputROJ = new ASTSearcher().simpleBreadthFirstSearch(newAST, HiveParser.TOK_QUERY, HiveParser.TOK_FROM, HiveParser.TOK_RIGHTOUTERJOIN, HiveParser.TOK_SUBQUERY);
    ASTNode selectNodeInputROJ = new ASTSearcher().simpleBreadthFirstSearch(subqueryNodeInputROJ, HiveParser.TOK_SUBQUERY, HiveParser.TOK_QUERY, HiveParser.TOK_INSERT, HiveParser.TOK_SELECT);
    ASTNode selectExprNodeInputROJ = (ASTNode) ParseDriver.adaptor.create(HiveParser.TOK_SELEXPR, "TOK_SELEXPR");
    ParseDriver.adaptor.addChild(selectNodeInputROJ, selectExprNodeInputROJ);
    ParseDriver.adaptor.addChild(selectExprNodeInputROJ, createRowIdNode(TableName.getDbTable(materializationNode.getChild(0).getText(), materializationNode.getChild(1).getText())));
    ASTNode whereClauseInInsert = findWhereClause(insertNode);
    // ROW__IS__DELETED
    if (whereClauseInInsert.getChild(0).getType() != HiveParser.KW_OR) {
        throw new SemanticException("OR clause expected below TOK_WHERE in incremental rewriting");
    }
    // We bypass the OR clause and select the first disjunct
    int indexDelete;
    int indexInsert;
    if (whereClauseInInsert.getChild(0).getChild(0).getType() == HiveParser.DOT) {
        indexDelete = 0;
        indexInsert = 1;
    } else if (whereClauseInInsert.getChild(0).getChild(1).getType() == HiveParser.DOT) {
        indexDelete = 1;
        indexInsert = 0;
    } else {
        throw new SemanticException("Unexpected condition in incremental rewriting");
    }
    ASTNode newCondInInsert = (ASTNode) whereClauseInInsert.getChild(0).getChild(indexInsert);
    ParseDriver.adaptor.setChild(whereClauseInInsert, 0, newCondInInsert);
    addDeleteBranch(insertNode, subqueryNodeInputROJ, (ASTNode) whereClauseInInsert.getChild(0).getChild(indexDelete));
    // 3) Add sort node to delete branch
    ASTNode sortNode = createSortNode(createRowIdNode((ASTNode) subqueryNodeInputROJ.getChild(1)));
    ParseDriver.adaptor.addChild(insertNode.getParent().getChild(2), sortNode);
    // 4) Now we set some tree properties related to multi-insert
    // operation with INSERT/UPDATE
    ctx.setOperation(Context.Operation.MERGE);
    ctx.addDestNamePrefix(1, Context.DestClausePrefix.INSERT);
    ctx.addDestNamePrefix(2, Context.DestClausePrefix.DELETE);
}
Also used : ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 47 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class AlterMaterializedViewRebuildAnalyzer method getRewrittenAST.

private ASTNode getRewrittenAST(TableName tableName) throws SemanticException {
    ASTNode rewrittenAST;
    // We need to go lookup the table and get the select statement and then parse it.
    try {
        Table table = getTableObjectByName(tableName.getNotEmptyDbTable(), true);
        if (!table.isMaterializedView()) {
            // Cannot rebuild not materialized view
            throw new SemanticException(ErrorMsg.REBUILD_NO_MATERIALIZED_VIEW);
        }
        // We need to use the expanded text for the materialized view, as it will contain
        // the qualified table aliases, etc.
        String viewText = table.getViewExpandedText();
        if (viewText.trim().isEmpty()) {
            throw new SemanticException(ErrorMsg.MATERIALIZED_VIEW_DEF_EMPTY);
        }
        Context ctx = new Context(queryState.getConf());
        String rewrittenInsertStatement = String.format(REWRITTEN_INSERT_STATEMENT, tableName.getEscapedNotEmptyDbTable(), viewText);
        rewrittenAST = ParseUtils.parse(rewrittenInsertStatement, ctx);
        this.ctx.addSubContext(ctx);
        if (!this.ctx.isExplainPlan() && AcidUtils.isTransactionalTable(table)) {
            // Acquire lock for the given materialized view. Only one rebuild per materialized view can be triggered at a
            // given time, as otherwise we might produce incorrect results if incremental maintenance is triggered.
            HiveTxnManager txnManager = getTxnMgr();
            LockState state;
            try {
                state = txnManager.acquireMaterializationRebuildLock(tableName.getDb(), tableName.getTable(), txnManager.getCurrentTxnId()).getState();
            } catch (LockException e) {
                throw new SemanticException("Exception acquiring lock for rebuilding the materialized view", e);
            }
            if (state != LockState.ACQUIRED) {
                throw new SemanticException("Another process is rebuilding the materialized view " + tableName.getNotEmptyDbTable());
            }
        }
    } catch (Exception e) {
        throw new SemanticException(e);
    }
    return rewrittenAST;
}
Also used : Context(org.apache.hadoop.hive.ql.Context) RelOptHiveTable(org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable) Table(org.apache.hadoop.hive.ql.metadata.Table) LockException(org.apache.hadoop.hive.ql.lockmgr.LockException) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) HiveTxnManager(org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager) LockState(org.apache.hadoop.hive.metastore.api.LockState) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) LockException(org.apache.hadoop.hive.ql.lockmgr.LockException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ColumnPropagationException(org.apache.hadoop.hive.ql.optimizer.calcite.rules.views.ColumnPropagationException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 48 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class AlterMaterializedViewRebuildAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    if (mvRebuildMode != MaterializationRebuildMode.NONE) {
        super.analyzeInternal(root);
        return;
    }
    ASTNode tableTree = (ASTNode) root.getChild(0);
    TableName tableName = getQualifiedTableName(tableTree);
    // now. However query scheduler requires the fully qualified table name.
    if (ctx.isScheduledQuery()) {
        unparseTranslator.addTableNameTranslation(tableTree, SessionState.get().getCurrentDatabase());
        return;
    }
    try {
        Boolean outdated = db.isOutdatedMaterializedView(getTxnMgr(), tableName);
        if (outdated != null && !outdated) {
            String msg = String.format("Materialized view %s.%s is up to date. Skipping rebuild.", tableName.getDb(), tableName.getTable());
            LOG.info(msg);
            console.printInfo(msg, false);
            return;
        }
    } catch (HiveException e) {
        LOG.warn("Error while checking materialized view " + tableName.getDb() + "." + tableName.getTable(), e);
    }
    ASTNode rewrittenAST = getRewrittenAST(tableName);
    mvRebuildMode = MaterializationRebuildMode.INSERT_OVERWRITE_REBUILD;
    mvRebuildDbName = tableName.getDb();
    mvRebuildName = tableName.getTable();
    LOG.debug("Rebuilding materialized view " + tableName.getNotEmptyDbTable());
    super.analyzeInternal(rewrittenAST);
    queryState.setCommandType(HiveOperation.ALTER_MATERIALIZED_VIEW_REBUILD);
}
Also used : TableName(org.apache.hadoop.hive.common.TableName) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode)

Example 49 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class AlterMaterializedViewRebuildAnalyzer method createSortNode.

// TOK_SORTBY
// TOK_TABSORTCOLNAMEASC
// TOK_NULLS_FIRST
// <sortKeyNode>
private ASTNode createSortNode(ASTNode sortKeyNode) {
    ASTNode sortExprNode = (ASTNode) ParseDriver.adaptor.create(HiveParser.TOK_SORTBY, "TOK_SORTBY");
    ASTNode orderExprNode = (ASTNode) ParseDriver.adaptor.create(HiveParser.TOK_TABSORTCOLNAMEASC, "TOK_TABSORTCOLNAMEASC");
    ASTNode nullsOrderExprNode = (ASTNode) ParseDriver.adaptor.create(HiveParser.TOK_NULLS_FIRST, "TOK_NULLS_FIRST");
    ParseDriver.adaptor.addChild(sortExprNode, orderExprNode);
    ParseDriver.adaptor.addChild(orderExprNode, nullsOrderExprNode);
    ParseDriver.adaptor.addChild(nullsOrderExprNode, sortKeyNode);
    return sortExprNode;
}
Also used : ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode)

Example 50 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class TestTransactionStatement method testTxnStart.

@Test
public void testTxnStart() throws ParseException {
    ASTNode ast = parse("START TRANSACTION");
    Assert.assertEquals("AST doesn't match", "tok_start_transaction", ast.toStringTree());
    ast = parse("START TRANSACTION ISOLATION LEVEL SNAPSHOT");
    Assert.assertEquals("AST doesn't match", "(tok_start_transaction (tok_isolation_level tok_isolation_snapshot))", ast.toStringTree());
    ast = parse("START TRANSACTION READ ONLY");
    Assert.assertEquals("AST doesn't match", "(tok_start_transaction (tok_txn_access_mode tok_txn_read_only))", ast.toStringTree());
    ast = parse("START TRANSACTION READ WRITE, ISOLATION LEVEL SNAPSHOT");
    Assert.assertEquals("AST doesn't match", "(tok_start_transaction (tok_txn_access_mode tok_txn_read_write) (tok_isolation_level tok_isolation_snapshot))", ast.toStringTree());
}
Also used : ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) Test(org.junit.Test)

Aggregations

ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)116 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)37 DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)24 ArrayList (java.util.ArrayList)21 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)13 HashMap (java.util.HashMap)11 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Table (org.apache.hadoop.hive.ql.metadata.Table)10 Node (org.apache.hadoop.hive.ql.lib.Node)9 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)8 TableName (org.apache.hadoop.hive.common.TableName)7 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)7 RowResolver (org.apache.hadoop.hive.ql.parse.RowResolver)7 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)6 RelNode (org.apache.calcite.rel.RelNode)5 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)5 Context (org.apache.hadoop.hive.ql.Context)5 ParseDriver (org.apache.hadoop.hive.ql.parse.ParseDriver)5 SemanticAnalyzer (org.apache.hadoop.hive.ql.parse.SemanticAnalyzer)5 WindowingException (com.sap.hadoop.windowing.WindowingException)4