Search in sources :

Example 51 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class ExplainSQRewriteTask method addRewrites.

void addRewrites(TokenRewriteStream stream, QBSubQuery sq, String program, PrintStream out, String qbAlias, boolean isWhere, StringBuilder addedJoins) {
    ASTNode sqNode = sq.getOriginalSubQueryASTForRewrite();
    ASTNode tokQry = getQueryASTNode(sqNode);
    ASTNode tokInsert = (ASTNode) tokQry.getChild(1);
    ASTNode tokWhere = null;
    for (int i = 0; i < tokInsert.getChildCount(); i++) {
        if (tokInsert.getChild(i).getType() == HiveParser.TOK_WHERE) {
            tokWhere = (ASTNode) tokInsert.getChild(i);
            break;
        }
    }
    SubQueryDiagnostic.QBSubQueryRewrite diag = sq.getDiagnostic();
    String sqStr = diag.getRewrittenQuery();
    String joinCond = diag.getJoiningCondition();
    /*
       * the SubQuery predicate has been hoisted as a Join. The SubQuery predicate is replaced
       * by a 'true' predicate in the Outer QB's where/having clause.
       */
    stream.replace(program, sqNode.getTokenStartIndex(), sqNode.getTokenStopIndex(), "1 = 1");
    String sqJoin = " " + getJoinKeyWord(sq) + " " + sqStr + " " + joinCond;
    addedJoins.append(" ").append(sqJoin);
    String postJoinCond = diag.getOuterQueryPostJoinCond();
    if (postJoinCond != null) {
        stream.insertAfter(program, tokWhere.getTokenStopIndex(), " and " + postJoinCond);
    }
    String qualifier = isWhere ? "Where Clause " : "Having Clause ";
    if (qbAlias != null) {
        qualifier = qualifier + "for Query Block '" + qbAlias + "' ";
    }
    out.println(String.format("\n%s Rewritten SubQuery:\n%s", qualifier, diag.getRewrittenQuery()));
    out.println(String.format("\n%s SubQuery Joining Condition:\n%s", qualifier, diag.getJoiningCondition()));
}
Also used : ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SubQueryDiagnostic(org.apache.hadoop.hive.ql.parse.SubQueryDiagnostic)

Example 52 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class ExplainSQRewriteTask method execute.

@Override
public int execute() {
    PrintStream out = null;
    try {
        Path resFile = new Path(work.getResFile());
        OutputStream outS = resFile.getFileSystem(conf).create(resFile);
        out = new PrintStream(outS);
        QB qb = work.getQb();
        TokenRewriteStream stream = work.getCtx().getTokenRewriteStream();
        String program = "sq rewrite";
        ASTNode ast = work.getAst();
        try {
            addRewrites(stream, qb, program, out);
            out.println("\nRewritten Query:\n" + stream.toString(program, ast.getTokenStartIndex(), ast.getTokenStopIndex()));
        } finally {
            stream.deleteProgram(program);
        }
        out.close();
        out = null;
        return (0);
    } catch (Exception e) {
        setException(e);
        LOG.error("Failed to execute", e);
        return (1);
    } finally {
        IOUtils.closeStream(out);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) PrintStream(java.io.PrintStream) QB(org.apache.hadoop.hive.ql.parse.QB) TokenRewriteStream(org.antlr.runtime.TokenRewriteStream) OutputStream(java.io.OutputStream) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode)

Example 53 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class Action method fromMetastoreExpression.

public static Action fromMetastoreExpression(String metastoreActionExpression) {
    ParseDriver driver = new ParseDriver();
    ASTNode node = null;
    try {
        node = driver.parseTriggerActionExpression(metastoreActionExpression);
    } catch (ParseException e) {
        throw new IllegalArgumentException("Invalid action expression: " + metastoreActionExpression, e);
    }
    if (node == null || node.getChildCount() != 2 || node.getChild(1).getType() != HiveParser.EOF) {
        throw new IllegalArgumentException("Invalid action expression: " + metastoreActionExpression);
    }
    node = (ASTNode) node.getChild(0);
    switch(node.getType()) {
        case HiveParser.KW_KILL:
            if (node.getChildCount() != 0) {
                throw new IllegalArgumentException("Invalid KILL action");
            }
            return new Action(Type.KILL_QUERY);
        case HiveParser.KW_MOVE:
            {
                if (node.getChildCount() != 1) {
                    throw new IllegalArgumentException("Invalid move to action, expected poolPath");
                }
                Tree poolNode = node.getChild(0);
                StringBuilder poolPath = new StringBuilder(poolNode.getText());
                for (int i = 0; i < poolNode.getChildCount(); ++i) {
                    poolPath.append(poolNode.getChild(i).getText());
                }
                return new Action(Type.MOVE_TO_POOL, poolPath.toString());
            }
        default:
            throw new IllegalArgumentException("Unhandled action expression, type: " + node.getType() + ": " + metastoreActionExpression);
    }
}
Also used : ParseDriver(org.apache.hadoop.hive.ql.parse.ParseDriver) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) Tree(org.antlr.runtime.tree.Tree) ParseException(org.apache.hadoop.hive.ql.parse.ParseException)

Example 54 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class Context method getDestNamePrefix.

/**
   * The suffix is always relative to a given ASTNode
   */
public DestClausePrefix getDestNamePrefix(ASTNode curNode) {
    assert curNode != null : "must supply curNode";
    if (curNode.getType() != HiveParser.TOK_INSERT_INTO) {
        //select statement
        assert curNode.getType() == HiveParser.TOK_DESTINATION;
        if (operation == Operation.OTHER) {
            //not an 'interesting' op
            return DestClausePrefix.INSERT;
        }
        //if it is an 'interesting' op but it's a select it must be a sub-query or a derived table
        //it doesn't require a special Acid code path - the reset of the code here is to ensure
        //the tree structure is what we expect
        boolean thisIsInASubquery = false;
        parentLoop: while (curNode.getParent() != null) {
            curNode = (ASTNode) curNode.getParent();
            switch(curNode.getType()) {
                case HiveParser.TOK_SUBQUERY_EXPR:
                //this is a real subquery (foo IN (select ...))
                case HiveParser.TOK_SUBQUERY:
                //strictly speaking SetOps should have a TOK_SUBQUERY parent so next 6 items are redundant
                case HiveParser.TOK_UNIONALL:
                case HiveParser.TOK_UNIONDISTINCT:
                case HiveParser.TOK_EXCEPTALL:
                case HiveParser.TOK_EXCEPTDISTINCT:
                case HiveParser.TOK_INTERSECTALL:
                case HiveParser.TOK_INTERSECTDISTINCT:
                    thisIsInASubquery = true;
                    break parentLoop;
            }
        }
        if (!thisIsInASubquery) {
            throw new IllegalStateException("Expected '" + getMatchedText(curNode) + "' to be in sub-query or set operation.");
        }
        return DestClausePrefix.INSERT;
    }
    switch(operation) {
        case OTHER:
            return DestClausePrefix.INSERT;
        case UPDATE:
            return DestClausePrefix.UPDATE;
        case DELETE:
            return DestClausePrefix.DELETE;
        case MERGE:
            /* This is the structrue expected here
        HiveParser.TOK_QUERY;
          HiveParser.TOK_FROM
          HiveParser.TOK_INSERT;
            HiveParser.TOK_INSERT_INTO;
          HiveParser.TOK_INSERT;
            HiveParser.TOK_INSERT_INTO;
          .....*/
            ASTNode insert = (ASTNode) curNode.getParent();
            assert insert != null && insert.getType() == HiveParser.TOK_INSERT;
            ASTNode query = (ASTNode) insert.getParent();
            assert query != null && query.getType() == HiveParser.TOK_QUERY;
            for (int childIdx = 1; childIdx < query.getChildCount(); childIdx++) {
                //1st child is TOK_FROM
                assert query.getChild(childIdx).getType() == HiveParser.TOK_INSERT;
                if (insert == query.getChild(childIdx)) {
                    DestClausePrefix prefix = insertBranchToNamePrefix.get(childIdx);
                    if (prefix == null) {
                        throw new IllegalStateException("Found a node w/o branch mapping: '" + getMatchedText(insert) + "'");
                    }
                    return prefix;
                }
            }
            throw new IllegalStateException("Could not locate '" + getMatchedText(insert) + "'");
        default:
            throw new IllegalStateException("Unexpected operation: " + operation);
    }
}
Also used : ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode)

Example 55 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class HiveMaterializedViewsRegistry method parseQuery.

private static RelNode parseQuery(String viewQuery) {
    try {
        final ASTNode node = ParseUtils.parse(viewQuery);
        final QueryState qs = new QueryState(SessionState.get().getConf());
        CalcitePlanner analyzer = new CalcitePlanner(qs);
        analyzer.initCtx(new Context(SessionState.get().getConf()));
        analyzer.init(false);
        return analyzer.genLogicalPlan(node);
    } catch (Exception e) {
        // We could not parse the view
        return null;
    }
}
Also used : Context(org.apache.hadoop.hive.ql.Context) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) CalcitePlanner(org.apache.hadoop.hive.ql.parse.CalcitePlanner) QueryState(org.apache.hadoop.hive.ql.QueryState) CalciteSemanticException(org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Aggregations

ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)116 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)37 DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)24 ArrayList (java.util.ArrayList)21 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)13 HashMap (java.util.HashMap)11 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Table (org.apache.hadoop.hive.ql.metadata.Table)10 Node (org.apache.hadoop.hive.ql.lib.Node)9 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)8 TableName (org.apache.hadoop.hive.common.TableName)7 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)7 RowResolver (org.apache.hadoop.hive.ql.parse.RowResolver)7 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)6 RelNode (org.apache.calcite.rel.RelNode)5 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)5 Context (org.apache.hadoop.hive.ql.Context)5 ParseDriver (org.apache.hadoop.hive.ql.parse.ParseDriver)5 SemanticAnalyzer (org.apache.hadoop.hive.ql.parse.SemanticAnalyzer)5 WindowingException (com.sap.hadoop.windowing.WindowingException)4