Search in sources :

Example 26 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class Context method getDestNamePrefix.

/**
   * The suffix is always relative to a given ASTNode
   */
public DestClausePrefix getDestNamePrefix(ASTNode curNode) {
    assert curNode != null : "must supply curNode";
    if (curNode.getType() != HiveParser.TOK_INSERT_INTO) {
        //select statement
        assert curNode.getType() == HiveParser.TOK_DESTINATION;
        if (operation == Operation.OTHER) {
            //not an 'interesting' op
            return DestClausePrefix.INSERT;
        }
        //if it is an 'interesting' op but it's a select it must be a sub-query or a derived table
        //it doesn't require a special Acid code path - the reset of the code here is to ensure
        //the tree structure is what we expect
        boolean thisIsInASubquery = false;
        parentLoop: while (curNode.getParent() != null) {
            curNode = (ASTNode) curNode.getParent();
            switch(curNode.getType()) {
                case HiveParser.TOK_SUBQUERY_EXPR:
                //this is a real subquery (foo IN (select ...))
                case HiveParser.TOK_SUBQUERY:
                //strictly speaking SetOps should have a TOK_SUBQUERY parent so next 6 items are redundant
                case HiveParser.TOK_UNIONALL:
                case HiveParser.TOK_UNIONDISTINCT:
                case HiveParser.TOK_EXCEPTALL:
                case HiveParser.TOK_EXCEPTDISTINCT:
                case HiveParser.TOK_INTERSECTALL:
                case HiveParser.TOK_INTERSECTDISTINCT:
                    thisIsInASubquery = true;
                    break parentLoop;
            }
        }
        if (!thisIsInASubquery) {
            throw new IllegalStateException("Expected '" + getMatchedText(curNode) + "' to be in sub-query or set operation.");
        }
        return DestClausePrefix.INSERT;
    }
    switch(operation) {
        case OTHER:
            return DestClausePrefix.INSERT;
        case UPDATE:
            return DestClausePrefix.UPDATE;
        case DELETE:
            return DestClausePrefix.DELETE;
        case MERGE:
            /* This is the structrue expected here
        HiveParser.TOK_QUERY;
          HiveParser.TOK_FROM
          HiveParser.TOK_INSERT;
            HiveParser.TOK_INSERT_INTO;
          HiveParser.TOK_INSERT;
            HiveParser.TOK_INSERT_INTO;
          .....*/
            ASTNode insert = (ASTNode) curNode.getParent();
            assert insert != null && insert.getType() == HiveParser.TOK_INSERT;
            ASTNode query = (ASTNode) insert.getParent();
            assert query != null && query.getType() == HiveParser.TOK_QUERY;
            for (int childIdx = 1; childIdx < query.getChildCount(); childIdx++) {
                //1st child is TOK_FROM
                assert query.getChild(childIdx).getType() == HiveParser.TOK_INSERT;
                if (insert == query.getChild(childIdx)) {
                    DestClausePrefix prefix = insertBranchToNamePrefix.get(childIdx);
                    if (prefix == null) {
                        throw new IllegalStateException("Found a node w/o branch mapping: '" + getMatchedText(insert) + "'");
                    }
                    return prefix;
                }
            }
            throw new IllegalStateException("Could not locate '" + getMatchedText(insert) + "'");
        default:
            throw new IllegalStateException("Unexpected operation: " + operation);
    }
}
Also used : ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode)

Example 27 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class HiveMaterializedViewsRegistry method parseQuery.

private static RelNode parseQuery(String viewQuery) {
    try {
        final ASTNode node = ParseUtils.parse(viewQuery);
        final QueryState qs = new QueryState(SessionState.get().getConf());
        CalcitePlanner analyzer = new CalcitePlanner(qs);
        analyzer.initCtx(new Context(SessionState.get().getConf()));
        analyzer.init(false);
        return analyzer.genLogicalPlan(node);
    } catch (Exception e) {
        // We could not parse the view
        return null;
    }
}
Also used : Context(org.apache.hadoop.hive.ql.Context) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) CalcitePlanner(org.apache.hadoop.hive.ql.parse.CalcitePlanner) QueryState(org.apache.hadoop.hive.ql.QueryState) CalciteSemanticException(org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Example 28 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class ExplainSQRewriteTask method execute.

@Override
public int execute(DriverContext driverContext) {
    PrintStream out = null;
    try {
        Path resFile = new Path(work.getResFile());
        OutputStream outS = resFile.getFileSystem(conf).create(resFile);
        out = new PrintStream(outS);
        QB qb = work.getQb();
        TokenRewriteStream stream = work.getCtx().getTokenRewriteStream();
        String program = "sq rewrite";
        ASTNode ast = work.getAst();
        try {
            addRewrites(stream, qb, program, out);
            out.println("\nRewritten Query:\n" + stream.toString(program, ast.getTokenStartIndex(), ast.getTokenStopIndex()));
        } finally {
            stream.deleteProgram(program);
        }
        out.close();
        out = null;
        return (0);
    } catch (Exception e) {
        console.printError("Failed with exception " + e.getMessage(), "\n" + StringUtils.stringifyException(e));
        return (1);
    } finally {
        IOUtils.closeStream(out);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) PrintStream(java.io.PrintStream) QB(org.apache.hadoop.hive.ql.parse.QB) TokenRewriteStream(org.antlr.runtime.TokenRewriteStream) OutputStream(java.io.OutputStream) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode)

Example 29 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class ExplainSQRewriteTask method addRewrites.

void addRewrites(TokenRewriteStream stream, QB qb, String program, PrintStream out) {
    QBSubQuery sqW = qb.getWhereClauseSubQueryPredicate();
    QBSubQuery sqH = qb.getHavingClauseSubQueryPredicate();
    if (sqW != null || sqH != null) {
        ASTNode sqNode = sqW != null ? sqW.getOriginalSubQueryASTForRewrite() : sqH.getOriginalSubQueryASTForRewrite();
        ASTNode tokQry = getQueryASTNode(sqNode);
        ASTNode tokFrom = (ASTNode) tokQry.getChild(0);
        StringBuilder addedJoins = new StringBuilder();
        if (sqW != null) {
            addRewrites(stream, sqW, program, out, qb.getId(), true, addedJoins);
        }
        if (sqH != null) {
            addRewrites(stream, sqH, program, out, qb.getId(), false, addedJoins);
        }
        stream.insertAfter(program, tokFrom.getTokenStopIndex(), addedJoins);
    }
    Set<String> sqAliases = qb.getSubqAliases();
    for (String sqAlias : sqAliases) {
        addRewrites(stream, qb.getSubqForAlias(sqAlias).getQB(), program, out);
    }
}
Also used : ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) QBSubQuery(org.apache.hadoop.hive.ql.parse.QBSubQuery)

Example 30 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class RewriteParseContextGenerator method doSemanticAnalysis.

/**
   * For the input ASTNode tree, perform a semantic analysis and check metadata
   * Generate a operator tree and return it.
   *
   * @param ctx
   * @param sem
   * @param ast
   * @return
   * @throws SemanticException
   */
private static Operator<?> doSemanticAnalysis(SemanticAnalyzer sem, ASTNode ast, Context ctx) throws SemanticException {
    QB qb = new QB(null, null, false);
    ASTNode child = ast;
    ParseContext subPCtx = sem.getParseContext();
    subPCtx.setContext(ctx);
    sem.initParseCtx(subPCtx);
    LOG.info("Starting Sub-query Semantic Analysis");
    sem.doPhase1(child, qb, sem.initPhase1Ctx(), null);
    LOG.info("Completed phase 1 of Sub-query Semantic Analysis");
    sem.getMetaData(qb);
    LOG.info("Completed getting MetaData in Sub-query Semantic Analysis");
    LOG.info("Sub-query Abstract syntax tree: " + ast.toStringTree());
    Operator<?> operator = sem.genPlan(qb);
    LOG.info("Sub-query Completed plan generation");
    return operator;
}
Also used : QB(org.apache.hadoop.hive.ql.parse.QB) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ParseContext(org.apache.hadoop.hive.ql.parse.ParseContext)

Aggregations

ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)44 ArrayList (java.util.ArrayList)8 DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)5 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)5 WindowingException (com.sap.hadoop.windowing.WindowingException)4 InputInfo (com.sap.hadoop.windowing.query2.translate.QueryTranslationInfo.InputInfo)4 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)4 PrincipalDesc (org.apache.hadoop.hive.ql.plan.PrincipalDesc)4 PrivilegeObjectDesc (org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc)4 HashMap (java.util.HashMap)3 LinkedHashMap (java.util.LinkedHashMap)3 RexNode (org.apache.calcite.rex.RexNode)3 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)3 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)3 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)3 ArgDef (com.sap.hadoop.windowing.query2.definition.ArgDef)2 QueryInputDef (com.sap.hadoop.windowing.query2.definition.QueryInputDef)2 IOException (java.io.IOException)2 BigDecimal (java.math.BigDecimal)2 RexInputRef (org.apache.calcite.rex.RexInputRef)2