use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class Context method getDestNamePrefix.
/**
* The suffix is always relative to a given ASTNode
*/
public DestClausePrefix getDestNamePrefix(ASTNode curNode) {
assert curNode != null : "must supply curNode";
if (curNode.getType() != HiveParser.TOK_INSERT_INTO) {
//select statement
assert curNode.getType() == HiveParser.TOK_DESTINATION;
if (operation == Operation.OTHER) {
//not an 'interesting' op
return DestClausePrefix.INSERT;
}
//if it is an 'interesting' op but it's a select it must be a sub-query or a derived table
//it doesn't require a special Acid code path - the reset of the code here is to ensure
//the tree structure is what we expect
boolean thisIsInASubquery = false;
parentLoop: while (curNode.getParent() != null) {
curNode = (ASTNode) curNode.getParent();
switch(curNode.getType()) {
case HiveParser.TOK_SUBQUERY_EXPR:
//this is a real subquery (foo IN (select ...))
case HiveParser.TOK_SUBQUERY:
//strictly speaking SetOps should have a TOK_SUBQUERY parent so next 6 items are redundant
case HiveParser.TOK_UNIONALL:
case HiveParser.TOK_UNIONDISTINCT:
case HiveParser.TOK_EXCEPTALL:
case HiveParser.TOK_EXCEPTDISTINCT:
case HiveParser.TOK_INTERSECTALL:
case HiveParser.TOK_INTERSECTDISTINCT:
thisIsInASubquery = true;
break parentLoop;
}
}
if (!thisIsInASubquery) {
throw new IllegalStateException("Expected '" + getMatchedText(curNode) + "' to be in sub-query or set operation.");
}
return DestClausePrefix.INSERT;
}
switch(operation) {
case OTHER:
return DestClausePrefix.INSERT;
case UPDATE:
return DestClausePrefix.UPDATE;
case DELETE:
return DestClausePrefix.DELETE;
case MERGE:
/* This is the structrue expected here
HiveParser.TOK_QUERY;
HiveParser.TOK_FROM
HiveParser.TOK_INSERT;
HiveParser.TOK_INSERT_INTO;
HiveParser.TOK_INSERT;
HiveParser.TOK_INSERT_INTO;
.....*/
ASTNode insert = (ASTNode) curNode.getParent();
assert insert != null && insert.getType() == HiveParser.TOK_INSERT;
ASTNode query = (ASTNode) insert.getParent();
assert query != null && query.getType() == HiveParser.TOK_QUERY;
for (int childIdx = 1; childIdx < query.getChildCount(); childIdx++) {
//1st child is TOK_FROM
assert query.getChild(childIdx).getType() == HiveParser.TOK_INSERT;
if (insert == query.getChild(childIdx)) {
DestClausePrefix prefix = insertBranchToNamePrefix.get(childIdx);
if (prefix == null) {
throw new IllegalStateException("Found a node w/o branch mapping: '" + getMatchedText(insert) + "'");
}
return prefix;
}
}
throw new IllegalStateException("Could not locate '" + getMatchedText(insert) + "'");
default:
throw new IllegalStateException("Unexpected operation: " + operation);
}
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class HiveMaterializedViewsRegistry method parseQuery.
private static RelNode parseQuery(String viewQuery) {
try {
final ASTNode node = ParseUtils.parse(viewQuery);
final QueryState qs = new QueryState(SessionState.get().getConf());
CalcitePlanner analyzer = new CalcitePlanner(qs);
analyzer.initCtx(new Context(SessionState.get().getConf()));
analyzer.init(false);
return analyzer.genLogicalPlan(node);
} catch (Exception e) {
// We could not parse the view
return null;
}
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class ExplainSQRewriteTask method execute.
@Override
public int execute(DriverContext driverContext) {
PrintStream out = null;
try {
Path resFile = new Path(work.getResFile());
OutputStream outS = resFile.getFileSystem(conf).create(resFile);
out = new PrintStream(outS);
QB qb = work.getQb();
TokenRewriteStream stream = work.getCtx().getTokenRewriteStream();
String program = "sq rewrite";
ASTNode ast = work.getAst();
try {
addRewrites(stream, qb, program, out);
out.println("\nRewritten Query:\n" + stream.toString(program, ast.getTokenStartIndex(), ast.getTokenStopIndex()));
} finally {
stream.deleteProgram(program);
}
out.close();
out = null;
return (0);
} catch (Exception e) {
console.printError("Failed with exception " + e.getMessage(), "\n" + StringUtils.stringifyException(e));
return (1);
} finally {
IOUtils.closeStream(out);
}
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class ExplainSQRewriteTask method addRewrites.
void addRewrites(TokenRewriteStream stream, QB qb, String program, PrintStream out) {
QBSubQuery sqW = qb.getWhereClauseSubQueryPredicate();
QBSubQuery sqH = qb.getHavingClauseSubQueryPredicate();
if (sqW != null || sqH != null) {
ASTNode sqNode = sqW != null ? sqW.getOriginalSubQueryASTForRewrite() : sqH.getOriginalSubQueryASTForRewrite();
ASTNode tokQry = getQueryASTNode(sqNode);
ASTNode tokFrom = (ASTNode) tokQry.getChild(0);
StringBuilder addedJoins = new StringBuilder();
if (sqW != null) {
addRewrites(stream, sqW, program, out, qb.getId(), true, addedJoins);
}
if (sqH != null) {
addRewrites(stream, sqH, program, out, qb.getId(), false, addedJoins);
}
stream.insertAfter(program, tokFrom.getTokenStopIndex(), addedJoins);
}
Set<String> sqAliases = qb.getSubqAliases();
for (String sqAlias : sqAliases) {
addRewrites(stream, qb.getSubqForAlias(sqAlias).getQB(), program, out);
}
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class RewriteParseContextGenerator method doSemanticAnalysis.
/**
* For the input ASTNode tree, perform a semantic analysis and check metadata
* Generate a operator tree and return it.
*
* @param ctx
* @param sem
* @param ast
* @return
* @throws SemanticException
*/
private static Operator<?> doSemanticAnalysis(SemanticAnalyzer sem, ASTNode ast, Context ctx) throws SemanticException {
QB qb = new QB(null, null, false);
ASTNode child = ast;
ParseContext subPCtx = sem.getParseContext();
subPCtx.setContext(ctx);
sem.initParseCtx(subPCtx);
LOG.info("Starting Sub-query Semantic Analysis");
sem.doPhase1(child, qb, sem.initPhase1Ctx(), null);
LOG.info("Completed phase 1 of Sub-query Semantic Analysis");
sem.getMetaData(qb);
LOG.info("Completed getting MetaData in Sub-query Semantic Analysis");
LOG.info("Sub-query Abstract syntax tree: " + ast.toStringTree());
Operator<?> operator = sem.genPlan(qb);
LOG.info("Sub-query Completed plan generation");
return operator;
}
Aggregations