use of org.apache.flink.table.planner.delegation.hive.parse.HiveASTParser in project flink by apache.
the class HiveASTParseDriver method parse.
/**
* Parses a command, optionally assigning the parser's token stream to the given context.
*
* @param command command to parse
* @param ctx context with which to associate this parser's token stream, or null if either no
* context is available or the context already has an existing stream
* @return parsed AST
*/
public HiveParserASTNode parse(String command, HiveParserContext ctx, String viewFullyQualifiedName) throws HiveASTParseException {
if (LOG.isDebugEnabled()) {
LOG.debug("Parsing command: " + command);
}
HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command));
TokenRewriteStream tokens = new TokenRewriteStream(lexer);
if (ctx != null) {
if (viewFullyQualifiedName == null) {
// Top level query
ctx.setTokenRewriteStream(tokens);
} else {
// It is a view
ctx.addViewTokenRewriteStream(viewFullyQualifiedName, tokens);
}
lexer.setHiveConf(ctx.getConf());
}
HiveASTParser parser = new HiveASTParser(tokens);
if (ctx != null) {
parser.setHiveConf(ctx.getConf());
}
parser.setTreeAdaptor(ADAPTOR);
HiveASTParser.statement_return r = null;
try {
r = parser.statement();
} catch (RecognitionException e) {
throw new HiveASTParseException(parser.errors);
}
if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) {
LOG.debug("Parse Completed");
} else if (lexer.getErrors().size() != 0) {
throw new HiveASTParseException(lexer.getErrors());
} else {
throw new HiveASTParseException(parser.errors);
}
HiveParserASTNode tree = r.getTree();
tree.setUnknownTokenBoundaries();
return tree;
}
Aggregations