use of org.antlr.runtime.TokenRewriteStream in project hive by apache.
the class SemanticAnalyzer method getQueryStringForCache.
/**
* Generate the query string for this query (with fully resolved table references).
* @return The query string with resolved references. NULL if an error occurred.
*/
private String getQueryStringForCache(ASTNode ast) {
// Use the UnparseTranslator to resolve unqualified table names.
String queryString = getQueryStringFromAst(ast);
// Re-using the TokenRewriteStream map for views so we do not overwrite the current TokenRewriteStream
String rewriteStreamName = "__qualified_query_string__";
ASTNode astNode;
try {
astNode = ParseUtils.parse(queryString, ctx, rewriteStreamName);
TokenRewriteStream tokenRewriteStream = ctx.getViewTokenRewriteStream(rewriteStreamName);
String fullyQualifiedQuery = rewriteQueryWithQualifiedNames(astNode, tokenRewriteStream);
return fullyQualifiedQuery;
} catch (Exception err) {
LOG.error("Unexpected error while reparsing the query string [" + queryString + "]", err);
// Don't fail the query - just return null (caller should skip cache lookup).
return null;
}
}
use of org.antlr.runtime.TokenRewriteStream in project hive by apache.
the class ParseDriver method parse.
/**
* Parses a command, optionally assigning the parser's token stream to the
* given context.
*
* @param command
* command to parse
*
* @param ctx
* context with which to associate this parser's token stream, or
* null if either no context is available or the context already has
* an existing stream
*
* @return parsed AST
*/
public ASTNode parse(String command, Context ctx, String viewFullyQualifiedName) throws ParseException {
if (LOG.isDebugEnabled()) {
LOG.debug("Parsing command: " + command);
}
HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command));
TokenRewriteStream tokens = new TokenRewriteStream(lexer);
if (ctx != null) {
if (viewFullyQualifiedName == null) {
// Top level query
ctx.setTokenRewriteStream(tokens);
} else {
// It is a view
ctx.addViewTokenRewriteStream(viewFullyQualifiedName, tokens);
}
lexer.setHiveConf(ctx.getConf());
}
HiveParser parser = new HiveParser(tokens);
if (ctx != null) {
parser.setHiveConf(ctx.getConf());
}
parser.setTreeAdaptor(adaptor);
HiveParser.statement_return r = null;
try {
r = parser.statement();
} catch (RecognitionException e) {
e.printStackTrace();
throw new ParseException(parser.errors);
}
if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) {
LOG.debug("Parse Completed");
} else if (lexer.getErrors().size() != 0) {
throw new ParseException(lexer.getErrors());
} else {
throw new ParseException(parser.errors);
}
ASTNode tree = (ASTNode) r.getTree();
tree.setUnknownTokenBoundaries();
return tree;
}
use of org.antlr.runtime.TokenRewriteStream in project ceylon by eclipse.
the class CeylonVersionTool method updateModuleVersion.
private boolean updateModuleVersion(Module module, Map<String, String> updatedModuleVersions) throws IOException, RecognitionException {
String moduleDescriptorPath = module.getUnit().getFullPath();
CeylonLexer lexer = new CeylonLexer(new ANTLRFileStream(moduleDescriptorPath, encoding));
TokenRewriteStream tokenStream = new TokenRewriteStream(lexer);
CeylonParser parser = new CeylonParser(tokenStream);
Tree.CompilationUnit cu = parser.compilationUnit();
fixModuleImportNames(cu);
String v = this.confirm == Confirm.dependencies ? this.newVersion : confirm("update.module.version", module.getNameAsString(), module.getVersion(), this.newVersion);
if (v == null) {
return false;
} else if (!v.isEmpty()) {
// record the new version for this module
updatedModuleVersions.put(module.getNameAsString(), v);
updateModuleVersion(moduleDescriptorPath, tokenStream, cu, v);
}
return true;
}
use of org.antlr.runtime.TokenRewriteStream in project ceylon by eclipse.
the class CeylonVersionTool method updateModuleImports.
private boolean updateModuleImports(Module module, Map<String, String> updatedModuleVersions) throws IOException, RecognitionException {
String moduleDescriptorPath = module.getUnit().getFullPath();
CeylonLexer lexer = new CeylonLexer(new ANTLRFileStream(moduleDescriptorPath, encoding));
TokenRewriteStream tokenStream = new TokenRewriteStream(lexer);
CeylonParser parser = new CeylonParser(tokenStream);
Tree.CompilationUnit cu = parser.compilationUnit();
fixModuleImportNames(cu);
List<Tree.ImportModule> moduleImports = findUpdatedImport(cu, updatedModuleVersions);
for (Tree.ImportModule moduleImport : moduleImports) {
String importedModuleName = moduleImport.getName();
String newVersion = updatedModuleVersions.get(importedModuleName);
if (newVersion == null)
newVersion = this.newVersion;
String v = confirm("update.dependency.version", importedModuleName, module.getNameAsString(), module.getVersion(), newVersion);
if (v == null) {
return false;
} else if (!v.isEmpty()) {
updateImportVersion(moduleDescriptorPath, tokenStream, moduleImport, v);
}
}
return true;
}
use of org.antlr.runtime.TokenRewriteStream in project hive by apache.
the class ExplainSQRewriteTask method execute.
@Override
public int execute() {
PrintStream out = null;
try {
Path resFile = new Path(work.getResFile());
OutputStream outS = resFile.getFileSystem(conf).create(resFile);
out = new PrintStream(outS);
QB qb = work.getQb();
TokenRewriteStream stream = work.getCtx().getTokenRewriteStream();
String program = "sq rewrite";
ASTNode ast = work.getAst();
try {
addRewrites(stream, qb, program, out);
out.println("\nRewritten Query:\n" + stream.toString(program, ast.getTokenStartIndex(), ast.getTokenStopIndex()));
} finally {
stream.deleteProgram(program);
}
out.close();
out = null;
return (0);
} catch (Exception e) {
setException(e);
LOG.error("Failed to execute", e);
return (1);
} finally {
IOUtils.closeStream(out);
}
}
Aggregations