use of io.prestosql.sql.tree.Statement in project hetu-core by openlookeng.
the class TestAnalyzer method analyze.
private void analyze(Session clientSession, @Language("SQL") String query) {
transaction(transactionManager, accessControl).readUncommitted().readOnly().execute(clientSession, session -> {
Analyzer analyzer = createAnalyzer(session, metadata);
Statement statement = SQL_PARSER.createStatement(query);
analyzer.analyze(statement);
});
}
use of io.prestosql.sql.tree.Statement in project hetu-core by openlookeng.
the class ImpalaParser method invokeParser.
public JSONObject invokeParser(String sql, Function<ImpalaSqlParser, ParserRuleContext> parseFunction, ParsingOptions parsingOptions) {
try {
ImpalaSqlLexer lexer = new ImpalaSqlLexer(new CaseInsensitiveStream(CharStreams.fromString(sql)));
CommonTokenStream tokenStream = new CommonTokenStream(lexer);
ImpalaSqlParser parser = new ImpalaSqlParser(tokenStream);
// Override the default error strategy to not attempt inserting or deleting a token.
// Otherwise, it messes up error reporting
parser.setErrorHandler(new DefaultErrorStrategy() {
@Override
public Token recoverInline(Parser recognizer) throws RecognitionException {
if (nextTokensContext == null) {
throw new InputMismatchException(recognizer);
} else {
throw new InputMismatchException(recognizer, nextTokensState, nextTokensContext);
}
}
});
parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames())));
lexer.removeErrorListeners();
lexer.addErrorListener(LEXER_ERROR_LISTENER);
parser.removeErrorListeners();
if (enhancedErrorHandlerEnabled) {
parser.addErrorListener(PARSER_ERROR_HANDLER);
} else {
parser.addErrorListener(LEXER_ERROR_LISTENER);
}
String convertedSql = "";
String conversionStatus = "";
String errorMessage = "";
JSONArray diffArray = new JSONArray();
ImpalaAstBuilder impalaAstBuilder = null;
try {
ParserRuleContext tree;
try {
// first, try parsing with potentially faster SLL mode
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
tree = parseFunction.apply(parser);
} catch (ParseCancellationException ex) {
// if we fail, parse with LL mode
// rewind input stream
tokenStream.reset();
parser.reset();
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
tree = parseFunction.apply(parser);
}
impalaAstBuilder = new ImpalaAstBuilder(parsingOptions);
Statement statement = (Statement) impalaAstBuilder.visit(tree);
if (statement == null) {
conversionStatus = Constants.FAILED;
errorMessage = "The input sql is not valid or empty.";
} else {
convertedSql = SqlFormatter.formatSql(statement, Optional.empty());
if (impalaAstBuilder.getParserDiffsList().isEmpty()) {
conversionStatus = Constants.SUCCESS;
} else {
conversionStatus = Constants.SUCCESS;
for (ParserDiffs diffs : impalaAstBuilder.getParserDiffsList()) {
if (diffs.getDiffType().equals(DiffType.DELETED) || diffs.getDiffType().equals(DiffType.FUNCTION_WARNING)) {
conversionStatus = Constants.WARNING;
}
diffArray.put(diffs.toJsonObject());
}
}
}
} catch (UnsupportedException e) {
// handle the unsupported keywords
conversionStatus = Constants.UNSUPPORTED;
if (impalaAstBuilder != null) {
for (ParserDiffs diffs : impalaAstBuilder.getParserDiffsList()) {
if (diffs.getDiffType().equals(DiffType.UNSUPPORTED)) {
diffArray.put(diffs.toJsonObject());
errorMessage += diffs.getMessage().isPresent() ? diffs.getMessage().get() : "";
}
}
}
if (errorMessage.isEmpty()) {
errorMessage = e.getMessage();
}
} catch (IllegalArgumentException | UnsupportedOperationException | ParsingException e) {
errorMessage = e.getMessage();
conversionStatus = Constants.FAILED;
}
// Construct json format result
JSONObject result = new JSONObject();
result.put(Constants.ORIGINAL_SQL, sql);
result.put(Constants.ORIGINAL_SQL_TYPE, IMPALA.getValue());
result.put(Constants.CONVERTED_SQL, convertedSql);
result.put(Constants.STATUS, conversionStatus);
result.put(Constants.MESSAGE, errorMessage);
result.put(Constants.DIFFS, diffArray);
return result;
} catch (JSONException e) {
throw new ParsingException("Construct parsing result failed." + e.getMessage());
} catch (StackOverflowError e) {
throw new ParsingException("statement is too large (stack overflow while parsing)");
}
}
use of io.prestosql.sql.tree.Statement in project hetu-core by openlookeng.
the class HiveParser method invokeParser.
public JSONObject invokeParser(String sql, Function<HiveSqlParser, ParserRuleContext> parseFunction, ParsingOptions parsingOptions) {
try {
HiveSqlLexer lexer = new HiveSqlLexer(new CaseInsensitiveStream(CharStreams.fromString(sql)));
CommonTokenStream tokenStream = new CommonTokenStream(lexer);
HiveSqlParser parser = new HiveSqlParser(tokenStream);
// Override the default error strategy to not attempt inserting or deleting a token.
// Otherwise, it messes up error reporting
parser.setErrorHandler(new DefaultErrorStrategy() {
@Override
public Token recoverInline(Parser recognizer) throws RecognitionException {
if (nextTokensContext == null) {
throw new InputMismatchException(recognizer);
} else {
throw new InputMismatchException(recognizer, nextTokensState, nextTokensContext);
}
}
});
parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames())));
lexer.removeErrorListeners();
lexer.addErrorListener(LEXER_ERROR_LISTENER);
parser.removeErrorListeners();
if (enhancedErrorHandlerEnabled) {
parser.addErrorListener(PARSER_ERROR_HANDLER);
} else {
parser.addErrorListener(LEXER_ERROR_LISTENER);
}
String convertedSql = "";
String conversionStatus = "";
String errorMessage = "";
JSONArray diffArray = new JSONArray();
HiveAstBuilder hiveAstBuilder = null;
try {
ParserRuleContext tree;
try {
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
tree = parseFunction.apply(parser);
} catch (ParseCancellationException e) {
tokenStream.reset();
parser.reset();
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
tree = parseFunction.apply(parser);
}
hiveAstBuilder = new HiveAstBuilder(parsingOptions);
Statement statement = (Statement) hiveAstBuilder.visit(tree);
if (statement == null) {
conversionStatus = Constants.FAILED;
errorMessage = "The input sql is not valid or empty.";
} else {
convertedSql = SqlFormatter.formatSql(statement, Optional.empty());
if (hiveAstBuilder.getParserDiffsList().isEmpty()) {
conversionStatus = Constants.SUCCESS;
} else {
conversionStatus = Constants.SUCCESS;
for (ParserDiffs diffs : hiveAstBuilder.getParserDiffsList()) {
if (diffs.getDiffType().equals(DiffType.DELETED) || diffs.getDiffType().equals(DiffType.FUNCTION_WARNING)) {
conversionStatus = Constants.WARNING;
}
diffArray.put(diffs.toJsonObject());
}
}
}
} catch (UnsupportedException e) {
// handle the unsupported keywords
conversionStatus = Constants.UNSUPPORTED;
if (hiveAstBuilder != null) {
for (ParserDiffs diffs : hiveAstBuilder.getParserDiffsList()) {
if (diffs.getDiffType().equals(DiffType.UNSUPPORTED)) {
diffArray.put(diffs.toJsonObject());
errorMessage += diffs.getMessage().isPresent() ? diffs.getMessage().get() : "";
}
}
}
if (errorMessage.isEmpty()) {
errorMessage = e.getMessage();
}
} catch (IllegalArgumentException | UnsupportedOperationException | ParsingException e) {
errorMessage = e.getMessage();
conversionStatus = Constants.FAILED;
}
// Construct json format result
JSONObject result = new JSONObject();
result.put(Constants.ORIGINAL_SQL, sql);
result.put(Constants.ORIGINAL_SQL_TYPE, HIVE);
result.put(Constants.CONVERTED_SQL, convertedSql);
result.put(Constants.STATUS, conversionStatus);
result.put(Constants.MESSAGE, errorMessage);
result.put(Constants.DIFFS, diffArray);
return result;
} catch (JSONException e) {
throw new ParsingException("Construct parsing result failed." + e.getMessage());
} catch (StackOverflowError e) {
throw new ParsingException("statement is too large (stack overflow while parsing)");
}
}
use of io.prestosql.sql.tree.Statement in project hetu-core by openlookeng.
the class Analyzer method analyze.
public Analysis analyze(Statement statement, boolean isDescribe) {
Statement rewrittenStatement = StatementRewrite.rewrite(session, metadata, cubeManager, sqlParser, queryExplainer, statement, parameters, accessControl, warningCollector, heuristicIndexerManager);
Analysis analysis = new Analysis(rewrittenStatement, parameters, isDescribe);
analysis.setOriginalStatement(statement);
StatementAnalyzer analyzer = new StatementAnalyzer(analysis, metadata, sqlParser, accessControl, session, warningCollector, heuristicIndexerManager, cubeManager);
analyzer.analyze(rewrittenStatement, Optional.empty());
// check column access permissions for each table
analysis.getTableColumnReferences().forEach((accessControlInfo, tableColumnReferences) -> tableColumnReferences.forEach((tableName, columns) -> accessControlInfo.getAccessControl().checkCanSelectFromColumns(session.getRequiredTransactionId(), accessControlInfo.getIdentity(), tableName, columns)));
return analysis;
}
use of io.prestosql.sql.tree.Statement in project hetu-core by openlookeng.
the class TreeAssertions method assertFormattedSql.
public static void assertFormattedSql(SqlParser sqlParser, ParsingOptions parsingOptions, Node expected) {
String formatted = formatSql(expected, Optional.empty());
// verify round-trip of formatting already-formatted SQL
Statement actual = parseFormatted(sqlParser, parsingOptions, formatted, expected);
assertEquals(formatSql(actual, Optional.empty()), formatted);
// compare parsed tree with parsed tree of formatted SQL
if (!actual.equals(expected)) {
// simplify finding the non-equal part of the tree
assertListEquals(linearizeTree(actual), linearizeTree(expected));
}
assertEquals(actual, expected);
}
Aggregations