use of io.prestosql.sql.parser.CaseInsensitiveStream in project hetu-core by openlookeng.
the class TypeCalculation method parseTypeCalculation.
private static ParserRuleContext parseTypeCalculation(String calculation) {
TypeCalculationLexer lexer = new TypeCalculationLexer(new CaseInsensitiveStream(new ANTLRInputStream(calculation)));
CommonTokenStream tokenStream = new CommonTokenStream(lexer);
TypeCalculationParser parser = new TypeCalculationParser(tokenStream);
lexer.removeErrorListeners();
lexer.addErrorListener(ERROR_LISTENER);
parser.removeErrorListeners();
parser.addErrorListener(ERROR_LISTENER);
ParserRuleContext tree;
try {
// first, try parsing with potentially faster SLL mode
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
tree = parser.typeCalculation();
} catch (ParseCancellationException ex) {
// if we fail, parse with LL mode
// rewind input stream
tokenStream.reset();
parser.reset();
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
tree = parser.typeCalculation();
}
return tree;
}
use of io.prestosql.sql.parser.CaseInsensitiveStream in project hetu-core by openlookeng.
the class ImpalaParser method invokeParser.
public JSONObject invokeParser(String sql, Function<ImpalaSqlParser, ParserRuleContext> parseFunction, ParsingOptions parsingOptions) {
try {
ImpalaSqlLexer lexer = new ImpalaSqlLexer(new CaseInsensitiveStream(CharStreams.fromString(sql)));
CommonTokenStream tokenStream = new CommonTokenStream(lexer);
ImpalaSqlParser parser = new ImpalaSqlParser(tokenStream);
// Override the default error strategy to not attempt inserting or deleting a token.
// Otherwise, it messes up error reporting
parser.setErrorHandler(new DefaultErrorStrategy() {
@Override
public Token recoverInline(Parser recognizer) throws RecognitionException {
if (nextTokensContext == null) {
throw new InputMismatchException(recognizer);
} else {
throw new InputMismatchException(recognizer, nextTokensState, nextTokensContext);
}
}
});
parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames())));
lexer.removeErrorListeners();
lexer.addErrorListener(LEXER_ERROR_LISTENER);
parser.removeErrorListeners();
if (enhancedErrorHandlerEnabled) {
parser.addErrorListener(PARSER_ERROR_HANDLER);
} else {
parser.addErrorListener(LEXER_ERROR_LISTENER);
}
String convertedSql = "";
String conversionStatus = "";
String errorMessage = "";
JSONArray diffArray = new JSONArray();
ImpalaAstBuilder impalaAstBuilder = null;
try {
ParserRuleContext tree;
try {
// first, try parsing with potentially faster SLL mode
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
tree = parseFunction.apply(parser);
} catch (ParseCancellationException ex) {
// if we fail, parse with LL mode
// rewind input stream
tokenStream.reset();
parser.reset();
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
tree = parseFunction.apply(parser);
}
impalaAstBuilder = new ImpalaAstBuilder(parsingOptions);
Statement statement = (Statement) impalaAstBuilder.visit(tree);
if (statement == null) {
conversionStatus = Constants.FAILED;
errorMessage = "The input sql is not valid or empty.";
} else {
convertedSql = SqlFormatter.formatSql(statement, Optional.empty());
if (impalaAstBuilder.getParserDiffsList().isEmpty()) {
conversionStatus = Constants.SUCCESS;
} else {
conversionStatus = Constants.SUCCESS;
for (ParserDiffs diffs : impalaAstBuilder.getParserDiffsList()) {
if (diffs.getDiffType().equals(DiffType.DELETED) || diffs.getDiffType().equals(DiffType.FUNCTION_WARNING)) {
conversionStatus = Constants.WARNING;
}
diffArray.put(diffs.toJsonObject());
}
}
}
} catch (UnsupportedException e) {
// handle the unsupported keywords
conversionStatus = Constants.UNSUPPORTED;
if (impalaAstBuilder != null) {
for (ParserDiffs diffs : impalaAstBuilder.getParserDiffsList()) {
if (diffs.getDiffType().equals(DiffType.UNSUPPORTED)) {
diffArray.put(diffs.toJsonObject());
errorMessage += diffs.getMessage().isPresent() ? diffs.getMessage().get() : "";
}
}
}
if (errorMessage.isEmpty()) {
errorMessage = e.getMessage();
}
} catch (IllegalArgumentException | UnsupportedOperationException | ParsingException e) {
errorMessage = e.getMessage();
conversionStatus = Constants.FAILED;
}
// Construct json format result
JSONObject result = new JSONObject();
result.put(Constants.ORIGINAL_SQL, sql);
result.put(Constants.ORIGINAL_SQL_TYPE, IMPALA.getValue());
result.put(Constants.CONVERTED_SQL, convertedSql);
result.put(Constants.STATUS, conversionStatus);
result.put(Constants.MESSAGE, errorMessage);
result.put(Constants.DIFFS, diffArray);
return result;
} catch (JSONException e) {
throw new ParsingException("Construct parsing result failed." + e.getMessage());
} catch (StackOverflowError e) {
throw new ParsingException("statement is too large (stack overflow while parsing)");
}
}
use of io.prestosql.sql.parser.CaseInsensitiveStream in project hetu-core by openlookeng.
the class HiveParser method invokeParser.
public JSONObject invokeParser(String sql, Function<HiveSqlParser, ParserRuleContext> parseFunction, ParsingOptions parsingOptions) {
try {
HiveSqlLexer lexer = new HiveSqlLexer(new CaseInsensitiveStream(CharStreams.fromString(sql)));
CommonTokenStream tokenStream = new CommonTokenStream(lexer);
HiveSqlParser parser = new HiveSqlParser(tokenStream);
// Override the default error strategy to not attempt inserting or deleting a token.
// Otherwise, it messes up error reporting
parser.setErrorHandler(new DefaultErrorStrategy() {
@Override
public Token recoverInline(Parser recognizer) throws RecognitionException {
if (nextTokensContext == null) {
throw new InputMismatchException(recognizer);
} else {
throw new InputMismatchException(recognizer, nextTokensState, nextTokensContext);
}
}
});
parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames())));
lexer.removeErrorListeners();
lexer.addErrorListener(LEXER_ERROR_LISTENER);
parser.removeErrorListeners();
if (enhancedErrorHandlerEnabled) {
parser.addErrorListener(PARSER_ERROR_HANDLER);
} else {
parser.addErrorListener(LEXER_ERROR_LISTENER);
}
String convertedSql = "";
String conversionStatus = "";
String errorMessage = "";
JSONArray diffArray = new JSONArray();
HiveAstBuilder hiveAstBuilder = null;
try {
ParserRuleContext tree;
try {
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
tree = parseFunction.apply(parser);
} catch (ParseCancellationException e) {
tokenStream.reset();
parser.reset();
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
tree = parseFunction.apply(parser);
}
hiveAstBuilder = new HiveAstBuilder(parsingOptions);
Statement statement = (Statement) hiveAstBuilder.visit(tree);
if (statement == null) {
conversionStatus = Constants.FAILED;
errorMessage = "The input sql is not valid or empty.";
} else {
convertedSql = SqlFormatter.formatSql(statement, Optional.empty());
if (hiveAstBuilder.getParserDiffsList().isEmpty()) {
conversionStatus = Constants.SUCCESS;
} else {
conversionStatus = Constants.SUCCESS;
for (ParserDiffs diffs : hiveAstBuilder.getParserDiffsList()) {
if (diffs.getDiffType().equals(DiffType.DELETED) || diffs.getDiffType().equals(DiffType.FUNCTION_WARNING)) {
conversionStatus = Constants.WARNING;
}
diffArray.put(diffs.toJsonObject());
}
}
}
} catch (UnsupportedException e) {
// handle the unsupported keywords
conversionStatus = Constants.UNSUPPORTED;
if (hiveAstBuilder != null) {
for (ParserDiffs diffs : hiveAstBuilder.getParserDiffsList()) {
if (diffs.getDiffType().equals(DiffType.UNSUPPORTED)) {
diffArray.put(diffs.toJsonObject());
errorMessage += diffs.getMessage().isPresent() ? diffs.getMessage().get() : "";
}
}
}
if (errorMessage.isEmpty()) {
errorMessage = e.getMessage();
}
} catch (IllegalArgumentException | UnsupportedOperationException | ParsingException e) {
errorMessage = e.getMessage();
conversionStatus = Constants.FAILED;
}
// Construct json format result
JSONObject result = new JSONObject();
result.put(Constants.ORIGINAL_SQL, sql);
result.put(Constants.ORIGINAL_SQL_TYPE, HIVE);
result.put(Constants.CONVERTED_SQL, convertedSql);
result.put(Constants.STATUS, conversionStatus);
result.put(Constants.MESSAGE, errorMessage);
result.put(Constants.DIFFS, diffArray);
return result;
} catch (JSONException e) {
throw new ParsingException("Construct parsing result failed." + e.getMessage());
} catch (StackOverflowError e) {
throw new ParsingException("statement is too large (stack overflow while parsing)");
}
}
Aggregations