use of io.hetu.core.migration.source.hive.HiveSqlParser in project hetu-core by openlookeng.
the class HiveParser method invokeParser.
public JSONObject invokeParser(String sql, Function<HiveSqlParser, ParserRuleContext> parseFunction, ParsingOptions parsingOptions) {
try {
HiveSqlLexer lexer = new HiveSqlLexer(new CaseInsensitiveStream(CharStreams.fromString(sql)));
CommonTokenStream tokenStream = new CommonTokenStream(lexer);
HiveSqlParser parser = new HiveSqlParser(tokenStream);
// Override the default error strategy to not attempt inserting or deleting a token.
// Otherwise, it messes up error reporting
parser.setErrorHandler(new DefaultErrorStrategy() {
@Override
public Token recoverInline(Parser recognizer) throws RecognitionException {
if (nextTokensContext == null) {
throw new InputMismatchException(recognizer);
} else {
throw new InputMismatchException(recognizer, nextTokensState, nextTokensContext);
}
}
});
parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames())));
lexer.removeErrorListeners();
lexer.addErrorListener(LEXER_ERROR_LISTENER);
parser.removeErrorListeners();
if (enhancedErrorHandlerEnabled) {
parser.addErrorListener(PARSER_ERROR_HANDLER);
} else {
parser.addErrorListener(LEXER_ERROR_LISTENER);
}
String convertedSql = "";
String conversionStatus = "";
String errorMessage = "";
JSONArray diffArray = new JSONArray();
HiveAstBuilder hiveAstBuilder = null;
try {
ParserRuleContext tree;
try {
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
tree = parseFunction.apply(parser);
} catch (ParseCancellationException e) {
tokenStream.reset();
parser.reset();
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
tree = parseFunction.apply(parser);
}
hiveAstBuilder = new HiveAstBuilder(parsingOptions);
Statement statement = (Statement) hiveAstBuilder.visit(tree);
if (statement == null) {
conversionStatus = Constants.FAILED;
errorMessage = "The input sql is not valid or empty.";
} else {
convertedSql = SqlFormatter.formatSql(statement, Optional.empty());
if (hiveAstBuilder.getParserDiffsList().isEmpty()) {
conversionStatus = Constants.SUCCESS;
} else {
conversionStatus = Constants.SUCCESS;
for (ParserDiffs diffs : hiveAstBuilder.getParserDiffsList()) {
if (diffs.getDiffType().equals(DiffType.DELETED) || diffs.getDiffType().equals(DiffType.FUNCTION_WARNING)) {
conversionStatus = Constants.WARNING;
}
diffArray.put(diffs.toJsonObject());
}
}
}
} catch (UnsupportedException e) {
// handle the unsupported keywords
conversionStatus = Constants.UNSUPPORTED;
if (hiveAstBuilder != null) {
for (ParserDiffs diffs : hiveAstBuilder.getParserDiffsList()) {
if (diffs.getDiffType().equals(DiffType.UNSUPPORTED)) {
diffArray.put(diffs.toJsonObject());
errorMessage += diffs.getMessage().isPresent() ? diffs.getMessage().get() : "";
}
}
}
if (errorMessage.isEmpty()) {
errorMessage = e.getMessage();
}
} catch (IllegalArgumentException | UnsupportedOperationException | ParsingException e) {
errorMessage = e.getMessage();
conversionStatus = Constants.FAILED;
}
// Construct json format result
JSONObject result = new JSONObject();
result.put(Constants.ORIGINAL_SQL, sql);
result.put(Constants.ORIGINAL_SQL_TYPE, HIVE);
result.put(Constants.CONVERTED_SQL, convertedSql);
result.put(Constants.STATUS, conversionStatus);
result.put(Constants.MESSAGE, errorMessage);
result.put(Constants.DIFFS, diffArray);
return result;
} catch (JSONException e) {
throw new ParsingException("Construct parsing result failed." + e.getMessage());
} catch (StackOverflowError e) {
throw new ParsingException("statement is too large (stack overflow while parsing)");
}
}
Aggregations