Search in sources :

Example 1 with CaseInsensitiveStream

use of io.prestosql.sql.parser.CaseInsensitiveStream in project hetu-core by openlookeng.

the class TypeCalculation method parseTypeCalculation.

private static ParserRuleContext parseTypeCalculation(String calculation) {
    TypeCalculationLexer lexer = new TypeCalculationLexer(new CaseInsensitiveStream(new ANTLRInputStream(calculation)));
    CommonTokenStream tokenStream = new CommonTokenStream(lexer);
    TypeCalculationParser parser = new TypeCalculationParser(tokenStream);
    lexer.removeErrorListeners();
    lexer.addErrorListener(ERROR_LISTENER);
    parser.removeErrorListeners();
    parser.addErrorListener(ERROR_LISTENER);
    ParserRuleContext tree;
    try {
        // first, try parsing with potentially faster SLL mode
        parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
        tree = parser.typeCalculation();
    } catch (ParseCancellationException ex) {
        // if we fail, parse with LL mode
        // rewind input stream
        tokenStream.reset();
        parser.reset();
        parser.getInterpreter().setPredictionMode(PredictionMode.LL);
        tree = parser.typeCalculation();
    }
    return tree;
}
Also used : CommonTokenStream(org.antlr.v4.runtime.CommonTokenStream) ParserRuleContext(org.antlr.v4.runtime.ParserRuleContext) ParseCancellationException(org.antlr.v4.runtime.misc.ParseCancellationException) CaseInsensitiveStream(io.prestosql.sql.parser.CaseInsensitiveStream) ANTLRInputStream(org.antlr.v4.runtime.ANTLRInputStream)

Example 2 with CaseInsensitiveStream

use of io.prestosql.sql.parser.CaseInsensitiveStream in project hetu-core by openlookeng.

the class ImpalaParser method invokeParser.

public JSONObject invokeParser(String sql, Function<ImpalaSqlParser, ParserRuleContext> parseFunction, ParsingOptions parsingOptions) {
    try {
        ImpalaSqlLexer lexer = new ImpalaSqlLexer(new CaseInsensitiveStream(CharStreams.fromString(sql)));
        CommonTokenStream tokenStream = new CommonTokenStream(lexer);
        ImpalaSqlParser parser = new ImpalaSqlParser(tokenStream);
        // Override the default error strategy to not attempt inserting or deleting a token.
        // Otherwise, it messes up error reporting
        parser.setErrorHandler(new DefaultErrorStrategy() {

            @Override
            public Token recoverInline(Parser recognizer) throws RecognitionException {
                if (nextTokensContext == null) {
                    throw new InputMismatchException(recognizer);
                } else {
                    throw new InputMismatchException(recognizer, nextTokensState, nextTokensContext);
                }
            }
        });
        parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames())));
        lexer.removeErrorListeners();
        lexer.addErrorListener(LEXER_ERROR_LISTENER);
        parser.removeErrorListeners();
        if (enhancedErrorHandlerEnabled) {
            parser.addErrorListener(PARSER_ERROR_HANDLER);
        } else {
            parser.addErrorListener(LEXER_ERROR_LISTENER);
        }
        String convertedSql = "";
        String conversionStatus = "";
        String errorMessage = "";
        JSONArray diffArray = new JSONArray();
        ImpalaAstBuilder impalaAstBuilder = null;
        try {
            ParserRuleContext tree;
            try {
                // first, try parsing with potentially faster SLL mode
                parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
                tree = parseFunction.apply(parser);
            } catch (ParseCancellationException ex) {
                // if we fail, parse with LL mode
                // rewind input stream
                tokenStream.reset();
                parser.reset();
                parser.getInterpreter().setPredictionMode(PredictionMode.LL);
                tree = parseFunction.apply(parser);
            }
            impalaAstBuilder = new ImpalaAstBuilder(parsingOptions);
            Statement statement = (Statement) impalaAstBuilder.visit(tree);
            if (statement == null) {
                conversionStatus = Constants.FAILED;
                errorMessage = "The input sql is not valid or empty.";
            } else {
                convertedSql = SqlFormatter.formatSql(statement, Optional.empty());
                if (impalaAstBuilder.getParserDiffsList().isEmpty()) {
                    conversionStatus = Constants.SUCCESS;
                } else {
                    conversionStatus = Constants.SUCCESS;
                    for (ParserDiffs diffs : impalaAstBuilder.getParserDiffsList()) {
                        if (diffs.getDiffType().equals(DiffType.DELETED) || diffs.getDiffType().equals(DiffType.FUNCTION_WARNING)) {
                            conversionStatus = Constants.WARNING;
                        }
                        diffArray.put(diffs.toJsonObject());
                    }
                }
            }
        } catch (UnsupportedException e) {
            // handle the unsupported keywords
            conversionStatus = Constants.UNSUPPORTED;
            if (impalaAstBuilder != null) {
                for (ParserDiffs diffs : impalaAstBuilder.getParserDiffsList()) {
                    if (diffs.getDiffType().equals(DiffType.UNSUPPORTED)) {
                        diffArray.put(diffs.toJsonObject());
                        errorMessage += diffs.getMessage().isPresent() ? diffs.getMessage().get() : "";
                    }
                }
            }
            if (errorMessage.isEmpty()) {
                errorMessage = e.getMessage();
            }
        } catch (IllegalArgumentException | UnsupportedOperationException | ParsingException e) {
            errorMessage = e.getMessage();
            conversionStatus = Constants.FAILED;
        }
        // Construct json format result
        JSONObject result = new JSONObject();
        result.put(Constants.ORIGINAL_SQL, sql);
        result.put(Constants.ORIGINAL_SQL_TYPE, IMPALA.getValue());
        result.put(Constants.CONVERTED_SQL, convertedSql);
        result.put(Constants.STATUS, conversionStatus);
        result.put(Constants.MESSAGE, errorMessage);
        result.put(Constants.DIFFS, diffArray);
        return result;
    } catch (JSONException e) {
        throw new ParsingException("Construct parsing result failed." + e.getMessage());
    } catch (StackOverflowError e) {
        throw new ParsingException("statement is too large (stack overflow while parsing)");
    }
}
Also used : Token(org.antlr.v4.runtime.Token) CommonToken(org.antlr.v4.runtime.CommonToken) DefaultErrorStrategy(org.antlr.v4.runtime.DefaultErrorStrategy) ParsingException(io.prestosql.sql.parser.ParsingException) CommonTokenStream(org.antlr.v4.runtime.CommonTokenStream) ParserRuleContext(org.antlr.v4.runtime.ParserRuleContext) Statement(io.prestosql.sql.tree.Statement) JSONArray(org.codehaus.jettison.json.JSONArray) JSONException(org.codehaus.jettison.json.JSONException) InputMismatchException(org.antlr.v4.runtime.InputMismatchException) ImpalaSqlParser(io.hetu.core.migration.source.impala.ImpalaSqlParser) Parser(org.antlr.v4.runtime.Parser) ImpalaSqlParser(io.hetu.core.migration.source.impala.ImpalaSqlParser) JSONObject(org.codehaus.jettison.json.JSONObject) ImpalaSqlLexer(io.hetu.core.migration.source.impala.ImpalaSqlLexer) ParseCancellationException(org.antlr.v4.runtime.misc.ParseCancellationException) CaseInsensitiveStream(io.prestosql.sql.parser.CaseInsensitiveStream) RecognitionException(org.antlr.v4.runtime.RecognitionException)

Example 3 with CaseInsensitiveStream

use of io.prestosql.sql.parser.CaseInsensitiveStream in project hetu-core by openlookeng.

the class HiveParser method invokeParser.

public JSONObject invokeParser(String sql, Function<HiveSqlParser, ParserRuleContext> parseFunction, ParsingOptions parsingOptions) {
    try {
        HiveSqlLexer lexer = new HiveSqlLexer(new CaseInsensitiveStream(CharStreams.fromString(sql)));
        CommonTokenStream tokenStream = new CommonTokenStream(lexer);
        HiveSqlParser parser = new HiveSqlParser(tokenStream);
        // Override the default error strategy to not attempt inserting or deleting a token.
        // Otherwise, it messes up error reporting
        parser.setErrorHandler(new DefaultErrorStrategy() {

            @Override
            public Token recoverInline(Parser recognizer) throws RecognitionException {
                if (nextTokensContext == null) {
                    throw new InputMismatchException(recognizer);
                } else {
                    throw new InputMismatchException(recognizer, nextTokensState, nextTokensContext);
                }
            }
        });
        parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames())));
        lexer.removeErrorListeners();
        lexer.addErrorListener(LEXER_ERROR_LISTENER);
        parser.removeErrorListeners();
        if (enhancedErrorHandlerEnabled) {
            parser.addErrorListener(PARSER_ERROR_HANDLER);
        } else {
            parser.addErrorListener(LEXER_ERROR_LISTENER);
        }
        String convertedSql = "";
        String conversionStatus = "";
        String errorMessage = "";
        JSONArray diffArray = new JSONArray();
        HiveAstBuilder hiveAstBuilder = null;
        try {
            ParserRuleContext tree;
            try {
                parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
                tree = parseFunction.apply(parser);
            } catch (ParseCancellationException e) {
                tokenStream.reset();
                parser.reset();
                parser.getInterpreter().setPredictionMode(PredictionMode.LL);
                tree = parseFunction.apply(parser);
            }
            hiveAstBuilder = new HiveAstBuilder(parsingOptions);
            Statement statement = (Statement) hiveAstBuilder.visit(tree);
            if (statement == null) {
                conversionStatus = Constants.FAILED;
                errorMessage = "The input sql is not valid or empty.";
            } else {
                convertedSql = SqlFormatter.formatSql(statement, Optional.empty());
                if (hiveAstBuilder.getParserDiffsList().isEmpty()) {
                    conversionStatus = Constants.SUCCESS;
                } else {
                    conversionStatus = Constants.SUCCESS;
                    for (ParserDiffs diffs : hiveAstBuilder.getParserDiffsList()) {
                        if (diffs.getDiffType().equals(DiffType.DELETED) || diffs.getDiffType().equals(DiffType.FUNCTION_WARNING)) {
                            conversionStatus = Constants.WARNING;
                        }
                        diffArray.put(diffs.toJsonObject());
                    }
                }
            }
        } catch (UnsupportedException e) {
            // handle the unsupported keywords
            conversionStatus = Constants.UNSUPPORTED;
            if (hiveAstBuilder != null) {
                for (ParserDiffs diffs : hiveAstBuilder.getParserDiffsList()) {
                    if (diffs.getDiffType().equals(DiffType.UNSUPPORTED)) {
                        diffArray.put(diffs.toJsonObject());
                        errorMessage += diffs.getMessage().isPresent() ? diffs.getMessage().get() : "";
                    }
                }
            }
            if (errorMessage.isEmpty()) {
                errorMessage = e.getMessage();
            }
        } catch (IllegalArgumentException | UnsupportedOperationException | ParsingException e) {
            errorMessage = e.getMessage();
            conversionStatus = Constants.FAILED;
        }
        // Construct json format result
        JSONObject result = new JSONObject();
        result.put(Constants.ORIGINAL_SQL, sql);
        result.put(Constants.ORIGINAL_SQL_TYPE, HIVE);
        result.put(Constants.CONVERTED_SQL, convertedSql);
        result.put(Constants.STATUS, conversionStatus);
        result.put(Constants.MESSAGE, errorMessage);
        result.put(Constants.DIFFS, diffArray);
        return result;
    } catch (JSONException e) {
        throw new ParsingException("Construct parsing result failed." + e.getMessage());
    } catch (StackOverflowError e) {
        throw new ParsingException("statement is too large (stack overflow while parsing)");
    }
}
Also used : Token(org.antlr.v4.runtime.Token) CommonToken(org.antlr.v4.runtime.CommonToken) DefaultErrorStrategy(org.antlr.v4.runtime.DefaultErrorStrategy) ParsingException(io.prestosql.sql.parser.ParsingException) CommonTokenStream(org.antlr.v4.runtime.CommonTokenStream) ParserRuleContext(org.antlr.v4.runtime.ParserRuleContext) Statement(io.prestosql.sql.tree.Statement) HiveSqlLexer(io.hetu.core.migration.source.hive.HiveSqlLexer) JSONArray(org.codehaus.jettison.json.JSONArray) JSONException(org.codehaus.jettison.json.JSONException) InputMismatchException(org.antlr.v4.runtime.InputMismatchException) Parser(org.antlr.v4.runtime.Parser) HiveSqlParser(io.hetu.core.migration.source.hive.HiveSqlParser) JSONObject(org.codehaus.jettison.json.JSONObject) ParseCancellationException(org.antlr.v4.runtime.misc.ParseCancellationException) CaseInsensitiveStream(io.prestosql.sql.parser.CaseInsensitiveStream) HiveSqlParser(io.hetu.core.migration.source.hive.HiveSqlParser) RecognitionException(org.antlr.v4.runtime.RecognitionException)

Aggregations

CaseInsensitiveStream (io.prestosql.sql.parser.CaseInsensitiveStream)3 CommonTokenStream (org.antlr.v4.runtime.CommonTokenStream)3 ParserRuleContext (org.antlr.v4.runtime.ParserRuleContext)3 ParseCancellationException (org.antlr.v4.runtime.misc.ParseCancellationException)3 ParsingException (io.prestosql.sql.parser.ParsingException)2 Statement (io.prestosql.sql.tree.Statement)2 CommonToken (org.antlr.v4.runtime.CommonToken)2 DefaultErrorStrategy (org.antlr.v4.runtime.DefaultErrorStrategy)2 InputMismatchException (org.antlr.v4.runtime.InputMismatchException)2 Parser (org.antlr.v4.runtime.Parser)2 RecognitionException (org.antlr.v4.runtime.RecognitionException)2 Token (org.antlr.v4.runtime.Token)2 JSONArray (org.codehaus.jettison.json.JSONArray)2 JSONException (org.codehaus.jettison.json.JSONException)2 JSONObject (org.codehaus.jettison.json.JSONObject)2 HiveSqlLexer (io.hetu.core.migration.source.hive.HiveSqlLexer)1 HiveSqlParser (io.hetu.core.migration.source.hive.HiveSqlParser)1 ImpalaSqlLexer (io.hetu.core.migration.source.impala.ImpalaSqlLexer)1 ImpalaSqlParser (io.hetu.core.migration.source.impala.ImpalaSqlParser)1 ANTLRInputStream (org.antlr.v4.runtime.ANTLRInputStream)1