Search in sources :

Example 11 with Statement

use of io.prestosql.sql.tree.Statement in project hetu-core by openlookeng.

the class TestAnalyzer method analyze.

private void analyze(Session clientSession, @Language("SQL") String query) {
    transaction(transactionManager, accessControl).readUncommitted().readOnly().execute(clientSession, session -> {
        Analyzer analyzer = createAnalyzer(session, metadata);
        Statement statement = SQL_PARSER.createStatement(query);
        analyzer.analyze(statement);
    });
}
Also used : Statement(io.prestosql.sql.tree.Statement)

Example 12 with Statement

use of io.prestosql.sql.tree.Statement in project hetu-core by openlookeng.

the class ImpalaParser method invokeParser.

public JSONObject invokeParser(String sql, Function<ImpalaSqlParser, ParserRuleContext> parseFunction, ParsingOptions parsingOptions) {
    try {
        ImpalaSqlLexer lexer = new ImpalaSqlLexer(new CaseInsensitiveStream(CharStreams.fromString(sql)));
        CommonTokenStream tokenStream = new CommonTokenStream(lexer);
        ImpalaSqlParser parser = new ImpalaSqlParser(tokenStream);
        // Override the default error strategy to not attempt inserting or deleting a token.
        // Otherwise, it messes up error reporting
        parser.setErrorHandler(new DefaultErrorStrategy() {

            @Override
            public Token recoverInline(Parser recognizer) throws RecognitionException {
                if (nextTokensContext == null) {
                    throw new InputMismatchException(recognizer);
                } else {
                    throw new InputMismatchException(recognizer, nextTokensState, nextTokensContext);
                }
            }
        });
        parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames())));
        lexer.removeErrorListeners();
        lexer.addErrorListener(LEXER_ERROR_LISTENER);
        parser.removeErrorListeners();
        if (enhancedErrorHandlerEnabled) {
            parser.addErrorListener(PARSER_ERROR_HANDLER);
        } else {
            parser.addErrorListener(LEXER_ERROR_LISTENER);
        }
        String convertedSql = "";
        String conversionStatus = "";
        String errorMessage = "";
        JSONArray diffArray = new JSONArray();
        ImpalaAstBuilder impalaAstBuilder = null;
        try {
            ParserRuleContext tree;
            try {
                // first, try parsing with potentially faster SLL mode
                parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
                tree = parseFunction.apply(parser);
            } catch (ParseCancellationException ex) {
                // if we fail, parse with LL mode
                // rewind input stream
                tokenStream.reset();
                parser.reset();
                parser.getInterpreter().setPredictionMode(PredictionMode.LL);
                tree = parseFunction.apply(parser);
            }
            impalaAstBuilder = new ImpalaAstBuilder(parsingOptions);
            Statement statement = (Statement) impalaAstBuilder.visit(tree);
            if (statement == null) {
                conversionStatus = Constants.FAILED;
                errorMessage = "The input sql is not valid or empty.";
            } else {
                convertedSql = SqlFormatter.formatSql(statement, Optional.empty());
                if (impalaAstBuilder.getParserDiffsList().isEmpty()) {
                    conversionStatus = Constants.SUCCESS;
                } else {
                    conversionStatus = Constants.SUCCESS;
                    for (ParserDiffs diffs : impalaAstBuilder.getParserDiffsList()) {
                        if (diffs.getDiffType().equals(DiffType.DELETED) || diffs.getDiffType().equals(DiffType.FUNCTION_WARNING)) {
                            conversionStatus = Constants.WARNING;
                        }
                        diffArray.put(diffs.toJsonObject());
                    }
                }
            }
        } catch (UnsupportedException e) {
            // handle the unsupported keywords
            conversionStatus = Constants.UNSUPPORTED;
            if (impalaAstBuilder != null) {
                for (ParserDiffs diffs : impalaAstBuilder.getParserDiffsList()) {
                    if (diffs.getDiffType().equals(DiffType.UNSUPPORTED)) {
                        diffArray.put(diffs.toJsonObject());
                        errorMessage += diffs.getMessage().isPresent() ? diffs.getMessage().get() : "";
                    }
                }
            }
            if (errorMessage.isEmpty()) {
                errorMessage = e.getMessage();
            }
        } catch (IllegalArgumentException | UnsupportedOperationException | ParsingException e) {
            errorMessage = e.getMessage();
            conversionStatus = Constants.FAILED;
        }
        // Construct json format result
        JSONObject result = new JSONObject();
        result.put(Constants.ORIGINAL_SQL, sql);
        result.put(Constants.ORIGINAL_SQL_TYPE, IMPALA.getValue());
        result.put(Constants.CONVERTED_SQL, convertedSql);
        result.put(Constants.STATUS, conversionStatus);
        result.put(Constants.MESSAGE, errorMessage);
        result.put(Constants.DIFFS, diffArray);
        return result;
    } catch (JSONException e) {
        throw new ParsingException("Construct parsing result failed." + e.getMessage());
    } catch (StackOverflowError e) {
        throw new ParsingException("statement is too large (stack overflow while parsing)");
    }
}
Also used : Token(org.antlr.v4.runtime.Token) CommonToken(org.antlr.v4.runtime.CommonToken) DefaultErrorStrategy(org.antlr.v4.runtime.DefaultErrorStrategy) ParsingException(io.prestosql.sql.parser.ParsingException) CommonTokenStream(org.antlr.v4.runtime.CommonTokenStream) ParserRuleContext(org.antlr.v4.runtime.ParserRuleContext) Statement(io.prestosql.sql.tree.Statement) JSONArray(org.codehaus.jettison.json.JSONArray) JSONException(org.codehaus.jettison.json.JSONException) InputMismatchException(org.antlr.v4.runtime.InputMismatchException) ImpalaSqlParser(io.hetu.core.migration.source.impala.ImpalaSqlParser) Parser(org.antlr.v4.runtime.Parser) ImpalaSqlParser(io.hetu.core.migration.source.impala.ImpalaSqlParser) JSONObject(org.codehaus.jettison.json.JSONObject) ImpalaSqlLexer(io.hetu.core.migration.source.impala.ImpalaSqlLexer) ParseCancellationException(org.antlr.v4.runtime.misc.ParseCancellationException) CaseInsensitiveStream(io.prestosql.sql.parser.CaseInsensitiveStream) RecognitionException(org.antlr.v4.runtime.RecognitionException)

Example 13 with Statement

use of io.prestosql.sql.tree.Statement in project hetu-core by openlookeng.

the class HiveParser method invokeParser.

public JSONObject invokeParser(String sql, Function<HiveSqlParser, ParserRuleContext> parseFunction, ParsingOptions parsingOptions) {
    try {
        HiveSqlLexer lexer = new HiveSqlLexer(new CaseInsensitiveStream(CharStreams.fromString(sql)));
        CommonTokenStream tokenStream = new CommonTokenStream(lexer);
        HiveSqlParser parser = new HiveSqlParser(tokenStream);
        // Override the default error strategy to not attempt inserting or deleting a token.
        // Otherwise, it messes up error reporting
        parser.setErrorHandler(new DefaultErrorStrategy() {

            @Override
            public Token recoverInline(Parser recognizer) throws RecognitionException {
                if (nextTokensContext == null) {
                    throw new InputMismatchException(recognizer);
                } else {
                    throw new InputMismatchException(recognizer, nextTokensState, nextTokensContext);
                }
            }
        });
        parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames())));
        lexer.removeErrorListeners();
        lexer.addErrorListener(LEXER_ERROR_LISTENER);
        parser.removeErrorListeners();
        if (enhancedErrorHandlerEnabled) {
            parser.addErrorListener(PARSER_ERROR_HANDLER);
        } else {
            parser.addErrorListener(LEXER_ERROR_LISTENER);
        }
        String convertedSql = "";
        String conversionStatus = "";
        String errorMessage = "";
        JSONArray diffArray = new JSONArray();
        HiveAstBuilder hiveAstBuilder = null;
        try {
            ParserRuleContext tree;
            try {
                parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
                tree = parseFunction.apply(parser);
            } catch (ParseCancellationException e) {
                tokenStream.reset();
                parser.reset();
                parser.getInterpreter().setPredictionMode(PredictionMode.LL);
                tree = parseFunction.apply(parser);
            }
            hiveAstBuilder = new HiveAstBuilder(parsingOptions);
            Statement statement = (Statement) hiveAstBuilder.visit(tree);
            if (statement == null) {
                conversionStatus = Constants.FAILED;
                errorMessage = "The input sql is not valid or empty.";
            } else {
                convertedSql = SqlFormatter.formatSql(statement, Optional.empty());
                if (hiveAstBuilder.getParserDiffsList().isEmpty()) {
                    conversionStatus = Constants.SUCCESS;
                } else {
                    conversionStatus = Constants.SUCCESS;
                    for (ParserDiffs diffs : hiveAstBuilder.getParserDiffsList()) {
                        if (diffs.getDiffType().equals(DiffType.DELETED) || diffs.getDiffType().equals(DiffType.FUNCTION_WARNING)) {
                            conversionStatus = Constants.WARNING;
                        }
                        diffArray.put(diffs.toJsonObject());
                    }
                }
            }
        } catch (UnsupportedException e) {
            // handle the unsupported keywords
            conversionStatus = Constants.UNSUPPORTED;
            if (hiveAstBuilder != null) {
                for (ParserDiffs diffs : hiveAstBuilder.getParserDiffsList()) {
                    if (diffs.getDiffType().equals(DiffType.UNSUPPORTED)) {
                        diffArray.put(diffs.toJsonObject());
                        errorMessage += diffs.getMessage().isPresent() ? diffs.getMessage().get() : "";
                    }
                }
            }
            if (errorMessage.isEmpty()) {
                errorMessage = e.getMessage();
            }
        } catch (IllegalArgumentException | UnsupportedOperationException | ParsingException e) {
            errorMessage = e.getMessage();
            conversionStatus = Constants.FAILED;
        }
        // Construct json format result
        JSONObject result = new JSONObject();
        result.put(Constants.ORIGINAL_SQL, sql);
        result.put(Constants.ORIGINAL_SQL_TYPE, HIVE);
        result.put(Constants.CONVERTED_SQL, convertedSql);
        result.put(Constants.STATUS, conversionStatus);
        result.put(Constants.MESSAGE, errorMessage);
        result.put(Constants.DIFFS, diffArray);
        return result;
    } catch (JSONException e) {
        throw new ParsingException("Construct parsing result failed." + e.getMessage());
    } catch (StackOverflowError e) {
        throw new ParsingException("statement is too large (stack overflow while parsing)");
    }
}
Also used : Token(org.antlr.v4.runtime.Token) CommonToken(org.antlr.v4.runtime.CommonToken) DefaultErrorStrategy(org.antlr.v4.runtime.DefaultErrorStrategy) ParsingException(io.prestosql.sql.parser.ParsingException) CommonTokenStream(org.antlr.v4.runtime.CommonTokenStream) ParserRuleContext(org.antlr.v4.runtime.ParserRuleContext) Statement(io.prestosql.sql.tree.Statement) HiveSqlLexer(io.hetu.core.migration.source.hive.HiveSqlLexer) JSONArray(org.codehaus.jettison.json.JSONArray) JSONException(org.codehaus.jettison.json.JSONException) InputMismatchException(org.antlr.v4.runtime.InputMismatchException) Parser(org.antlr.v4.runtime.Parser) HiveSqlParser(io.hetu.core.migration.source.hive.HiveSqlParser) JSONObject(org.codehaus.jettison.json.JSONObject) ParseCancellationException(org.antlr.v4.runtime.misc.ParseCancellationException) CaseInsensitiveStream(io.prestosql.sql.parser.CaseInsensitiveStream) HiveSqlParser(io.hetu.core.migration.source.hive.HiveSqlParser) RecognitionException(org.antlr.v4.runtime.RecognitionException)

Example 14 with Statement

use of io.prestosql.sql.tree.Statement in project hetu-core by openlookeng.

the class Analyzer method analyze.

public Analysis analyze(Statement statement, boolean isDescribe) {
    Statement rewrittenStatement = StatementRewrite.rewrite(session, metadata, cubeManager, sqlParser, queryExplainer, statement, parameters, accessControl, warningCollector, heuristicIndexerManager);
    Analysis analysis = new Analysis(rewrittenStatement, parameters, isDescribe);
    analysis.setOriginalStatement(statement);
    StatementAnalyzer analyzer = new StatementAnalyzer(analysis, metadata, sqlParser, accessControl, session, warningCollector, heuristicIndexerManager, cubeManager);
    analyzer.analyze(rewrittenStatement, Optional.empty());
    // check column access permissions for each table
    analysis.getTableColumnReferences().forEach((accessControlInfo, tableColumnReferences) -> tableColumnReferences.forEach((tableName, columns) -> accessControlInfo.getAccessControl().checkCanSelectFromColumns(session.getRequiredTransactionId(), accessControlInfo.getIdentity(), tableName, columns)));
    return analysis;
}
Also used : Iterables(com.google.common.collect.Iterables) SqlParser(io.prestosql.sql.parser.SqlParser) NOT_SUPPORTED(io.prestosql.sql.analyzer.SemanticErrorCode.NOT_SUPPORTED) Statement(io.prestosql.sql.tree.Statement) WarningCollector(io.prestosql.execution.warnings.WarningCollector) ExpressionTreeUtils.extractWindowFunctions(io.prestosql.sql.analyzer.ExpressionTreeUtils.extractWindowFunctions) ImmutableList(com.google.common.collect.ImmutableList) FunctionCall(io.prestosql.sql.tree.FunctionCall) Map(java.util.Map) Objects.requireNonNull(java.util.Objects.requireNonNull) Session(io.prestosql.Session) ExpressionTreeUtils.extractExpressions(io.prestosql.sql.analyzer.ExpressionTreeUtils.extractExpressions) StatementRewrite(io.prestosql.sql.rewrite.StatementRewrite) HeuristicIndexerManager(io.prestosql.heuristicindex.HeuristicIndexerManager) AccessControl(io.prestosql.security.AccessControl) GroupingOperation(io.prestosql.sql.tree.GroupingOperation) Metadata(io.prestosql.metadata.Metadata) NodeRef(io.prestosql.sql.tree.NodeRef) FunctionHandle(io.prestosql.spi.function.FunctionHandle) CubeManager(io.prestosql.cube.CubeManager) List(java.util.List) CANNOT_HAVE_AGGREGATIONS_WINDOWS_OR_GROUPING(io.prestosql.sql.analyzer.SemanticErrorCode.CANNOT_HAVE_AGGREGATIONS_WINDOWS_OR_GROUPING) ExpressionTreeUtils.extractExternalFunctions(io.prestosql.sql.analyzer.ExpressionTreeUtils.extractExternalFunctions) ExpressionTreeUtils.extractAggregateFunctions(io.prestosql.sql.analyzer.ExpressionTreeUtils.extractAggregateFunctions) Optional(java.util.Optional) FunctionAndTypeManager(io.prestosql.metadata.FunctionAndTypeManager) Expression(io.prestosql.sql.tree.Expression) Statement(io.prestosql.sql.tree.Statement)

Example 15 with Statement

use of io.prestosql.sql.tree.Statement in project hetu-core by openlookeng.

the class TreeAssertions method assertFormattedSql.

public static void assertFormattedSql(SqlParser sqlParser, ParsingOptions parsingOptions, Node expected) {
    String formatted = formatSql(expected, Optional.empty());
    // verify round-trip of formatting already-formatted SQL
    Statement actual = parseFormatted(sqlParser, parsingOptions, formatted, expected);
    assertEquals(formatSql(actual, Optional.empty()), formatted);
    // compare parsed tree with parsed tree of formatted SQL
    if (!actual.equals(expected)) {
        // simplify finding the non-equal part of the tree
        assertListEquals(linearizeTree(actual), linearizeTree(expected));
    }
    assertEquals(actual, expected);
}
Also used : Statement(io.prestosql.sql.tree.Statement)

Aggregations

Statement (io.prestosql.sql.tree.Statement)17 PrestoException (io.prestosql.spi.PrestoException)5 List (java.util.List)5 Session (io.prestosql.Session)4 CubeManager (io.prestosql.cube.CubeManager)4 WarningCollector (io.prestosql.execution.warnings.WarningCollector)4 HeuristicIndexerManager (io.prestosql.heuristicindex.HeuristicIndexerManager)4 Metadata (io.prestosql.metadata.Metadata)4 AccessControl (io.prestosql.security.AccessControl)4 SqlParser (io.prestosql.sql.parser.SqlParser)4 Expression (io.prestosql.sql.tree.Expression)4 Optional (java.util.Optional)4 ImmutableList (com.google.common.collect.ImmutableList)3 TransactionManager (io.prestosql.transaction.TransactionManager)3 Inject (javax.inject.Inject)3 Futures.immediateFuture (com.google.common.util.concurrent.Futures.immediateFuture)2 ListenableFuture (com.google.common.util.concurrent.ListenableFuture)2 MetadataUtil.createQualifiedObjectName (io.prestosql.metadata.MetadataUtil.createQualifiedObjectName)2 QualifiedObjectName (io.prestosql.spi.connector.QualifiedObjectName)2 TableHandle (io.prestosql.spi.metadata.TableHandle)2