Search in sources :

Example 76 with ParserException

use of com.alibaba.druid.sql.parser.ParserException in project canal by alibaba.

the class SqlParser method parse.

/**
 * 解析sql
 *
 * @param sql sql
 * @return 视图对象
 */
public static SchemaItem parse(String sql) {
    try {
        SQLStatementParser parser = new MySqlStatementParser(sql);
        SQLSelectStatement statement = (SQLSelectStatement) parser.parseStatement();
        MySqlSelectQueryBlock sqlSelectQueryBlock = (MySqlSelectQueryBlock) statement.getSelect().getQuery();
        SchemaItem schemaItem = new SchemaItem();
        schemaItem.setSql(SQLUtils.toMySqlString(sqlSelectQueryBlock));
        SQLTableSource sqlTableSource = sqlSelectQueryBlock.getFrom();
        List<TableItem> tableItems = new ArrayList<>();
        SqlParser.visitSelectTable(schemaItem, sqlTableSource, tableItems, null);
        tableItems.forEach(tableItem -> schemaItem.getAliasTableItems().put(tableItem.getAlias(), tableItem));
        List<FieldItem> fieldItems = collectSelectQueryFields(sqlSelectQueryBlock);
        fieldItems.forEach(fieldItem -> schemaItem.getSelectFields().put(fieldItem.getFieldName(), fieldItem));
        schemaItem.init();
        if (schemaItem.getAliasTableItems().isEmpty() || schemaItem.getSelectFields().isEmpty()) {
            throw new ParserException("Parse sql error");
        }
        return schemaItem;
    } catch (Exception e) {
        throw new ParserException();
    }
}
Also used : ParserException(com.alibaba.druid.sql.parser.ParserException) SQLStatementParser(com.alibaba.druid.sql.parser.SQLStatementParser) TableItem(com.alibaba.otter.canal.client.adapter.es.core.config.SchemaItem.TableItem) ArrayList(java.util.ArrayList) MySqlSelectQueryBlock(com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlSelectQueryBlock) SQLTableSource(com.alibaba.druid.sql.ast.statement.SQLTableSource) ParserException(com.alibaba.druid.sql.parser.ParserException) SQLSelectStatement(com.alibaba.druid.sql.ast.statement.SQLSelectStatement) FieldItem(com.alibaba.otter.canal.client.adapter.es.core.config.SchemaItem.FieldItem) MySqlStatementParser(com.alibaba.druid.sql.dialect.mysql.parser.MySqlStatementParser)

Example 77 with ParserException

use of com.alibaba.druid.sql.parser.ParserException in project druid by alibaba.

the class PGCommentTest method test_1.

public void test_1() {
    String sql = "select a.id,--single line comment test a.name from a;";
    PGSQLStatementParser parser;
    try {
        parser = new PGSQLStatementParser(sql);
        parser.parseStatementList();
    } catch (Exception e) {
        assertTrue(e instanceof ParserException);
    }
}
Also used : ParserException(com.alibaba.druid.sql.parser.ParserException) ParserException(com.alibaba.druid.sql.parser.ParserException) PGSQLStatementParser(com.alibaba.druid.sql.dialect.postgresql.parser.PGSQLStatementParser)

Example 78 with ParserException

use of com.alibaba.druid.sql.parser.ParserException in project druid by alibaba.

the class AntsparkCreateTableParser method parseCreateTable.

public SQLCreateTableStatement parseCreateTable(boolean acceptCreate) {
    AntsparkCreateTableStatement stmt = new AntsparkCreateTableStatement();
    if (acceptCreate) {
        if (lexer.hasComment() && lexer.isKeepComments()) {
            stmt.addBeforeComment(lexer.readAndResetComments());
        }
        accept(Token.CREATE);
    }
    if (lexer.identifierEquals(FnvHash.Constants.EXTERNAL)) {
        lexer.nextToken();
        stmt.setExternal(true);
    }
    if (lexer.identifierEquals(FnvHash.Constants.TEMPORARY)) {
        lexer.nextToken();
        stmt.setType(SQLCreateTableStatement.Type.TEMPORARY);
    }
    accept(Token.TABLE);
    if (lexer.token() == Token.IF || lexer.identifierEquals(FnvHash.Constants.IF)) {
        lexer.nextToken();
        accept(Token.NOT);
        accept(Token.EXISTS);
        stmt.setIfNotExiists(true);
    }
    stmt.setName(this.exprParser.name());
    if (lexer.token() == Token.LPAREN) {
        lexer.nextToken();
        for (; ; ) {
            Token token = lexer.token();
            if (// 
            token == Token.IDENTIFIER || token == Token.LITERAL_ALIAS) {
                SQLColumnDefinition column = this.exprParser.parseColumn();
                stmt.getTableElementList().add(column);
            } else if (// 
            token == Token.PRIMARY || // 
            token == Token.UNIQUE || // 
            token == Token.CHECK || token == Token.CONSTRAINT || token == Token.FOREIGN) {
                SQLConstraint constraint = this.exprParser.parseConstaint();
                constraint.setParent(stmt);
                stmt.getTableElementList().add((SQLTableElement) constraint);
            } else if (token == Token.TABLESPACE) {
                throw new ParserException("TODO " + lexer.info());
            } else {
                SQLColumnDefinition column = this.exprParser.parseColumn();
                stmt.getTableElementList().add(column);
            }
            if (lexer.token() == Token.COMMA) {
                lexer.nextToken();
                if (lexer.token() == Token.RPAREN) {
                    // compatible for sql server
                    break;
                }
                continue;
            }
            break;
        }
        accept(Token.RPAREN);
    }
    // add using
    if (lexer.token() == Token.USING) {
        lexer.nextToken();
        SQLName expr = this.exprParser.name();
        stmt.setDatasource(expr);
    }
    if (lexer.token() == Token.COMMENT) {
        lexer.nextToken();
        SQLExpr comment = this.exprParser.expr();
        stmt.setComment(comment);
    }
    if (lexer.identifierEquals(FnvHash.Constants.MAPPED)) {
        lexer.nextToken();
        accept(Token.BY);
        this.exprParser.parseAssignItem(stmt.getMappedBy(), stmt);
    }
    if (lexer.token() == Token.PARTITIONED) {
        lexer.nextToken();
        accept(Token.BY);
        accept(Token.LPAREN);
        for (; ; ) {
            if (lexer.token() != Token.IDENTIFIER) {
                throw new ParserException("expect identifier. " + lexer.info());
            }
            SQLColumnDefinition column = this.exprParser.parseColumn();
            stmt.addPartitionColumn(column);
            if (lexer.isKeepComments() && lexer.hasComment()) {
                column.addAfterComment(lexer.readAndResetComments());
            }
            if (lexer.token() != Token.COMMA) {
                break;
            } else {
                lexer.nextToken();
                if (lexer.isKeepComments() && lexer.hasComment()) {
                    column.addAfterComment(lexer.readAndResetComments());
                }
            }
        }
        accept(Token.RPAREN);
    }
    if (lexer.identifierEquals(FnvHash.Constants.CLUSTERED)) {
        lexer.nextToken();
        accept(Token.BY);
        accept(Token.LPAREN);
        for (; ; ) {
            SQLSelectOrderByItem item = this.exprParser.parseSelectOrderByItem();
            stmt.addClusteredByItem(item);
            if (lexer.token() == Token.COMMA) {
                lexer.nextToken();
                continue;
            }
            break;
        }
        accept(Token.RPAREN);
    }
    if (lexer.identifierEquals(FnvHash.Constants.SKEWED)) {
        lexer.nextToken();
        accept(Token.BY);
        accept(Token.LPAREN);
        this.exprParser.exprList(stmt.getSkewedBy(), stmt);
        accept(Token.RPAREN);
        accept(Token.ON);
        accept(Token.LPAREN);
        for (; ; ) {
            if (lexer.token() == Token.LPAREN) {
                SQLListExpr list = new SQLListExpr();
                lexer.nextToken();
                this.exprParser.exprList(list.getItems(), list);
                accept(Token.RPAREN);
                stmt.addSkewedByOn(list);
            } else {
                SQLExpr expr = this.exprParser.expr();
                stmt.addSkewedByOn(expr);
            }
            if (lexer.token() == Token.COMMA) {
                lexer.nextToken();
                continue;
            }
            break;
        }
        accept(Token.RPAREN);
    }
    if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
        parseSortedBy(stmt);
    }
    if (lexer.token() == Token.ROW || lexer.identifierEquals(FnvHash.Constants.ROW)) {
        parseRowFormat(stmt);
    }
    if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
        parseSortedBy(stmt);
    }
    if (stmt.getClusteredBy().size() > 0 || stmt.getSortedBy().size() > 0) {
        accept(Token.INTO);
        if (lexer.token() == Token.LITERAL_INT) {
            stmt.setBuckets(lexer.integerValue().intValue());
            lexer.nextToken();
        } else {
            throw new ParserException("into buckets must be integer. " + lexer.info());
        }
        acceptIdentifier("BUCKETS");
    }
    if (lexer.token() == Token.ROW || lexer.identifierEquals(FnvHash.Constants.ROW)) {
        parseRowFormat(stmt);
    }
    if (lexer.identifierEquals(FnvHash.Constants.STORED)) {
        lexer.nextToken();
        accept(Token.AS);
        if (lexer.identifierEquals(FnvHash.Constants.INPUTFORMAT)) {
            HiveInputOutputFormat format = new HiveInputOutputFormat();
            lexer.nextToken();
            format.setInput(this.exprParser.primary());
            if (lexer.identifierEquals(FnvHash.Constants.OUTPUTFORMAT)) {
                lexer.nextToken();
                format.setOutput(this.exprParser.primary());
            }
            stmt.setStoredAs(format);
        } else {
            SQLName name = this.exprParser.name();
            stmt.setStoredAs(name);
        }
    }
    if (lexer.identifierEquals(FnvHash.Constants.LOCATION)) {
        lexer.nextToken();
        SQLExpr location = this.exprParser.expr();
        stmt.setLocation(location);
    }
    if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
        lexer.nextToken();
        accept(Token.LPAREN);
        for (; ; ) {
            String name = lexer.stringVal();
            lexer.nextToken();
            accept(Token.EQ);
            SQLExpr value = this.exprParser.primary();
            stmt.addOption(name, value);
            if (lexer.token() == Token.COMMA) {
                lexer.nextToken();
                if (lexer.token() == Token.RPAREN) {
                    break;
                }
                continue;
            }
            break;
        }
        accept(Token.RPAREN);
    }
    if (lexer.identifierEquals(FnvHash.Constants.META)) {
        lexer.nextToken();
        acceptIdentifier("LIFECYCLE");
        stmt.setMetaLifeCycle(this.exprParser.primary());
    }
    if (lexer.token() == Token.AS) {
        lexer.nextToken();
        SQLSelect select = this.createSQLSelectParser().select();
        stmt.setSelect(select);
    }
    if (lexer.token() == Token.LIKE) {
        lexer.nextToken();
        SQLName name = this.exprParser.name();
        stmt.setLike(name);
    }
    if (lexer.token() == Token.COMMENT) {
        lexer.nextToken();
        SQLExpr comment = this.exprParser.expr();
        stmt.setComment(comment);
    }
    return stmt;
}
Also used : ParserException(com.alibaba.druid.sql.parser.ParserException) SQLConstraint(com.alibaba.druid.sql.ast.statement.SQLConstraint) SQLSelect(com.alibaba.druid.sql.ast.statement.SQLSelect) SQLName(com.alibaba.druid.sql.ast.SQLName) Token(com.alibaba.druid.sql.parser.Token) SQLTableElement(com.alibaba.druid.sql.ast.statement.SQLTableElement) SQLColumnDefinition(com.alibaba.druid.sql.ast.statement.SQLColumnDefinition) SQLExpr(com.alibaba.druid.sql.ast.SQLExpr) HiveInputOutputFormat(com.alibaba.druid.sql.dialect.hive.ast.HiveInputOutputFormat) AntsparkCreateTableStatement(com.alibaba.druid.sql.dialect.antspark.ast.AntsparkCreateTableStatement) SQLSelectOrderByItem(com.alibaba.druid.sql.ast.statement.SQLSelectOrderByItem) SQLListExpr(com.alibaba.druid.sql.ast.expr.SQLListExpr)

Example 79 with ParserException

use of com.alibaba.druid.sql.parser.ParserException in project druid by alibaba.

the class BlinkCreateTableParser method parseCreateTable.

public SQLCreateTableStatement parseCreateTable(boolean acceptCreate) {
    BlinkCreateTableStatement stmt = new BlinkCreateTableStatement();
    if (acceptCreate) {
        accept(Token.CREATE);
    }
    if (lexer.identifierEquals(FnvHash.Constants.EXTERNAL)) {
        lexer.nextToken();
        stmt.setExternal(true);
    }
    accept(Token.TABLE);
    if (lexer.token() == Token.IF || lexer.identifierEquals("IF")) {
        lexer.nextToken();
        accept(Token.NOT);
        accept(Token.EXISTS);
        stmt.setIfNotExiists(true);
    }
    stmt.setName(this.exprParser.name());
    accept(Token.LPAREN);
    if (lexer.isKeepComments() && lexer.hasComment()) {
        stmt.addBodyBeforeComment(lexer.readAndResetComments());
    }
    for_: for (; ; ) {
        SQLColumnDefinition column = null;
        switch(lexer.token()) {
            case IDENTIFIER:
            case KEY:
                column = this.exprParser.parseColumn();
                column.setParent(stmt);
                stmt.getTableElementList().add(column);
                break;
            case PRIMARY:
                SQLTableConstraint constraint = this.parseConstraint();
                constraint.setParent(stmt);
                stmt.getTableElementList().add(constraint);
                break;
            case PERIOD:
                lexer.nextToken();
                accept(Token.FOR);
                SQLExpr periodFor = this.exprParser.primary();
                stmt.setPeriodFor(periodFor);
                break for_;
            default:
                throw new ParserException("expect identifier. " + lexer.info());
        }
        if (lexer.isKeepComments() && lexer.hasComment() && column != null) {
            column.addAfterComment(lexer.readAndResetComments());
        }
        if (!(lexer.token() == (Token.COMMA))) {
            break;
        } else {
            lexer.nextToken();
            if (lexer.isKeepComments() && lexer.hasComment() && column != null) {
                column.addAfterComment(lexer.readAndResetComments());
            }
        }
    }
    accept(Token.RPAREN);
    if (lexer.token() == Token.COMMENT) {
        lexer.nextToken();
        stmt.setComment(this.exprParser.primary());
    }
    if (stmt.getClusteredBy().size() > 0 || stmt.getSortedBy().size() > 0) {
        accept(Token.INTO);
        if (lexer.token() == Token.LITERAL_INT) {
            stmt.setBuckets(lexer.integerValue().intValue());
            lexer.nextToken();
        } else {
            throw new ParserException("into buckets must be integer. " + lexer.info());
        }
        acceptIdentifier("BUCKETS");
    }
    if (lexer.token() == Token.WITH) {
        lexer.nextToken();
        accept(Token.LPAREN);
        parseAssignItems(stmt.getTableOptions(), stmt, true);
        accept(Token.RPAREN);
    }
    return stmt;
}
Also used : ParserException(com.alibaba.druid.sql.parser.ParserException) BlinkCreateTableStatement(com.alibaba.druid.sql.dialect.blink.ast.BlinkCreateTableStatement) SQLExpr(com.alibaba.druid.sql.ast.SQLExpr)

Example 80 with ParserException

use of com.alibaba.druid.sql.parser.ParserException in project druid by alibaba.

the class DB2CreateTableParser method parseCreateTable.

public SQLCreateTableStatement parseCreateTable(boolean acceptCreate) {
    DB2CreateTableStatement createTable = newCreateStatement();
    if (acceptCreate) {
        if (lexer.hasComment() && lexer.isKeepComments()) {
            createTable.addBeforeComment(lexer.readAndResetComments());
        }
        accept(Token.CREATE);
    }
    if (lexer.identifierEquals("GLOBAL")) {
        lexer.nextToken();
        if (lexer.identifierEquals("TEMPORARY")) {
            lexer.nextToken();
            createTable.setType(SQLCreateTableStatement.Type.GLOBAL_TEMPORARY);
        } else {
            throw new ParserException("syntax error " + lexer.info());
        }
    } else if (lexer.token() == Token.IDENTIFIER && lexer.stringVal().equalsIgnoreCase("LOCAL")) {
        lexer.nextToken();
        if (lexer.token() == Token.IDENTIFIER && lexer.stringVal().equalsIgnoreCase("TEMPORAY")) {
            lexer.nextToken();
            createTable.setType(SQLCreateTableStatement.Type.LOCAL_TEMPORARY);
        } else {
            throw new ParserException("syntax error. " + lexer.info());
        }
    }
    accept(Token.TABLE);
    createTable.setName(this.exprParser.name());
    if (lexer.token() == Token.LPAREN) {
        lexer.nextToken();
        for (; ; ) {
            Token token = lexer.token();
            if (// 
            token == Token.IDENTIFIER || token == Token.LITERAL_ALIAS) {
                SQLColumnDefinition column = this.exprParser.parseColumn();
                createTable.getTableElementList().add(column);
            } else if (// 
            token == Token.PRIMARY || // 
            token == Token.UNIQUE || // 
            token == Token.CHECK || token == Token.CONSTRAINT || token == Token.FOREIGN) {
                SQLConstraint constraint = this.exprParser.parseConstaint();
                constraint.setParent(createTable);
                createTable.getTableElementList().add((SQLTableElement) constraint);
            } else if (token == Token.TABLESPACE) {
                throw new ParserException("TODO " + lexer.info());
            } else {
                SQLColumnDefinition column = this.exprParser.parseColumn();
                createTable.getTableElementList().add(column);
            }
            if (lexer.token() == Token.COMMA) {
                lexer.nextToken();
                if (lexer.token() == Token.RPAREN) {
                    // compatible for sql server
                    break;
                }
                continue;
            }
            break;
        }
        accept(Token.RPAREN);
        if (lexer.identifierEquals("INHERITS")) {
            lexer.nextToken();
            accept(Token.LPAREN);
            SQLName inherits = this.exprParser.name();
            createTable.setInherits(new SQLExprTableSource(inherits));
            accept(Token.RPAREN);
        }
    }
    if (lexer.token() == Token.AS) {
        lexer.nextToken();
        SQLSelect select = this.createSQLSelectParser().select();
        createTable.setSelect(select);
    }
    for (; ; ) {
        if (lexer.identifierEquals(FnvHash.Constants.DATA)) {
            lexer.nextToken();
            acceptIdentifier("CAPTURE");
            if (lexer.identifierEquals(FnvHash.Constants.NONE)) {
                lexer.nextToken();
                createTable.setDataCaptureNone(true);
                continue;
            }
            throw new ParserException("TODO " + lexer.info());
        } else if (lexer.token() == Token.IN) {
            lexer.nextToken();
            if (lexer.token() == Token.DATABASE) {
                lexer.nextToken();
                SQLName database = this.exprParser.name();
                createTable.setDatabase(database);
            } else if (lexer.identifierEquals("tablespace")) {
                throw new ParserException("TODO " + lexer.info());
            } else {
                SQLName tablespace = this.exprParser.name();
                createTable.setTablespace(tablespace);
            }
            continue;
        } else if (lexer.identifierEquals(FnvHash.Constants.PARTITIONING)) {
            SQLPartitionByHash partitionBy = new SQLPartitionByHash();
            lexer.nextToken();
            accept(Token.KEY);
            accept(Token.LPAREN);
            this.exprParser.exprList(partitionBy.getColumns(), partitionBy);
            accept(Token.RPAREN);
            accept(Token.USING);
            acceptIdentifier("HASHING");
            createTable.setPartitioning(partitionBy);
            continue;
        } else if (lexer.identifierEquals(FnvHash.Constants.VALIDPROC)) {
            lexer.nextToken();
            SQLName validproc = this.exprParser.name();
            createTable.setValidproc(validproc);
            continue;
        } else if (lexer.identifierEquals(FnvHash.Constants.COMPRESS)) {
            lexer.nextToken();
            createTable.setCompress(true);
            if (lexer.identifierEquals(FnvHash.Constants.YES)) {
                lexer.nextToken();
            }
            continue;
        } else if (lexer.token() == Token.INDEX) {
            lexer.nextToken();
            accept(Token.IN);
            SQLName indexIn = this.exprParser.name();
            createTable.setIndexIn(indexIn);
            continue;
        }
        break;
    }
    return createTable;
}
Also used : DB2CreateTableStatement(com.alibaba.druid.sql.dialect.db2.ast.stmt.DB2CreateTableStatement) ParserException(com.alibaba.druid.sql.parser.ParserException) SQLName(com.alibaba.druid.sql.ast.SQLName) Token(com.alibaba.druid.sql.parser.Token) SQLPartitionByHash(com.alibaba.druid.sql.ast.SQLPartitionByHash)

Aggregations

ParserException (com.alibaba.druid.sql.parser.ParserException)98 SQLExpr (com.alibaba.druid.sql.ast.SQLExpr)25 SQLStatement (com.alibaba.druid.sql.ast.SQLStatement)18 SQLName (com.alibaba.druid.sql.ast.SQLName)16 MySqlStatementParser (com.alibaba.druid.sql.dialect.mysql.parser.MySqlStatementParser)12 SQLIdentifierExpr (com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr)10 SQLSelectStatement (com.alibaba.druid.sql.ast.statement.SQLSelectStatement)9 Token (com.alibaba.druid.sql.parser.Token)9 SQLCommentHint (com.alibaba.druid.sql.ast.SQLCommentHint)5 SQLCharExpr (com.alibaba.druid.sql.ast.expr.SQLCharExpr)5 SQLIntegerExpr (com.alibaba.druid.sql.ast.expr.SQLIntegerExpr)5 SQLSelect (com.alibaba.druid.sql.ast.statement.SQLSelect)5 SQLOrderBy (com.alibaba.druid.sql.ast.SQLOrderBy)4 SQLBinaryOpExpr (com.alibaba.druid.sql.ast.expr.SQLBinaryOpExpr)4 SQLVariantRefExpr (com.alibaba.druid.sql.ast.expr.SQLVariantRefExpr)4 MySqlSelectQueryBlock (com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlSelectQueryBlock)4 OracleConstraint (com.alibaba.druid.sql.dialect.oracle.ast.stmt.OracleConstraint)4 DbType (com.alibaba.druid.DbType)3 SQLDeclareItem (com.alibaba.druid.sql.ast.SQLDeclareItem)3 SQLPartitionByHash (com.alibaba.druid.sql.ast.SQLPartitionByHash)3