Search in sources :

Example 16 with Token

use of com.alibaba.druid.sql.parser.Token in project druid by alibaba.

the class AntsparkCreateTableParser method parseCreateTable.

public SQLCreateTableStatement parseCreateTable(boolean acceptCreate) {
    AntsparkCreateTableStatement stmt = new AntsparkCreateTableStatement();
    if (acceptCreate) {
        if (lexer.hasComment() && lexer.isKeepComments()) {
            stmt.addBeforeComment(lexer.readAndResetComments());
        }
        accept(Token.CREATE);
    }
    if (lexer.identifierEquals(FnvHash.Constants.EXTERNAL)) {
        lexer.nextToken();
        stmt.setExternal(true);
    }
    if (lexer.identifierEquals(FnvHash.Constants.TEMPORARY)) {
        lexer.nextToken();
        stmt.setType(SQLCreateTableStatement.Type.TEMPORARY);
    }
    accept(Token.TABLE);
    if (lexer.token() == Token.IF || lexer.identifierEquals(FnvHash.Constants.IF)) {
        lexer.nextToken();
        accept(Token.NOT);
        accept(Token.EXISTS);
        stmt.setIfNotExiists(true);
    }
    stmt.setName(this.exprParser.name());
    if (lexer.token() == Token.LPAREN) {
        lexer.nextToken();
        for (; ; ) {
            Token token = lexer.token();
            if (// 
            token == Token.IDENTIFIER || token == Token.LITERAL_ALIAS) {
                SQLColumnDefinition column = this.exprParser.parseColumn();
                stmt.getTableElementList().add(column);
            } else if (// 
            token == Token.PRIMARY || // 
            token == Token.UNIQUE || // 
            token == Token.CHECK || token == Token.CONSTRAINT || token == Token.FOREIGN) {
                SQLConstraint constraint = this.exprParser.parseConstaint();
                constraint.setParent(stmt);
                stmt.getTableElementList().add((SQLTableElement) constraint);
            } else if (token == Token.TABLESPACE) {
                throw new ParserException("TODO " + lexer.info());
            } else {
                SQLColumnDefinition column = this.exprParser.parseColumn();
                stmt.getTableElementList().add(column);
            }
            if (lexer.token() == Token.COMMA) {
                lexer.nextToken();
                if (lexer.token() == Token.RPAREN) {
                    // compatible for sql server
                    break;
                }
                continue;
            }
            break;
        }
        accept(Token.RPAREN);
    }
    // add using
    if (lexer.token() == Token.USING) {
        lexer.nextToken();
        SQLName expr = this.exprParser.name();
        stmt.setDatasource(expr);
    }
    if (lexer.token() == Token.COMMENT) {
        lexer.nextToken();
        SQLExpr comment = this.exprParser.expr();
        stmt.setComment(comment);
    }
    if (lexer.identifierEquals(FnvHash.Constants.MAPPED)) {
        lexer.nextToken();
        accept(Token.BY);
        this.exprParser.parseAssignItem(stmt.getMappedBy(), stmt);
    }
    if (lexer.token() == Token.PARTITIONED) {
        lexer.nextToken();
        accept(Token.BY);
        accept(Token.LPAREN);
        for (; ; ) {
            if (lexer.token() != Token.IDENTIFIER) {
                throw new ParserException("expect identifier. " + lexer.info());
            }
            SQLColumnDefinition column = this.exprParser.parseColumn();
            stmt.addPartitionColumn(column);
            if (lexer.isKeepComments() && lexer.hasComment()) {
                column.addAfterComment(lexer.readAndResetComments());
            }
            if (lexer.token() != Token.COMMA) {
                break;
            } else {
                lexer.nextToken();
                if (lexer.isKeepComments() && lexer.hasComment()) {
                    column.addAfterComment(lexer.readAndResetComments());
                }
            }
        }
        accept(Token.RPAREN);
    }
    if (lexer.identifierEquals(FnvHash.Constants.CLUSTERED)) {
        lexer.nextToken();
        accept(Token.BY);
        accept(Token.LPAREN);
        for (; ; ) {
            SQLSelectOrderByItem item = this.exprParser.parseSelectOrderByItem();
            stmt.addClusteredByItem(item);
            if (lexer.token() == Token.COMMA) {
                lexer.nextToken();
                continue;
            }
            break;
        }
        accept(Token.RPAREN);
    }
    if (lexer.identifierEquals(FnvHash.Constants.SKEWED)) {
        lexer.nextToken();
        accept(Token.BY);
        accept(Token.LPAREN);
        this.exprParser.exprList(stmt.getSkewedBy(), stmt);
        accept(Token.RPAREN);
        accept(Token.ON);
        accept(Token.LPAREN);
        for (; ; ) {
            if (lexer.token() == Token.LPAREN) {
                SQLListExpr list = new SQLListExpr();
                lexer.nextToken();
                this.exprParser.exprList(list.getItems(), list);
                accept(Token.RPAREN);
                stmt.addSkewedByOn(list);
            } else {
                SQLExpr expr = this.exprParser.expr();
                stmt.addSkewedByOn(expr);
            }
            if (lexer.token() == Token.COMMA) {
                lexer.nextToken();
                continue;
            }
            break;
        }
        accept(Token.RPAREN);
    }
    if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
        parseSortedBy(stmt);
    }
    if (lexer.token() == Token.ROW || lexer.identifierEquals(FnvHash.Constants.ROW)) {
        parseRowFormat(stmt);
    }
    if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
        parseSortedBy(stmt);
    }
    if (stmt.getClusteredBy().size() > 0 || stmt.getSortedBy().size() > 0) {
        accept(Token.INTO);
        if (lexer.token() == Token.LITERAL_INT) {
            stmt.setBuckets(lexer.integerValue().intValue());
            lexer.nextToken();
        } else {
            throw new ParserException("into buckets must be integer. " + lexer.info());
        }
        acceptIdentifier("BUCKETS");
    }
    if (lexer.token() == Token.ROW || lexer.identifierEquals(FnvHash.Constants.ROW)) {
        parseRowFormat(stmt);
    }
    if (lexer.identifierEquals(FnvHash.Constants.STORED)) {
        lexer.nextToken();
        accept(Token.AS);
        if (lexer.identifierEquals(FnvHash.Constants.INPUTFORMAT)) {
            HiveInputOutputFormat format = new HiveInputOutputFormat();
            lexer.nextToken();
            format.setInput(this.exprParser.primary());
            if (lexer.identifierEquals(FnvHash.Constants.OUTPUTFORMAT)) {
                lexer.nextToken();
                format.setOutput(this.exprParser.primary());
            }
            stmt.setStoredAs(format);
        } else {
            SQLName name = this.exprParser.name();
            stmt.setStoredAs(name);
        }
    }
    if (lexer.identifierEquals(FnvHash.Constants.LOCATION)) {
        lexer.nextToken();
        SQLExpr location = this.exprParser.expr();
        stmt.setLocation(location);
    }
    if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
        lexer.nextToken();
        accept(Token.LPAREN);
        for (; ; ) {
            String name = lexer.stringVal();
            lexer.nextToken();
            accept(Token.EQ);
            SQLExpr value = this.exprParser.primary();
            stmt.addOption(name, value);
            if (lexer.token() == Token.COMMA) {
                lexer.nextToken();
                if (lexer.token() == Token.RPAREN) {
                    break;
                }
                continue;
            }
            break;
        }
        accept(Token.RPAREN);
    }
    if (lexer.identifierEquals(FnvHash.Constants.META)) {
        lexer.nextToken();
        acceptIdentifier("LIFECYCLE");
        stmt.setMetaLifeCycle(this.exprParser.primary());
    }
    if (lexer.token() == Token.AS) {
        lexer.nextToken();
        SQLSelect select = this.createSQLSelectParser().select();
        stmt.setSelect(select);
    }
    if (lexer.token() == Token.LIKE) {
        lexer.nextToken();
        SQLName name = this.exprParser.name();
        stmt.setLike(name);
    }
    if (lexer.token() == Token.COMMENT) {
        lexer.nextToken();
        SQLExpr comment = this.exprParser.expr();
        stmt.setComment(comment);
    }
    return stmt;
}
Also used : ParserException(com.alibaba.druid.sql.parser.ParserException) SQLConstraint(com.alibaba.druid.sql.ast.statement.SQLConstraint) SQLSelect(com.alibaba.druid.sql.ast.statement.SQLSelect) SQLName(com.alibaba.druid.sql.ast.SQLName) Token(com.alibaba.druid.sql.parser.Token) SQLTableElement(com.alibaba.druid.sql.ast.statement.SQLTableElement) SQLColumnDefinition(com.alibaba.druid.sql.ast.statement.SQLColumnDefinition) SQLExpr(com.alibaba.druid.sql.ast.SQLExpr) HiveInputOutputFormat(com.alibaba.druid.sql.dialect.hive.ast.HiveInputOutputFormat) AntsparkCreateTableStatement(com.alibaba.druid.sql.dialect.antspark.ast.AntsparkCreateTableStatement) SQLSelectOrderByItem(com.alibaba.druid.sql.ast.statement.SQLSelectOrderByItem) SQLListExpr(com.alibaba.druid.sql.ast.expr.SQLListExpr)

Example 17 with Token

use of com.alibaba.druid.sql.parser.Token in project druid by alibaba.

the class DB2CreateTableParser method parseCreateTable.

public SQLCreateTableStatement parseCreateTable(boolean acceptCreate) {
    DB2CreateTableStatement createTable = newCreateStatement();
    if (acceptCreate) {
        if (lexer.hasComment() && lexer.isKeepComments()) {
            createTable.addBeforeComment(lexer.readAndResetComments());
        }
        accept(Token.CREATE);
    }
    if (lexer.identifierEquals("GLOBAL")) {
        lexer.nextToken();
        if (lexer.identifierEquals("TEMPORARY")) {
            lexer.nextToken();
            createTable.setType(SQLCreateTableStatement.Type.GLOBAL_TEMPORARY);
        } else {
            throw new ParserException("syntax error " + lexer.info());
        }
    } else if (lexer.token() == Token.IDENTIFIER && lexer.stringVal().equalsIgnoreCase("LOCAL")) {
        lexer.nextToken();
        if (lexer.token() == Token.IDENTIFIER && lexer.stringVal().equalsIgnoreCase("TEMPORAY")) {
            lexer.nextToken();
            createTable.setType(SQLCreateTableStatement.Type.LOCAL_TEMPORARY);
        } else {
            throw new ParserException("syntax error. " + lexer.info());
        }
    }
    accept(Token.TABLE);
    createTable.setName(this.exprParser.name());
    if (lexer.token() == Token.LPAREN) {
        lexer.nextToken();
        for (; ; ) {
            Token token = lexer.token();
            if (// 
            token == Token.IDENTIFIER || token == Token.LITERAL_ALIAS) {
                SQLColumnDefinition column = this.exprParser.parseColumn();
                createTable.getTableElementList().add(column);
            } else if (// 
            token == Token.PRIMARY || // 
            token == Token.UNIQUE || // 
            token == Token.CHECK || token == Token.CONSTRAINT || token == Token.FOREIGN) {
                SQLConstraint constraint = this.exprParser.parseConstaint();
                constraint.setParent(createTable);
                createTable.getTableElementList().add((SQLTableElement) constraint);
            } else if (token == Token.TABLESPACE) {
                throw new ParserException("TODO " + lexer.info());
            } else {
                SQLColumnDefinition column = this.exprParser.parseColumn();
                createTable.getTableElementList().add(column);
            }
            if (lexer.token() == Token.COMMA) {
                lexer.nextToken();
                if (lexer.token() == Token.RPAREN) {
                    // compatible for sql server
                    break;
                }
                continue;
            }
            break;
        }
        accept(Token.RPAREN);
        if (lexer.identifierEquals("INHERITS")) {
            lexer.nextToken();
            accept(Token.LPAREN);
            SQLName inherits = this.exprParser.name();
            createTable.setInherits(new SQLExprTableSource(inherits));
            accept(Token.RPAREN);
        }
    }
    if (lexer.token() == Token.AS) {
        lexer.nextToken();
        SQLSelect select = this.createSQLSelectParser().select();
        createTable.setSelect(select);
    }
    for (; ; ) {
        if (lexer.identifierEquals(FnvHash.Constants.DATA)) {
            lexer.nextToken();
            acceptIdentifier("CAPTURE");
            if (lexer.identifierEquals(FnvHash.Constants.NONE)) {
                lexer.nextToken();
                createTable.setDataCaptureNone(true);
                continue;
            }
            throw new ParserException("TODO " + lexer.info());
        } else if (lexer.token() == Token.IN) {
            lexer.nextToken();
            if (lexer.token() == Token.DATABASE) {
                lexer.nextToken();
                SQLName database = this.exprParser.name();
                createTable.setDatabase(database);
            } else if (lexer.identifierEquals("tablespace")) {
                throw new ParserException("TODO " + lexer.info());
            } else {
                SQLName tablespace = this.exprParser.name();
                createTable.setTablespace(tablespace);
            }
            continue;
        } else if (lexer.identifierEquals(FnvHash.Constants.PARTITIONING)) {
            SQLPartitionByHash partitionBy = new SQLPartitionByHash();
            lexer.nextToken();
            accept(Token.KEY);
            accept(Token.LPAREN);
            this.exprParser.exprList(partitionBy.getColumns(), partitionBy);
            accept(Token.RPAREN);
            accept(Token.USING);
            acceptIdentifier("HASHING");
            createTable.setPartitioning(partitionBy);
            continue;
        } else if (lexer.identifierEquals(FnvHash.Constants.VALIDPROC)) {
            lexer.nextToken();
            SQLName validproc = this.exprParser.name();
            createTable.setValidproc(validproc);
            continue;
        } else if (lexer.identifierEquals(FnvHash.Constants.COMPRESS)) {
            lexer.nextToken();
            createTable.setCompress(true);
            if (lexer.identifierEquals(FnvHash.Constants.YES)) {
                lexer.nextToken();
            }
            continue;
        } else if (lexer.token() == Token.INDEX) {
            lexer.nextToken();
            accept(Token.IN);
            SQLName indexIn = this.exprParser.name();
            createTable.setIndexIn(indexIn);
            continue;
        }
        break;
    }
    return createTable;
}
Also used : DB2CreateTableStatement(com.alibaba.druid.sql.dialect.db2.ast.stmt.DB2CreateTableStatement) ParserException(com.alibaba.druid.sql.parser.ParserException) SQLName(com.alibaba.druid.sql.ast.SQLName) Token(com.alibaba.druid.sql.parser.Token) SQLPartitionByHash(com.alibaba.druid.sql.ast.SQLPartitionByHash)

Example 18 with Token

use of com.alibaba.druid.sql.parser.Token in project druid by alibaba.

the class Lexer method scanString.

protected void scanString() {
    mark = pos;
    boolean hasSpecial = false;
    Token preToken = this.token;
    for (; ; ) {
        if (isEOF()) {
            lexError("unclosed.str.lit");
            return;
        }
        ch = charAt(++pos);
        if (ch == '\'') {
            scanChar();
            if (ch != '\'') {
                token = LITERAL_CHARS;
                break;
            } else {
                if (!hasSpecial) {
                    initBuff(bufPos);
                    arraycopy(mark + 1, buf, 0, bufPos);
                    hasSpecial = true;
                }
                putChar('\'');
                continue;
            }
        }
        if (!hasSpecial) {
            bufPos++;
            continue;
        }
        if (bufPos == buf.length) {
            putChar(ch);
        } else {
            buf[bufPos++] = ch;
        }
    }
    if (!hasSpecial) {
        if (preToken == Token.AS) {
            stringVal = subString(mark, bufPos + 2);
        } else {
            stringVal = subString(mark + 1, bufPos);
        }
    } else {
        stringVal = new String(buf, 0, bufPos);
    }
}
Also used : Token(com.alibaba.druid.sql.parser.Token)

Example 19 with Token

use of com.alibaba.druid.sql.parser.Token in project druid by alibaba.

the class Lexer method parameterize.

public static String parameterize(String sql, DbType dbType) {
    Lexer lexer = SQLParserUtils.createLexer(sql, dbType);
    // optimized
    lexer.optimizedForParameterized = true;
    lexer.nextToken();
    StringBuffer buf = new StringBuffer();
    for_: for (; ; ) {
        Token token = lexer.token;
        switch(token) {
            case LITERAL_ALIAS:
            case LITERAL_FLOAT:
            case LITERAL_CHARS:
            case LITERAL_INT:
            case LITERAL_NCHARS:
            case LITERAL_HEX:
            case VARIANT:
                if (buf.length() != 0) {
                    buf.append(' ');
                }
                buf.append('?');
                break;
            case COMMA:
                buf.append(',');
                break;
            case EQ:
                buf.append('=');
                break;
            case EOF:
                break for_;
            case ERROR:
                return sql;
            case SELECT:
                buf.append("SELECT");
                break;
            case UPDATE:
                buf.append("UPDATE");
                break;
            default:
                if (buf.length() != 0) {
                    buf.append(' ');
                }
                lexer.stringVal(buf);
                break;
        }
        lexer.nextToken();
    }
    return buf.toString();
}
Also used : MySqlLexer(com.alibaba.druid.sql.dialect.mysql.parser.MySqlLexer) Token(com.alibaba.druid.sql.parser.Token)

Example 20 with Token

use of com.alibaba.druid.sql.parser.Token in project druid by alibaba.

the class Lexer method scanMultiLineComment.

private void scanMultiLineComment() {
    Token lastToken = this.token;
    int depth = 1;
    scanChar();
    scanChar();
    mark = pos;
    bufPos = 0;
    for (; ; ) {
        if (ch == '/' && charAt(pos + 1) == '*') {
            scanChar();
            scanChar();
            if (ch == '!' || ch == '+') {
                scanChar();
                ++depth;
            }
        }
        if (ch == '*' && charAt(pos + 1) == '/') {
            scanChar();
            scanChar();
            if (0 == --depth) {
                break;
            }
        }
        // multiline comment结束符错误
        if (ch == EOI) {
            throw new ParserException("unterminated /* comment. " + info());
        }
        scanChar();
        bufPos++;
    }
    stringVal = subString(mark, bufPos);
    token = Token.MULTI_LINE_COMMENT;
    commentCount++;
    if (keepComments) {
        addComment(stringVal);
    }
    if (commentHandler != null && commentHandler.handle(lastToken, stringVal)) {
        return;
    }
    if (!isAllowComment() && !isSafeComment(stringVal)) {
        throw new NotAllowCommentException();
    }
}
Also used : Token(com.alibaba.druid.sql.parser.Token)

Aggregations

Token (com.alibaba.druid.sql.parser.Token)23 ParserException (com.alibaba.druid.sql.parser.ParserException)9 SQLExpr (com.alibaba.druid.sql.ast.SQLExpr)4 NotAllowCommentException (com.alibaba.druid.sql.parser.NotAllowCommentException)4 SQLName (com.alibaba.druid.sql.ast.SQLName)3 SQLIdentifierExpr (com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr)2 SQLUnaryExpr (com.alibaba.druid.sql.ast.expr.SQLUnaryExpr)2 SQLVariantRefExpr (com.alibaba.druid.sql.ast.expr.SQLVariantRefExpr)2 OracleLexer (com.alibaba.druid.sql.dialect.oracle.parser.OracleLexer)2 ArrayList (java.util.ArrayList)2 SQLPartitionByHash (com.alibaba.druid.sql.ast.SQLPartitionByHash)1 SQLStatement (com.alibaba.druid.sql.ast.SQLStatement)1 SQLBinaryOpExpr (com.alibaba.druid.sql.ast.expr.SQLBinaryOpExpr)1 SQLCharExpr (com.alibaba.druid.sql.ast.expr.SQLCharExpr)1 SQLIntegerExpr (com.alibaba.druid.sql.ast.expr.SQLIntegerExpr)1 SQLListExpr (com.alibaba.druid.sql.ast.expr.SQLListExpr)1 SQLNumberExpr (com.alibaba.druid.sql.ast.expr.SQLNumberExpr)1 SQLColumnDefinition (com.alibaba.druid.sql.ast.statement.SQLColumnDefinition)1 SQLConstraint (com.alibaba.druid.sql.ast.statement.SQLConstraint)1 SQLForeignKeyConstraint (com.alibaba.druid.sql.ast.statement.SQLForeignKeyConstraint)1