Search in sources :

Example 1 with HiveInputOutputFormat

use of com.alibaba.druid.sql.dialect.hive.ast.HiveInputOutputFormat in project druid by alibaba.

the class OdpsCreateTableParser method parseCreateTable.

public SQLCreateTableStatement parseCreateTable(boolean acceptCreate) {
    OdpsCreateTableStatement stmt = new OdpsCreateTableStatement();
    if (acceptCreate) {
        accept(Token.CREATE);
    }
    if (lexer.identifierEquals(FnvHash.Constants.EXTERNAL)) {
        lexer.nextToken();
        stmt.setExternal(true);
    }
    accept(Token.TABLE);
    if (lexer.token() == Token.IF || lexer.identifierEquals("IF")) {
        lexer.nextToken();
        accept(Token.NOT);
        accept(Token.EXISTS);
        stmt.setIfNotExiists(true);
    }
    stmt.setName(this.exprParser.name());
    if (lexer.token() == Token.COMMENT) {
        lexer.nextToken();
        stmt.setComment(this.exprParser.primary());
    }
    if (lexer.token() == Token.SEMI || lexer.token() == Token.EOF) {
        return stmt;
    }
    for (; ; ) {
        if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
            parseTblProperties(stmt);
            continue;
        }
        if (lexer.identifierEquals(FnvHash.Constants.LIFECYCLE)) {
            lexer.nextToken();
            stmt.setLifecycle(this.exprParser.expr());
            continue;
        }
        if (lexer.identifierEquals(FnvHash.Constants.STORED)) {
            lexer.nextToken();
            accept(Token.AS);
            if (lexer.identifierEquals(FnvHash.Constants.INPUTFORMAT)) {
                HiveInputOutputFormat format = new HiveInputOutputFormat();
                lexer.nextToken();
                format.setInput(this.exprParser.primary());
                if (lexer.identifierEquals(FnvHash.Constants.OUTPUTFORMAT)) {
                    lexer.nextToken();
                    format.setOutput(this.exprParser.primary());
                }
                stmt.setStoredAs(format);
            } else {
                SQLName name = this.exprParser.name();
                stmt.setStoredAs(name);
            }
            continue;
        }
        break;
    }
    if (lexer.token() == Token.LIKE) {
        lexer.nextToken();
        SQLName name = this.exprParser.name();
        stmt.setLike(name);
    } else if (lexer.token() == Token.AS) {
        lexer.nextToken();
        OdpsSelectParser selectParser = new OdpsSelectParser(this.exprParser);
        SQLSelect select = selectParser.select();
        stmt.setSelect(select);
    } else if (lexer.token() != Token.LPAREN && stmt.isExternal()) {
    // skip
    } else {
        accept(Token.LPAREN);
        if (lexer.isKeepComments() && lexer.hasComment()) {
            stmt.addBodyBeforeComment(lexer.readAndResetComments());
        }
        for (; ; ) {
            SQLColumnDefinition column;
            switch(lexer.token()) {
                case IDENTIFIER:
                case KEY:
                case SEQUENCE:
                case USER:
                case GROUP:
                case INDEX:
                case ENABLE:
                case DISABLE:
                case DESC:
                case ALL:
                case INTERVAL:
                case OPEN:
                case PARTITION:
                case SCHEMA:
                case CONSTRAINT:
                case COMMENT:
                case VIEW:
                case SHOW:
                case ORDER:
                case LEAVE:
                case UNIQUE:
                case DEFAULT:
                case EXPLAIN:
                case CHECK:
                case CLOSE:
                case IN:
                case OUT:
                case INOUT:
                case LIMIT:
                case FULL:
                case MINUS:
                case VALUES:
                case TRIGGER:
                case USE:
                case LIKE:
                case DISTRIBUTE:
                case DELETE:
                case UPDATE:
                case IS:
                case LEFT:
                case RIGHT:
                case REPEAT:
                case COMPUTE:
                case LOCK:
                case TABLE:
                case DO:
                case WHILE:
                case LOOP:
                case FOR:
                case RLIKE:
                case PROCEDURE:
                case GRANT:
                case EXCEPT:
                case CREATE:
                case PARTITIONED:
                case UNION:
                case PRIMARY:
                case INNER:
                case TO:
                case DECLARE:
                case REFERENCES:
                case FOREIGN:
                case ESCAPE:
                case BY:
                case ALTER:
                case SOME:
                case ASC:
                case NULL:
                case CURSOR:
                case FETCH:
                case OVER:
                case DATABASE:
                    column = this.exprParser.parseColumn(stmt);
                    break;
                default:
                    throw new ParserException("expect identifier. " + lexer.info());
            }
            stmt.getTableElementList().add(column);
            if (lexer.isKeepComments() && lexer.hasComment()) {
                column.addAfterComment(lexer.readAndResetComments());
            }
            if (!(lexer.token() == (Token.COMMA))) {
                break;
            } else {
                lexer.nextToken();
                if (lexer.isKeepComments() && lexer.hasComment()) {
                    column.addAfterComment(lexer.readAndResetComments());
                }
            }
        }
        accept(Token.RPAREN);
    }
    for (; ; ) {
        if (lexer.token() == Token.COMMENT) {
            lexer.nextToken();
            stmt.setComment(this.exprParser.primary());
            continue;
        }
        if (lexer.token() == Token.PARTITIONED) {
            lexer.nextToken();
            accept(Token.BY);
            accept(Token.LPAREN);
            for (; ; ) {
                switch(lexer.token()) {
                    case INDEX:
                    case KEY:
                    case CHECK:
                    case IDENTIFIER:
                    case GROUP:
                    case INTERVAL:
                    case LOOP:
                    case USER:
                    case TABLE:
                    case PARTITION:
                    case SEQUENCE:
                        break;
                    default:
                        throw new ParserException("expect identifier. " + lexer.info());
                }
                SQLColumnDefinition column = this.exprParser.parseColumn();
                stmt.addPartitionColumn(column);
                if (lexer.isKeepComments() && lexer.hasComment()) {
                    column.addAfterComment(lexer.readAndResetComments());
                }
                if (lexer.token() != Token.COMMA) {
                    break;
                } else {
                    lexer.nextToken();
                    if (lexer.isKeepComments() && lexer.hasComment()) {
                        column.addAfterComment(lexer.readAndResetComments());
                    }
                }
            }
            accept(Token.RPAREN);
            continue;
        }
        if (lexer.identifierEquals(FnvHash.Constants.RANGE)) {
            lexer.nextToken();
            if (lexer.identifierEquals(FnvHash.Constants.CLUSTERED)) {
                stmt.setClusteringType(ClusteringType.Range);
            }
            continue;
        }
        if (lexer.identifierEquals(FnvHash.Constants.CLUSTERED)) {
            lexer.nextToken();
            accept(Token.BY);
            accept(Token.LPAREN);
            for (; ; ) {
                SQLSelectOrderByItem item = this.exprParser.parseSelectOrderByItem();
                stmt.addClusteredByItem(item);
                if (lexer.token() == Token.COMMA) {
                    lexer.nextToken();
                    continue;
                }
                break;
            }
            accept(Token.RPAREN);
            continue;
        }
        if (lexer.identifierEquals(FnvHash.Constants.ROW)) {
            SQLExternalRecordFormat recordFormat = this.exprParser.parseRowFormat();
            stmt.setRowFormat(recordFormat);
            continue;
        }
        if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
            lexer.nextToken();
            accept(Token.BY);
            accept(Token.LPAREN);
            for (; ; ) {
                SQLSelectOrderByItem item = this.exprParser.parseSelectOrderByItem();
                stmt.addSortedByItem(item);
                if (lexer.token() == Token.COMMA) {
                    lexer.nextToken();
                    continue;
                }
                break;
            }
            accept(Token.RPAREN);
            continue;
        }
        if (stmt.getClusteringType() != ClusteringType.Range && (stmt.getClusteredBy().size() > 0 || stmt.getSortedBy().size() > 0) && lexer.token() == Token.INTO) {
            lexer.nextToken();
            if (lexer.token() == Token.LITERAL_INT) {
                stmt.setBuckets(lexer.integerValue().intValue());
                lexer.nextToken();
            } else {
                throw new ParserException("into buckets must be integer. " + lexer.info());
            }
            acceptIdentifier("BUCKETS");
            if (lexer.token() == Token.INTO) {
                lexer.nextToken();
                if (lexer.token() == Token.LITERAL_INT) {
                    stmt.setShards(lexer.integerValue().intValue());
                    lexer.nextToken();
                } else {
                    throw new ParserException("into shards must be integer. " + lexer.info());
                }
                acceptIdentifier("SHARDS");
            }
            continue;
        }
        if (lexer.token() == Token.INTO) {
            lexer.nextToken();
            if (lexer.token() == Token.LITERAL_INT) {
                stmt.setIntoBuckets(new SQLIntegerExpr(lexer.integerValue().intValue()));
                lexer.nextToken();
                acceptIdentifier("BUCKETS");
            } else {
                throw new ParserException("into shards must be integer. " + lexer.info());
            }
            continue;
        }
        if (lexer.token() == Token.AS && stmt.getSelect() == null) {
            lexer.nextToken();
            OdpsSelectParser selectParser = new OdpsSelectParser(this.exprParser);
            SQLSelect select = selectParser.select();
            stmt.setSelect(select);
            continue;
        }
        if (lexer.identifierEquals(FnvHash.Constants.LIFECYCLE)) {
            lexer.nextToken();
            stmt.setLifecycle(this.exprParser.expr());
            continue;
        }
        if (lexer.identifierEquals(FnvHash.Constants.STORED)) {
            lexer.nextToken();
            if (lexer.token() == Token.AS) {
                lexer.nextToken();
                if (lexer.identifierEquals(FnvHash.Constants.INPUTFORMAT)) {
                    HiveInputOutputFormat format = new HiveInputOutputFormat();
                    lexer.nextToken();
                    format.setInput(this.exprParser.primary());
                    if (lexer.identifierEquals(FnvHash.Constants.OUTPUTFORMAT)) {
                        lexer.nextToken();
                        format.setOutput(this.exprParser.primary());
                    }
                    stmt.setStoredAs(format);
                } else {
                    SQLName storedAs = this.exprParser.name();
                    stmt.setStoredAs(storedAs);
                }
            } else {
                accept(Token.BY);
                SQLExpr storedBy = this.exprParser.expr();
                stmt.setStoredBy(storedBy);
            }
            continue;
        }
        if (lexer.identifierEquals(FnvHash.Constants.LIFECYCLE)) {
            lexer.nextToken();
            stmt.setLifecycle(this.exprParser.expr());
            continue;
        }
        if (lexer.token() == Token.WITH) {
            lexer.nextToken();
            acceptIdentifier("SERDEPROPERTIES");
            accept(Token.LPAREN);
            this.exprParser.exprList(stmt.getWithSerdeproperties(), stmt);
            accept(Token.RPAREN);
            continue;
        }
        if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
            parseTblProperties(stmt);
            continue;
        }
        if (lexer.identifierEquals(FnvHash.Constants.LOCATION)) {
            lexer.nextToken();
            SQLExpr location = this.exprParser.expr();
            stmt.setLocation(location);
            continue;
        }
        if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
            parseTblProperties(stmt);
            continue;
        }
        if (lexer.identifierEquals(FnvHash.Constants.USING)) {
            lexer.nextToken();
            SQLExpr using = this.exprParser.expr();
            stmt.setUsing(using);
            continue;
        }
        if (lexer.identifierEquals(FnvHash.Constants.LIFECYCLE)) {
            lexer.nextToken();
            stmt.setLifecycle(this.exprParser.expr());
            continue;
        }
        break;
    }
    return stmt;
}
Also used : ParserException(com.alibaba.druid.sql.parser.ParserException) HiveInputOutputFormat(com.alibaba.druid.sql.dialect.hive.ast.HiveInputOutputFormat) OdpsCreateTableStatement(com.alibaba.druid.sql.dialect.odps.ast.OdpsCreateTableStatement) SQLName(com.alibaba.druid.sql.ast.SQLName) SQLIntegerExpr(com.alibaba.druid.sql.ast.expr.SQLIntegerExpr) SQLExpr(com.alibaba.druid.sql.ast.SQLExpr)

Example 2 with HiveInputOutputFormat

use of com.alibaba.druid.sql.dialect.hive.ast.HiveInputOutputFormat in project druid by alibaba.

the class AntsparkCreateTableParser method parseCreateTable.

public SQLCreateTableStatement parseCreateTable(boolean acceptCreate) {
    AntsparkCreateTableStatement stmt = new AntsparkCreateTableStatement();
    if (acceptCreate) {
        if (lexer.hasComment() && lexer.isKeepComments()) {
            stmt.addBeforeComment(lexer.readAndResetComments());
        }
        accept(Token.CREATE);
    }
    if (lexer.identifierEquals(FnvHash.Constants.EXTERNAL)) {
        lexer.nextToken();
        stmt.setExternal(true);
    }
    if (lexer.identifierEquals(FnvHash.Constants.TEMPORARY)) {
        lexer.nextToken();
        stmt.setType(SQLCreateTableStatement.Type.TEMPORARY);
    }
    accept(Token.TABLE);
    if (lexer.token() == Token.IF || lexer.identifierEquals(FnvHash.Constants.IF)) {
        lexer.nextToken();
        accept(Token.NOT);
        accept(Token.EXISTS);
        stmt.setIfNotExiists(true);
    }
    stmt.setName(this.exprParser.name());
    if (lexer.token() == Token.LPAREN) {
        lexer.nextToken();
        for (; ; ) {
            Token token = lexer.token();
            if (// 
            token == Token.IDENTIFIER || token == Token.LITERAL_ALIAS) {
                SQLColumnDefinition column = this.exprParser.parseColumn();
                stmt.getTableElementList().add(column);
            } else if (// 
            token == Token.PRIMARY || // 
            token == Token.UNIQUE || // 
            token == Token.CHECK || token == Token.CONSTRAINT || token == Token.FOREIGN) {
                SQLConstraint constraint = this.exprParser.parseConstaint();
                constraint.setParent(stmt);
                stmt.getTableElementList().add((SQLTableElement) constraint);
            } else if (token == Token.TABLESPACE) {
                throw new ParserException("TODO " + lexer.info());
            } else {
                SQLColumnDefinition column = this.exprParser.parseColumn();
                stmt.getTableElementList().add(column);
            }
            if (lexer.token() == Token.COMMA) {
                lexer.nextToken();
                if (lexer.token() == Token.RPAREN) {
                    // compatible for sql server
                    break;
                }
                continue;
            }
            break;
        }
        accept(Token.RPAREN);
    }
    // add using
    if (lexer.token() == Token.USING) {
        lexer.nextToken();
        SQLName expr = this.exprParser.name();
        stmt.setDatasource(expr);
    }
    if (lexer.token() == Token.COMMENT) {
        lexer.nextToken();
        SQLExpr comment = this.exprParser.expr();
        stmt.setComment(comment);
    }
    if (lexer.identifierEquals(FnvHash.Constants.MAPPED)) {
        lexer.nextToken();
        accept(Token.BY);
        this.exprParser.parseAssignItem(stmt.getMappedBy(), stmt);
    }
    if (lexer.token() == Token.PARTITIONED) {
        lexer.nextToken();
        accept(Token.BY);
        accept(Token.LPAREN);
        for (; ; ) {
            if (lexer.token() != Token.IDENTIFIER) {
                throw new ParserException("expect identifier. " + lexer.info());
            }
            SQLColumnDefinition column = this.exprParser.parseColumn();
            stmt.addPartitionColumn(column);
            if (lexer.isKeepComments() && lexer.hasComment()) {
                column.addAfterComment(lexer.readAndResetComments());
            }
            if (lexer.token() != Token.COMMA) {
                break;
            } else {
                lexer.nextToken();
                if (lexer.isKeepComments() && lexer.hasComment()) {
                    column.addAfterComment(lexer.readAndResetComments());
                }
            }
        }
        accept(Token.RPAREN);
    }
    if (lexer.identifierEquals(FnvHash.Constants.CLUSTERED)) {
        lexer.nextToken();
        accept(Token.BY);
        accept(Token.LPAREN);
        for (; ; ) {
            SQLSelectOrderByItem item = this.exprParser.parseSelectOrderByItem();
            stmt.addClusteredByItem(item);
            if (lexer.token() == Token.COMMA) {
                lexer.nextToken();
                continue;
            }
            break;
        }
        accept(Token.RPAREN);
    }
    if (lexer.identifierEquals(FnvHash.Constants.SKEWED)) {
        lexer.nextToken();
        accept(Token.BY);
        accept(Token.LPAREN);
        this.exprParser.exprList(stmt.getSkewedBy(), stmt);
        accept(Token.RPAREN);
        accept(Token.ON);
        accept(Token.LPAREN);
        for (; ; ) {
            if (lexer.token() == Token.LPAREN) {
                SQLListExpr list = new SQLListExpr();
                lexer.nextToken();
                this.exprParser.exprList(list.getItems(), list);
                accept(Token.RPAREN);
                stmt.addSkewedByOn(list);
            } else {
                SQLExpr expr = this.exprParser.expr();
                stmt.addSkewedByOn(expr);
            }
            if (lexer.token() == Token.COMMA) {
                lexer.nextToken();
                continue;
            }
            break;
        }
        accept(Token.RPAREN);
    }
    if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
        parseSortedBy(stmt);
    }
    if (lexer.token() == Token.ROW || lexer.identifierEquals(FnvHash.Constants.ROW)) {
        parseRowFormat(stmt);
    }
    if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
        parseSortedBy(stmt);
    }
    if (stmt.getClusteredBy().size() > 0 || stmt.getSortedBy().size() > 0) {
        accept(Token.INTO);
        if (lexer.token() == Token.LITERAL_INT) {
            stmt.setBuckets(lexer.integerValue().intValue());
            lexer.nextToken();
        } else {
            throw new ParserException("into buckets must be integer. " + lexer.info());
        }
        acceptIdentifier("BUCKETS");
    }
    if (lexer.token() == Token.ROW || lexer.identifierEquals(FnvHash.Constants.ROW)) {
        parseRowFormat(stmt);
    }
    if (lexer.identifierEquals(FnvHash.Constants.STORED)) {
        lexer.nextToken();
        accept(Token.AS);
        if (lexer.identifierEquals(FnvHash.Constants.INPUTFORMAT)) {
            HiveInputOutputFormat format = new HiveInputOutputFormat();
            lexer.nextToken();
            format.setInput(this.exprParser.primary());
            if (lexer.identifierEquals(FnvHash.Constants.OUTPUTFORMAT)) {
                lexer.nextToken();
                format.setOutput(this.exprParser.primary());
            }
            stmt.setStoredAs(format);
        } else {
            SQLName name = this.exprParser.name();
            stmt.setStoredAs(name);
        }
    }
    if (lexer.identifierEquals(FnvHash.Constants.LOCATION)) {
        lexer.nextToken();
        SQLExpr location = this.exprParser.expr();
        stmt.setLocation(location);
    }
    if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
        lexer.nextToken();
        accept(Token.LPAREN);
        for (; ; ) {
            String name = lexer.stringVal();
            lexer.nextToken();
            accept(Token.EQ);
            SQLExpr value = this.exprParser.primary();
            stmt.addOption(name, value);
            if (lexer.token() == Token.COMMA) {
                lexer.nextToken();
                if (lexer.token() == Token.RPAREN) {
                    break;
                }
                continue;
            }
            break;
        }
        accept(Token.RPAREN);
    }
    if (lexer.identifierEquals(FnvHash.Constants.META)) {
        lexer.nextToken();
        acceptIdentifier("LIFECYCLE");
        stmt.setMetaLifeCycle(this.exprParser.primary());
    }
    if (lexer.token() == Token.AS) {
        lexer.nextToken();
        SQLSelect select = this.createSQLSelectParser().select();
        stmt.setSelect(select);
    }
    if (lexer.token() == Token.LIKE) {
        lexer.nextToken();
        SQLName name = this.exprParser.name();
        stmt.setLike(name);
    }
    if (lexer.token() == Token.COMMENT) {
        lexer.nextToken();
        SQLExpr comment = this.exprParser.expr();
        stmt.setComment(comment);
    }
    return stmt;
}
Also used : ParserException(com.alibaba.druid.sql.parser.ParserException) SQLConstraint(com.alibaba.druid.sql.ast.statement.SQLConstraint) SQLSelect(com.alibaba.druid.sql.ast.statement.SQLSelect) SQLName(com.alibaba.druid.sql.ast.SQLName) Token(com.alibaba.druid.sql.parser.Token) SQLTableElement(com.alibaba.druid.sql.ast.statement.SQLTableElement) SQLColumnDefinition(com.alibaba.druid.sql.ast.statement.SQLColumnDefinition) SQLExpr(com.alibaba.druid.sql.ast.SQLExpr) HiveInputOutputFormat(com.alibaba.druid.sql.dialect.hive.ast.HiveInputOutputFormat) AntsparkCreateTableStatement(com.alibaba.druid.sql.dialect.antspark.ast.AntsparkCreateTableStatement) SQLSelectOrderByItem(com.alibaba.druid.sql.ast.statement.SQLSelectOrderByItem) SQLListExpr(com.alibaba.druid.sql.ast.expr.SQLListExpr)

Example 3 with HiveInputOutputFormat

use of com.alibaba.druid.sql.dialect.hive.ast.HiveInputOutputFormat in project druid by alibaba.

the class HiveCreateTableParser method parseCreateTable.

public SQLCreateTableStatement parseCreateTable(boolean acceptCreate) {
    HiveCreateTableStatement stmt = newCreateStatement();
    if (acceptCreate) {
        if (lexer.hasComment() && lexer.isKeepComments()) {
            stmt.addBeforeComment(lexer.readAndResetComments());
        }
        accept(Token.CREATE);
    }
    if (lexer.identifierEquals(FnvHash.Constants.EXTERNAL)) {
        lexer.nextToken();
        stmt.setExternal(true);
    }
    if (lexer.identifierEquals(FnvHash.Constants.TEMPORARY)) {
        lexer.nextToken();
        stmt.setType(SQLCreateTableStatement.Type.TEMPORARY);
    }
    accept(Token.TABLE);
    if (lexer.token() == Token.IF || lexer.identifierEquals(FnvHash.Constants.IF)) {
        lexer.nextToken();
        accept(Token.NOT);
        accept(Token.EXISTS);
        stmt.setIfNotExiists(true);
    }
    stmt.setName(this.exprParser.name());
    if (lexer.token() == Token.LPAREN) {
        lexer.nextToken();
        for (; ; ) {
            Token token = lexer.token();
            if (// 
            token == Token.IDENTIFIER || token == Token.LITERAL_ALIAS) {
                SQLColumnDefinition column = this.exprParser.parseColumn();
                stmt.getTableElementList().add(column);
            } else if (// 
            token == Token.PRIMARY || // 
            token == Token.UNIQUE || // 
            token == Token.CHECK || token == Token.CONSTRAINT || token == Token.FOREIGN) {
                SQLConstraint constraint = this.exprParser.parseConstaint();
                constraint.setParent(stmt);
                stmt.getTableElementList().add((SQLTableElement) constraint);
            } else if (token == Token.TABLESPACE) {
                throw new ParserException("TODO " + lexer.info());
            } else {
                SQLColumnDefinition column = this.exprParser.parseColumn();
                stmt.getTableElementList().add(column);
            }
            if (lexer.token() == Token.COMMA) {
                lexer.nextToken();
                if (lexer.token() == Token.RPAREN) {
                    // compatible for sql server
                    break;
                }
                continue;
            }
            break;
        }
        accept(Token.RPAREN);
    } else if (lexer.token() == Token.LIKE) {
        parseLike(stmt);
    }
    if (lexer.identifierEquals(FnvHash.Constants.ENGINE)) {
        // skip engine=xxx
        lexer.nextToken();
        accept(Token.EQ);
        lexer.nextToken();
    }
    if (lexer.identifierEquals(FnvHash.Constants.CHARSET)) {
        // skip charset = xxx
        lexer.nextToken();
        accept(Token.EQ);
        lexer.nextToken();
    }
    if (lexer.identifierEquals(FnvHash.Constants.USING) || lexer.token() == Token.USING) {
        lexer.nextToken();
        SQLExpr using = this.exprParser.expr();
        stmt.setUsing(using);
    }
    if (lexer.identifierEquals(FnvHash.Constants.OPTIONS)) {
        lexer.nextToken();
        accept(Token.LPAREN);
        parseAssignItems(stmt.getTableOptions(), stmt, false);
        accept(Token.RPAREN);
    }
    if (lexer.token() == Token.COMMENT) {
        lexer.nextToken();
        SQLExpr comment = this.exprParser.expr();
        stmt.setComment(comment);
    }
    if (lexer.identifierEquals(FnvHash.Constants.MAPPED)) {
        lexer.nextToken();
        accept(Token.BY);
        this.exprParser.parseAssignItem(stmt.getMappedBy(), stmt);
    }
    if (lexer.token() == Token.PARTITIONED) {
        lexer.nextToken();
        accept(Token.BY);
        accept(Token.LPAREN);
        for (; ; ) {
            if (lexer.token() != Token.IDENTIFIER) {
                throw new ParserException("expect identifier. " + lexer.info());
            }
            SQLColumnDefinition column = this.exprParser.parseColumn();
            stmt.addPartitionColumn(column);
            if (lexer.isKeepComments() && lexer.hasComment()) {
                column.addAfterComment(lexer.readAndResetComments());
            }
            if (lexer.token() != Token.COMMA) {
                break;
            } else {
                lexer.nextToken();
                if (lexer.isKeepComments() && lexer.hasComment()) {
                    column.addAfterComment(lexer.readAndResetComments());
                }
            }
        }
        accept(Token.RPAREN);
    }
    if (lexer.identifierEquals(FnvHash.Constants.CLUSTERED)) {
        lexer.nextToken();
        accept(Token.BY);
        accept(Token.LPAREN);
        for (; ; ) {
            SQLSelectOrderByItem item = this.exprParser.parseSelectOrderByItem();
            stmt.addClusteredByItem(item);
            if (lexer.token() == Token.COMMA) {
                lexer.nextToken();
                continue;
            }
            break;
        }
        accept(Token.RPAREN);
    }
    if (lexer.identifierEquals(FnvHash.Constants.SKEWED)) {
        lexer.nextToken();
        accept(Token.BY);
        accept(Token.LPAREN);
        this.exprParser.exprList(stmt.getSkewedBy(), stmt);
        accept(Token.RPAREN);
        accept(Token.ON);
        accept(Token.LPAREN);
        for (; ; ) {
            if (lexer.token() == Token.LPAREN) {
                SQLListExpr list = new SQLListExpr();
                lexer.nextToken();
                this.exprParser.exprList(list.getItems(), list);
                accept(Token.RPAREN);
                stmt.addSkewedByOn(list);
            } else {
                SQLExpr expr = this.exprParser.expr();
                stmt.addSkewedByOn(expr);
            }
            if (lexer.token() == Token.COMMA) {
                lexer.nextToken();
                continue;
            }
            break;
        }
        accept(Token.RPAREN);
    }
    if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
        parseSortedBy(stmt);
    }
    if (lexer.token() == Token.ROW || lexer.identifierEquals(FnvHash.Constants.ROW)) {
        parseRowFormat(stmt);
    }
    if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
        parseSortedBy(stmt);
    }
    if (stmt.getClusteredBy().size() > 0 || stmt.getSortedBy().size() > 0) {
        accept(Token.INTO);
        if (lexer.token() == Token.LITERAL_INT) {
            stmt.setBuckets(lexer.integerValue().intValue());
            lexer.nextToken();
        } else {
            throw new ParserException("into buckets must be integer. " + lexer.info());
        }
        acceptIdentifier("BUCKETS");
    }
    if (lexer.token() == Token.ROW || lexer.identifierEquals(FnvHash.Constants.ROW)) {
        parseRowFormat(stmt);
    }
    if (lexer.identifierEquals(FnvHash.Constants.STORED)) {
        lexer.nextToken();
        accept(Token.AS);
        if (lexer.identifierEquals(FnvHash.Constants.INPUTFORMAT)) {
            HiveInputOutputFormat format = new HiveInputOutputFormat();
            lexer.nextToken();
            format.setInput(this.exprParser.primary());
            if (lexer.identifierEquals(FnvHash.Constants.OUTPUTFORMAT)) {
                lexer.nextToken();
                format.setOutput(this.exprParser.primary());
            }
            stmt.setStoredAs(format);
        } else {
            SQLName name = this.exprParser.name();
            stmt.setStoredAs(name);
        }
    }
    if (lexer.identifierEquals(FnvHash.Constants.LOCATION)) {
        lexer.nextToken();
        SQLExpr location = this.exprParser.primary();
        stmt.setLocation(location);
    }
    if (lexer.token() == Token.LIKE) {
        parseLike(stmt);
    }
    if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
        parseTblProperties(stmt);
    }
    if (lexer.identifierEquals(FnvHash.Constants.META)) {
        lexer.nextToken();
        acceptIdentifier("LIFECYCLE");
        stmt.setMetaLifeCycle(this.exprParser.primary());
    }
    if (lexer.token() == Token.AS) {
        lexer.nextToken();
        SQLSelect select = this.createSQLSelectParser().select();
        stmt.setSelect(select);
    }
    if (lexer.token() == Token.LIKE) {
        lexer.nextToken();
        Lexer.SavePoint mark = lexer.mark();
        if (lexer.token() == Token.SELECT) {
            stmt.setLikeQuery(true);
            SQLSelect select = this.createSQLSelectParser().select();
            stmt.setSelect(select);
        } else {
            lexer.reset(mark);
            if (lexer.identifierEquals(FnvHash.Constants.MAPPING)) {
                SQLExpr like = this.exprParser.primary();
                stmt.setLike(new SQLExprTableSource(like));
            } else {
                SQLName name = this.exprParser.name();
                stmt.setLike(name);
            }
        }
    }
    if (lexer.token() == Token.COMMENT) {
        lexer.nextToken();
        SQLExpr comment = this.exprParser.expr();
        stmt.setComment(comment);
    }
    if (lexer.identifierEquals(FnvHash.Constants.USING) || lexer.token() == Token.USING) {
        lexer.nextToken();
        SQLExpr using = this.exprParser.expr();
        stmt.setUsing(using);
    }
    if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
        lexer.nextToken();
        accept(Token.LPAREN);
        parseAssignItems(stmt.getTblProperties(), stmt, false);
        accept(Token.RPAREN);
    }
    return stmt;
}
Also used : SQLName(com.alibaba.druid.sql.ast.SQLName) SQLExpr(com.alibaba.druid.sql.ast.SQLExpr) HiveCreateTableStatement(com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateTableStatement) HiveInputOutputFormat(com.alibaba.druid.sql.dialect.hive.ast.HiveInputOutputFormat) SQLListExpr(com.alibaba.druid.sql.ast.expr.SQLListExpr)

Aggregations

SQLExpr (com.alibaba.druid.sql.ast.SQLExpr)3 SQLName (com.alibaba.druid.sql.ast.SQLName)3 HiveInputOutputFormat (com.alibaba.druid.sql.dialect.hive.ast.HiveInputOutputFormat)3 SQLListExpr (com.alibaba.druid.sql.ast.expr.SQLListExpr)2 ParserException (com.alibaba.druid.sql.parser.ParserException)2 SQLIntegerExpr (com.alibaba.druid.sql.ast.expr.SQLIntegerExpr)1 SQLColumnDefinition (com.alibaba.druid.sql.ast.statement.SQLColumnDefinition)1 SQLConstraint (com.alibaba.druid.sql.ast.statement.SQLConstraint)1 SQLSelect (com.alibaba.druid.sql.ast.statement.SQLSelect)1 SQLSelectOrderByItem (com.alibaba.druid.sql.ast.statement.SQLSelectOrderByItem)1 SQLTableElement (com.alibaba.druid.sql.ast.statement.SQLTableElement)1 AntsparkCreateTableStatement (com.alibaba.druid.sql.dialect.antspark.ast.AntsparkCreateTableStatement)1 HiveCreateTableStatement (com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateTableStatement)1 OdpsCreateTableStatement (com.alibaba.druid.sql.dialect.odps.ast.OdpsCreateTableStatement)1 Token (com.alibaba.druid.sql.parser.Token)1