use of com.alibaba.druid.sql.ast.SQLPartitionByRange in project druid by alibaba.
the class MySqlCreateTableParser method parseCrateTable.
public MySqlCreateTableStatement parseCrateTable(boolean acceptCreate) {
if (acceptCreate) {
accept(Token.CREATE);
}
MySqlCreateTableStatement stmt = new MySqlCreateTableStatement();
if (identifierEquals("TEMPORARY")) {
lexer.nextToken();
stmt.setType(SQLCreateTableStatement.Type.GLOBAL_TEMPORARY);
}
accept(Token.TABLE);
if (lexer.token() == Token.IF || identifierEquals("IF")) {
lexer.nextToken();
accept(Token.NOT);
accept(Token.EXISTS);
stmt.setIfNotExiists(true);
}
stmt.setName(this.exprParser.name());
if (lexer.token() == Token.LIKE) {
lexer.nextToken();
SQLName name = this.exprParser.name();
stmt.setLike(name);
}
if (lexer.token() == (Token.LPAREN)) {
lexer.nextToken();
if (lexer.token() == Token.LIKE) {
lexer.nextToken();
SQLName name = this.exprParser.name();
stmt.setLike(name);
} else {
for (; ; ) {
if (//
lexer.token() == Token.IDENTIFIER || lexer.token() == Token.LITERAL_CHARS) {
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.getTableElementList().add(column);
} else if (//
lexer.token() == Token.CONSTRAINT || //
lexer.token() == Token.PRIMARY || lexer.token() == Token.UNIQUE) {
SQLTableConstraint constraint = this.parseConstraint();
stmt.getTableElementList().add(constraint);
} else if (lexer.token() == (Token.INDEX)) {
lexer.nextToken();
MySqlTableIndex idx = new MySqlTableIndex();
if (lexer.token() == Token.IDENTIFIER) {
if (!"USING".equalsIgnoreCase(lexer.stringVal())) {
idx.setName(this.exprParser.name());
}
}
if (identifierEquals("USING")) {
lexer.nextToken();
idx.setIndexType(lexer.stringVal());
lexer.nextToken();
}
accept(Token.LPAREN);
for (; ; ) {
idx.addColumn(this.exprParser.expr());
if (!(lexer.token() == (Token.COMMA))) {
break;
} else {
lexer.nextToken();
}
}
accept(Token.RPAREN);
if (identifierEquals("USING")) {
lexer.nextToken();
idx.setIndexType(lexer.stringVal());
lexer.nextToken();
}
stmt.getTableElementList().add(idx);
} else if (lexer.token() == (Token.KEY)) {
stmt.getTableElementList().add(parseConstraint());
} else if (lexer.token() == (Token.PRIMARY)) {
SQLTableConstraint pk = parseConstraint();
pk.setParent(stmt);
stmt.getTableElementList().add(pk);
} else if (lexer.token() == (Token.FOREIGN)) {
SQLForeignKeyConstraint fk = this.getExprParser().parseForeignKey();
fk.setParent(stmt);
stmt.getTableElementList().add(fk);
} else if (lexer.token() == Token.CHECK) {
SQLCheck check = this.exprParser.parseCheck();
stmt.getTableElementList().add(check);
} else {
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.getTableElementList().add(column);
}
if (!(lexer.token() == (Token.COMMA))) {
break;
} else {
lexer.nextToken();
}
}
}
accept(Token.RPAREN);
}
for (; ; ) {
if (identifierEquals("ENGINE")) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("ENGINE", this.exprParser.expr());
continue;
}
if (identifierEquals("AUTO_INCREMENT")) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("AUTO_INCREMENT", this.exprParser.expr());
continue;
}
if (identifierEquals("AVG_ROW_LENGTH")) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("AVG_ROW_LENGTH", this.exprParser.expr());
continue;
}
if (lexer.token() == Token.DEFAULT) {
lexer.nextToken();
parseTableOptionCharsetOrCollate(stmt);
continue;
}
if (parseTableOptionCharsetOrCollate(stmt)) {
continue;
}
if (identifierEquals("CHECKSUM")) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("CHECKSUM", this.exprParser.expr());
continue;
}
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("COMMENT", this.exprParser.expr());
continue;
}
if (identifierEquals("CONNECTION")) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("CONNECTION", this.exprParser.expr());
continue;
}
if (identifierEquals("DATA")) {
lexer.nextToken();
acceptIdentifier("DIRECTORY");
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("DATA DIRECTORY", this.exprParser.expr());
continue;
}
if (identifierEquals("DELAY_KEY_WRITE")) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("DELAY_KEY_WRITE", this.exprParser.expr());
continue;
}
if (identifierEquals("INDEX")) {
lexer.nextToken();
acceptIdentifier("DIRECTORY");
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("INDEX DIRECTORY", this.exprParser.expr());
continue;
}
if (identifierEquals("INSERT_METHOD")) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("INSERT_METHOD", this.exprParser.expr());
continue;
}
if (identifierEquals("KEY_BLOCK_SIZE")) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("KEY_BLOCK_SIZE", this.exprParser.expr());
continue;
}
if (identifierEquals("MAX_ROWS")) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("MAX_ROWS", this.exprParser.expr());
continue;
}
if (identifierEquals("MIN_ROWS")) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("MIN_ROWS", this.exprParser.expr());
continue;
}
if (identifierEquals("PACK_KEYS")) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("PACK_KEYS", this.exprParser.expr());
continue;
}
if (identifierEquals("PASSWORD")) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("PASSWORD", this.exprParser.expr());
continue;
}
if (identifierEquals("ROW_FORMAT")) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("ROW_FORMAT", this.exprParser.expr());
continue;
}
if (identifierEquals("STATS_AUTO_RECALC")) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("STATS_AUTO_RECALC", this.exprParser.expr());
continue;
}
if (identifierEquals("STATS_PERSISTENT")) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("STATS_PERSISTENT", this.exprParser.expr());
continue;
}
if (identifierEquals("STATS_SAMPLE_PAGES")) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.getTableOptions().put("STATS_SAMPLE_PAGES", this.exprParser.expr());
continue;
}
if (lexer.token() == Token.UNION) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
accept(Token.LPAREN);
SQLTableSource tableSrc = this.createSQLSelectParser().parseTableSource();
stmt.getTableOptions().put("UNION", tableSrc);
accept(Token.RPAREN);
continue;
}
if (lexer.token() == Token.TABLESPACE) {
lexer.nextToken();
TableSpaceOption option = new TableSpaceOption();
option.setName(this.exprParser.name());
if (identifierEquals("STORAGE")) {
lexer.nextToken();
option.setStorage(this.exprParser.name());
}
stmt.getTableOptions().put("TABLESPACE", option);
continue;
}
if (identifierEquals("TABLEGROUP")) {
lexer.nextToken();
SQLName tableGroup = this.exprParser.name();
stmt.setTableGroup(tableGroup);
continue;
}
if (identifierEquals("TYPE")) {
lexer.nextToken();
accept(Token.EQ);
stmt.getTableOptions().put("TYPE", this.exprParser.expr());
lexer.nextToken();
continue;
}
if (lexer.token() == Token.PARTITION) {
lexer.nextToken();
accept(Token.BY);
SQLPartitionBy partitionClause;
boolean linera = false;
if (identifierEquals("LINEAR")) {
lexer.nextToken();
linera = true;
}
if (lexer.token() == Token.KEY) {
MySqlPartitionByKey clause = new MySqlPartitionByKey();
lexer.nextToken();
if (linera) {
clause.setLinear(true);
}
accept(Token.LPAREN);
if (lexer.token() != Token.RPAREN) {
for (; ; ) {
clause.addColumn(this.exprParser.name());
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
}
accept(Token.RPAREN);
partitionClause = clause;
partitionClauseRest(clause);
} else if (identifierEquals("HASH")) {
lexer.nextToken();
SQLPartitionByHash clause = new SQLPartitionByHash();
if (linera) {
clause.setLinear(true);
}
if (lexer.token() == Token.KEY) {
lexer.nextToken();
clause.setKey(true);
}
accept(Token.LPAREN);
clause.setExpr(this.exprParser.expr());
accept(Token.RPAREN);
partitionClause = clause;
partitionClauseRest(clause);
} else if (identifierEquals("RANGE")) {
SQLPartitionByRange clause = partitionByRange();
partitionClause = clause;
partitionClauseRest(clause);
} else if (identifierEquals("LIST")) {
lexer.nextToken();
SQLPartitionByList clause = new SQLPartitionByList();
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
clause.setExpr(this.exprParser.expr());
accept(Token.RPAREN);
} else {
acceptIdentifier("COLUMNS");
accept(Token.LPAREN);
for (; ; ) {
clause.addColumn(this.exprParser.name());
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
}
partitionClause = clause;
partitionClauseRest(clause);
} else {
throw new ParserException("TODO " + lexer.token() + " " + lexer.stringVal());
}
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
for (; ; ) {
SQLPartition partitionDef = this.getExprParser().parsePartition();
partitionClause.addPartition(partitionDef);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
} else {
break;
}
}
accept(Token.RPAREN);
}
stmt.setPartitioning(partitionClause);
continue;
}
break;
}
if (lexer.token() == (Token.ON)) {
throw new ParserException("TODO");
}
if (lexer.token() == (Token.AS)) {
lexer.nextToken();
}
if (lexer.token() == (Token.SELECT)) {
SQLSelect query = new MySqlSelectParser(this.exprParser).select();
stmt.setSelect(query);
}
while (lexer.token() == (Token.HINT)) {
this.exprParser.parseHints(stmt.getOptionHints());
}
return stmt;
}
use of com.alibaba.druid.sql.ast.SQLPartitionByRange in project druid by alibaba.
the class OracleCreateTableParser method partitionByRange.
protected SQLPartitionByRange partitionByRange() {
acceptIdentifier("RANGE");
accept(Token.LPAREN);
SQLPartitionByRange clause = new SQLPartitionByRange();
for (; ; ) {
SQLName column = this.exprParser.name();
clause.addColumn(column);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
if (lexer.token() == Token.INTERVAL) {
lexer.nextToken();
accept(Token.LPAREN);
clause.setInterval(this.exprParser.expr());
accept(Token.RPAREN);
}
parsePartitionByRest(clause);
return clause;
}
use of com.alibaba.druid.sql.ast.SQLPartitionByRange in project druid by alibaba.
the class OracleCreateTableParser method parseCrateTable.
public OracleCreateTableStatement parseCrateTable(boolean acceptCreate) {
OracleCreateTableStatement stmt = (OracleCreateTableStatement) super.parseCrateTable(acceptCreate);
for (; ; ) {
if (lexer.token() == Token.TABLESPACE) {
lexer.nextToken();
stmt.setTablespace(this.exprParser.name());
continue;
} else if (identifierEquals("IN_MEMORY_METADATA")) {
lexer.nextToken();
stmt.setInMemoryMetadata(true);
continue;
} else if (identifierEquals("CURSOR_SPECIFIC_SEGMENT")) {
lexer.nextToken();
stmt.setCursorSpecificSegment(true);
continue;
} else if (identifierEquals("NOPARALLEL")) {
lexer.nextToken();
stmt.setParallel(false);
continue;
} else if (lexer.token() == Token.LOGGING) {
lexer.nextToken();
stmt.setLogging(Boolean.TRUE);
continue;
} else if (lexer.token() == Token.CACHE) {
lexer.nextToken();
stmt.setCache(Boolean.TRUE);
continue;
} else if (lexer.token() == Token.NOCACHE) {
lexer.nextToken();
stmt.setCache(Boolean.FALSE);
continue;
} else if (lexer.token() == Token.NOCOMPRESS) {
lexer.nextToken();
stmt.setCompress(Boolean.FALSE);
continue;
} else if (lexer.token() == Token.ON) {
lexer.nextToken();
accept(Token.COMMIT);
stmt.setOnCommit(true);
continue;
} else if (identifierEquals("PRESERVE")) {
lexer.nextToken();
acceptIdentifier("ROWS");
stmt.setPreserveRows(true);
continue;
} else if (identifierEquals("STORAGE")) {
OracleStorageClause storage = ((OracleExprParser) this.exprParser).parseStorage();
stmt.setStorage(storage);
continue;
} else if (identifierEquals("organization")) {
lexer.nextToken();
accept(Token.INDEX);
stmt.setOrganizationIndex(true);
continue;
} else if (lexer.token() == Token.PCTFREE) {
lexer.nextToken();
stmt.setPtcfree(this.exprParser.expr());
continue;
} else if (identifierEquals("PCTUSED")) {
lexer.nextToken();
stmt.setPctused(this.exprParser.expr());
continue;
} else if (lexer.token() == Token.STORAGE) {
OracleStorageClause storage = ((OracleExprParser) this.exprParser).parseStorage();
stmt.setStorage(storage);
continue;
} else if (lexer.token() == Token.LOB) {
OracleLobStorageClause lobStorage = ((OracleExprParser) this.exprParser).parseLobStorage();
stmt.setLobStorage(lobStorage);
continue;
} else if (lexer.token() == Token.INITRANS) {
lexer.nextToken();
stmt.setInitrans(this.exprParser.expr());
continue;
} else if (lexer.token() == Token.MAXTRANS) {
lexer.nextToken();
stmt.setMaxtrans(this.exprParser.expr());
continue;
} else if (lexer.token() == Token.SEGMENT) {
lexer.nextToken();
accept(Token.CREATION);
if (lexer.token() == Token.IMMEDIATE) {
lexer.nextToken();
stmt.setDeferredSegmentCreation(DeferredSegmentCreation.IMMEDIATE);
} else {
accept(Token.DEFERRED);
stmt.setDeferredSegmentCreation(DeferredSegmentCreation.DEFERRED);
}
continue;
} else if (identifierEquals("PARTITION")) {
lexer.nextToken();
accept(Token.BY);
if (identifierEquals("RANGE")) {
SQLPartitionByRange partitionByRange = partitionByRange();
partitionClauseRest(partitionByRange);
stmt.setPartitioning(partitionByRange);
continue;
} else if (identifierEquals("HASH")) {
SQLPartitionByHash partitionByHash = partitionByHash();
partitionClauseRest(partitionByHash);
stmt.setPartitioning(partitionByHash);
continue;
} else if (identifierEquals("LIST")) {
SQLPartitionByList partitionByList = partitionByList();
partitionClauseRest(partitionByList);
stmt.setPartitioning(partitionByList);
continue;
} else {
throw new ParserException("TODO : " + lexer.token() + " " + lexer.stringVal());
}
}
break;
}
if (lexer.token() == Token.AS) {
lexer.nextToken();
OracleSelect select = new OracleSelectParser(exprParser).select();
stmt.setSelect(select);
}
return stmt;
}
use of com.alibaba.druid.sql.ast.SQLPartitionByRange in project druid by alibaba.
the class MySqlCreateTableParser method partitionByRange.
protected SQLPartitionByRange partitionByRange() {
acceptIdentifier("RANGE");
SQLPartitionByRange clause = new SQLPartitionByRange();
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
clause.setExpr(this.exprParser.expr());
accept(Token.RPAREN);
} else {
acceptIdentifier("COLUMNS");
accept(Token.LPAREN);
for (; ; ) {
clause.addColumn(this.exprParser.name());
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
}
return clause;
}
Aggregations