use of com.alibaba.druid.sql.parser.Token in project druid by alibaba.
the class AntsparkCreateTableParser method parseCreateTable.
public SQLCreateTableStatement parseCreateTable(boolean acceptCreate) {
AntsparkCreateTableStatement stmt = new AntsparkCreateTableStatement();
if (acceptCreate) {
if (lexer.hasComment() && lexer.isKeepComments()) {
stmt.addBeforeComment(lexer.readAndResetComments());
}
accept(Token.CREATE);
}
if (lexer.identifierEquals(FnvHash.Constants.EXTERNAL)) {
lexer.nextToken();
stmt.setExternal(true);
}
if (lexer.identifierEquals(FnvHash.Constants.TEMPORARY)) {
lexer.nextToken();
stmt.setType(SQLCreateTableStatement.Type.TEMPORARY);
}
accept(Token.TABLE);
if (lexer.token() == Token.IF || lexer.identifierEquals(FnvHash.Constants.IF)) {
lexer.nextToken();
accept(Token.NOT);
accept(Token.EXISTS);
stmt.setIfNotExiists(true);
}
stmt.setName(this.exprParser.name());
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
for (; ; ) {
Token token = lexer.token();
if (//
token == Token.IDENTIFIER || token == Token.LITERAL_ALIAS) {
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.getTableElementList().add(column);
} else if (//
token == Token.PRIMARY || //
token == Token.UNIQUE || //
token == Token.CHECK || token == Token.CONSTRAINT || token == Token.FOREIGN) {
SQLConstraint constraint = this.exprParser.parseConstaint();
constraint.setParent(stmt);
stmt.getTableElementList().add((SQLTableElement) constraint);
} else if (token == Token.TABLESPACE) {
throw new ParserException("TODO " + lexer.info());
} else {
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.getTableElementList().add(column);
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
if (lexer.token() == Token.RPAREN) {
// compatible for sql server
break;
}
continue;
}
break;
}
accept(Token.RPAREN);
}
// add using
if (lexer.token() == Token.USING) {
lexer.nextToken();
SQLName expr = this.exprParser.name();
stmt.setDatasource(expr);
}
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
SQLExpr comment = this.exprParser.expr();
stmt.setComment(comment);
}
if (lexer.identifierEquals(FnvHash.Constants.MAPPED)) {
lexer.nextToken();
accept(Token.BY);
this.exprParser.parseAssignItem(stmt.getMappedBy(), stmt);
}
if (lexer.token() == Token.PARTITIONED) {
lexer.nextToken();
accept(Token.BY);
accept(Token.LPAREN);
for (; ; ) {
if (lexer.token() != Token.IDENTIFIER) {
throw new ParserException("expect identifier. " + lexer.info());
}
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.addPartitionColumn(column);
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
if (lexer.token() != Token.COMMA) {
break;
} else {
lexer.nextToken();
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
}
}
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.CLUSTERED)) {
lexer.nextToken();
accept(Token.BY);
accept(Token.LPAREN);
for (; ; ) {
SQLSelectOrderByItem item = this.exprParser.parseSelectOrderByItem();
stmt.addClusteredByItem(item);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.SKEWED)) {
lexer.nextToken();
accept(Token.BY);
accept(Token.LPAREN);
this.exprParser.exprList(stmt.getSkewedBy(), stmt);
accept(Token.RPAREN);
accept(Token.ON);
accept(Token.LPAREN);
for (; ; ) {
if (lexer.token() == Token.LPAREN) {
SQLListExpr list = new SQLListExpr();
lexer.nextToken();
this.exprParser.exprList(list.getItems(), list);
accept(Token.RPAREN);
stmt.addSkewedByOn(list);
} else {
SQLExpr expr = this.exprParser.expr();
stmt.addSkewedByOn(expr);
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
parseSortedBy(stmt);
}
if (lexer.token() == Token.ROW || lexer.identifierEquals(FnvHash.Constants.ROW)) {
parseRowFormat(stmt);
}
if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
parseSortedBy(stmt);
}
if (stmt.getClusteredBy().size() > 0 || stmt.getSortedBy().size() > 0) {
accept(Token.INTO);
if (lexer.token() == Token.LITERAL_INT) {
stmt.setBuckets(lexer.integerValue().intValue());
lexer.nextToken();
} else {
throw new ParserException("into buckets must be integer. " + lexer.info());
}
acceptIdentifier("BUCKETS");
}
if (lexer.token() == Token.ROW || lexer.identifierEquals(FnvHash.Constants.ROW)) {
parseRowFormat(stmt);
}
if (lexer.identifierEquals(FnvHash.Constants.STORED)) {
lexer.nextToken();
accept(Token.AS);
if (lexer.identifierEquals(FnvHash.Constants.INPUTFORMAT)) {
HiveInputOutputFormat format = new HiveInputOutputFormat();
lexer.nextToken();
format.setInput(this.exprParser.primary());
if (lexer.identifierEquals(FnvHash.Constants.OUTPUTFORMAT)) {
lexer.nextToken();
format.setOutput(this.exprParser.primary());
}
stmt.setStoredAs(format);
} else {
SQLName name = this.exprParser.name();
stmt.setStoredAs(name);
}
}
if (lexer.identifierEquals(FnvHash.Constants.LOCATION)) {
lexer.nextToken();
SQLExpr location = this.exprParser.expr();
stmt.setLocation(location);
}
if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
lexer.nextToken();
accept(Token.LPAREN);
for (; ; ) {
String name = lexer.stringVal();
lexer.nextToken();
accept(Token.EQ);
SQLExpr value = this.exprParser.primary();
stmt.addOption(name, value);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
if (lexer.token() == Token.RPAREN) {
break;
}
continue;
}
break;
}
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.META)) {
lexer.nextToken();
acceptIdentifier("LIFECYCLE");
stmt.setMetaLifeCycle(this.exprParser.primary());
}
if (lexer.token() == Token.AS) {
lexer.nextToken();
SQLSelect select = this.createSQLSelectParser().select();
stmt.setSelect(select);
}
if (lexer.token() == Token.LIKE) {
lexer.nextToken();
SQLName name = this.exprParser.name();
stmt.setLike(name);
}
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
SQLExpr comment = this.exprParser.expr();
stmt.setComment(comment);
}
return stmt;
}
use of com.alibaba.druid.sql.parser.Token in project druid by alibaba.
the class DB2CreateTableParser method parseCreateTable.
public SQLCreateTableStatement parseCreateTable(boolean acceptCreate) {
DB2CreateTableStatement createTable = newCreateStatement();
if (acceptCreate) {
if (lexer.hasComment() && lexer.isKeepComments()) {
createTable.addBeforeComment(lexer.readAndResetComments());
}
accept(Token.CREATE);
}
if (lexer.identifierEquals("GLOBAL")) {
lexer.nextToken();
if (lexer.identifierEquals("TEMPORARY")) {
lexer.nextToken();
createTable.setType(SQLCreateTableStatement.Type.GLOBAL_TEMPORARY);
} else {
throw new ParserException("syntax error " + lexer.info());
}
} else if (lexer.token() == Token.IDENTIFIER && lexer.stringVal().equalsIgnoreCase("LOCAL")) {
lexer.nextToken();
if (lexer.token() == Token.IDENTIFIER && lexer.stringVal().equalsIgnoreCase("TEMPORAY")) {
lexer.nextToken();
createTable.setType(SQLCreateTableStatement.Type.LOCAL_TEMPORARY);
} else {
throw new ParserException("syntax error. " + lexer.info());
}
}
accept(Token.TABLE);
createTable.setName(this.exprParser.name());
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
for (; ; ) {
Token token = lexer.token();
if (//
token == Token.IDENTIFIER || token == Token.LITERAL_ALIAS) {
SQLColumnDefinition column = this.exprParser.parseColumn();
createTable.getTableElementList().add(column);
} else if (//
token == Token.PRIMARY || //
token == Token.UNIQUE || //
token == Token.CHECK || token == Token.CONSTRAINT || token == Token.FOREIGN) {
SQLConstraint constraint = this.exprParser.parseConstaint();
constraint.setParent(createTable);
createTable.getTableElementList().add((SQLTableElement) constraint);
} else if (token == Token.TABLESPACE) {
throw new ParserException("TODO " + lexer.info());
} else {
SQLColumnDefinition column = this.exprParser.parseColumn();
createTable.getTableElementList().add(column);
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
if (lexer.token() == Token.RPAREN) {
// compatible for sql server
break;
}
continue;
}
break;
}
accept(Token.RPAREN);
if (lexer.identifierEquals("INHERITS")) {
lexer.nextToken();
accept(Token.LPAREN);
SQLName inherits = this.exprParser.name();
createTable.setInherits(new SQLExprTableSource(inherits));
accept(Token.RPAREN);
}
}
if (lexer.token() == Token.AS) {
lexer.nextToken();
SQLSelect select = this.createSQLSelectParser().select();
createTable.setSelect(select);
}
for (; ; ) {
if (lexer.identifierEquals(FnvHash.Constants.DATA)) {
lexer.nextToken();
acceptIdentifier("CAPTURE");
if (lexer.identifierEquals(FnvHash.Constants.NONE)) {
lexer.nextToken();
createTable.setDataCaptureNone(true);
continue;
}
throw new ParserException("TODO " + lexer.info());
} else if (lexer.token() == Token.IN) {
lexer.nextToken();
if (lexer.token() == Token.DATABASE) {
lexer.nextToken();
SQLName database = this.exprParser.name();
createTable.setDatabase(database);
} else if (lexer.identifierEquals("tablespace")) {
throw new ParserException("TODO " + lexer.info());
} else {
SQLName tablespace = this.exprParser.name();
createTable.setTablespace(tablespace);
}
continue;
} else if (lexer.identifierEquals(FnvHash.Constants.PARTITIONING)) {
SQLPartitionByHash partitionBy = new SQLPartitionByHash();
lexer.nextToken();
accept(Token.KEY);
accept(Token.LPAREN);
this.exprParser.exprList(partitionBy.getColumns(), partitionBy);
accept(Token.RPAREN);
accept(Token.USING);
acceptIdentifier("HASHING");
createTable.setPartitioning(partitionBy);
continue;
} else if (lexer.identifierEquals(FnvHash.Constants.VALIDPROC)) {
lexer.nextToken();
SQLName validproc = this.exprParser.name();
createTable.setValidproc(validproc);
continue;
} else if (lexer.identifierEquals(FnvHash.Constants.COMPRESS)) {
lexer.nextToken();
createTable.setCompress(true);
if (lexer.identifierEquals(FnvHash.Constants.YES)) {
lexer.nextToken();
}
continue;
} else if (lexer.token() == Token.INDEX) {
lexer.nextToken();
accept(Token.IN);
SQLName indexIn = this.exprParser.name();
createTable.setIndexIn(indexIn);
continue;
}
break;
}
return createTable;
}
use of com.alibaba.druid.sql.parser.Token in project druid by alibaba.
the class Lexer method scanString.
protected void scanString() {
mark = pos;
boolean hasSpecial = false;
Token preToken = this.token;
for (; ; ) {
if (isEOF()) {
lexError("unclosed.str.lit");
return;
}
ch = charAt(++pos);
if (ch == '\'') {
scanChar();
if (ch != '\'') {
token = LITERAL_CHARS;
break;
} else {
if (!hasSpecial) {
initBuff(bufPos);
arraycopy(mark + 1, buf, 0, bufPos);
hasSpecial = true;
}
putChar('\'');
continue;
}
}
if (!hasSpecial) {
bufPos++;
continue;
}
if (bufPos == buf.length) {
putChar(ch);
} else {
buf[bufPos++] = ch;
}
}
if (!hasSpecial) {
if (preToken == Token.AS) {
stringVal = subString(mark, bufPos + 2);
} else {
stringVal = subString(mark + 1, bufPos);
}
} else {
stringVal = new String(buf, 0, bufPos);
}
}
use of com.alibaba.druid.sql.parser.Token in project druid by alibaba.
the class Lexer method parameterize.
public static String parameterize(String sql, DbType dbType) {
Lexer lexer = SQLParserUtils.createLexer(sql, dbType);
// optimized
lexer.optimizedForParameterized = true;
lexer.nextToken();
StringBuffer buf = new StringBuffer();
for_: for (; ; ) {
Token token = lexer.token;
switch(token) {
case LITERAL_ALIAS:
case LITERAL_FLOAT:
case LITERAL_CHARS:
case LITERAL_INT:
case LITERAL_NCHARS:
case LITERAL_HEX:
case VARIANT:
if (buf.length() != 0) {
buf.append(' ');
}
buf.append('?');
break;
case COMMA:
buf.append(',');
break;
case EQ:
buf.append('=');
break;
case EOF:
break for_;
case ERROR:
return sql;
case SELECT:
buf.append("SELECT");
break;
case UPDATE:
buf.append("UPDATE");
break;
default:
if (buf.length() != 0) {
buf.append(' ');
}
lexer.stringVal(buf);
break;
}
lexer.nextToken();
}
return buf.toString();
}
use of com.alibaba.druid.sql.parser.Token in project druid by alibaba.
the class Lexer method scanMultiLineComment.
private void scanMultiLineComment() {
Token lastToken = this.token;
int depth = 1;
scanChar();
scanChar();
mark = pos;
bufPos = 0;
for (; ; ) {
if (ch == '/' && charAt(pos + 1) == '*') {
scanChar();
scanChar();
if (ch == '!' || ch == '+') {
scanChar();
++depth;
}
}
if (ch == '*' && charAt(pos + 1) == '/') {
scanChar();
scanChar();
if (0 == --depth) {
break;
}
}
// multiline comment结束符错误
if (ch == EOI) {
throw new ParserException("unterminated /* comment. " + info());
}
scanChar();
bufPos++;
}
stringVal = subString(mark, bufPos);
token = Token.MULTI_LINE_COMMENT;
commentCount++;
if (keepComments) {
addComment(stringVal);
}
if (commentHandler != null && commentHandler.handle(lastToken, stringVal)) {
return;
}
if (!isAllowComment() && !isSafeComment(stringVal)) {
throw new NotAllowCommentException();
}
}
Aggregations