use of com.alibaba.druid.sql.parser.Token in project druid by alibaba.
the class MySqlExprParser method primary.
public SQLExpr primary() {
final Token tok = lexer.token();
if (identifierEquals("outfile")) {
lexer.nextToken();
SQLExpr file = primary();
SQLExpr expr = new MySqlOutFileExpr(file);
return primaryRest(expr);
}
switch(tok) {
case LITERAL_ALIAS:
String aliasValue = lexer.stringVal();
lexer.nextToken();
return primaryRest(new SQLCharExpr(aliasValue));
case VARIANT:
SQLVariantRefExpr varRefExpr = new SQLVariantRefExpr(lexer.stringVal());
lexer.nextToken();
if (varRefExpr.getName().equalsIgnoreCase("@@global")) {
accept(Token.DOT);
varRefExpr = new SQLVariantRefExpr(lexer.stringVal(), true);
lexer.nextToken();
} else if (varRefExpr.getName().equals("@") && lexer.token() == Token.LITERAL_CHARS) {
varRefExpr.setName("@'" + lexer.stringVal() + "'");
lexer.nextToken();
} else if (varRefExpr.getName().equals("@@") && lexer.token() == Token.LITERAL_CHARS) {
varRefExpr.setName("@@'" + lexer.stringVal() + "'");
lexer.nextToken();
}
return primaryRest(varRefExpr);
case VALUES:
lexer.nextToken();
if (lexer.token() != Token.LPAREN) {
throw new ParserException("syntax error, illegal values clause");
}
return this.methodRest(new SQLIdentifierExpr("VALUES"), true);
case BINARY:
lexer.nextToken();
if (lexer.token() == Token.COMMA || lexer.token() == Token.SEMI || lexer.token() == Token.EOF) {
return new SQLIdentifierExpr("BINARY");
} else {
SQLUnaryExpr binaryExpr = new SQLUnaryExpr(SQLUnaryOperator.BINARY, expr());
return primaryRest(binaryExpr);
}
case CACHE:
case GROUP:
lexer.nextToken();
return primaryRest(new SQLIdentifierExpr(lexer.stringVal()));
default:
return super.primary();
}
}
use of com.alibaba.druid.sql.parser.Token in project druid by alibaba.
the class MySqlLexer method scanComment.
public void scanComment() {
Token lastToken = this.token;
if (ch == '-') {
char next_2 = charAt(pos + 2);
if (isDigit(next_2)) {
scanChar();
token = Token.SUB;
return;
}
} else if (ch != '/') {
throw new IllegalStateException();
}
mark = pos;
bufPos = 0;
scanChar();
// /*+ */
if (ch == '*') {
scanChar();
bufPos++;
while (ch == ' ') {
scanChar();
bufPos++;
}
boolean isHint = false;
int startHintSp = bufPos + 1;
if (//
ch == '!' || // oceanbase hints
ch == '+') {
isHint = true;
scanChar();
bufPos++;
}
for (; ; ) {
if (ch == EOI) {
this.token = Token.ERROR;
return;
}
if (ch == '*' && charAt(pos + 1) == '/') {
bufPos += 3;
scanChar();
scanChar();
break;
}
scanChar();
bufPos++;
}
if (isHint) {
stringVal = subString(mark + startHintSp, (bufPos - startHintSp) - 2);
token = Token.HINT;
} else {
stringVal = subString(mark, bufPos);
token = Token.MULTI_LINE_COMMENT;
commentCount++;
if (keepComments) {
addComment(stringVal);
}
}
endOfComment = isEOF();
if (commentHandler != null && commentHandler.handle(lastToken, stringVal)) {
return;
}
if (!isHint && !isAllowComment() && !isSafeComment(stringVal)) {
throw new NotAllowCommentException();
}
return;
}
if (ch == '/' || ch == '-') {
scanChar();
bufPos++;
for (; ; ) {
if (ch == '\r') {
if (charAt(pos + 1) == '\n') {
bufPos += 2;
scanChar();
break;
}
bufPos++;
break;
} else if (ch == EOI) {
break;
}
if (ch == '\n') {
scanChar();
bufPos++;
break;
}
scanChar();
bufPos++;
}
stringVal = subString(mark, bufPos + 1);
token = Token.LINE_COMMENT;
commentCount++;
if (keepComments) {
addComment(stringVal);
}
if (commentHandler != null && commentHandler.handle(lastToken, stringVal)) {
return;
}
endOfComment = isEOF();
if (!isAllowComment() && (isEOF() || !isSafeComment(stringVal))) {
throw new NotAllowCommentException();
}
return;
}
}
use of com.alibaba.druid.sql.parser.Token in project druid by alibaba.
the class OracleExprParser method primary.
public SQLExpr primary() {
final Token tok = lexer.token();
SQLExpr sqlExpr = null;
switch(tok) {
case SYSDATE:
lexer.nextToken();
OracleSysdateExpr sysdate = new OracleSysdateExpr();
if (lexer.token() == Token.MONKEYS_AT) {
lexer.nextToken();
accept(Token.BANG);
sysdate.setOption("!");
}
sqlExpr = sysdate;
return primaryRest(sqlExpr);
case PRIOR:
lexer.nextToken();
sqlExpr = expr();
sqlExpr = new SQLUnaryExpr(SQLUnaryOperator.Prior, sqlExpr);
return primaryRest(sqlExpr);
case COLON:
lexer.nextToken();
if (lexer.token() == Token.LITERAL_INT) {
String name = ":" + lexer.numberString();
lexer.nextToken();
return new SQLVariantRefExpr(name);
} else if (lexer.token() == Token.IDENTIFIER) {
String name = lexer.stringVal();
if (name.charAt(0) == 'B' || name.charAt(0) == 'b') {
lexer.nextToken();
return new SQLVariantRefExpr(":" + name);
}
throw new ParserException("syntax error : " + lexer.token() + " " + lexer.stringVal());
} else {
throw new ParserException("syntax error : " + lexer.token());
}
case LITERAL_ALIAS:
String alias = '"' + lexer.stringVal() + '"';
lexer.nextToken();
return primaryRest(new SQLIdentifierExpr(alias));
case BINARY_FLOAT:
OracleBinaryFloatExpr floatExpr = new OracleBinaryFloatExpr();
floatExpr.setValue(Float.parseFloat(lexer.numberString()));
lexer.nextToken();
return primaryRest(floatExpr);
case BINARY_DOUBLE:
OracleBinaryDoubleExpr doubleExpr = new OracleBinaryDoubleExpr();
doubleExpr.setValue(Double.parseDouble(lexer.numberString()));
lexer.nextToken();
return primaryRest(doubleExpr);
case TABLE:
lexer.nextToken();
return primaryRest(new SQLIdentifierExpr("TABLE"));
case PLUS:
lexer.nextToken();
switch(lexer.token()) {
case LITERAL_INT:
sqlExpr = new SQLIntegerExpr(lexer.integerValue());
lexer.nextToken();
break;
case LITERAL_FLOAT:
sqlExpr = new SQLNumberExpr(lexer.decimalValue());
lexer.nextToken();
break;
case BINARY_FLOAT:
sqlExpr = new OracleBinaryFloatExpr(Float.parseFloat(lexer.numberString()));
lexer.nextToken();
break;
case BINARY_DOUBLE:
sqlExpr = new OracleBinaryDoubleExpr(Double.parseDouble(lexer.numberString()));
lexer.nextToken();
break;
case LPAREN:
lexer.nextToken();
sqlExpr = expr();
accept(Token.RPAREN);
sqlExpr = new SQLUnaryExpr(SQLUnaryOperator.Plus, sqlExpr);
break;
default:
throw new ParserException("TODO");
}
return primaryRest(sqlExpr);
case SUB:
lexer.nextToken();
switch(lexer.token()) {
case LITERAL_INT:
Number integerValue = lexer.integerValue();
if (integerValue instanceof Integer) {
int intVal = ((Integer) integerValue).intValue();
if (intVal == Integer.MIN_VALUE) {
integerValue = Long.valueOf(((long) intVal) * -1);
} else {
integerValue = Integer.valueOf(intVal * -1);
}
} else if (integerValue instanceof Long) {
long longVal = ((Long) integerValue).longValue();
if (longVal == 2147483648L) {
integerValue = Integer.valueOf((int) (((long) longVal) * -1));
} else {
integerValue = Long.valueOf(longVal * -1);
}
} else {
integerValue = ((BigInteger) integerValue).negate();
}
sqlExpr = new SQLIntegerExpr(integerValue);
lexer.nextToken();
break;
case LITERAL_FLOAT:
sqlExpr = new SQLNumberExpr(lexer.decimalValue().negate());
lexer.nextToken();
break;
case BINARY_FLOAT:
sqlExpr = new OracleBinaryFloatExpr(Float.parseFloat(lexer.numberString()) * -1);
lexer.nextToken();
break;
case BINARY_DOUBLE:
sqlExpr = new OracleBinaryDoubleExpr(Double.parseDouble(lexer.numberString()) * -1);
lexer.nextToken();
break;
case VARIANT:
case IDENTIFIER:
sqlExpr = expr();
sqlExpr = new SQLUnaryExpr(SQLUnaryOperator.Negative, sqlExpr);
break;
case LPAREN:
lexer.nextToken();
sqlExpr = expr();
accept(Token.RPAREN);
sqlExpr = new SQLUnaryExpr(SQLUnaryOperator.Negative, sqlExpr);
break;
default:
throw new ParserException("TODO " + lexer.token());
}
return primaryRest(sqlExpr);
case CURSOR:
lexer.nextToken();
accept(Token.LPAREN);
OracleSelect select = createSelectParser().select();
OracleCursorExpr cursorExpr = new OracleCursorExpr(select);
accept(Token.RPAREN);
sqlExpr = cursorExpr;
return primaryRest(sqlExpr);
case MODEL:
case PCTFREE:
case INITRANS:
case MAXTRANS:
case SEGMENT:
case CREATION:
case IMMEDIATE:
case DEFERRED:
case STORAGE:
case NEXT:
case MINEXTENTS:
case MAXEXTENTS:
case MAXSIZE:
case PCTINCREASE:
case FLASH_CACHE:
case CELL_FLASH_CACHE:
case KEEP:
case NONE:
case LOB:
case STORE:
case ROW:
case CHUNK:
case CACHE:
case NOCACHE:
case LOGGING:
case NOCOMPRESS:
case KEEP_DUPLICATES:
case EXCEPTIONS:
case PURGE:
sqlExpr = new SQLIdentifierExpr(lexer.stringVal());
lexer.nextToken();
return primaryRest(sqlExpr);
default:
return super.primary();
}
}
use of com.alibaba.druid.sql.parser.Token in project druid by alibaba.
the class MySqlStatementParser method parseInsert.
public SQLInsertStatement parseInsert() {
MySqlInsertStatement stmt = new MySqlInsertStatement();
SQLName tableName = null;
if (lexer.token() == Token.INSERT) {
lexer.nextToken();
for (; ; ) {
if (lexer.token() == Token.IDENTIFIER) {
long hash = lexer.hash_lower();
if (hash == FnvHash.Constants.LOW_PRIORITY) {
stmt.setLowPriority(true);
lexer.nextToken();
continue;
}
if (hash == FnvHash.Constants.DELAYED) {
stmt.setDelayed(true);
lexer.nextToken();
continue;
}
if (hash == FnvHash.Constants.HIGH_PRIORITY) {
stmt.setHighPriority(true);
lexer.nextToken();
continue;
}
if (hash == FnvHash.Constants.IGNORE) {
stmt.setIgnore(true);
lexer.nextToken();
continue;
}
if (hash == FnvHash.Constants.ROLLBACK_ON_FAIL) {
stmt.setRollbackOnFail(true);
lexer.nextToken();
continue;
}
}
break;
}
if (lexer.token() == Token.HINT) {
List<SQLCommentHint> hints = this.exprParser.parseHints();
stmt.setHints(hints);
}
if (lexer.token() == Token.INTO) {
lexer.nextToken();
if (lexer.token() == Token.TABLE) {
lexer.nextToken();
}
} else if (lexer.identifierEquals(FnvHash.Constants.OVERWRITE)) {
lexer.nextToken();
stmt.setOverwrite(true);
if (lexer.token() == Token.TABLE) {
lexer.nextToken();
} else if (lexer.token() == Token.INTO) {
lexer.nextToken();
}
}
if (lexer.token() == Token.LINE_COMMENT) {
lexer.nextToken();
}
if (lexer.token() == Token.FULLTEXT) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.DICTIONARY)) {
lexer.nextToken();
stmt.setFulltextDictionary(true);
}
}
tableName = this.exprParser.name();
stmt.setTableName(tableName);
if (lexer.token() == Token.HINT) {
String comment = "/*" + lexer.stringVal() + "*/";
lexer.nextToken();
stmt.getTableSource().addAfterComment(comment);
}
if (lexer.token() == Token.IDENTIFIER && !lexer.identifierEquals(FnvHash.Constants.VALUE)) {
stmt.setAlias(lexer.stringVal());
lexer.nextToken();
}
if (lexer.token() == Token.WITH) {
SQLSelectStatement withStmt = (SQLSelectStatement) parseWith();
stmt.setQuery(withStmt.getSelect());
}
if (lexer.token() == Token.PARTITION) {
lexer.nextToken();
accept(Token.LPAREN);
for (; ; ) {
SQLAssignItem ptExpr = new SQLAssignItem();
ptExpr.setTarget(this.exprParser.name());
if (lexer.token() == Token.EQ) {
lexer.nextToken();
SQLExpr ptValue = this.exprParser.expr();
ptExpr.setValue(ptValue);
}
stmt.addPartition(ptExpr);
if (lexer.token() != Token.COMMA) {
break;
} else {
lexer.nextToken();
}
}
accept(Token.RPAREN);
if (lexer.token() == Token.IF) {
lexer.nextToken();
accept(Token.NOT);
accept(Token.EXISTS);
stmt.setIfNotExists(true);
}
}
}
int columnSize = 0;
List<SQLColumnDefinition> columnDefinitionList = null;
if (lexer.token() == Token.LPAREN) {
boolean useInsertColumnsCache = lexer.isEnabled(SQLParserFeature.UseInsertColumnsCache);
InsertColumnsCache insertColumnsCache = null;
long tableNameHash = 0;
InsertColumnsCache.Entry cachedColumns = null;
if (useInsertColumnsCache) {
insertColumnsCache = this.insertColumnsCache;
if (insertColumnsCache == null) {
insertColumnsCache = InsertColumnsCache.global;
}
if (tableName != null) {
tableNameHash = tableName.nameHashCode64();
cachedColumns = insertColumnsCache.get(tableNameHash);
}
}
SchemaObject tableObject = null;
int pos = lexer.pos();
if (cachedColumns != null && lexer.text.startsWith(cachedColumns.columnsString, pos)) {
if (!lexer.isEnabled(SQLParserFeature.OptimizedForParameterized)) {
List<SQLExpr> columns = stmt.getColumns();
List<SQLExpr> cachedColumns2 = cachedColumns.columns;
for (int i = 0, size = cachedColumns2.size(); i < size; i++) {
columns.add(cachedColumns2.get(i).clone());
}
}
stmt.setColumnsString(cachedColumns.columnsFormattedString, cachedColumns.columnsFormattedStringHash);
int p2 = pos + cachedColumns.columnsString.length();
lexer.reset(p2);
lexer.nextToken();
} else {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
if (lexer.token() == Token.SELECT) {
lexer.reset(mark);
SQLSelect select = this.exprParser.createSelectParser().select();
select.setParent(stmt);
stmt.setQuery(select);
} else {
if (repository != null && lexer.isEnabled(SQLParserFeature.InsertValueCheckType)) {
tableObject = repository.findTable(tableName.nameHashCode64());
}
if (tableObject != null) {
columnDefinitionList = new ArrayList<SQLColumnDefinition>();
}
List<SQLExpr> columns = stmt.getColumns();
if (lexer.token() != Token.RPAREN) {
for (; ; ) {
String identName;
long hash;
Token token = lexer.token();
if (token == Token.IDENTIFIER) {
identName = lexer.stringVal();
hash = lexer.hash_lower();
} else if (token == Token.LITERAL_CHARS) {
if (lexer.isEnabled(SQLParserFeature.IgnoreNameQuotes)) {
identName = lexer.stringVal();
} else {
identName = '\'' + lexer.stringVal() + '\'';
}
hash = 0;
} else if (token == Token.LITERAL_ALIAS) {
identName = lexer.stringVal();
if (lexer.isEnabled(SQLParserFeature.IgnoreNameQuotes)) {
identName = SQLUtils.normalize(identName, dbType);
}
hash = 0;
} else {
identName = lexer.stringVal();
hash = 0;
}
lexer.nextTokenComma();
SQLExpr expr = new SQLIdentifierExpr(identName, hash);
while (lexer.token() == Token.DOT) {
lexer.nextToken();
String propertyName = lexer.stringVal();
lexer.nextToken();
expr = new SQLPropertyExpr(expr, propertyName);
}
expr.setParent(stmt);
columns.add(expr);
columnSize++;
if (tableObject != null) {
SQLColumnDefinition columnDefinition = tableObject.findColumn(hash);
columnDefinitionList.add(columnDefinition);
}
if (lexer.token() == Token.COMMA) {
lexer.nextTokenIdent();
continue;
}
break;
}
columnSize = stmt.getColumns().size();
if (insertColumnsCache != null && tableName != null) {
String columnsString = lexer.subString(pos, lexer.pos() - pos);
List<SQLExpr> clonedColumns = new ArrayList<SQLExpr>(columnSize);
for (int i = 0; i < columns.size(); i++) {
clonedColumns.add(columns.get(i).clone());
}
StringBuilder buf = new StringBuilder();
SQLASTOutputVisitor outputVisitor = SQLUtils.createOutputVisitor(buf, dbType);
outputVisitor.printInsertColumns(columns);
String formattedColumnsString = buf.toString();
long columnsFormattedStringHash = FnvHash.fnv1a_64_lower(formattedColumnsString);
insertColumnsCache.put(tableName.hashCode64(), columnsString, formattedColumnsString, clonedColumns);
stmt.setColumnsString(formattedColumnsString, columnsFormattedStringHash);
}
}
accept(Token.RPAREN);
}
}
}
List<SQLCommentHint> commentHints = null;
if (lexer.token() == Token.HINT) {
commentHints = this.exprParser.parseHints();
} else if (lexer.token() == Token.LINE_COMMENT) {
lexer.nextToken();
}
if (lexer.token() == Token.VALUES || lexer.identifierEquals(FnvHash.Constants.VALUE)) {
lexer.nextTokenLParen();
if (lexer.isEnabled(SQLParserFeature.InsertReader)) {
return stmt;
}
if (lexer.isEnabled(SQLParserFeature.InsertValueNative)) {
parseValueClauseNative(stmt.getValuesList(), columnDefinitionList, columnSize, stmt);
} else {
parseValueClause(stmt.getValuesList(), columnDefinitionList, columnSize, stmt);
}
} else if (lexer.token() == Token.SET) {
lexer.nextToken();
SQLInsertStatement.ValuesClause values = new SQLInsertStatement.ValuesClause();
stmt.addValueCause(values);
for (; ; ) {
SQLName name = this.exprParser.name();
stmt.addColumn(name);
if (lexer.token() == Token.EQ) {
lexer.nextToken();
} else {
accept(Token.COLONEQ);
}
values.addValue(this.exprParser.expr());
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
} else if (lexer.token() == (Token.SELECT)) {
SQLSelect select = this.exprParser.createSelectParser().select();
if (commentHints != null && !commentHints.isEmpty()) {
select.setHeadHint(commentHints.get(0));
}
select.setParent(stmt);
stmt.setQuery(select);
} else if (lexer.token() == (Token.LPAREN)) {
lexer.nextToken();
SQLSelect select = this.exprParser.createSelectParser().select();
select.setParent(stmt);
stmt.setQuery(select);
accept(Token.RPAREN);
} else if (lexer.token() == WITH) {
SQLSelect query = this.exprParser.createSelectParser().select();
stmt.setQuery(query);
}
if (lexer.token() == Token.ON) {
lexer.nextToken();
acceptIdentifier("DUPLICATE");
accept(Token.KEY);
accept(Token.UPDATE);
List<SQLExpr> duplicateKeyUpdate = stmt.getDuplicateKeyUpdate();
for (; ; ) {
SQLName name = this.exprParser.name();
accept(Token.EQ);
SQLExpr value;
try {
value = this.exprParser.expr();
} catch (EOFParserException e) {
throw new ParserException("EOF, " + name + "=", e);
}
SQLBinaryOpExpr assignment = new SQLBinaryOpExpr(name, SQLBinaryOperator.Equality, value);
assignment.setParent(stmt);
duplicateKeyUpdate.add(assignment);
if (lexer.token() == Token.COMMA) {
lexer.nextTokenIdent();
continue;
}
break;
}
}
return stmt;
}
use of com.alibaba.druid.sql.parser.Token in project druid by alibaba.
the class OracleCompatibleTest method test_compatibleTest.
public void test_compatibleTest() throws Exception {
// oracle ppas
String sql = "select * from t where rownum < 10";
OracleLexer lexer = new OracleLexer(sql);
for (; ; ) {
lexer.nextToken();
Token token = lexer.token();
if (token == Token.EOF) {
break;
}
if (token == Token.IDENTIFIER) {
System.out.println(lexer.stringVal());
} else if (token == Token.LITERAL_CHARS || token == Token.LITERAL_INT || token == Token.LITERAL_ALIAS) {
// skip
}
System.out.println(token);
}
}
Aggregations