use of com.alibaba.druid.sql.parser.ParserException in project druid by alibaba.
the class OdpsCreateTableParser method parseCrateTable.
public SQLCreateTableStatement parseCrateTable(boolean acceptCreate) {
OdpsCreateTableStatement stmt = new OdpsCreateTableStatement();
if (acceptCreate) {
accept(Token.CREATE);
}
accept(Token.TABLE);
if (lexer.token() == Token.IF || identifierEquals("IF")) {
lexer.nextToken();
accept(Token.NOT);
accept(Token.EXISTS);
stmt.setIfNotExiists(true);
}
stmt.setName(this.exprParser.name());
if (identifierEquals("LIFECYCLE")) {
lexer.nextToken();
stmt.setLifecycle(this.exprParser.expr());
}
if (lexer.token() == Token.LIKE) {
lexer.nextToken();
SQLName name = this.exprParser.name();
stmt.setLike(name);
} else if (lexer.token() == Token.AS) {
lexer.nextToken();
OdpsSelectParser selectParser = new OdpsSelectParser(this.exprParser);
SQLSelect select = selectParser.select();
stmt.setSelect(select);
} else {
accept(Token.LPAREN);
if (lexer.isKeepComments() && lexer.hasComment()) {
stmt.addBodyBeforeComment(lexer.readAndResetComments());
}
for (; ; ) {
if (lexer.token() != Token.IDENTIFIER) {
throw new ParserException("expect identifier");
}
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.getTableElementList().add(column);
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
if (!(lexer.token() == (Token.COMMA))) {
break;
} else {
lexer.nextToken();
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
}
}
accept(Token.RPAREN);
}
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
stmt.setComment(this.exprParser.primary());
}
if (lexer.token() == Token.PARTITIONED) {
lexer.nextToken();
accept(Token.BY);
accept(Token.LPAREN);
for (; ; ) {
if (lexer.token() != Token.IDENTIFIER) {
throw new ParserException("expect identifier");
}
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.addPartitionColumn(column);
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
if (!(lexer.token() == (Token.COMMA))) {
break;
} else {
lexer.nextToken();
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
}
}
accept(Token.RPAREN);
}
if (identifierEquals("LIFECYCLE")) {
lexer.nextToken();
stmt.setLifecycle(this.exprParser.expr());
}
return stmt;
}
use of com.alibaba.druid.sql.parser.ParserException in project druid by alibaba.
the class OdpsStatementParser method parseShow.
public SQLStatement parseShow() {
accept(Token.SHOW);
if (identifierEquals("PARTITIONS")) {
lexer.nextToken();
OdpsShowPartitionsStmt stmt = new OdpsShowPartitionsStmt();
SQLExpr expr = this.exprParser.expr();
stmt.setTableSource(new SQLExprTableSource(expr));
return stmt;
}
if (identifierEquals("STATISTIC")) {
lexer.nextToken();
OdpsShowStatisticStmt stmt = new OdpsShowStatisticStmt();
SQLExpr expr = this.exprParser.expr();
stmt.setTableSource(new SQLExprTableSource(expr));
return stmt;
}
if (identifierEquals("TABLES")) {
lexer.nextToken();
SQLShowTablesStatement stmt = new SQLShowTablesStatement();
if (lexer.token() == Token.FROM) {
lexer.nextToken();
stmt.setDatabase(this.exprParser.name());
}
if (lexer.token() == Token.LIKE) {
lexer.nextToken();
stmt.setLike(this.exprParser.expr());
}
return stmt;
}
if (identifierEquals("GRANTS")) {
lexer.nextToken();
OdpsShowGrantsStmt stmt = new OdpsShowGrantsStmt();
if (lexer.token() == Token.FOR) {
lexer.nextToken();
stmt.setUser(this.exprParser.expr());
}
if (lexer.token() == Token.ON) {
lexer.nextToken();
acceptIdentifier("type");
stmt.setObjectType(this.exprParser.expr());
}
return stmt;
}
throw new ParserException("TODO " + lexer.token() + " " + lexer.stringVal());
}
use of com.alibaba.druid.sql.parser.ParserException in project druid by alibaba.
the class SQLServerStatementParser method parseDeclare.
public SQLStatement parseDeclare() {
this.accept(Token.DECLARE);
SQLServerDeclareStatement declareStatement = new SQLServerDeclareStatement();
for (; ; ) {
SQLDeclareItem item = new SQLDeclareItem();
declareStatement.addItem(item);
item.setName(this.exprParser.name());
if (lexer.token() == Token.AS) {
lexer.nextToken();
}
if (lexer.token() == Token.TABLE) {
lexer.nextToken();
item.setType(SQLDeclareItem.Type.TABLE);
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
for (; ; ) {
if (//
lexer.token() == Token.IDENTIFIER || lexer.token() == Token.LITERAL_ALIAS) {
SQLColumnDefinition column = this.exprParser.parseColumn();
item.getTableElementList().add(column);
} else if (//
lexer.token() == Token.PRIMARY || //
lexer.token() == Token.UNIQUE || //
lexer.token() == Token.CHECK || lexer.token() == Token.CONSTRAINT) {
SQLConstraint constraint = this.exprParser.parseConstaint();
constraint.setParent(item);
item.getTableElementList().add((SQLTableElement) constraint);
} else if (lexer.token() == Token.TABLESPACE) {
throw new ParserException("TODO " + lexer.token());
} else {
SQLColumnDefinition column = this.exprParser.parseColumn();
item.getTableElementList().add(column);
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
if (lexer.token() == Token.RPAREN) {
break;
}
continue;
}
break;
}
accept(Token.RPAREN);
}
break;
} else if (lexer.token() == Token.CURSOR) {
item.setType(SQLDeclareItem.Type.CURSOR);
lexer.nextToken();
} else {
item.setType(SQLDeclareItem.Type.LOCAL);
item.setDataType(this.exprParser.parseDataType());
if (lexer.token() == Token.EQ) {
lexer.nextToken();
item.setValue(this.exprParser.expr());
}
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
} else {
break;
}
}
return declareStatement;
}
use of com.alibaba.druid.sql.parser.ParserException in project druid by alibaba.
the class OdpsCreateTableParser method parseCreateTable.
public SQLCreateTableStatement parseCreateTable(boolean acceptCreate) {
OdpsCreateTableStatement stmt = new OdpsCreateTableStatement();
if (acceptCreate) {
accept(Token.CREATE);
}
if (lexer.identifierEquals(FnvHash.Constants.EXTERNAL)) {
lexer.nextToken();
stmt.setExternal(true);
}
accept(Token.TABLE);
if (lexer.token() == Token.IF || lexer.identifierEquals("IF")) {
lexer.nextToken();
accept(Token.NOT);
accept(Token.EXISTS);
stmt.setIfNotExiists(true);
}
stmt.setName(this.exprParser.name());
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
stmt.setComment(this.exprParser.primary());
}
if (lexer.token() == Token.SEMI || lexer.token() == Token.EOF) {
return stmt;
}
for (; ; ) {
if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
parseTblProperties(stmt);
continue;
}
if (lexer.identifierEquals(FnvHash.Constants.LIFECYCLE)) {
lexer.nextToken();
stmt.setLifecycle(this.exprParser.expr());
continue;
}
if (lexer.identifierEquals(FnvHash.Constants.STORED)) {
lexer.nextToken();
accept(Token.AS);
if (lexer.identifierEquals(FnvHash.Constants.INPUTFORMAT)) {
HiveInputOutputFormat format = new HiveInputOutputFormat();
lexer.nextToken();
format.setInput(this.exprParser.primary());
if (lexer.identifierEquals(FnvHash.Constants.OUTPUTFORMAT)) {
lexer.nextToken();
format.setOutput(this.exprParser.primary());
}
stmt.setStoredAs(format);
} else {
SQLName name = this.exprParser.name();
stmt.setStoredAs(name);
}
continue;
}
break;
}
if (lexer.token() == Token.LIKE) {
lexer.nextToken();
SQLName name = this.exprParser.name();
stmt.setLike(name);
} else if (lexer.token() == Token.AS) {
lexer.nextToken();
OdpsSelectParser selectParser = new OdpsSelectParser(this.exprParser);
SQLSelect select = selectParser.select();
stmt.setSelect(select);
} else if (lexer.token() != Token.LPAREN && stmt.isExternal()) {
// skip
} else {
accept(Token.LPAREN);
if (lexer.isKeepComments() && lexer.hasComment()) {
stmt.addBodyBeforeComment(lexer.readAndResetComments());
}
for (; ; ) {
SQLColumnDefinition column;
switch(lexer.token()) {
case IDENTIFIER:
case KEY:
case SEQUENCE:
case USER:
case GROUP:
case INDEX:
case ENABLE:
case DISABLE:
case DESC:
case ALL:
case INTERVAL:
case OPEN:
case PARTITION:
case SCHEMA:
case CONSTRAINT:
case COMMENT:
case VIEW:
case SHOW:
case ORDER:
case LEAVE:
case UNIQUE:
case DEFAULT:
case EXPLAIN:
case CHECK:
case CLOSE:
case IN:
case OUT:
case INOUT:
case LIMIT:
case FULL:
case MINUS:
case VALUES:
case TRIGGER:
case USE:
case LIKE:
case DISTRIBUTE:
case DELETE:
case UPDATE:
case IS:
case LEFT:
case RIGHT:
case REPEAT:
case COMPUTE:
case LOCK:
case TABLE:
case DO:
case WHILE:
case LOOP:
case FOR:
case RLIKE:
case PROCEDURE:
case GRANT:
case EXCEPT:
case CREATE:
case PARTITIONED:
case UNION:
case PRIMARY:
case INNER:
case TO:
case DECLARE:
case REFERENCES:
case FOREIGN:
case ESCAPE:
case BY:
case ALTER:
case SOME:
case ASC:
case NULL:
case CURSOR:
case FETCH:
case OVER:
case DATABASE:
column = this.exprParser.parseColumn(stmt);
break;
default:
throw new ParserException("expect identifier. " + lexer.info());
}
stmt.getTableElementList().add(column);
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
if (!(lexer.token() == (Token.COMMA))) {
break;
} else {
lexer.nextToken();
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
}
}
accept(Token.RPAREN);
}
for (; ; ) {
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
stmt.setComment(this.exprParser.primary());
continue;
}
if (lexer.token() == Token.PARTITIONED) {
lexer.nextToken();
accept(Token.BY);
accept(Token.LPAREN);
for (; ; ) {
switch(lexer.token()) {
case INDEX:
case KEY:
case CHECK:
case IDENTIFIER:
case GROUP:
case INTERVAL:
case LOOP:
case USER:
case TABLE:
case PARTITION:
case SEQUENCE:
break;
default:
throw new ParserException("expect identifier. " + lexer.info());
}
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.addPartitionColumn(column);
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
if (lexer.token() != Token.COMMA) {
break;
} else {
lexer.nextToken();
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
}
}
accept(Token.RPAREN);
continue;
}
if (lexer.identifierEquals(FnvHash.Constants.RANGE)) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.CLUSTERED)) {
stmt.setClusteringType(ClusteringType.Range);
}
continue;
}
if (lexer.identifierEquals(FnvHash.Constants.CLUSTERED)) {
lexer.nextToken();
accept(Token.BY);
accept(Token.LPAREN);
for (; ; ) {
SQLSelectOrderByItem item = this.exprParser.parseSelectOrderByItem();
stmt.addClusteredByItem(item);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
continue;
}
if (lexer.identifierEquals(FnvHash.Constants.ROW)) {
SQLExternalRecordFormat recordFormat = this.exprParser.parseRowFormat();
stmt.setRowFormat(recordFormat);
continue;
}
if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
lexer.nextToken();
accept(Token.BY);
accept(Token.LPAREN);
for (; ; ) {
SQLSelectOrderByItem item = this.exprParser.parseSelectOrderByItem();
stmt.addSortedByItem(item);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
continue;
}
if (stmt.getClusteringType() != ClusteringType.Range && (stmt.getClusteredBy().size() > 0 || stmt.getSortedBy().size() > 0) && lexer.token() == Token.INTO) {
lexer.nextToken();
if (lexer.token() == Token.LITERAL_INT) {
stmt.setBuckets(lexer.integerValue().intValue());
lexer.nextToken();
} else {
throw new ParserException("into buckets must be integer. " + lexer.info());
}
acceptIdentifier("BUCKETS");
if (lexer.token() == Token.INTO) {
lexer.nextToken();
if (lexer.token() == Token.LITERAL_INT) {
stmt.setShards(lexer.integerValue().intValue());
lexer.nextToken();
} else {
throw new ParserException("into shards must be integer. " + lexer.info());
}
acceptIdentifier("SHARDS");
}
continue;
}
if (lexer.token() == Token.INTO) {
lexer.nextToken();
if (lexer.token() == Token.LITERAL_INT) {
stmt.setIntoBuckets(new SQLIntegerExpr(lexer.integerValue().intValue()));
lexer.nextToken();
acceptIdentifier("BUCKETS");
} else {
throw new ParserException("into shards must be integer. " + lexer.info());
}
continue;
}
if (lexer.token() == Token.AS && stmt.getSelect() == null) {
lexer.nextToken();
OdpsSelectParser selectParser = new OdpsSelectParser(this.exprParser);
SQLSelect select = selectParser.select();
stmt.setSelect(select);
continue;
}
if (lexer.identifierEquals(FnvHash.Constants.LIFECYCLE)) {
lexer.nextToken();
stmt.setLifecycle(this.exprParser.expr());
continue;
}
if (lexer.identifierEquals(FnvHash.Constants.STORED)) {
lexer.nextToken();
if (lexer.token() == Token.AS) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.INPUTFORMAT)) {
HiveInputOutputFormat format = new HiveInputOutputFormat();
lexer.nextToken();
format.setInput(this.exprParser.primary());
if (lexer.identifierEquals(FnvHash.Constants.OUTPUTFORMAT)) {
lexer.nextToken();
format.setOutput(this.exprParser.primary());
}
stmt.setStoredAs(format);
} else {
SQLName storedAs = this.exprParser.name();
stmt.setStoredAs(storedAs);
}
} else {
accept(Token.BY);
SQLExpr storedBy = this.exprParser.expr();
stmt.setStoredBy(storedBy);
}
continue;
}
if (lexer.identifierEquals(FnvHash.Constants.LIFECYCLE)) {
lexer.nextToken();
stmt.setLifecycle(this.exprParser.expr());
continue;
}
if (lexer.token() == Token.WITH) {
lexer.nextToken();
acceptIdentifier("SERDEPROPERTIES");
accept(Token.LPAREN);
this.exprParser.exprList(stmt.getWithSerdeproperties(), stmt);
accept(Token.RPAREN);
continue;
}
if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
parseTblProperties(stmt);
continue;
}
if (lexer.identifierEquals(FnvHash.Constants.LOCATION)) {
lexer.nextToken();
SQLExpr location = this.exprParser.expr();
stmt.setLocation(location);
continue;
}
if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
parseTblProperties(stmt);
continue;
}
if (lexer.identifierEquals(FnvHash.Constants.USING)) {
lexer.nextToken();
SQLExpr using = this.exprParser.expr();
stmt.setUsing(using);
continue;
}
if (lexer.identifierEquals(FnvHash.Constants.LIFECYCLE)) {
lexer.nextToken();
stmt.setLifecycle(this.exprParser.expr());
continue;
}
break;
}
return stmt;
}
use of com.alibaba.druid.sql.parser.ParserException in project druid by alibaba.
the class Hex method eval.
public Object eval(SQLEvalVisitor visitor, SQLMethodInvokeExpr x) {
if (x.getArguments().size() != 1) {
throw new ParserException("argument's != 1, " + x.getArguments().size());
}
SQLExpr param0 = x.getArguments().get(0);
param0.accept(visitor);
Object param0Value = param0.getAttributes().get(EVAL_VALUE);
if (param0Value == null) {
return SQLEvalVisitor.EVAL_ERROR;
}
if (param0Value instanceof String) {
byte[] bytes = ((String) param0Value).getBytes();
String result = HexBin.encode(bytes);
return result;
}
if (param0Value instanceof Number) {
long value = ((Number) param0Value).longValue();
String result = Long.toHexString(value).toUpperCase();
return result;
}
return SQLEvalVisitor.EVAL_ERROR;
}
Aggregations