use of com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateFunctionStatement in project druid by alibaba.
the class OdpsStatementParser method parseStatementListDialect.
public boolean parseStatementListDialect(List<SQLStatement> statementList) {
if (lexer.token() == Token.FROM) {
SQLStatement stmt = this.parseInsert();
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals("ANALYZE")) {
SQLStatement stmt = parseAnalyze();
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals("ADD")) {
lexer.nextToken();
if (lexer.identifierEquals("STATISTIC")) {
lexer.nextToken();
OdpsAddStatisticStatement stmt = new OdpsAddStatisticStatement();
stmt.setTable(this.exprParser.name());
stmt.setStatisticClause(parseStaticClause());
statementList.add(stmt);
return true;
}
if (lexer.token() == Token.USER) {
lexer.nextToken();
OdpsAddUserStatement stmt = new OdpsAddUserStatement();
stmt.setUser(this.exprParser.name());
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals("ACCOUNTPROVIDER")) {
lexer.nextToken();
OdpsAddAccountProviderStatement stmt = new OdpsAddAccountProviderStatement();
stmt.setProvider(this.exprParser.name());
statementList.add(stmt);
return true;
}
if (lexer.token() == Token.TABLE) {
lexer.nextToken();
OdpsAddTableStatement stmt = new OdpsAddTableStatement();
stmt.setTable(this.exprParser.name());
if (lexer.token() == Token.PARTITION) {
lexer.nextToken();
this.exprParser.parseAssignItem(stmt.getPartitions(), stmt);
}
if (lexer.token() == Token.AS) {
lexer.nextToken();
SQLName name = this.exprParser.name();
stmt.getTable().setAlias(name.toString());
}
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
stmt.setComment(this.exprParser.primary());
}
if (lexer.token() == Token.SUB) {
lexer.nextToken();
acceptIdentifier("f");
stmt.setForce(true);
}
if (lexer.token() == Token.TO) {
lexer.nextToken();
acceptIdentifier("PACKAGE");
SQLName packageName = this.exprParser.name();
stmt.setToPackage(packageName);
if (lexer.token() == Token.WITH) {
lexer.nextToken();
acceptIdentifier("PRIVILEGES");
parsePrivileages(stmt.getPrivileges(), stmt);
}
}
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals(FnvHash.Constants.FILE) || lexer.identifierEquals(FnvHash.Constants.JAR) || lexer.identifierEquals(FnvHash.Constants.PY) || lexer.identifierEquals(FnvHash.Constants.ARCHIVE)) {
OdpsAddFileStatement stmt = new OdpsAddFileStatement();
long hash = lexer.hash_lower();
if (hash == FnvHash.Constants.JAR) {
stmt.setType(OdpsAddFileStatement.FileType.JAR);
} else if (hash == FnvHash.Constants.PY) {
stmt.setType(OdpsAddFileStatement.FileType.PY);
} else if (hash == FnvHash.Constants.ARCHIVE) {
stmt.setType(OdpsAddFileStatement.FileType.ARCHIVE);
}
lexer.nextPath();
String path = lexer.stringVal();
lexer.nextToken();
stmt.setFile(path);
if (lexer.token() == Token.AS) {
lexer.nextToken();
SQLName name = this.exprParser.name();
stmt.setAlias(name.toString());
}
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
stmt.setComment(this.exprParser.primary());
}
if (lexer.token() == Token.SUB) {
lexer.nextToken();
acceptIdentifier("f");
stmt.setForce(true);
}
statementList.add(stmt);
return true;
}
throw new ParserException("TODO " + lexer.info());
}
if (lexer.identifierEquals("REMOVE")) {
lexer.nextToken();
if (lexer.identifierEquals("STATISTIC")) {
lexer.nextToken();
OdpsRemoveStatisticStatement stmt = new OdpsRemoveStatisticStatement();
stmt.setTable(this.exprParser.name());
stmt.setStatisticClause(parseStaticClause());
statementList.add(stmt);
return true;
}
if (lexer.token() == Token.USER) {
lexer.nextToken();
OdpsRemoveUserStatement stmt = new OdpsRemoveUserStatement();
stmt.setUser((SQLIdentifierExpr) this.exprParser.name());
statementList.add(stmt);
return true;
}
throw new ParserException("TODO " + lexer.info());
}
if (lexer.identifierEquals("READ")) {
OdpsReadStatement stmt = new OdpsReadStatement();
if (lexer.hasComment() && lexer.isKeepComments()) {
stmt.addBeforeComment(lexer.readAndResetComments());
}
lexer.nextToken();
stmt.setTable(this.exprParser.name());
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
this.exprParser.names(stmt.getColumns(), stmt);
accept(Token.RPAREN);
}
if (lexer.token() == Token.PARTITION) {
lexer.nextToken();
accept(Token.LPAREN);
parseAssignItems(stmt.getPartition(), stmt);
accept(Token.RPAREN);
}
if (lexer.token() == Token.LITERAL_INT) {
stmt.setRowCount(this.exprParser.primary());
}
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals("LIST")) {
OdpsListStmt stmt = new OdpsListStmt();
lexer.nextToken();
stmt.setObject(this.exprParser.expr());
if (lexer.identifierEquals("ROLES") && stmt.getObject() instanceof SQLIdentifierExpr && ((SQLIdentifierExpr) stmt.getObject()).nameEquals("TENANT")) {
lexer.nextToken();
stmt.setObject(new SQLIdentifierExpr("TENANT ROLES"));
} else if (lexer.identifierEquals("OUTPUT") && stmt.getObject() instanceof SQLIdentifierExpr && ((SQLIdentifierExpr) stmt.getObject()).nameEquals("TEMPORARY")) {
lexer.nextToken();
stmt.setObject(new SQLIdentifierExpr("TEMPORARY OUTPUT"));
}
statementList.add(stmt);
return true;
}
if (lexer.token() == Token.DESC || lexer.identifierEquals("DESCRIBE")) {
SQLStatement stmt = parseDescribe();
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals("WHOAMI")) {
lexer.nextToken();
SQLWhoamiStatement stmt = new SQLWhoamiStatement();
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals("COUNT")) {
lexer.nextToken();
OdpsCountStatement stmt = new OdpsCountStatement();
stmt.setTable(this.exprParser.name());
if (lexer.token() == Token.PARTITION) {
lexer.nextToken();
this.exprParser.parseAssignItem(stmt.getPartitions(), stmt);
}
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals("MSCK")) {
SQLStatement stmt = parseMsck();
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals("EXSTORE")) {
lexer.nextToken();
OdpsExstoreStatement stmt = new OdpsExstoreStatement();
SQLExpr table = this.exprParser.expr();
stmt.setTable(new SQLExprTableSource(table));
accept(Token.PARTITION);
this.exprParser.parseAssignItem(stmt.getPartitions(), stmt);
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals("INSTALL")) {
lexer.nextToken();
acceptIdentifier("PACKAGE");
OdpsInstallPackageStatement stmt = new OdpsInstallPackageStatement();
stmt.setPackageName(this.exprParser.name());
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals(FnvHash.Constants.KILL)) {
SQLStatement stmt = parseKill();
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals(FnvHash.Constants.LOAD)) {
HiveLoadDataStatement stmt = parseLoad();
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals(FnvHash.Constants.MERGE)) {
SQLStatement stmt = parseMerge();
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals(FnvHash.Constants.CLONE)) {
SQLStatement stmt = parseClone();
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals(FnvHash.Constants.UNLOAD)) {
SQLStatement stmt = parseUnload();
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals(FnvHash.Constants.BEGIN)) {
SQLStatement stmt = parseBlock();
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals(FnvHash.Constants.RESTORE)) {
lexer.nextToken();
accept(Token.TABLE);
OdpsRestoreStatement stmt = new OdpsRestoreStatement();
stmt.setTable(this.exprParser.name());
if (lexer.token() == Token.LPAREN) {
this.exprParser.parseAssignItem(stmt.getProperties(), stmt);
}
if (lexer.token() == Token.PARTITION) {
lexer.nextToken();
this.exprParser.parseAssignItem(stmt.getPartitions(), stmt);
}
if (lexer.token() == Token.TO) {
lexer.nextToken();
acceptIdentifier("LSN");
stmt.setTo(this.exprParser.expr());
}
if (lexer.token() == Token.AS) {
lexer.nextToken();
stmt.setAlias(this.alias());
}
statementList.add(stmt);
return true;
}
if (lexer.identifierEquals(FnvHash.Constants.UNDO)) {
lexer.nextToken();
accept(Token.TABLE);
OdpsUndoTableStatement stmt = new OdpsUndoTableStatement();
stmt.setTable(new SQLExprTableSource(this.exprParser.name()));
if (lexer.token() == Token.PARTITION) {
lexer.nextToken();
this.exprParser.parseAssignItem(stmt.getPartitions(), stmt);
}
accept(Token.TO);
stmt.setTo(this.exprParser.expr());
statementList.add(stmt);
return true;
}
if (lexer.token() == Token.FUNCTION) {
HiveCreateFunctionStatement stmt = (HiveCreateFunctionStatement) parseHiveCreateFunction();
stmt.setDeclare(true);
statementList.add(stmt);
return true;
}
if (lexer.token() == Token.VARIANT && lexer.stringVal().startsWith("@")) {
Lexer.SavePoint mark = lexer.mark();
String variant = lexer.stringVal();
lexer.nextToken();
if (lexer.token() == Token.COLONEQ) {
lexer.nextToken();
boolean cache = false;
if (lexer.identifierEquals(FnvHash.Constants.CACHE)) {
lexer.nextToken();
accept(Token.ON);
cache = true;
}
Lexer.SavePoint lpMark = null;
if (lexer.token() == Token.LPAREN) {
lpMark = lexer.mark();
lexer.nextToken();
}
;
switch(lexer.token()) {
case LITERAL_INT:
case LITERAL_FLOAT:
case LITERAL_CHARS:
case LITERAL_ALIAS:
case IDENTIFIER:
case CASE:
case CAST:
case IF:
case VARIANT:
case REPLACE:
case NEW:
case SUB:
case TRUE:
case FALSE:
{
if (lpMark != null) {
lexer.reset(lpMark);
}
SQLExpr expr = this.exprParser.expr();
SQLExprStatement stmt = new SQLExprStatement(new SQLAssignItem(new SQLIdentifierExpr(variant), expr));
statementList.add(stmt);
return true;
}
default:
if (lpMark != null) {
lexer.reset(lpMark);
}
boolean paren = lexer.token() == Token.LPAREN;
Lexer.SavePoint parenMark = lexer.mark();
SQLSelect select;
try {
select = new OdpsSelectParser(this.exprParser).select();
} catch (ParserException error) {
if (paren) {
lexer.reset(parenMark);
SQLExpr expr = this.exprParser.expr();
SQLExprStatement stmt = new SQLExprStatement(new SQLAssignItem(new SQLIdentifierExpr(variant), expr));
statementList.add(stmt);
return true;
}
throw error;
}
switch(lexer.token()) {
case GT:
case GTEQ:
case EQ:
case LT:
case LTEQ:
statementList.add(new SQLExprStatement(new SQLAssignItem(new SQLIdentifierExpr(variant), this.exprParser.exprRest(new SQLQueryExpr(select)))));
return true;
default:
break;
}
SQLSelectStatement stmt = new SQLSelectStatement(select, dbType);
OdpsQueryAliasStatement aliasQueryStatement = new OdpsQueryAliasStatement(variant, stmt);
aliasQueryStatement.setCache(cache);
statementList.add(aliasQueryStatement);
return true;
}
}
OdpsDeclareVariableStatement stmt = new OdpsDeclareVariableStatement();
if (lexer.token() != Token.EQ && lexer.token() != Token.SEMI && lexer.token() != Token.EOF) {
stmt.setDataType(this.exprParser.parseDataType());
}
if (lexer.token() == Token.EQ || lexer.token() == Token.COLONEQ) {
lexer.nextToken();
stmt.setInitValue(this.exprParser.expr());
}
if (lexer.token() == Token.SEMI) {
lexer.nextToken();
}
statementList.add(stmt);
return true;
}
if (lexer.token() == Token.IF) {
SQLStatement stmt = parseIf();
statementList.add(stmt);
return true;
}
if (lexer.token() == Token.CODE) {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
if (lexer.token() == Token.EOF || lexer.token() == Token.SEMI) {
return true;
}
lexer.reset(mark);
}
return false;
}
use of com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateFunctionStatement in project druid by alibaba.
the class SQLStatementParser method parseHiveCreateFunction.
public SQLCreateFunctionStatement parseHiveCreateFunction() {
HiveCreateFunctionStatement stmt = new HiveCreateFunctionStatement();
stmt.setDbType(dbType);
if (lexer.token == CREATE) {
lexer.nextToken();
}
if (lexer.token == OR) {
lexer.nextToken();
accept(REPLACE);
stmt.setOrReplace(true);
}
if (lexer.identifierEquals(FnvHash.Constants.TEMPORARY)) {
lexer.nextToken();
stmt.setTemporary(true);
}
// TODO for odps
boolean sql = false;
if (lexer.identifierEquals(FnvHash.Constants.SQL)) {
lexer.nextToken();
sql = true;
}
accept(Token.FUNCTION);
if (lexer.token == IF) {
lexer.nextToken();
accept(NOT);
accept(EXISTS);
stmt.setIfNotExists(true);
}
SQLName name = this.exprParser.name();
stmt.setName(name);
if (lexer.token == LPAREN) {
lexer.nextToken();
while (lexer.token != RPAREN) {
SQLParameter param = new SQLParameter();
param.setName(this.exprParser.name());
param.setDataType(this.exprParser.parseDataType());
if (lexer.token == COMMA) {
lexer.nextToken();
}
stmt.getParameters().add(param);
param.setParent(stmt);
}
accept(RPAREN);
}
if (lexer.identifierEquals(Constants.RETURNS)) {
lexer.nextToken();
if (lexer.token == VARIANT) {
// TODO
lexer.nextToken();
}
SQLDataType returnDataType = this.exprParser.parseDataType();
stmt.setReturnDataType(returnDataType);
}
if (lexer.token == IDENTIFIER && lexer.stringVal().toUpperCase().startsWith("RETURNS@")) {
lexer.nextToken();
SQLDataType returnDataType = this.exprParser.parseDataType();
stmt.setReturnDataType(returnDataType);
}
if (lexer.token() == Token.AS) {
lexer.setToken(Token.IDENTIFIER);
lexer.nextToken();
SQLExpr className = this.exprParser.expr();
stmt.setClassName(className);
}
if (lexer.identifierEquals(FnvHash.Constants.LOCATION)) {
lexer.nextToken();
SQLExpr location = this.exprParser.primary();
stmt.setLocation(location);
}
if (lexer.identifierEquals(FnvHash.Constants.SYMBOL)) {
lexer.nextToken();
accept(Token.EQ);
SQLExpr symbol = this.exprParser.primary();
stmt.setSymbol(symbol);
}
if (lexer.token() == Token.USING || lexer.hash_lower() == Constants.USING) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.JAR)) {
lexer.nextToken();
stmt.setResourceType(HiveCreateFunctionStatement.ResourceType.JAR);
} else if (lexer.identifierEquals(FnvHash.Constants.ARCHIVE)) {
lexer.nextToken();
stmt.setResourceType(HiveCreateFunctionStatement.ResourceType.ARCHIVE);
} else if (lexer.identifierEquals(FnvHash.Constants.FILE)) {
lexer.nextToken();
stmt.setResourceType(HiveCreateFunctionStatement.ResourceType.FILE);
} else if (lexer.token == Token.CODE) {
stmt.setCode(lexer.stringVal());
lexer.nextToken();
stmt.setResourceType(HiveCreateFunctionStatement.ResourceType.CODE);
return stmt;
}
SQLExpr location = this.exprParser.primary();
stmt.setLocation(location);
}
return stmt;
}
Aggregations