use of com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateTableStatement in project druid by alibaba.
the class MySqlSelectTest_203_dla method test_0.
public void test_0() throws Exception {
String sql = "SELECT $1, $2 FROM\n" + "TABLE temp_1\n" + "(\n" + " $1 int,\n" + " $2 string\n" + ")\n" + "TBLPROPERTIES (\n" + " CATALOG='oss',\n" + " LOCATION='oss://oss-cn-hangzhou-for-openanalytics-dailybuild/jinluo/tbl1_part/kv1.txt',\n" + " SCHEMA='jinluo_test0810'\n" + ")\n" + "META LIFECYCLE 1";
// System.out.println(sql);
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals("SELECT $1, $2\n" + "FROM TABLE temp_1 (\n" + "\t$1 int,\n" + "\t$2 string\n" + ")\n" + "TBLPROPERTIES (\n" + "\t'CATALOG' = 'oss',\n" + "\t'LOCATION' = 'oss://oss-cn-hangzhou-for-openanalytics-dailybuild/jinluo/tbl1_part/kv1.txt',\n" + "\t'SCHEMA' = 'jinluo_test0810'\n" + ")\n" + "META LIFECYCLE 1", stmt.toString());
assertEquals("select $1, $2\n" + "from table temp_1 (\n" + "\t$1 int,\n" + "\t$2 string\n" + ")\n" + "tblproperties (\n" + "\t'CATALOG' = 'oss',\n" + "\t'LOCATION' = 'oss://oss-cn-hangzhou-for-openanalytics-dailybuild/jinluo/tbl1_part/kv1.txt',\n" + "\t'SCHEMA' = 'jinluo_test0810'\n" + ")\n" + "meta lifecycle 1", stmt.toLowerCaseString());
final TempTableNameGen tempTableNameGen = new TempTableNameGen() {
@Override
public String generateName() {
return "__temp_table_1";
}
};
final List<SQLCreateTableStatement> createTableStatementList = new ArrayList<SQLCreateTableStatement>();
SQLASTVisitorAdapter v = new MySqlASTVisitorAdapter() {
public boolean visit(SQLAdhocTableSource x) {
final String tableName = tempTableNameGen.generateName();
HiveCreateTableStatement createStmt = (HiveCreateTableStatement) x.getDefinition();
createStmt.setParent(null);
createStmt.setTableName(tableName);
createStmt.setExternal(true);
SQLUtils.replaceInParent(x, new SQLExprTableSource(tableName));
createTableStatementList.add(createStmt);
return false;
}
public boolean visit(SQLVariantRefExpr x) {
String name = x.getName();
if (name != null && name.startsWith("$")) {
SQLUtils.replaceInParent(x, new SQLIdentifierExpr(name));
}
return false;
}
};
stmt.accept(v);
for (SQLCreateTableStatement createStmt : createTableStatementList) {
System.out.println(createStmt.toString(VisitorFeature.OutputNameQuote));
}
System.out.println();
System.out.println(stmt.toString(VisitorFeature.OutputNameQuote));
//
}
use of com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateTableStatement in project druid by alibaba.
the class SQLSelectParser method parseTableSourceRest.
public SQLTableSource parseTableSourceRest(SQLTableSource tableSource) {
parseTableSourceSample(tableSource);
if (lexer.hasComment() && lexer.isKeepComments() && !(tableSource instanceof SQLSubqueryTableSource)) {
tableSource.addAfterComment(lexer.readAndResetComments());
}
if (tableSource.getAlias() == null || tableSource.getAlias().length() == 0) {
Token token = lexer.token;
long hash;
switch(token) {
case LEFT:
case RIGHT:
case FULL:
{
Lexer.SavePoint mark = lexer.mark();
String strVal = lexer.stringVal();
lexer.nextToken();
if (lexer.token == Token.OUTER || lexer.token == Token.JOIN || lexer.identifierEquals(FnvHash.Constants.ANTI) || lexer.identifierEquals(FnvHash.Constants.SEMI)) {
lexer.reset(mark);
} else {
tableSource.setAlias(strVal);
}
}
break;
case OUTER:
break;
default:
if (!(token == Token.IDENTIFIER && ((hash = lexer.hash_lower()) == FnvHash.Constants.STRAIGHT_JOIN || hash == FnvHash.Constants.CROSS))) {
boolean must = false;
if (lexer.token == Token.AS) {
lexer.nextToken();
must = true;
}
String alias = tableAlias(must);
if (alias != null) {
if (isEnabled(SQLParserFeature.IgnoreNameQuotes) && alias.length() > 1) {
alias = StringUtils.removeNameQuotes(alias);
}
tableSource.setAlias(alias);
if ((tableSource instanceof SQLValuesTableSource) && ((SQLValuesTableSource) tableSource).getColumns().size() == 0) {
SQLValuesTableSource values = (SQLValuesTableSource) tableSource;
accept(Token.LPAREN);
this.exprParser.names(values.getColumns(), values);
accept(Token.RPAREN);
} else if (tableSource instanceof SQLSubqueryTableSource) {
SQLSubqueryTableSource subQuery = (SQLSubqueryTableSource) tableSource;
if (lexer.token == Token.LPAREN) {
lexer.nextToken();
this.exprParser.names(subQuery.getColumns(), subQuery);
accept(Token.RPAREN);
}
} else if (tableSource instanceof SQLUnionQueryTableSource) {
SQLUnionQueryTableSource union = (SQLUnionQueryTableSource) tableSource;
if (lexer.token == Token.LPAREN) {
lexer.nextToken();
this.exprParser.names(union.getColumns(), union);
accept(Token.RPAREN);
}
} else if (lexer.token == Token.LPAREN && tableSource instanceof SQLExprTableSource && (((SQLExprTableSource) tableSource).getExpr() instanceof SQLVariantRefExpr || ((SQLExprTableSource) tableSource).getExpr() instanceof SQLIdentifierExpr)) {
lexer.nextToken();
SQLExprTableSource exprTableSource = (SQLExprTableSource) tableSource;
this.exprParser.names(exprTableSource.getColumns(), exprTableSource);
accept(Token.RPAREN);
}
if (lexer.token == Token.WHERE) {
return tableSource;
}
return parseTableSourceRest(tableSource);
}
}
break;
}
}
SQLJoinTableSource.JoinType joinType = null;
boolean natural = lexer.identifierEquals(FnvHash.Constants.NATURAL);
if (natural) {
lexer.nextToken();
}
boolean asof = false;
if (lexer.identifierEquals(FnvHash.Constants.ASOF) && dbType == DbType.clickhouse) {
lexer.nextToken();
asof = true;
}
if (lexer.token == Token.OUTER) {
Lexer.SavePoint mark = lexer.mark();
String str = lexer.stringVal();
lexer.nextToken();
if (tableSource.getAlias() == null && !lexer.identifierEquals(FnvHash.Constants.APPLY)) {
tableSource.setAlias(str);
} else {
lexer.reset(mark);
}
}
boolean global = false;
if (dbType == DbType.clickhouse) {
if (lexer.token == Token.GLOBAL) {
lexer.nextToken();
global = true;
}
}
switch(lexer.token) {
case LEFT:
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.SEMI)) {
lexer.nextToken();
joinType = SQLJoinTableSource.JoinType.LEFT_SEMI_JOIN;
} else if (lexer.identifierEquals(FnvHash.Constants.ANTI)) {
lexer.nextToken();
joinType = SQLJoinTableSource.JoinType.LEFT_ANTI_JOIN;
} else if (lexer.token == Token.OUTER) {
lexer.nextToken();
joinType = SQLJoinTableSource.JoinType.LEFT_OUTER_JOIN;
} else {
joinType = SQLJoinTableSource.JoinType.LEFT_OUTER_JOIN;
}
if (dbType == DbType.odps && lexer.token == Token.IDENTIFIER && lexer.stringVal().startsWith("join@")) {
lexer.stringVal = lexer.stringVal().substring(5);
break;
}
accept(Token.JOIN);
break;
case RIGHT:
lexer.nextToken();
if (lexer.token == Token.OUTER) {
lexer.nextToken();
}
accept(Token.JOIN);
joinType = SQLJoinTableSource.JoinType.RIGHT_OUTER_JOIN;
break;
case FULL:
lexer.nextToken();
if (lexer.token == Token.OUTER) {
lexer.nextToken();
}
accept(Token.JOIN);
joinType = SQLJoinTableSource.JoinType.FULL_OUTER_JOIN;
break;
case INNER:
lexer.nextToken();
accept(Token.JOIN);
joinType = SQLJoinTableSource.JoinType.INNER_JOIN;
break;
case JOIN:
lexer.nextToken();
joinType = natural ? SQLJoinTableSource.JoinType.NATURAL_JOIN : SQLJoinTableSource.JoinType.JOIN;
break;
case COMMA:
lexer.nextToken();
joinType = SQLJoinTableSource.JoinType.COMMA;
break;
case OUTER:
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.APPLY)) {
lexer.nextToken();
joinType = SQLJoinTableSource.JoinType.OUTER_APPLY;
}
break;
case STRAIGHT_JOIN:
case IDENTIFIER:
final long hash = lexer.hash_lower;
if (hash == FnvHash.Constants.STRAIGHT_JOIN) {
lexer.nextToken();
joinType = SQLJoinTableSource.JoinType.STRAIGHT_JOIN;
} else if (hash == FnvHash.Constants.STRAIGHT) {
lexer.nextToken();
accept(Token.JOIN);
joinType = SQLJoinTableSource.JoinType.STRAIGHT_JOIN;
} else if (hash == FnvHash.Constants.CROSS) {
lexer.nextToken();
if (lexer.token == Token.JOIN) {
lexer.nextToken();
joinType = natural ? SQLJoinTableSource.JoinType.NATURAL_CROSS_JOIN : SQLJoinTableSource.JoinType.CROSS_JOIN;
} else if (lexer.identifierEquals(FnvHash.Constants.APPLY)) {
lexer.nextToken();
joinType = SQLJoinTableSource.JoinType.CROSS_APPLY;
}
}
break;
default:
break;
}
if (joinType != null) {
SQLJoinTableSource join = new SQLJoinTableSource();
join.setLeft(tableSource);
join.setJoinType(joinType);
join.setGlobal(global);
if (asof) {
join.setAsof(true);
}
boolean isBrace = false;
if (SQLJoinTableSource.JoinType.COMMA == joinType) {
if (lexer.token == Token.LBRACE) {
lexer.nextToken();
acceptIdentifier("OJ");
isBrace = true;
}
}
SQLTableSource rightTableSource = null;
if (lexer.token == Token.LPAREN) {
lexer.nextToken();
if (lexer.token == Token.SELECT || (lexer.token == Token.FROM && (dbType == DbType.odps || dbType == DbType.hive))) {
SQLSelect select = this.select();
rightTableSource = new SQLSubqueryTableSource(select);
} else {
rightTableSource = this.parseTableSource();
}
if (lexer.token == Token.UNION || lexer.token == Token.EXCEPT || lexer.token == Token.MINUS || lexer.token == Token.INTERSECT) {
if (rightTableSource instanceof SQLSubqueryTableSource) {
SQLSelect select = ((SQLSubqueryTableSource) rightTableSource).getSelect();
SQLSelectQuery query = queryRest(select.getQuery(), true);
select.setQuery(query);
} else if (rightTableSource instanceof SQLUnionQueryTableSource) {
SQLUnionQueryTableSource unionTableSrc = (SQLUnionQueryTableSource) rightTableSource;
unionTableSrc.setUnion((SQLUnionQuery) queryRest(unionTableSrc.getUnion()));
}
}
accept(Token.RPAREN);
if (rightTableSource instanceof SQLValuesTableSource && (lexer.token == Token.AS || lexer.token == Token.IDENTIFIER) && rightTableSource.getAlias() == null && ((SQLValuesTableSource) rightTableSource).getColumns().size() == 0) {
if (lexer.token == Token.AS) {
lexer.nextToken();
}
rightTableSource.setAlias(tableAlias(true));
if (lexer.token == Token.LPAREN) {
lexer.nextToken();
this.exprParser.names(((SQLValuesTableSource) rightTableSource).getColumns(), rightTableSource);
accept(Token.RPAREN);
}
}
} else if (lexer.token() == Token.TABLE) {
HiveCreateTableParser createTableParser = new HiveCreateTableParser(lexer);
HiveCreateTableStatement stmt = (HiveCreateTableStatement) createTableParser.parseCreateTable(false);
rightTableSource = new SQLAdhocTableSource(stmt);
primaryTableSourceRest(rightTableSource);
} else {
if (lexer.identifierEquals(FnvHash.Constants.UNNEST)) {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
SQLUnnestTableSource unnest = new SQLUnnestTableSource();
this.exprParser.exprList(unnest.getItems(), unnest);
accept(Token.RPAREN);
if (lexer.token() == Token.WITH) {
lexer.nextToken();
acceptIdentifier("ORDINALITY");
unnest.setOrdinality(true);
}
String alias = this.tableAlias();
unnest.setAlias(alias);
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
this.exprParser.names(unnest.getColumns(), unnest);
accept(Token.RPAREN);
}
SQLTableSource tableSrc = parseTableSourceRest(unnest);
rightTableSource = tableSrc;
} else {
lexer.reset(mark);
}
} else if (lexer.token == Token.VALUES) {
rightTableSource = this.parseValues();
}
if (rightTableSource == null) {
boolean aliasToken = lexer.token == Token.LITERAL_ALIAS;
SQLExpr expr;
switch(lexer.token) {
case ALL:
expr = this.exprParser.name();
break;
default:
expr = this.expr();
break;
}
if (aliasToken && expr instanceof SQLCharExpr) {
expr = new SQLIdentifierExpr(((SQLCharExpr) expr).getText());
}
SQLExprTableSource exprTableSource = new SQLExprTableSource(expr);
if (expr instanceof SQLMethodInvokeExpr && lexer.token == Token.AS) {
lexer.nextToken();
String alias = this.tableAlias(true);
exprTableSource.setAlias(alias);
if (lexer.token == Token.LPAREN) {
lexer.nextToken();
this.exprParser.names(exprTableSource.getColumns(), exprTableSource);
accept(Token.RPAREN);
}
}
rightTableSource = exprTableSource;
}
rightTableSource = primaryTableSourceRest(rightTableSource);
}
if (lexer.token == Token.USING || lexer.identifierEquals(FnvHash.Constants.USING)) {
Lexer.SavePoint savePoint = lexer.mark();
lexer.nextToken();
if (lexer.token == Token.LPAREN) {
lexer.nextToken();
join.setRight(rightTableSource);
this.exprParser.exprList(join.getUsing(), join);
accept(Token.RPAREN);
} else if (lexer.token == Token.IDENTIFIER) {
lexer.reset(savePoint);
join.setRight(rightTableSource);
return join;
} else {
join.setAlias(this.tableAlias());
}
} else if (lexer.token == Token.STRAIGHT_JOIN || lexer.identifierEquals(FnvHash.Constants.STRAIGHT_JOIN)) {
primaryTableSourceRest(rightTableSource);
} else if (rightTableSource.getAlias() == null && !(rightTableSource instanceof SQLValuesTableSource)) {
int line = lexer.line;
String tableAlias;
if (lexer.token == Token.AS) {
lexer.nextToken();
if (lexer.token != Token.ON) {
tableAlias = this.tableAlias(true);
} else {
tableAlias = null;
}
} else {
tableAlias = this.tableAlias(false);
}
if (tableAlias != null) {
rightTableSource.setAlias(tableAlias);
if (line + 1 == lexer.line && lexer.hasComment() && lexer.getComments().get(0).startsWith("--")) {
rightTableSource.addAfterComment(lexer.readAndResetComments());
}
if (lexer.token == Token.LPAREN) {
if (rightTableSource instanceof SQLSubqueryTableSource) {
lexer.nextToken();
List<SQLName> columns = ((SQLSubqueryTableSource) rightTableSource).getColumns();
this.exprParser.names(columns, rightTableSource);
accept(Token.RPAREN);
} else if (rightTableSource instanceof SQLExprTableSource && ((SQLExprTableSource) rightTableSource).getExpr() instanceof SQLMethodInvokeExpr) {
List<SQLName> columns = ((SQLExprTableSource) rightTableSource).getColumns();
if (columns.size() == 0) {
lexer.nextToken();
this.exprParser.names(columns, rightTableSource);
accept(Token.RPAREN);
}
}
}
}
rightTableSource = primaryTableSourceRest(rightTableSource);
}
if (lexer.token == Token.WITH) {
lexer.nextToken();
accept(Token.LPAREN);
for (; ; ) {
SQLExpr hintExpr = this.expr();
SQLExprHint hint = new SQLExprHint(hintExpr);
hint.setParent(tableSource);
rightTableSource.getHints().add(hint);
if (lexer.token == Token.COMMA) {
lexer.nextToken();
continue;
} else {
break;
}
}
accept(Token.RPAREN);
}
join.setRight(rightTableSource);
if (!natural) {
if (!StringUtils.isEmpty(tableSource.getAlias()) && tableSource.aliasHashCode64() == FnvHash.Constants.NATURAL && DbType.mysql == dbType) {
tableSource.setAlias(null);
natural = true;
}
}
join.setNatural(natural);
if (lexer.token == Token.ON) {
lexer.nextToken();
SQLExpr joinOn = expr();
join.setCondition(joinOn);
while (lexer.token == Token.ON) {
lexer.nextToken();
SQLExpr joinOn2 = expr();
join.addCondition(joinOn2);
}
if (dbType == DbType.odps && lexer.identifierEquals(FnvHash.Constants.USING)) {
SQLJoinTableSource.UDJ udj = new SQLJoinTableSource.UDJ();
lexer.nextToken();
udj.setFunction(this.exprParser.name());
accept(Token.LPAREN);
this.exprParser.exprList(udj.getArguments(), udj);
accept(Token.RPAREN);
if (lexer.token != Token.AS) {
udj.setAlias(alias());
}
accept(Token.AS);
accept(Token.LPAREN);
this.exprParser.names(udj.getColumns(), udj);
accept(Token.RPAREN);
if (lexer.identifierEquals(FnvHash.Constants.SORT)) {
lexer.nextToken();
accept(Token.BY);
this.exprParser.orderBy(udj.getSortBy(), udj);
}
if (lexer.token == Token.WITH) {
lexer.nextToken();
acceptIdentifier("UDFPROPERTIES");
this.exprParser.parseAssignItem(udj.getProperties(), udj);
}
join.setUdj(udj);
}
} else if (lexer.token == Token.USING || lexer.identifierEquals(FnvHash.Constants.USING)) {
Lexer.SavePoint savePoint = lexer.mark();
lexer.nextToken();
if (lexer.token == Token.LPAREN) {
lexer.nextToken();
this.exprParser.exprList(join.getUsing(), join);
accept(Token.RPAREN);
} else {
lexer.reset(savePoint);
}
}
SQLTableSource tableSourceReturn = parseTableSourceRest(join);
if (isBrace) {
accept(Token.RBRACE);
}
return parseTableSourceRest(tableSourceReturn);
}
if ((tableSource.aliasHashCode64() == FnvHash.Constants.LATERAL || lexer.token == Token.LATERAL) && lexer.token() == Token.VIEW) {
return parseLateralView(tableSource);
}
if (lexer.identifierEquals(FnvHash.Constants.LATERAL) || lexer.token == Token.LATERAL) {
lexer.nextToken();
return parseLateralView(tableSource);
}
return tableSource;
}
use of com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateTableStatement in project druid by alibaba.
the class HiveCreateTableParser method parseCreateTable.
public SQLCreateTableStatement parseCreateTable(boolean acceptCreate) {
HiveCreateTableStatement stmt = newCreateStatement();
if (acceptCreate) {
if (lexer.hasComment() && lexer.isKeepComments()) {
stmt.addBeforeComment(lexer.readAndResetComments());
}
accept(Token.CREATE);
}
if (lexer.identifierEquals(FnvHash.Constants.EXTERNAL)) {
lexer.nextToken();
stmt.setExternal(true);
}
if (lexer.identifierEquals(FnvHash.Constants.TEMPORARY)) {
lexer.nextToken();
stmt.setType(SQLCreateTableStatement.Type.TEMPORARY);
}
accept(Token.TABLE);
if (lexer.token() == Token.IF || lexer.identifierEquals(FnvHash.Constants.IF)) {
lexer.nextToken();
accept(Token.NOT);
accept(Token.EXISTS);
stmt.setIfNotExiists(true);
}
stmt.setName(this.exprParser.name());
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
for (; ; ) {
Token token = lexer.token();
if (//
token == Token.IDENTIFIER || token == Token.LITERAL_ALIAS) {
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.getTableElementList().add(column);
} else if (//
token == Token.PRIMARY || //
token == Token.UNIQUE || //
token == Token.CHECK || token == Token.CONSTRAINT || token == Token.FOREIGN) {
SQLConstraint constraint = this.exprParser.parseConstaint();
constraint.setParent(stmt);
stmt.getTableElementList().add((SQLTableElement) constraint);
} else if (token == Token.TABLESPACE) {
throw new ParserException("TODO " + lexer.info());
} else {
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.getTableElementList().add(column);
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
if (lexer.token() == Token.RPAREN) {
// compatible for sql server
break;
}
continue;
}
break;
}
accept(Token.RPAREN);
} else if (lexer.token() == Token.LIKE) {
parseLike(stmt);
}
if (lexer.identifierEquals(FnvHash.Constants.ENGINE)) {
// skip engine=xxx
lexer.nextToken();
accept(Token.EQ);
lexer.nextToken();
}
if (lexer.identifierEquals(FnvHash.Constants.CHARSET)) {
// skip charset = xxx
lexer.nextToken();
accept(Token.EQ);
lexer.nextToken();
}
if (lexer.identifierEquals(FnvHash.Constants.USING) || lexer.token() == Token.USING) {
lexer.nextToken();
SQLExpr using = this.exprParser.expr();
stmt.setUsing(using);
}
if (lexer.identifierEquals(FnvHash.Constants.OPTIONS)) {
lexer.nextToken();
accept(Token.LPAREN);
parseAssignItems(stmt.getTableOptions(), stmt, false);
accept(Token.RPAREN);
}
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
SQLExpr comment = this.exprParser.expr();
stmt.setComment(comment);
}
if (lexer.identifierEquals(FnvHash.Constants.MAPPED)) {
lexer.nextToken();
accept(Token.BY);
this.exprParser.parseAssignItem(stmt.getMappedBy(), stmt);
}
if (lexer.token() == Token.PARTITIONED) {
lexer.nextToken();
accept(Token.BY);
accept(Token.LPAREN);
for (; ; ) {
if (lexer.token() != Token.IDENTIFIER) {
throw new ParserException("expect identifier. " + lexer.info());
}
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.addPartitionColumn(column);
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
if (lexer.token() != Token.COMMA) {
break;
} else {
lexer.nextToken();
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
}
}
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.CLUSTERED)) {
lexer.nextToken();
accept(Token.BY);
accept(Token.LPAREN);
for (; ; ) {
SQLSelectOrderByItem item = this.exprParser.parseSelectOrderByItem();
stmt.addClusteredByItem(item);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.SKEWED)) {
lexer.nextToken();
accept(Token.BY);
accept(Token.LPAREN);
this.exprParser.exprList(stmt.getSkewedBy(), stmt);
accept(Token.RPAREN);
accept(Token.ON);
accept(Token.LPAREN);
for (; ; ) {
if (lexer.token() == Token.LPAREN) {
SQLListExpr list = new SQLListExpr();
lexer.nextToken();
this.exprParser.exprList(list.getItems(), list);
accept(Token.RPAREN);
stmt.addSkewedByOn(list);
} else {
SQLExpr expr = this.exprParser.expr();
stmt.addSkewedByOn(expr);
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
parseSortedBy(stmt);
}
if (lexer.token() == Token.ROW || lexer.identifierEquals(FnvHash.Constants.ROW)) {
parseRowFormat(stmt);
}
if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
parseSortedBy(stmt);
}
if (stmt.getClusteredBy().size() > 0 || stmt.getSortedBy().size() > 0) {
accept(Token.INTO);
if (lexer.token() == Token.LITERAL_INT) {
stmt.setBuckets(lexer.integerValue().intValue());
lexer.nextToken();
} else {
throw new ParserException("into buckets must be integer. " + lexer.info());
}
acceptIdentifier("BUCKETS");
}
if (lexer.token() == Token.ROW || lexer.identifierEquals(FnvHash.Constants.ROW)) {
parseRowFormat(stmt);
}
if (lexer.identifierEquals(FnvHash.Constants.STORED)) {
lexer.nextToken();
accept(Token.AS);
if (lexer.identifierEquals(FnvHash.Constants.INPUTFORMAT)) {
HiveInputOutputFormat format = new HiveInputOutputFormat();
lexer.nextToken();
format.setInput(this.exprParser.primary());
if (lexer.identifierEquals(FnvHash.Constants.OUTPUTFORMAT)) {
lexer.nextToken();
format.setOutput(this.exprParser.primary());
}
stmt.setStoredAs(format);
} else {
SQLName name = this.exprParser.name();
stmt.setStoredAs(name);
}
}
if (lexer.identifierEquals(FnvHash.Constants.LOCATION)) {
lexer.nextToken();
SQLExpr location = this.exprParser.primary();
stmt.setLocation(location);
}
if (lexer.token() == Token.LIKE) {
parseLike(stmt);
}
if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
parseTblProperties(stmt);
}
if (lexer.identifierEquals(FnvHash.Constants.META)) {
lexer.nextToken();
acceptIdentifier("LIFECYCLE");
stmt.setMetaLifeCycle(this.exprParser.primary());
}
if (lexer.token() == Token.AS) {
lexer.nextToken();
SQLSelect select = this.createSQLSelectParser().select();
stmt.setSelect(select);
}
if (lexer.token() == Token.LIKE) {
lexer.nextToken();
Lexer.SavePoint mark = lexer.mark();
if (lexer.token() == Token.SELECT) {
stmt.setLikeQuery(true);
SQLSelect select = this.createSQLSelectParser().select();
stmt.setSelect(select);
} else {
lexer.reset(mark);
if (lexer.identifierEquals(FnvHash.Constants.MAPPING)) {
SQLExpr like = this.exprParser.primary();
stmt.setLike(new SQLExprTableSource(like));
} else {
SQLName name = this.exprParser.name();
stmt.setLike(name);
}
}
}
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
SQLExpr comment = this.exprParser.expr();
stmt.setComment(comment);
}
if (lexer.identifierEquals(FnvHash.Constants.USING) || lexer.token() == Token.USING) {
lexer.nextToken();
SQLExpr using = this.exprParser.expr();
stmt.setUsing(using);
}
if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
lexer.nextToken();
accept(Token.LPAREN);
parseAssignItems(stmt.getTblProperties(), stmt, false);
accept(Token.RPAREN);
}
return stmt;
}
use of com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateTableStatement in project druid by alibaba.
the class MySqlSelectParser method parseFrom.
public void parseFrom(SQLSelectQueryBlock queryBlock) {
if (lexer.token() == Token.EOF || lexer.token() == Token.SEMI || lexer.token() == Token.ORDER || lexer.token() == Token.RPAREN || lexer.token() == Token.UNION) {
return;
}
if (lexer.token() != Token.FROM) {
for (SQLSelectItem item : queryBlock.getSelectList()) {
SQLExpr expr = item.getExpr();
if (expr instanceof SQLAggregateExpr) {
throw new ParserException("syntax error, expect " + Token.FROM + ", actual " + lexer.token() + ", " + lexer.info());
}
}
return;
}
lexer.nextTokenIdent();
while (lexer.token() == Token.HINT) {
lexer.nextToken();
}
if (lexer.token() == Token.TABLE) {
HiveCreateTableParser createTableParser = new HiveCreateTableParser(lexer);
HiveCreateTableStatement stmt = (HiveCreateTableStatement) createTableParser.parseCreateTable(false);
SQLAdhocTableSource tableSource = new SQLAdhocTableSource(stmt);
queryBlock.setFrom(parseTableSourceRest(tableSource));
return;
}
if (lexer.token() == Token.UPDATE) {
// taobao returning to urgly syntax
updateStmt = this.parseUpdateStatment();
List<SQLExpr> returnning = updateStmt.getReturning();
for (SQLSelectItem item : queryBlock.getSelectList()) {
SQLExpr itemExpr = item.getExpr();
itemExpr.setParent(updateStmt);
returnning.add(itemExpr);
}
returningFlag = true;
return;
}
SQLTableSource from = parseTableSource(queryBlock);
queryBlock.setFrom(from);
}
use of com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateTableStatement in project druid by alibaba.
the class MySqlSelectTest_204_dla method test_0.
public void test_0() throws Exception {
String sql = "SELECT temp_1.$1, temp_2.smallint_col\n" + "FROM \n" + "TABLE temp_1\n" + "(\n" + " $1 int,\n" + " $2 int\n" + ")\n" + "TBLPROPERTIES (\n" + " TYPE='oss',\n" + " LOCATION='oss//x.x.x.x:xxx/test_db',\n" + " SCHEMA='test_db'\n" + ")\n" + "META LIFECYCLE 1\n" + "\n" + "JOIN\n" + "\n" + "TABLE temp_2\n" + "(\n" + " id INT COMMENT 'default',\n" + " bool_col BOOLEAN COMMENT 'default',\n" + " tinyint_col TINYINT COMMENT 'default',\n" + " smallint_col SMALLINT COMMENT 'default',\n" + " int_col INT COMMENT 'default',\n" + " bigint_col BIGINT COMMENT 'default',\n" + " float_col FLOAT COMMENT 'default',\n" + " double_col DOUBLE COMMENT 'default',\n" + " date_string_col STRING COMMENT 'default',\n" + " string_col STRING COMMENT 'default',\n" + " timestamp_col TIMESTAMP COMMENT 'default'\n" + ")\n" + "ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' \n" + "WITH SERDEPROPERTIES ('field.delim'='|', 'serialization.format'='|') \n" + "STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'\n" + "OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'\n" + "LOCATION 'oss://xxx/xxx/xxx.csv'\n" + "TBLPROPERTIES ('recursive.directories'='false')\n" + "META LIFECYCLE 1\n" + "\n" + "ON temp_1.$1 = temp_2.id\n" + "WHERE temp_2.bool_col = true;";
// System.out.println(sql);
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals("SELECT temp_1.$1, temp_2.smallint_col\n" + "FROM TABLE temp_1 (\n" + "\t$1 int,\n" + "\t$2 int\n" + ")\n" + "TBLPROPERTIES (\n" + "\t'TYPE' = 'oss',\n" + "\t'LOCATION' = 'oss//x.x.x.x:xxx/test_db',\n" + "\t'SCHEMA' = 'test_db'\n" + ")\n" + "META LIFECYCLE 1\n" + "\tJOIN TABLE temp_2 (\n" + "\t\tid INT COMMENT 'default',\n" + "\t\tbool_col BOOLEAN COMMENT 'default',\n" + "\t\ttinyint_col TINYINT COMMENT 'default',\n" + "\t\tsmallint_col SMALLINT COMMENT 'default',\n" + "\t\tint_col INT COMMENT 'default',\n" + "\t\tbigint_col BIGINT COMMENT 'default',\n" + "\t\tfloat_col FLOAT COMMENT 'default',\n" + "\t\tdouble_col DOUBLE COMMENT 'default',\n" + "\t\tdate_string_col STRING COMMENT 'default',\n" + "\t\tstring_col STRING COMMENT 'default',\n" + "\t\ttimestamp_col TIMESTAMP COMMENT 'default'\n" + "\t)\n" + "\tROW FORMAT\n" + "\t\tSERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'\n" + "\tWITH SERDEPROPERTIES (\n" + "\t\t'field.delim' = '|',\n" + "\t\t'serialization.format' = '|'\n" + "\t)\n" + "\tSTORED AS\n" + "\t\tINPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'\n" + "\t\tOUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'\n" + "\tLOCATION 'oss://xxx/xxx/xxx.csv'\n" + "\tTBLPROPERTIES (\n" + "\t\t'recursive.directories' = 'false'\n" + "\t)\n" + "\tMETA LIFECYCLE 1 ON temp_1.$1 = temp_2.id\n" + "WHERE temp_2.bool_col = true;", stmt.toString());
assertEquals("select temp_1.$1, temp_2.smallint_col\n" + "from table temp_1 (\n" + "\t$1 int,\n" + "\t$2 int\n" + ")\n" + "tblproperties (\n" + "\t'TYPE' = 'oss',\n" + "\t'LOCATION' = 'oss//x.x.x.x:xxx/test_db',\n" + "\t'SCHEMA' = 'test_db'\n" + ")\n" + "meta lifecycle 1\n" + "\tjoin table temp_2 (\n" + "\t\tid INT comment 'default',\n" + "\t\tbool_col BOOLEAN comment 'default',\n" + "\t\ttinyint_col TINYINT comment 'default',\n" + "\t\tsmallint_col SMALLINT comment 'default',\n" + "\t\tint_col INT comment 'default',\n" + "\t\tbigint_col BIGINT comment 'default',\n" + "\t\tfloat_col FLOAT comment 'default',\n" + "\t\tdouble_col DOUBLE comment 'default',\n" + "\t\tdate_string_col STRING comment 'default',\n" + "\t\tstring_col STRING comment 'default',\n" + "\t\ttimestamp_col TIMESTAMP comment 'default'\n" + "\t)\n" + "\trow rowFormat\n" + "\t\tserde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'\n" + "\twith serdeproperties (\n" + "\t\t'field.delim' = '|',\n" + "\t\t'serialization.format' = '|'\n" + "\t)\n" + "\tstored as\n" + "\t\tinputformat 'org.apache.hadoop.mapred.TextInputFormat'\n" + "\t\toutputformat 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'\n" + "\tlocation 'oss://xxx/xxx/xxx.csv'\n" + "\ttblproperties (\n" + "\t\t'recursive.directories' = 'false'\n" + "\t)\n" + "\tmeta lifecycle 1 on temp_1.$1 = temp_2.id\n" + "where temp_2.bool_col = true;", stmt.toLowerCaseString());
final TempTableNameGen tempTableNameGen = new TempTableNameGen() {
@Override
public String generateName() {
return "__temp_table_1";
}
};
final List<SQLCreateTableStatement> createTableStatementList = new ArrayList<SQLCreateTableStatement>();
SQLASTVisitorAdapter v = new MySqlASTVisitorAdapter() {
public boolean visit(SQLAdhocTableSource x) {
final String tableName = tempTableNameGen.generateName();
HiveCreateTableStatement createStmt = (HiveCreateTableStatement) x.getDefinition();
createStmt.setParent(null);
createStmt.setTableName(tableName);
createStmt.setExternal(true);
SQLUtils.replaceInParent(x, new SQLExprTableSource(tableName));
createTableStatementList.add(createStmt);
return false;
}
public boolean visit(SQLVariantRefExpr x) {
String name = x.getName();
if (name != null && name.startsWith("$")) {
SQLUtils.replaceInParent(x, new SQLIdentifierExpr(name));
}
return false;
}
};
stmt.accept(v);
for (SQLCreateTableStatement createStmt : createTableStatementList) {
System.out.println(createStmt.toString(VisitorFeature.OutputNameQuote));
}
System.out.println();
System.out.println(stmt.toString(VisitorFeature.OutputNameQuote));
HiveCreateTableStatement createTableStatement = (HiveCreateTableStatement) createTableStatementList.get(0);
SQLExpr lifeCycle = createTableStatement.getMetaLifeCycle();
if (lifeCycle instanceof SQLIntegerExpr) {
int intValue = ((SQLIntegerExpr) lifeCycle).getNumber().intValue();
} else if (lifeCycle instanceof SQLIdentifierExpr && ((SQLIdentifierExpr) lifeCycle).nameHashCode64() == FnvHash.Constants.ALWAYS) {
// always
}
//
}
Aggregations