use of com.alibaba.druid.sql.ast.statement.SQLTableElement in project druid by alibaba.
the class AntsparkCreateTableParser method parseCreateTable.
public SQLCreateTableStatement parseCreateTable(boolean acceptCreate) {
AntsparkCreateTableStatement stmt = new AntsparkCreateTableStatement();
if (acceptCreate) {
if (lexer.hasComment() && lexer.isKeepComments()) {
stmt.addBeforeComment(lexer.readAndResetComments());
}
accept(Token.CREATE);
}
if (lexer.identifierEquals(FnvHash.Constants.EXTERNAL)) {
lexer.nextToken();
stmt.setExternal(true);
}
if (lexer.identifierEquals(FnvHash.Constants.TEMPORARY)) {
lexer.nextToken();
stmt.setType(SQLCreateTableStatement.Type.TEMPORARY);
}
accept(Token.TABLE);
if (lexer.token() == Token.IF || lexer.identifierEquals(FnvHash.Constants.IF)) {
lexer.nextToken();
accept(Token.NOT);
accept(Token.EXISTS);
stmt.setIfNotExiists(true);
}
stmt.setName(this.exprParser.name());
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
for (; ; ) {
Token token = lexer.token();
if (//
token == Token.IDENTIFIER || token == Token.LITERAL_ALIAS) {
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.getTableElementList().add(column);
} else if (//
token == Token.PRIMARY || //
token == Token.UNIQUE || //
token == Token.CHECK || token == Token.CONSTRAINT || token == Token.FOREIGN) {
SQLConstraint constraint = this.exprParser.parseConstaint();
constraint.setParent(stmt);
stmt.getTableElementList().add((SQLTableElement) constraint);
} else if (token == Token.TABLESPACE) {
throw new ParserException("TODO " + lexer.info());
} else {
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.getTableElementList().add(column);
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
if (lexer.token() == Token.RPAREN) {
// compatible for sql server
break;
}
continue;
}
break;
}
accept(Token.RPAREN);
}
// add using
if (lexer.token() == Token.USING) {
lexer.nextToken();
SQLName expr = this.exprParser.name();
stmt.setDatasource(expr);
}
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
SQLExpr comment = this.exprParser.expr();
stmt.setComment(comment);
}
if (lexer.identifierEquals(FnvHash.Constants.MAPPED)) {
lexer.nextToken();
accept(Token.BY);
this.exprParser.parseAssignItem(stmt.getMappedBy(), stmt);
}
if (lexer.token() == Token.PARTITIONED) {
lexer.nextToken();
accept(Token.BY);
accept(Token.LPAREN);
for (; ; ) {
if (lexer.token() != Token.IDENTIFIER) {
throw new ParserException("expect identifier. " + lexer.info());
}
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.addPartitionColumn(column);
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
if (lexer.token() != Token.COMMA) {
break;
} else {
lexer.nextToken();
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
}
}
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.CLUSTERED)) {
lexer.nextToken();
accept(Token.BY);
accept(Token.LPAREN);
for (; ; ) {
SQLSelectOrderByItem item = this.exprParser.parseSelectOrderByItem();
stmt.addClusteredByItem(item);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.SKEWED)) {
lexer.nextToken();
accept(Token.BY);
accept(Token.LPAREN);
this.exprParser.exprList(stmt.getSkewedBy(), stmt);
accept(Token.RPAREN);
accept(Token.ON);
accept(Token.LPAREN);
for (; ; ) {
if (lexer.token() == Token.LPAREN) {
SQLListExpr list = new SQLListExpr();
lexer.nextToken();
this.exprParser.exprList(list.getItems(), list);
accept(Token.RPAREN);
stmt.addSkewedByOn(list);
} else {
SQLExpr expr = this.exprParser.expr();
stmt.addSkewedByOn(expr);
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
parseSortedBy(stmt);
}
if (lexer.token() == Token.ROW || lexer.identifierEquals(FnvHash.Constants.ROW)) {
parseRowFormat(stmt);
}
if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
parseSortedBy(stmt);
}
if (stmt.getClusteredBy().size() > 0 || stmt.getSortedBy().size() > 0) {
accept(Token.INTO);
if (lexer.token() == Token.LITERAL_INT) {
stmt.setBuckets(lexer.integerValue().intValue());
lexer.nextToken();
} else {
throw new ParserException("into buckets must be integer. " + lexer.info());
}
acceptIdentifier("BUCKETS");
}
if (lexer.token() == Token.ROW || lexer.identifierEquals(FnvHash.Constants.ROW)) {
parseRowFormat(stmt);
}
if (lexer.identifierEquals(FnvHash.Constants.STORED)) {
lexer.nextToken();
accept(Token.AS);
if (lexer.identifierEquals(FnvHash.Constants.INPUTFORMAT)) {
HiveInputOutputFormat format = new HiveInputOutputFormat();
lexer.nextToken();
format.setInput(this.exprParser.primary());
if (lexer.identifierEquals(FnvHash.Constants.OUTPUTFORMAT)) {
lexer.nextToken();
format.setOutput(this.exprParser.primary());
}
stmt.setStoredAs(format);
} else {
SQLName name = this.exprParser.name();
stmt.setStoredAs(name);
}
}
if (lexer.identifierEquals(FnvHash.Constants.LOCATION)) {
lexer.nextToken();
SQLExpr location = this.exprParser.expr();
stmt.setLocation(location);
}
if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
lexer.nextToken();
accept(Token.LPAREN);
for (; ; ) {
String name = lexer.stringVal();
lexer.nextToken();
accept(Token.EQ);
SQLExpr value = this.exprParser.primary();
stmt.addOption(name, value);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
if (lexer.token() == Token.RPAREN) {
break;
}
continue;
}
break;
}
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.META)) {
lexer.nextToken();
acceptIdentifier("LIFECYCLE");
stmt.setMetaLifeCycle(this.exprParser.primary());
}
if (lexer.token() == Token.AS) {
lexer.nextToken();
SQLSelect select = this.createSQLSelectParser().select();
stmt.setSelect(select);
}
if (lexer.token() == Token.LIKE) {
lexer.nextToken();
SQLName name = this.exprParser.name();
stmt.setLike(name);
}
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
SQLExpr comment = this.exprParser.expr();
stmt.setComment(comment);
}
return stmt;
}
use of com.alibaba.druid.sql.ast.statement.SQLTableElement in project druid by alibaba.
the class AntsparkOutputVisitor method visit.
// add using statment
@Override
public boolean visit(AntsparkCreateTableStatement x) {
print0(ucase ? "CREATE " : "create ");
if (x.isExternal()) {
print0(ucase ? "EXTERNAL " : "external ");
}
if (x.isIfNotExists()) {
print0(ucase ? "TABLE IF NOT EXISTS " : "table if not exists ");
} else {
print0(ucase ? "TABLE " : "table ");
}
x.getName().accept(this);
if (x.getLike() != null) {
print0(ucase ? " LIKE " : " like ");
x.getLike().accept(this);
}
final List<SQLTableElement> tableElementList = x.getTableElementList();
int size = tableElementList.size();
if (size > 0) {
print0(" (");
if (this.isPrettyFormat() && x.hasBodyBeforeComment()) {
print(' ');
printlnComment(x.getBodyBeforeCommentsDirect());
}
this.indentCount++;
println();
for (int i = 0; i < size; ++i) {
SQLTableElement element = tableElementList.get(i);
element.accept(this);
if (i != size - 1) {
print(',');
}
if (this.isPrettyFormat() && element.hasAfterComment()) {
print(' ');
printlnComment(element.getAfterCommentsDirect());
}
if (i != size - 1) {
println();
}
}
this.indentCount--;
println();
print(')');
}
if (x.getDatasource() != null) {
println();
print0(ucase ? "USING " : "using ");
print0(x.getDatasource().toString());
}
if (x.getComment() != null) {
println();
print0(ucase ? "COMMENT " : "comment ");
x.getComment().accept(this);
}
int partitionSize = x.getPartitionColumns().size();
if (partitionSize > 0) {
println();
print0(ucase ? "PARTITIONED BY (" : "partitioned by (");
this.indentCount++;
println();
for (int i = 0; i < partitionSize; ++i) {
SQLColumnDefinition column = x.getPartitionColumns().get(i);
column.accept(this);
if (i != partitionSize - 1) {
print(',');
}
if (this.isPrettyFormat() && column.hasAfterComment()) {
print(' ');
printlnComment(column.getAfterCommentsDirect());
}
if (i != partitionSize - 1) {
println();
}
}
this.indentCount--;
println();
print(')');
}
List<SQLSelectOrderByItem> clusteredBy = x.getClusteredBy();
if (clusteredBy.size() > 0) {
println();
print0(ucase ? "CLUSTERED BY (" : "clustered by (");
printAndAccept(clusteredBy, ",");
print(')');
}
List<SQLSelectOrderByItem> sortedBy = x.getSortedBy();
if (sortedBy.size() > 0) {
println();
print0(ucase ? "SORTED BY (" : "sorted by (");
printAndAccept(sortedBy, ", ");
print(')');
}
int buckets = x.getBuckets();
if (buckets > 0) {
println();
print0(ucase ? "INTO " : "into ");
print(buckets);
print0(ucase ? " BUCKETS" : " buckets");
}
SQLExpr storedAs = x.getStoredAs();
if (storedAs != null) {
println();
print0(ucase ? "STORED AS " : "stored as ");
storedAs.accept(this);
}
SQLSelect select = x.getSelect();
if (select != null) {
println();
print0(ucase ? "AS" : "as");
println();
select.accept(this);
}
Map<String, SQLObject> serdeProperties = x.getSerdeProperties();
if (serdeProperties.size() > 0) {
println();
print0(ucase ? "TBLPROPERTIES (" : "tblproperties (");
String seperator = "";
for (Entry<String, SQLObject> entry : serdeProperties.entrySet()) {
print0("'" + entry.getKey() + "'='");
entry.getValue().accept(this);
print0("'" + seperator);
seperator = ",";
}
print(')');
}
SQLExpr location = x.getLocation();
if (location != null) {
println();
print0(ucase ? "LOCATION " : "location ");
location.accept(this);
}
return false;
}
use of com.alibaba.druid.sql.ast.statement.SQLTableElement in project druid by alibaba.
the class MySqlSchemaStatVisitorTest_pk_0 method test_0.
public void test_0() throws Exception {
String sql = "CREATE TABLE `m_dt` (\n" + " `id` int(10) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键',\n" + " `gmt_create` datetime NOT NULL COMMENT '创建时间',\n" + " `gmt_modified` datetime NOT NULL COMMENT '修改时间',\n" + " `instance_id` int(11) NOT NULL COMMENT '实例id',\n" + " `schema_name` varchar(128) NOT NULL COMMENT '数据库schema名称',\n" + " `state` tinyint(4) NOT NULL DEFAULT '0' COMMENT ' x dd ',\n" + " `busi_user` varchar(64) DEFAULT NULL COMMENT 'JDBC业务用户',\n" + " `bzp` varchar(128) DEFAULT NULL COMMENT 'bcp',\n" + " `ecd` varchar(32) DEFAULT NULL COMMENT 'xxx',\n" + " `last_sync_time` datetime DEFAULT NULL COMMENT '最后同步时间',\n" + " `catalog_name` varchar(128) NOT NULL COMMENT '物理库名称',\n" + " `search_name` varchar(256) NOT NULL COMMENT '用于搜索,区分不同数据库的不同字段信息',\n" + " `db_type` tinyint(4) NOT NULL COMMENT '数据库类型,和meta_instance表一致',\n" + " `et` varchar(32) NOT NULL DEFAULT '' COMMENT 'et',\n" + " `ae` varchar(32) DEFAULT NULL COMMENT 'ae',\n" + " PRIMARY KEY (`id`),\n" + " UNIQUE KEY `uk_instanceid_schemaname` (`instance_id`,`catalog_name`,`schema_name`),\n" + " KEY `idx_schema_name` (`schema_name`),\n" + " KEY `idx_instance_id_state` (`instance_id`,`id`,`state`),\n" + " KEY `idx_search_name` (`search_name`(255))\n" + ") ENGINE=InnoDB AUTO_INCREMENT=408695 DEFAULT CHARSET=utf8 COMMENT='数据库表信息'";
// sql = "select columnName from table1 where id in (select id from table3 where name = ?)";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLCreateTableStatement stmt = (SQLCreateTableStatement) statementList.get(0);
Assert.assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
System.out.println(sql);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
Assert.assertEquals(1, visitor.getTables().size());
Assert.assertEquals(true, visitor.containsTable("m_dt"));
Assert.assertEquals(15, visitor.getColumns().size());
{
TableStat.Column column = visitor.getColumn("m_dt", "id");
assertNotNull(column);
assertTrue(column.isPrimaryKey());
}
{
TableStat.Column column = visitor.getColumn("m_dt", "schema_name");
assertNotNull(column);
assertTrue(column.isUnique());
}
{
TableStat.Column column = visitor.getColumn("m_dt", "instance_id");
assertNotNull(column);
assertTrue(column.isUnique());
}
for (SQLTableElement element : stmt.getTableElementList()) {
if (element instanceof SQLUnique) {
SQLName name = ((SQLUnique) element).getName();
if (name != null) {
String uniqueName = name.toString();
}
}
}
// Assert.assertEquals(true, visitor.getFields().contains(new
// Column("users", "id")));
// Assert.assertEquals(true, visitor.getFields().contains(new
// Column("users", "name")));
}
Aggregations