use of com.alibaba.druid.sql.repository.SchemaObject in project druid by alibaba.
the class MySqlStatementParser method parseInsert.
public SQLInsertStatement parseInsert() {
MySqlInsertStatement stmt = new MySqlInsertStatement();
SQLName tableName = null;
if (lexer.token() == Token.INSERT) {
lexer.nextToken();
for (; ; ) {
if (lexer.token() == Token.IDENTIFIER) {
long hash = lexer.hash_lower();
if (hash == FnvHash.Constants.LOW_PRIORITY) {
stmt.setLowPriority(true);
lexer.nextToken();
continue;
}
if (hash == FnvHash.Constants.DELAYED) {
stmt.setDelayed(true);
lexer.nextToken();
continue;
}
if (hash == FnvHash.Constants.HIGH_PRIORITY) {
stmt.setHighPriority(true);
lexer.nextToken();
continue;
}
if (hash == FnvHash.Constants.IGNORE) {
stmt.setIgnore(true);
lexer.nextToken();
continue;
}
if (hash == FnvHash.Constants.ROLLBACK_ON_FAIL) {
stmt.setRollbackOnFail(true);
lexer.nextToken();
continue;
}
}
break;
}
if (lexer.token() == Token.HINT) {
List<SQLCommentHint> hints = this.exprParser.parseHints();
stmt.setHints(hints);
}
if (lexer.token() == Token.INTO) {
lexer.nextToken();
if (lexer.token() == Token.TABLE) {
lexer.nextToken();
}
} else if (lexer.identifierEquals(FnvHash.Constants.OVERWRITE)) {
lexer.nextToken();
stmt.setOverwrite(true);
if (lexer.token() == Token.TABLE) {
lexer.nextToken();
} else if (lexer.token() == Token.INTO) {
lexer.nextToken();
}
}
if (lexer.token() == Token.LINE_COMMENT) {
lexer.nextToken();
}
if (lexer.token() == Token.FULLTEXT) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.DICTIONARY)) {
lexer.nextToken();
stmt.setFulltextDictionary(true);
}
}
tableName = this.exprParser.name();
stmt.setTableName(tableName);
if (lexer.token() == Token.HINT) {
String comment = "/*" + lexer.stringVal() + "*/";
lexer.nextToken();
stmt.getTableSource().addAfterComment(comment);
}
if (lexer.token() == Token.IDENTIFIER && !lexer.identifierEquals(FnvHash.Constants.VALUE)) {
stmt.setAlias(lexer.stringVal());
lexer.nextToken();
}
if (lexer.token() == Token.WITH) {
SQLSelectStatement withStmt = (SQLSelectStatement) parseWith();
stmt.setQuery(withStmt.getSelect());
}
if (lexer.token() == Token.PARTITION) {
lexer.nextToken();
accept(Token.LPAREN);
for (; ; ) {
SQLAssignItem ptExpr = new SQLAssignItem();
ptExpr.setTarget(this.exprParser.name());
if (lexer.token() == Token.EQ) {
lexer.nextToken();
SQLExpr ptValue = this.exprParser.expr();
ptExpr.setValue(ptValue);
}
stmt.addPartition(ptExpr);
if (lexer.token() != Token.COMMA) {
break;
} else {
lexer.nextToken();
}
}
accept(Token.RPAREN);
if (lexer.token() == Token.IF) {
lexer.nextToken();
accept(Token.NOT);
accept(Token.EXISTS);
stmt.setIfNotExists(true);
}
}
}
int columnSize = 0;
List<SQLColumnDefinition> columnDefinitionList = null;
if (lexer.token() == Token.LPAREN) {
boolean useInsertColumnsCache = lexer.isEnabled(SQLParserFeature.UseInsertColumnsCache);
InsertColumnsCache insertColumnsCache = null;
long tableNameHash = 0;
InsertColumnsCache.Entry cachedColumns = null;
if (useInsertColumnsCache) {
insertColumnsCache = this.insertColumnsCache;
if (insertColumnsCache == null) {
insertColumnsCache = InsertColumnsCache.global;
}
if (tableName != null) {
tableNameHash = tableName.nameHashCode64();
cachedColumns = insertColumnsCache.get(tableNameHash);
}
}
SchemaObject tableObject = null;
int pos = lexer.pos();
if (cachedColumns != null && lexer.text.startsWith(cachedColumns.columnsString, pos)) {
if (!lexer.isEnabled(SQLParserFeature.OptimizedForParameterized)) {
List<SQLExpr> columns = stmt.getColumns();
List<SQLExpr> cachedColumns2 = cachedColumns.columns;
for (int i = 0, size = cachedColumns2.size(); i < size; i++) {
columns.add(cachedColumns2.get(i).clone());
}
}
stmt.setColumnsString(cachedColumns.columnsFormattedString, cachedColumns.columnsFormattedStringHash);
int p2 = pos + cachedColumns.columnsString.length();
lexer.reset(p2);
lexer.nextToken();
} else {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
if (lexer.token() == Token.SELECT) {
lexer.reset(mark);
SQLSelect select = this.exprParser.createSelectParser().select();
select.setParent(stmt);
stmt.setQuery(select);
} else {
if (repository != null && lexer.isEnabled(SQLParserFeature.InsertValueCheckType)) {
tableObject = repository.findTable(tableName.nameHashCode64());
}
if (tableObject != null) {
columnDefinitionList = new ArrayList<SQLColumnDefinition>();
}
List<SQLExpr> columns = stmt.getColumns();
if (lexer.token() != Token.RPAREN) {
for (; ; ) {
String identName;
long hash;
Token token = lexer.token();
if (token == Token.IDENTIFIER) {
identName = lexer.stringVal();
hash = lexer.hash_lower();
} else if (token == Token.LITERAL_CHARS) {
if (lexer.isEnabled(SQLParserFeature.IgnoreNameQuotes)) {
identName = lexer.stringVal();
} else {
identName = '\'' + lexer.stringVal() + '\'';
}
hash = 0;
} else if (token == Token.LITERAL_ALIAS) {
identName = lexer.stringVal();
if (lexer.isEnabled(SQLParserFeature.IgnoreNameQuotes)) {
identName = SQLUtils.normalize(identName, dbType);
}
hash = 0;
} else {
identName = lexer.stringVal();
hash = 0;
}
lexer.nextTokenComma();
SQLExpr expr = new SQLIdentifierExpr(identName, hash);
while (lexer.token() == Token.DOT) {
lexer.nextToken();
String propertyName = lexer.stringVal();
lexer.nextToken();
expr = new SQLPropertyExpr(expr, propertyName);
}
expr.setParent(stmt);
columns.add(expr);
columnSize++;
if (tableObject != null) {
SQLColumnDefinition columnDefinition = tableObject.findColumn(hash);
columnDefinitionList.add(columnDefinition);
}
if (lexer.token() == Token.COMMA) {
lexer.nextTokenIdent();
continue;
}
break;
}
columnSize = stmt.getColumns().size();
if (insertColumnsCache != null && tableName != null) {
String columnsString = lexer.subString(pos, lexer.pos() - pos);
List<SQLExpr> clonedColumns = new ArrayList<SQLExpr>(columnSize);
for (int i = 0; i < columns.size(); i++) {
clonedColumns.add(columns.get(i).clone());
}
StringBuilder buf = new StringBuilder();
SQLASTOutputVisitor outputVisitor = SQLUtils.createOutputVisitor(buf, dbType);
outputVisitor.printInsertColumns(columns);
String formattedColumnsString = buf.toString();
long columnsFormattedStringHash = FnvHash.fnv1a_64_lower(formattedColumnsString);
insertColumnsCache.put(tableName.hashCode64(), columnsString, formattedColumnsString, clonedColumns);
stmt.setColumnsString(formattedColumnsString, columnsFormattedStringHash);
}
}
accept(Token.RPAREN);
}
}
}
List<SQLCommentHint> commentHints = null;
if (lexer.token() == Token.HINT) {
commentHints = this.exprParser.parseHints();
} else if (lexer.token() == Token.LINE_COMMENT) {
lexer.nextToken();
}
if (lexer.token() == Token.VALUES || lexer.identifierEquals(FnvHash.Constants.VALUE)) {
lexer.nextTokenLParen();
if (lexer.isEnabled(SQLParserFeature.InsertReader)) {
return stmt;
}
if (lexer.isEnabled(SQLParserFeature.InsertValueNative)) {
parseValueClauseNative(stmt.getValuesList(), columnDefinitionList, columnSize, stmt);
} else {
parseValueClause(stmt.getValuesList(), columnDefinitionList, columnSize, stmt);
}
} else if (lexer.token() == Token.SET) {
lexer.nextToken();
SQLInsertStatement.ValuesClause values = new SQLInsertStatement.ValuesClause();
stmt.addValueCause(values);
for (; ; ) {
SQLName name = this.exprParser.name();
stmt.addColumn(name);
if (lexer.token() == Token.EQ) {
lexer.nextToken();
} else {
accept(Token.COLONEQ);
}
values.addValue(this.exprParser.expr());
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
} else if (lexer.token() == (Token.SELECT)) {
SQLSelect select = this.exprParser.createSelectParser().select();
if (commentHints != null && !commentHints.isEmpty()) {
select.setHeadHint(commentHints.get(0));
}
select.setParent(stmt);
stmt.setQuery(select);
} else if (lexer.token() == (Token.LPAREN)) {
lexer.nextToken();
SQLSelect select = this.exprParser.createSelectParser().select();
select.setParent(stmt);
stmt.setQuery(select);
accept(Token.RPAREN);
} else if (lexer.token() == WITH) {
SQLSelect query = this.exprParser.createSelectParser().select();
stmt.setQuery(query);
}
if (lexer.token() == Token.ON) {
lexer.nextToken();
acceptIdentifier("DUPLICATE");
accept(Token.KEY);
accept(Token.UPDATE);
List<SQLExpr> duplicateKeyUpdate = stmt.getDuplicateKeyUpdate();
for (; ; ) {
SQLName name = this.exprParser.name();
accept(Token.EQ);
SQLExpr value;
try {
value = this.exprParser.expr();
} catch (EOFParserException e) {
throw new ParserException("EOF, " + name + "=", e);
}
SQLBinaryOpExpr assignment = new SQLBinaryOpExpr(name, SQLBinaryOperator.Equality, value);
assignment.setParent(stmt);
duplicateKeyUpdate.add(assignment);
if (lexer.token() == Token.COMMA) {
lexer.nextTokenIdent();
continue;
}
break;
}
}
return stmt;
}
use of com.alibaba.druid.sql.repository.SchemaObject in project druid by alibaba.
the class CanalSQLSchemaTest method test_persistent.
@Test
public void test_persistent() throws Throwable {
SchemaRepository repository = new SchemaRepository(JdbcConstants.MYSQL);
String sql = " create table example_vc_tbl( c1 int not null auto_increment primary key," + "c2 varchar(70), vc1 int as (length(c2)) virtual," + "DIM_SUM varchar(128) AS (MD5(UPPER(CONCAT(c2, c1)))) PERSISTENT)";
repository.console(sql);
repository.setDefaultSchema("test");
SchemaObject table = repository.findTable("example_vc_tbl");
Assert.assertTrue(table.findColumn("DIM_SUM") != null);
}
use of com.alibaba.druid.sql.repository.SchemaObject in project druid by alibaba.
the class SchemaStatTest15 method test_schemaStat_2.
public void test_schemaStat_2() throws Exception {
SchemaRepository repository = new SchemaRepository(JdbcConstants.MYSQL);
repository.acceptDDL("create table table1 (fid bigint, f1 varchar(100), f2 varchar(100));");
repository.acceptDDL("create view view1 as select fid table1 where fid = 1;");
SchemaObject viewObject = repository.findView("view1");
System.out.println(viewObject);
assertEquals("view1", viewObject.getName());
assertEquals(SchemaObjectType.View, viewObject.getType());
}
use of com.alibaba.druid.sql.repository.SchemaObject in project canal by alibaba.
the class FastsqlSchemaTest method testSimple.
@Test
public void testSimple() throws Throwable {
SchemaRepository repository = new SchemaRepository(JdbcConstants.MYSQL);
String sql1 = "CREATE TABLE `table_x1` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, " + "`key1` longtext NOT NULL COMMENT 'key1', `value1` longtext NOT NULL COMMENT 'value1', PRIMARY KEY (`id`) )" + "ENGINE=InnoDB DEFAULT CHARSET=utf8mb4";
String sql2 = " CREATE TABLE IF NOT EXISTS `table_x1` ( `id` bigint(20) NOT NULL AUTO_INCREMENT," + "`key1` longtext NOT NULL COMMENT 'key1',PRIMARY KEY (`id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4";
repository.console(sql1);
repository.console(sql2);
repository.setDefaultSchema("test");
SchemaObject table = repository.findTable("table_x1");
System.out.println(table.getStatement().toString());
Assert.assertTrue(table.findColumn("value1") != null);
}
use of com.alibaba.druid.sql.repository.SchemaObject in project canal by alibaba.
the class FastsqlSchemaTest method test_invisible.
@Test
public void test_invisible() throws Throwable {
SchemaRepository repository = new SchemaRepository(JdbcConstants.MYSQL);
String sql = " CREATE TABLE `proposal_order_info` (`id` bigint(20) unsigned NOT NULL AUTO_INCREMENT," + "`created_at` timestamp NULL DEFAULT NULL, " + "PRIMARY KEY (`id`) , " + "KEY `idx_create_time` (`created_at`) /*!80000 INVISIBLE */" + ") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 BLOCK_FORMAT=ENCRYPTED";
repository.console(sql);
repository.setDefaultSchema("test");
SchemaObject table = repository.findTable("proposal_order_info");
System.out.println(table.getStatement().toString());
Assert.assertTrue(table.findColumn("id") != null);
}
Aggregations