use of com.alibaba.druid.sql.ast.SQLDataType in project druid by alibaba.
the class OracleExprParser method parseDataType.
public SQLDataType parseDataType() {
if (lexer.token() == Token.CONSTRAINT || lexer.token() == Token.COMMA) {
return null;
}
if (lexer.token() == Token.DEFAULT || lexer.token() == Token.NOT || lexer.token() == Token.NULL) {
return null;
}
if (lexer.token() == Token.INTERVAL) {
lexer.nextToken();
if (identifierEquals("YEAR")) {
lexer.nextToken();
OracleDataTypeIntervalYear interval = new OracleDataTypeIntervalYear();
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
interval.addArgument(this.expr());
accept(Token.RPAREN);
}
accept(Token.TO);
acceptIdentifier("MONTH");
return interval;
} else {
acceptIdentifier("DAY");
OracleDataTypeIntervalDay interval = new OracleDataTypeIntervalDay();
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
interval.addArgument(this.expr());
accept(Token.RPAREN);
}
accept(Token.TO);
acceptIdentifier("SECOND");
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
interval.getFractionalSeconds().add(this.expr());
accept(Token.RPAREN);
}
return interval;
}
}
String typeName;
if (identifierEquals("LONG")) {
lexer.nextToken();
acceptIdentifier("RAW");
typeName = "LONG RAW";
} else {
SQLName typeExpr = name();
typeName = typeExpr.toString();
}
if ("TIMESTAMP".equalsIgnoreCase(typeName)) {
OracleDataTypeTimestamp timestamp = new OracleDataTypeTimestamp();
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
timestamp.addArgument(this.expr());
accept(Token.RPAREN);
}
if (lexer.token() == Token.WITH) {
lexer.nextToken();
if (identifierEquals("LOCAL")) {
lexer.nextToken();
timestamp.setWithLocalTimeZone(true);
} else {
timestamp.setWithTimeZone(true);
}
acceptIdentifier("TIME");
acceptIdentifier("ZONE");
}
return timestamp;
}
if (isCharType(typeName)) {
SQLCharacterDataType charType = new SQLCharacterDataType(typeName);
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
charType.addArgument(this.expr());
if (identifierEquals("CHAR")) {
lexer.nextToken();
charType.setCharType(SQLCharacterDataType.CHAR_TYPE_CHAR);
} else if (identifierEquals("BYTE")) {
lexer.nextToken();
charType.setCharType(SQLCharacterDataType.CHAR_TYPE_BYTE);
}
accept(Token.RPAREN);
}
return parseCharTypeRest(charType);
}
if (lexer.token() == Token.PERCENT) {
lexer.nextToken();
if (identifierEquals("TYPE")) {
lexer.nextToken();
typeName += "%TYPE";
} else if (identifierEquals("ROWTYPE")) {
lexer.nextToken();
typeName += "%ROWTYPE";
} else {
throw new ParserException("syntax error : " + lexer.token() + " " + lexer.stringVal());
}
}
SQLDataType dataType = new SQLDataTypeImpl(typeName);
return parseDataTypeRest(dataType);
}
use of com.alibaba.druid.sql.ast.SQLDataType in project druid by alibaba.
the class MySqlOutputVisitor method visit.
public boolean visit(SQLColumnDefinition x) {
x.getName().accept(this);
SQLDataType dataType = x.getDataType();
if (dataType != null) {
print(' ');
dataType.accept(this);
}
if (x.getCharsetExpr() != null) {
print0(ucase ? " CHARSET " : " charset ");
x.getCharsetExpr().accept(this);
}
for (SQLColumnConstraint item : x.getConstraints()) {
print(' ');
item.accept(this);
}
if (x.getDefaultExpr() != null) {
print0(ucase ? " DEFAULT " : " default ");
x.getDefaultExpr().accept(this);
}
if (x.getStorage() != null) {
print0(ucase ? " STORAGE " : " storage ");
x.getStorage().accept(this);
}
if (x.getOnUpdate() != null) {
print0(ucase ? " ON UPDATE " : " on update ");
x.getOnUpdate().accept(this);
}
if (x.isAutoIncrement()) {
print0(ucase ? " AUTO_INCREMENT" : " auto_increment");
}
if (x.getComment() != null) {
print0(ucase ? " COMMENT " : " comment ");
x.getComment().accept(this);
}
if (x.getAsExpr() != null) {
print0(ucase ? " AS (" : " as (");
x.getAsExpr().accept(this);
print(')');
}
if (x.isSorted()) {
print0(ucase ? " SORTED" : " sorted");
}
return false;
}
use of com.alibaba.druid.sql.ast.SQLDataType in project canal by alibaba.
the class MemoryTableMeta method processTableElement.
private void processTableElement(SQLTableElement element, TableMeta tableMeta) {
if (element instanceof SQLColumnDefinition) {
FieldMeta fieldMeta = new FieldMeta();
SQLColumnDefinition column = (SQLColumnDefinition) element;
String name = getSqlName(column.getName());
// String charset = getSqlName(column.getCharsetExpr());
SQLDataType dataType = column.getDataType();
String dataTypStr = dataType.getName();
if (StringUtils.equalsIgnoreCase(dataTypStr, "float")) {
if (dataType.getArguments().size() == 1) {
int num = Integer.valueOf(dataType.getArguments().get(0).toString());
if (num > 24) {
dataTypStr = "double";
}
}
}
if (dataType.getArguments().size() > 0) {
dataTypStr += "(";
for (int i = 0; i < column.getDataType().getArguments().size(); i++) {
if (i != 0) {
dataTypStr += ",";
}
SQLExpr arg = column.getDataType().getArguments().get(i);
dataTypStr += arg.toString();
}
dataTypStr += ")";
}
if (dataType instanceof SQLDataTypeImpl) {
SQLDataTypeImpl dataTypeImpl = (SQLDataTypeImpl) dataType;
if (dataTypeImpl.isUnsigned()) {
dataTypStr += " unsigned";
}
if (dataTypeImpl.isZerofill()) {
// 如果设置了zerofill,自动给列添加unsigned属性
if (!dataTypeImpl.isUnsigned()) {
dataTypStr += " unsigned";
}
dataTypStr += " zerofill";
}
}
if (column.getDefaultExpr() == null || column.getDefaultExpr() instanceof SQLNullExpr) {
fieldMeta.setDefaultValue(null);
} else {
fieldMeta.setDefaultValue(DruidDdlParser.unescapeQuotaName(getSqlName(column.getDefaultExpr())));
}
fieldMeta.setColumnName(name);
fieldMeta.setColumnType(dataTypStr);
fieldMeta.setNullable(true);
List<SQLColumnConstraint> constraints = column.getConstraints();
for (SQLColumnConstraint constraint : constraints) {
if (constraint instanceof SQLNotNullConstraint) {
fieldMeta.setNullable(false);
} else if (constraint instanceof SQLNullConstraint) {
fieldMeta.setNullable(true);
} else if (constraint instanceof SQLColumnPrimaryKey) {
fieldMeta.setKey(true);
fieldMeta.setNullable(false);
} else if (constraint instanceof SQLColumnUniqueKey) {
fieldMeta.setUnique(true);
}
}
tableMeta.addFieldMeta(fieldMeta);
} else if (element instanceof MySqlPrimaryKey) {
MySqlPrimaryKey column = (MySqlPrimaryKey) element;
List<SQLSelectOrderByItem> pks = column.getColumns();
for (SQLSelectOrderByItem pk : pks) {
String name = getSqlName(pk.getExpr());
FieldMeta field = tableMeta.getFieldMetaByName(name);
field.setKey(true);
field.setNullable(false);
}
} else if (element instanceof MySqlUnique) {
MySqlUnique column = (MySqlUnique) element;
List<SQLSelectOrderByItem> uks = column.getColumns();
for (SQLSelectOrderByItem uk : uks) {
String name = getSqlName(uk.getExpr());
FieldMeta field = tableMeta.getFieldMetaByName(name);
field.setUnique(true);
}
}
}
use of com.alibaba.druid.sql.ast.SQLDataType in project canal by alibaba.
the class TypeUtil method getPhoenixType.
/**
* 根据SQL的定义返回Phoenix的类型定义
* @see "https://dev.mysql.com/doc/connector-j/5.1/en/connector-j-reference-type-conversions.html"
* @param definition SQL的字段定义
* @param limit 是否启用字段长度限制
* @return Phoenix字段类型定义
*/
public static String getPhoenixType(SQLColumnDefinition definition, boolean limit) {
if (definition == null)
return "VARCHAR";
SQLDataType sqlDataType = definition.getDataType();
SQLDataTypeImpl sqlDataType1 = sqlDataType instanceof SQLDataTypeImpl ? (SQLDataTypeImpl) sqlDataType : null;
boolean isUnsigned = sqlDataType1 != null && sqlDataType1.isUnsigned();
return getPhoenixType(sqlDataType.getName().toUpperCase(), sqlDataType.getArguments().toArray(), isUnsigned, limit);
}
use of com.alibaba.druid.sql.ast.SQLDataType in project druid by alibaba.
the class Oracle2PG_DataTypeTest_lob method test_oracle2pg_char.
public void test_oracle2pg_char() throws Exception {
String sql = "blob";
SQLDataType dataType = SQLParserUtils.createExprParser(sql, JdbcConstants.ORACLE).parseDataType();
SQLDataType pgDataType = SQLTransformUtils.transformOracleToPostgresql(dataType);
assertEquals("BYTEA", pgDataType.toString());
}
Aggregations