use of liquibase.statement.core.RawSqlStatement in project irida by phac-nml.
the class AbsoluteToRelativePaths method generateStatements.
@Override
public SqlStatement[] generateStatements(Database database) throws CustomChangeException {
// for each type of directory and file-class, go through and strip out
// the prefix in the database.
// First check if the database paths match the configured paths
ValidationErrors testRelativePaths = testRelativePaths();
if (testRelativePaths.hasErrors()) {
for (String error : testRelativePaths.getErrorMessages()) {
logger.error(error);
}
throw new CustomChangeException("File locations did not validate. Change cannot be applied.");
}
final String sequenceFileDirectoryPath = appendPathSeparator(this.sequenceFileDirectory.toString());
final String referenceFileDirectoryPath = appendPathSeparator(this.referenceFileDirectory.toString());
final String outputFileDirectoryPath = appendPathSeparator(this.outputFileDirectory.toString());
return new SqlStatement[] { new RawSqlStatement(String.format("update sequence_file set file_path = replace(file_path, '%s', '') WHERE file_path IS NOT NULL", sequenceFileDirectoryPath)), new RawSqlStatement(String.format("update sequence_file_AUD set file_path = replace(file_path, '%s', '') WHERE file_path IS NOT NULL", sequenceFileDirectoryPath)), new RawSqlStatement(String.format("update reference_file set filePath = replace(filePath, '%s', '') WHERE filePath IS NOT NULL", referenceFileDirectoryPath)), new RawSqlStatement(String.format("update reference_file_AUD set filePath = replace(filePath, '%s', '') WHERE filePath IS NOT NULL", referenceFileDirectoryPath)), new RawSqlStatement(String.format("update analysis_output_file set file_path = replace(file_path, '%s', '') WHERE file_path IS NOT NULL", outputFileDirectoryPath)) };
}
use of liquibase.statement.core.RawSqlStatement in project liquibase by liquibase.
the class PostgreSQLIntegrationTest method testMissingDataGenerator.
@Test
public void testMissingDataGenerator() throws Exception {
Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", getDatabase()).execute(new RawSqlStatement("CREATE TABLE \"FIRST_TABLE\" (\"ID\" INT, \"NAME\" VARCHAR(20), \"LAST_NAME\" VARCHAR(20) DEFAULT 'Snow', " + "\"AGE\" INT DEFAULT 25, \"REGISTRATION_DATE\" date DEFAULT TO_DATE('2014-08-11', 'YYYY-MM-DD'), " + "\"COMPVALCOL\" INT DEFAULT 1*22)"));
Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", getDatabase()).execute(new RawSqlStatement("CREATE TABLE \"SECOND_TABLE\" (\"ID\" INT, \"NAME\" VARCHAR(20))"));
Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", getDatabase()).execute(new RawSqlStatement("ALTER TABLE \"FIRST_TABLE\" ADD CONSTRAINT \"FIRST_TABLE_PK\" PRIMARY KEY (\"ID\")"));
Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", getDatabase()).execute(new RawSqlStatement("ALTER TABLE \"SECOND_TABLE\" ADD CONSTRAINT \"FIRST_TABLE_FK\" FOREIGN KEY (\"ID\") REFERENCES \"FIRST_TABLE\"(\"ID\")"));
Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", getDatabase()).execute(new RawSqlStatement("CREATE INDEX \"IDX_FIRST_TABLE\" ON \"FIRST_TABLE\"(\"NAME\")"));
Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", getDatabase()).execute(new RawSqlStatement("INSERT INTO \"FIRST_TABLE\"(\"ID\", \"NAME\") VALUES (1, 'JOHN')"));
Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", getDatabase()).execute(new RawSqlStatement("INSERT INTO \"FIRST_TABLE\"(\"ID\", \"NAME\", \"LAST_NAME\", \"AGE\", \"REGISTRATION_DATE\", \"COMPVALCOL\") VALUES (2, 'JEREMY', 'IRONS', 71, TO_DATE('2020-04-01', 'YYYY-MM-DD'), 2*11 )"));
Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", getDatabase()).execute(new RawSqlStatement("INSERT INTO \"SECOND_TABLE\"(\"ID\", \"NAME\") VALUES (1, 'JOHN')"));
Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", getDatabase()).execute(new RawSqlStatement("INSERT INTO \"SECOND_TABLE\"(\"ID\", \"NAME\") VALUES (2, 'JEREMY')"));
DiffResult diffResult = DiffGeneratorFactory.getInstance().compare(getDatabase(), null, new CompareControl());
DiffToChangeLog changeLogWriter = new DiffToChangeLog(diffResult, new DiffOutputControl(false, false, false, null));
List<ChangeSet> changeSets = changeLogWriter.generateChangeSets();
boolean found = false;
for (ChangeSet changeSet : changeSets) {
List<Change> changes = changeSet.getChanges();
for (Change change : changes) {
if (!(change instanceof CreateTableChange)) {
continue;
}
found = ((CreateTableChange) change).getTableName().equals("FIRST_TABLE");
if (found) {
break;
}
}
if (found) {
break;
}
}
Assert.assertTrue("There should be a table named \"FIRST_TABLE\"", found);
}
use of liquibase.statement.core.RawSqlStatement in project liquibase by liquibase.
the class CockroachDBIntegrationTest method descPrimaryKey.
@Test
public void descPrimaryKey() throws Exception {
if (getDatabase() == null) {
return;
}
final Executor executor = Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", getDatabase());
executor.execute(new RawSqlStatement("DROP TABLE IF EXISTS pk"));
executor.execute(new RawSqlStatement("CREATE TABLE pk (\n" + "a INT8 NOT NULL,\n" + "b INT8 NOT NULL,\n" + "c INT8 NOT NULL,\n" + "d INT8 NOT NULL,\n" + "CONSTRAINT \"primary\" PRIMARY KEY (a ASC, b ASC, c DESC)\n" + ")"));
DatabaseSnapshot snapshot = SnapshotGeneratorFactory.getInstance().createSnapshot(getDatabase().getDefaultSchema(), getDatabase(), new SnapshotControl(getDatabase()));
PrimaryKey pk = snapshot.get(new PrimaryKey().setTable(new Table().setName("pk")).setName("primary"));
List<Column> columns = pk.getColumns();
assertEquals("a", columns.get(0).getName());
assertNull(columns.get(0).getDescending());
assertEquals("b", columns.get(1).getName());
assertNull(columns.get(1).getDescending());
assertEquals("c", columns.get(2).getName());
assertTrue(columns.get(2).getDescending());
}
use of liquibase.statement.core.RawSqlStatement in project liquibase by liquibase.
the class ColumnSnapshotGenerator method setAutoIncrementDetails.
protected void setAutoIncrementDetails(Column column, Database database, DatabaseSnapshot snapshot) {
if ((column.getAutoIncrementInformation() != null) && (database instanceof MSSQLDatabase) && (database.getConnection() != null) && !(database.getConnection() instanceof OfflineConnection)) {
Map<String, Column.AutoIncrementInformation> autoIncrementColumns = (Map) snapshot.getScratchData("autoIncrementColumns");
if (autoIncrementColumns == null) {
autoIncrementColumns = new HashMap<>();
Executor executor = Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", database);
try {
List<Map<String, ?>> rows = executor.queryForList(new RawSqlStatement("SELECT object_schema_name(object_id) AS schema_name, " + "object_name(object_id) AS table_name, name AS column_name, " + "CAST(seed_value AS bigint) AS start_value, " + "CAST(increment_value AS bigint) AS increment_by " + "FROM sys.identity_columns"));
for (Map row : rows) {
String schemaName = (String) row.get("SCHEMA_NAME");
String tableName = (String) row.get("TABLE_NAME");
String columnName = (String) row.get("COLUMN_NAME");
Long startValue = (Long) row.get("START_VALUE");
Long incrementBy = (Long) row.get("INCREMENT_BY");
Column.AutoIncrementInformation info = new Column.AutoIncrementInformation(startValue, incrementBy);
autoIncrementColumns.put(schemaName + "." + tableName + "." + columnName, info);
}
snapshot.setScratchData("autoIncrementColumns", autoIncrementColumns);
} catch (DatabaseException e) {
Scope.getCurrentScope().getLog(getClass()).info("Could not read identity information", e);
}
}
if ((column.getRelation() != null) && (column.getSchema() != null)) {
Column.AutoIncrementInformation autoIncrementInformation = autoIncrementColumns.get(column.getSchema().getName() + "." + column.getRelation().getName() + "." + column.getName());
if (autoIncrementInformation != null) {
column.setAutoIncrementInformation(autoIncrementInformation);
}
}
}
}
use of liquibase.statement.core.RawSqlStatement in project liquibase by liquibase.
the class ColumnSnapshotGenerator method readDataType.
/**
* Processes metadata of a column, e.g. name, type and default value. We start with the result of the JDBC
* {@link DatabaseMetaData}.getColumns() method. Depending on Database, additional columns might be present.
*
* @param columnMetadataResultSet the result from the JDBC getColumns() call for the column
* @param column logical definition of the column (object form)
* @param database the database from which the column originates
* @return a DataType object with detailed information about the type
* @throws DatabaseException If an error occurs during processing (mostly caused by Exceptions in JDBC calls)
*/
protected DataType readDataType(CachedRow columnMetadataResultSet, Column column, Database database) throws DatabaseException {
if (database instanceof OracleDatabase) {
String dataType = columnMetadataResultSet.getString("DATA_TYPE_NAME");
dataType = dataType.replace("VARCHAR2", "VARCHAR");
dataType = dataType.replace("NVARCHAR2", "NVARCHAR");
DataType type = new DataType(dataType);
type.setDataTypeId(columnMetadataResultSet.getInt("DATA_TYPE"));
if (dataType.equalsIgnoreCase("NUMBER")) {
type.setColumnSize(columnMetadataResultSet.getInt("DATA_PRECISION"));
// if (type.getColumnSize() == null) {
// type.setColumnSize(38);
// }
type.setDecimalDigits(columnMetadataResultSet.getInt("DATA_SCALE"));
// if (type.getDecimalDigits() == null) {
// type.setDecimalDigits(0);
// }
// type.setRadix(10);
} else {
if ("FLOAT".equalsIgnoreCase(dataType)) {
// FLOAT [(precision)]
type.setColumnSize(columnMetadataResultSet.getInt("DATA_PRECISION"));
} else {
type.setColumnSize(columnMetadataResultSet.getInt("DATA_LENGTH"));
}
boolean isTimeStampDataType = dataType.toUpperCase().contains("TIMESTAMP");
if (isTimeStampDataType || dataType.equalsIgnoreCase("NCLOB") || dataType.equalsIgnoreCase("BLOB") || dataType.equalsIgnoreCase("CLOB")) {
type.setColumnSize(null);
} else if (dataType.equalsIgnoreCase("NVARCHAR") || dataType.equalsIgnoreCase("NCHAR")) {
type.setColumnSize(columnMetadataResultSet.getInt("CHAR_LENGTH"));
type.setColumnSizeUnit(DataType.ColumnSizeUnit.CHAR);
} else {
String charUsed = columnMetadataResultSet.getString("CHAR_USED");
DataType.ColumnSizeUnit unit = null;
if ("C".equals(charUsed)) {
unit = DataType.ColumnSizeUnit.CHAR;
type.setColumnSize(columnMetadataResultSet.getInt("CHAR_LENGTH"));
} else if ("B".equals(charUsed)) {
unit = DataType.ColumnSizeUnit.BYTE;
}
type.setColumnSizeUnit(unit);
}
}
return type;
}
String columnTypeName = (String) columnMetadataResultSet.get("TYPE_NAME");
if (database instanceof MSSQLDatabase) {
if ("numeric() identity".equalsIgnoreCase(columnTypeName)) {
columnTypeName = "numeric";
} else if ("decimal() identity".equalsIgnoreCase(columnTypeName)) {
columnTypeName = "decimal";
} else if ("xml".equalsIgnoreCase(columnTypeName)) {
columnMetadataResultSet.set("COLUMN_SIZE", null);
columnMetadataResultSet.set("DECIMAL_DIGITS", null);
} else if ("datetimeoffset".equalsIgnoreCase(columnTypeName) || "time".equalsIgnoreCase(columnTypeName)) {
columnMetadataResultSet.set("COLUMN_SIZE", columnMetadataResultSet.getInt("DECIMAL_DIGITS"));
columnMetadataResultSet.set("DECIMAL_DIGITS", null);
}
} else if (database instanceof PostgresDatabase) {
columnTypeName = database.unescapeDataTypeName(columnTypeName);
// https://www.postgresql.org/message-id/20061016193942.GF23302%40svana.org says that internally array datatypes are defined with an underscore prefix.
if (columnTypeName.startsWith("_")) {
columnTypeName = columnTypeName.replaceFirst("_", "").concat("[]");
}
}
if (database instanceof FirebirdDatabase) {
if ("BLOB SUB_TYPE 0".equals(columnTypeName)) {
columnTypeName = "BLOB";
}
if ("BLOB SUB_TYPE 1".equals(columnTypeName)) {
columnTypeName = "CLOB";
}
}
if ((database instanceof MySQLDatabase) && ("ENUM".equalsIgnoreCase(columnTypeName) || "SET".equalsIgnoreCase(columnTypeName))) {
try {
String boilerLength;
if ("ENUM".equalsIgnoreCase(columnTypeName)) {
boilerLength = "7";
} else {
// SET
boilerLength = "6";
}
List<String> enumValues = Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", database).queryForList(new RawSqlStatement("SELECT DISTINCT SUBSTRING_INDEX(SUBSTRING_INDEX(SUBSTRING(COLUMN_TYPE, " + boilerLength + ", LENGTH(COLUMN_TYPE) - " + boilerLength + " - 1 ), \"','\", 1 + units.i + tens.i * 10) , \"','\", -1)\n" + "FROM INFORMATION_SCHEMA.COLUMNS\n" + "CROSS JOIN (SELECT 0 AS i UNION SELECT 1 UNION SELECT 2 UNION SELECT 3 UNION SELECT 4 " + "UNION SELECT 5 UNION SELECT 6 UNION SELECT 7 UNION SELECT 8 UNION SELECT 9) units\n" + "CROSS JOIN (SELECT 0 AS i UNION SELECT 1 UNION SELECT 2 UNION SELECT 3 UNION SELECT 4 " + "UNION SELECT 5 UNION SELECT 6 UNION SELECT 7 UNION SELECT 8 UNION SELECT 9) tens\n" + "WHERE TABLE_NAME = '" + column.getRelation().getName() + "' \n" + "AND COLUMN_NAME = '" + column.getName() + "'"), String.class);
String enumClause = "";
for (String enumValue : enumValues) {
enumClause += "'" + enumValue + "', ";
}
enumClause = enumClause.replaceFirst(", $", "");
return new DataType(columnTypeName + "(" + enumClause + ")");
} catch (DatabaseException e) {
Scope.getCurrentScope().getLog(getClass()).warning("Error fetching enum values", e);
}
}
DataType.ColumnSizeUnit columnSizeUnit = DataType.ColumnSizeUnit.BYTE;
int dataType = columnMetadataResultSet.getInt("DATA_TYPE");
Integer columnSize = null;
Integer decimalDigits = null;
if (!database.dataTypeIsNotModifiable(columnTypeName)) {
// don't set size for types like int4, int8 etc
columnSize = columnMetadataResultSet.getInt("COLUMN_SIZE");
decimalDigits = columnMetadataResultSet.getInt("DECIMAL_DIGITS");
if ((decimalDigits != null) && decimalDigits.equals(0)) {
if (dataType == Types.TIME && database instanceof PostgresDatabase) {
// that is allowed
} else {
decimalDigits = null;
}
}
}
Integer radix = columnMetadataResultSet.getInt("NUM_PREC_RADIX");
Integer characterOctetLength = columnMetadataResultSet.getInt("CHAR_OCTET_LENGTH");
if (database instanceof AbstractDb2Database) {
String typeName = columnMetadataResultSet.getString("TYPE_NAME");
if (("DBCLOB".equalsIgnoreCase(typeName) || "GRAPHIC".equalsIgnoreCase(typeName) || "VARGRAPHIC".equalsIgnoreCase(typeName)) && (columnSize != null)) {
// Stored as double length chars
columnSize = columnSize / 2;
}
if ("TIMESTAMP".equalsIgnoreCase(columnTypeName) && (decimalDigits == null)) {
// Actually a date
columnTypeName = "DATE";
dataType = Types.DATE;
}
}
if ((database instanceof PostgresDatabase) && columnSize != null) {
if (columnSize.equals(Integer.MAX_VALUE)) {
columnSize = null;
} else if (columnTypeName.equalsIgnoreCase("numeric") && columnSize.equals(0)) {
columnSize = null;
}
}
// The same applies to LONG(...) VARCHAR.
if (database instanceof SybaseASADatabase && ("LONG BINARY".equalsIgnoreCase(columnTypeName) || "LONG VARCHAR".equalsIgnoreCase(columnTypeName))) {
columnSize = null;
}
DataType type = new DataType(columnTypeName);
type.setDataTypeId(dataType);
/*
* According to the description of DatabaseMetaData.getColumns, the content of the "COLUMN_SIZE" column is
* pretty worthless for datetime/timestamp columns:
*
* "For datetime datatypes, this is the length in characters of the String representation
* (assuming the maximum allowed precision of the fractional seconds component)."
* In the case of TIMESTAMP columns, the information we are really looking for
* (the fractional digits) is located in the column DECIMAL_DIGITS.
*/
int jdbcType = columnMetadataResultSet.getInt("DATA_TYPE");
// if jdbcType is TIMESTAMP_WITH_TIMEZONE (does not exist yet in JDK7)
if (jdbcType == Types.TIMESTAMP) {
if (decimalDigits == null) {
type.setColumnSize(null);
} else {
type.setColumnSize((decimalDigits != database.getDefaultFractionalDigitsForTimestamp()) ? decimalDigits : null);
}
type.setDecimalDigits(null);
} else {
type.setColumnSize(columnSize);
type.setDecimalDigits(decimalDigits);
}
type.setRadix(radix);
type.setCharacterOctetLength(characterOctetLength);
type.setColumnSizeUnit(columnSizeUnit);
return type;
}
Aggregations