use of liquibase.listener.SqlListener in project liquibase by liquibase.
the class DerbyConnection method checkPoint.
private void checkPoint() throws DatabaseException {
Statement st = null;
try {
st = createStatement();
final String sql = "CALL SYSCS_UTIL.SYSCS_CHECKPOINT_DATABASE()";
for (SqlListener listener : Scope.getCurrentScope().getListeners(SqlListener.class)) {
listener.writeSqlWillRun(sql);
}
st.execute(sql);
} catch (SQLException e) {
throw new DatabaseException(e);
} finally {
JdbcUtil.closeStatement(st);
}
}
use of liquibase.listener.SqlListener in project liquibase by liquibase.
the class AbstractIntegrationTest method clearDatabase.
/**
* Drops all supported object types in all testing schemas and the DATABASECHANGELOG table if it resides in a
* different schema from the test schemas.
*
* @throws DatabaseException if something goes wrong during object deletion
*/
protected void clearDatabase() throws DatabaseException {
wipeDatabase();
try {
Statement statement = null;
try {
// only drop the DATABASECHANGELOG table if it really exists.
if (SnapshotGeneratorFactory.getInstance().has(new Table().setName(database.getDatabaseChangeLogTableName()).setSchema(new Schema(database.getLiquibaseCatalogName(), database.getLiquibaseSchemaName())), database)) {
statement = ((JdbcConnection) database.getConnection()).getUnderlyingConnection().createStatement();
final String sql = "DROP TABLE " + database.escapeTableName(database.getLiquibaseCatalogName(), database.getLiquibaseSchemaName(), database.getDatabaseChangeLogTableName());
for (SqlListener listener : Scope.getCurrentScope().getListeners(SqlListener.class)) {
listener.writeSqlWillRun(sql);
}
statement.execute(sql);
database.commit();
}
} catch (Exception e) {
Scope.getCurrentScope().getLog(getClass()).warning("Probably expected error dropping databasechangelog table");
e.printStackTrace();
database.rollback();
} finally {
if (statement != null) {
statement.close();
}
}
// Now drop the DATABASECHANGELOGLOCK table (if it exists)
try {
if (SnapshotGeneratorFactory.getInstance().has(new Table().setName(database.getDatabaseChangeLogLockTableName()).setSchema(new Schema(database.getLiquibaseCatalogName(), database.getLiquibaseSchemaName())), database)) {
statement = ((JdbcConnection) database.getConnection()).getUnderlyingConnection().createStatement();
String sql = "DROP TABLE " + database.escapeTableName(database.getLiquibaseCatalogName(), database.getLiquibaseSchemaName(), database.getDatabaseChangeLogLockTableName());
for (SqlListener listener : Scope.getCurrentScope().getListeners(SqlListener.class)) {
listener.writeSqlWillRun(sql);
}
statement.execute(sql);
database.commit();
}
} catch (Exception e) {
Scope.getCurrentScope().getLog(getClass()).warning("Probably expected error dropping databasechangeloglock table");
e.printStackTrace();
database.rollback();
} finally {
if (statement != null) {
statement.close();
}
}
} catch (SQLException e) {
throw new DatabaseException(e);
}
SnapshotGeneratorFactory.resetAll();
DatabaseFactory.reset();
}
use of liquibase.listener.SqlListener in project liquibase by liquibase.
the class AbstractIntegrationTest method wipeDatabase.
/**
* Wipes all Liquibase schemas in the database before testing starts. This includes the DATABASECHANGELOG/LOCK
* tables.
*/
protected void wipeDatabase() {
emptySchemas.clear();
try {
// TODO the cleaner solution would be to have a noCachingHasObject() Method in SnapshotGeneratorFactory
try {
if (database.getConnection() != null) {
String sql = "DROP TABLE " + database.getDatabaseChangeLogLockTableName();
for (SqlListener listener : Scope.getCurrentScope().getListeners(SqlListener.class)) {
listener.writeSqlWillRun(sql);
}
((JdbcConnection) database.getConnection()).getUnderlyingConnection().createStatement().executeUpdate(sql);
database.commit();
}
} catch (SQLException e) {
if (database instanceof PostgresDatabase) {
// throws "current transaction is aborted" unless we roll back the connection
database.rollback();
}
}
SnapshotGeneratorFactory.resetAll();
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
emptyTestSchema(CatalogAndSchema.DEFAULT.getCatalogName(), CatalogAndSchema.DEFAULT.getSchemaName(), database);
SnapshotGeneratorFactory factory = SnapshotGeneratorFactory.getInstance();
if (database.supportsSchemas()) {
emptyTestSchema(null, ALT_SCHEMA, database);
}
if (supportsAltCatalogTests()) {
if (database.supportsSchemas() && database.supportsCatalogs()) {
emptyTestSchema(ALT_CATALOG, ALT_SCHEMA, database);
}
}
/*
* There is a special treatment for identifiers in the case when (a) the RDBMS does NOT support
* schemas AND (b) the RDBMS DOES support catalogs AND (c) someone uses "schemaName=..." in a
* Liquibase ChangeSet. In this case, AbstractJdbcDatabase.escapeObjectName assumes the author
* was intending to write "catalog=..." and transparently rewrites the expression.
* For us, this means that we have to wipe both ALT_SCHEMA and ALT_CATALOG to be sure we
* are doing a thorough cleanup.
*/
CatalogAndSchema[] alternativeLocations = new CatalogAndSchema[] { new CatalogAndSchema(ALT_CATALOG, null), new CatalogAndSchema(null, ALT_SCHEMA), new CatalogAndSchema("LBCAT2", database.getDefaultSchemaName()), new CatalogAndSchema(null, "LBCAT2"), new CatalogAndSchema("lbcat2", database.getDefaultSchemaName()), new CatalogAndSchema(null, "lbcat2") };
for (CatalogAndSchema location : alternativeLocations) {
emptyTestSchema(location.getCatalogName(), location.getSchemaName(), database);
}
database.commit();
SnapshotGeneratorFactory.resetAll();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of liquibase.listener.SqlListener in project liquibase by liquibase.
the class AbstractIntegrationTest method testRunUpdateOnOldChangelogTableFormat.
@Test
// Successful execution qualifies as test success.
@SuppressWarnings("squid:S2699")
public void testRunUpdateOnOldChangelogTableFormat() throws Exception {
assumeNotNull(this.getDatabase());
Liquibase liquibase = createLiquibase(completeChangeLog);
clearDatabase();
String nullableKeyword = database.requiresExplicitNullForColumns() ? " NULL" : "";
String sql = "CREATE TABLE " + database.escapeTableName(database.getDefaultCatalogName(), database.getDefaultSchemaName(), "DATABASECHANGELOG") + " (id varchar(150) NOT NULL, " + "author VARCHAR(150) NOT NULL, " + "filename VARCHAR(255) NOT NULL, " + "dateExecuted " + DataTypeFactory.getInstance().fromDescription("datetime", database).toDatabaseDataType(database) + " NOT NULL, " + "md5sum VARCHAR(32)" + nullableKeyword + ", " + "description VARCHAR(255)" + nullableKeyword + ", " + "comments VARCHAR(255)" + nullableKeyword + ", " + "tag VARCHAR(255)" + nullableKeyword + ", " + "liquibase VARCHAR(10)" + nullableKeyword + ", " + "PRIMARY KEY (id, author, filename))";
for (SqlListener listener : Scope.getCurrentScope().getListeners(SqlListener.class)) {
listener.writeSqlWillRun(sql);
}
Connection conn = ((JdbcConnection) database.getConnection()).getUnderlyingConnection();
boolean savedAcSetting = conn.getAutoCommit();
conn.setAutoCommit(false);
conn.createStatement().execute(sql);
conn.commit();
conn.setAutoCommit(savedAcSetting);
liquibase = createLiquibase(completeChangeLog);
liquibase.setChangeLogParameter("loginuser", testSystem.getUsername());
liquibase.update(this.contexts);
}
Aggregations