use of org.dbflute.properties.DfReplaceSchemaProperties in project dbflute-core by dbflute.
the class DfTaskControlLogic method showFinalMessage.
// ===================================================================================
// Final Message
// =============
public void showFinalMessage(long before, long after, boolean abort, String taskName, String finalInformation) {
final String displayTaskName = getDisplayTaskName(taskName);
final String envType = DfEnvironmentType.getInstance().getEnvironmentType();
final StringBuilder sb = new StringBuilder();
final String ln = ln();
sb.append(ln).append("_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/");
sb.append(ln).append("[Final Message]: ").append(getPerformanceView(after - before));
if (abort) {
sb.append(" *Abort");
}
sb.append(ln);
final DfConnectionMetaInfo metaInfo = getConnectionMetaInfo();
final String productDisp = metaInfo != null ? " (" + metaInfo.getProductDisp() + ")" : "";
final String databaseType = getDatabaseTypeFacadeProp().getTargetDatabase() + productDisp;
sb.append(ln).append(" DBFLUTE_CLIENT: {").append(getBasicProperties().getProjectName()).append("}");
sb.append(ln).append(" database = " + databaseType);
sb.append(ln).append(" language = " + getBasicProperties().getTargetLanguage());
sb.append(ln).append(" container = " + getBasicProperties().getTargetContainerName());
sb.append(ln).append(" package = " + getBasicProperties().getPackageBase());
sb.append(ln);
sb.append(ln).append(" DBFLUTE_ENVIRONMENT_TYPE: {").append(envType != null ? envType : "").append("}");
final String driver = _databaseResource.getDriver();
if (driver != null) {
// basically true except cancelled
sb.append(ln).append(" driver = ").append(driver);
sb.append(ln).append(" url = ").append(_databaseResource.getUrl());
sb.append(ln).append(" schema = ").append(_databaseResource.getMainSchema());
sb.append(ln).append(" user = ").append(_databaseResource.getUser());
sb.append(ln).append(" props = ").append(_databaseResource.getConnectionProperties());
}
final String additionalSchemaDisp = buildAdditionalSchemaDisp();
sb.append(ln).append(" additionalSchema = ").append(additionalSchemaDisp);
final DfReplaceSchemaProperties replaceSchemaProp = getProperties().getReplaceSchemaProperties();
sb.append(ln).append(" repsEnvType = ").append(replaceSchemaProp.getRepsEnvType());
final String refreshProjectDisp = buildRefreshProjectDisp();
sb.append(ln).append(" refreshProject = ").append(refreshProjectDisp);
if (finalInformation != null) {
sb.append(ln).append(ln);
sb.append(finalInformation);
}
sb.append(ln).append("_/_/_/_/_/_/_/_/_/_/ {").append(displayTaskName).append("}");
DfDBFluteTaskUtil.logFinalMessage(sb.toString());
}
use of org.dbflute.properties.DfReplaceSchemaProperties in project dbflute-core by dbflute.
the class DfArrangeBeforeRepsProcess method arrangeBeforeReps.
// ===================================================================================
// Process
// =======
public void arrangeBeforeReps() {
final DfReplaceSchemaProperties prop = getReplaceSchemaProperties();
final Map<String, String> copyMap = prop.getArrangeBeforeRepsCopyMap();
if (!copyMap.isEmpty()) {
_log.info("...Arranging resource files for ReplaceSchema");
}
for (Entry<String, String> entry : copyMap.entrySet()) {
final String src = entry.getKey();
final String dest = entry.getValue();
arrangeCopy(src, dest);
}
final Map<String, String> scriptMap = prop.getArrangeBeforeRepsScriptMap();
if (!scriptMap.isEmpty()) {
_log.info("...Arranging by script files for ReplaceSchema");
}
for (Entry<String, String> entry : scriptMap.entrySet()) {
final String path = entry.getKey();
arrangeScript(path);
}
}
use of org.dbflute.properties.DfReplaceSchemaProperties in project dbflute-core by dbflute.
the class DfCreateSchemaProcess method getSqlFileRunner.
protected DfSqlFileRunner getSqlFileRunner(final DfRunnerInformation runInfo) {
final DfReplaceSchemaProperties prop = getReplaceSchemaProperties();
final DfSqlFileRunnerExecute execute = new DfSqlFileRunnerExecuteCreateSchema(runInfo, getDataSource());
execute.setDispatcher(new // for additional user dispatch
DfSqlFileRunnerDispatcher() {
protected final Set<String> _skippedFileSet = new HashSet<String>();
public DfRunnerDispatchResult dispatch(File sqlFile, Statement st, String sql) throws SQLException {
if (_currentUser == null || _currentUser.trim().length() == 0) {
return DfRunnerDispatchResult.NONE;
}
checkSkippedUser();
if (isSkippedUser()) {
return DfRunnerDispatchResult.SKIPPED;
}
Connection conn = _changeUserConnectionMap.get(_currentUser);
if (conn == null) {
_log.info("...Creating a connection to " + _currentUser);
conn = prop.createAdditionalUserConnection(_currentUser);
if (conn != null) {
_changeUserConnectionMap.put(_currentUser, conn);
} else {
final StringBuilder sb = new StringBuilder();
sb.append("...Saying good-bye to the user '").append(_currentUser).append("'");
sb.append(" because of no definition");
_log.info(sb.toString());
_goodByeUserSet.add(_currentUser);
return DfRunnerDispatchResult.SKIPPED;
}
}
final Statement dispatchStmt = conn.createStatement();
try {
dispatchStmt.execute(sql);
return DfRunnerDispatchResult.DISPATCHED;
} catch (SQLException e) {
final List<String> argList = analyzeCheckUser(sql);
if (argList != null) {
// means the command was found
if (argList.contains("mainSchema")) {
_alreadyExistsMainSchema = true;
}
final StringBuilder sb = new StringBuilder();
sb.append("...Saying good-bye to the user '").append(_currentUser).append("'");
sb.append(" because of checked: ").append(argList);
_log.info(sb.toString());
final String exmsg = e.getMessage();
_log.info(" -> " + (exmsg != null ? exmsg.trim() : null));
_goodByeUserSet.add(_currentUser);
return DfRunnerDispatchResult.SKIPPED;
}
throw e;
} finally {
if (dispatchStmt != null) {
dispatchStmt.close();
}
}
}
protected void checkSkippedUser() {
if (_skippedFileSet.contains(_currentUser)) {
return;
}
if (prop.isAdditionalUserSkipIfNotFoundPasswordFileAndDefault(_currentUser)) {
_log.info("...Skipping the user since no password file: " + _currentUser);
_skippedFileSet.add(_currentUser);
}
}
protected boolean isSkippedUser() {
return _skippedFileSet.contains(_currentUser);
}
});
return execute;
}
use of org.dbflute.properties.DfReplaceSchemaProperties in project dbflute-core by dbflute.
the class DfTakeFinallyProcess method getSqlFileRunner4TakeFinally.
protected DfSqlFileRunner getSqlFileRunner4TakeFinally(final DfRunnerInformation runInfo) {
final DfReplaceSchemaProperties prop = getReplaceSchemaProperties();
final DfSqlFileRunnerExecute runnerExecute = new DfSqlFileRunnerExecute(runInfo, _dataSource) {
@Override
protected String filterSql(String sql) {
sql = super.filterSql(sql);
sql = prop.resolveFilterVariablesIfNeeds(sql);
return sql;
}
@Override
protected boolean isHandlingCommentOnLineSeparator() {
return true;
}
@Override
protected boolean isDbCommentLine(String line) {
final boolean commentLine = super.isDbCommentLine(line);
if (commentLine) {
return commentLine;
}
// for irregular pattern
return isDbCommentLineForIrregularPattern(line);
}
@Override
protected String getTerminator4Tool() {
return resolveTerminator4Tool();
}
@Override
protected boolean isTargetFile(String sql) {
return getReplaceSchemaProperties().isTargetRepsFile(sql);
}
};
final String loadType = getReplaceSchemaProperties().getRepsEnvType();
final DfDataAssertProvider dataAssertProvider = new DfDataAssertProvider(loadType);
runnerExecute.setDispatcher(new DfSqlFileRunnerDispatcher() {
public DfRunnerDispatchResult dispatch(File sqlFile, Statement st, String sql) throws SQLException {
final DfDataAssertHandler dataAssertHandler = dataAssertProvider.provideDataAssertHandler(sql);
if (dataAssertHandler == null) {
if (_skipIfNonAssetionSql) {
_log.info("*Skipped the statement because of not assertion SQL");
return DfRunnerDispatchResult.SKIPPED;
} else if (_restrictIfNonAssetionSql) {
throwTakeFinallyNonAssertionSqlFoundException(sqlFile, sql);
} else {
return DfRunnerDispatchResult.NONE;
}
}
try {
dataAssertHandler.handle(sqlFile, st, sql);
} catch (DfTakeFinallyAssertionFailureException e) {
handleAssertionFailureException(e);
}
return DfRunnerDispatchResult.DISPATCHED;
}
});
return runnerExecute;
}
use of org.dbflute.properties.DfReplaceSchemaProperties in project dbflute-core by dbflute.
the class DfSPolicyInRepsChecker method checkSchemaPolicyInRepsIfNeeds.
// ===================================================================================
// Check
// =====
public void checkSchemaPolicyInRepsIfNeeds() {
final DfReplaceSchemaProperties repsProp = getReplaceSchemaProperties();
if (!repsProp.isCheckSchemaPolicyInReps()) {
return;
}
final DfSchemaPolicyProperties policyProp = getSchemaPolicyProperties();
if (!policyProp.hasPolicy()) {
return;
}
_log.info("...Beginning schema policy check in replace-schema");
final String schemaXml = repsProp.getSchemaPolicyInRepsSchemaXml();
deleteTemporarySchemaXmlIfExists(schemaXml);
final DfSchemaXmlSerializer serializer = createSchemaXmlSerializer(schemaXml);
serializer.serialize();
try {
final DfSchemaXmlReader reader = createSchemaXmlReader(schemaXml);
final AppData appData = reader.read();
final Database database = appData.getDatabase();
// for "then classification"
initializeClassificationDeployment(database);
final List<Table> tableList = database.getTableList();
final DfSPolicyChecker checker = policyProp.createChecker(() -> tableList);
checker.checkPolicyIfNeeds();
} finally {
deleteTemporarySchemaXmlIfExists(schemaXml);
}
}
Aggregations