use of org.dbflute.helper.jdbc.sqlfile.DfSqlFileRunnerExecute in project dbflute-core by dbflute.
the class DfAlterCheckProcess method createSqlFileRunner.
protected DfSqlFileRunner createSqlFileRunner(final DfRunnerInformation runInfo) {
final String loadType = getReplaceSchemaProperties().getRepsEnvType();
final DfDataAssertProvider dataAssertProvider = new DfDataAssertProvider(loadType);
final DfSqlFileRunnerExecute runnerExecute = new DfSqlFileRunnerExecute(runInfo, _dataSource);
runnerExecute.setDispatcher(new DfSqlFileRunnerDispatcher() {
public DfRunnerDispatchResult dispatch(File sqlFile, Statement st, String sql) throws SQLException {
final DfDataAssertHandler dataAssertHandler = dataAssertProvider.provideDataAssertHandler(sql);
if (dataAssertHandler == null) {
return DfRunnerDispatchResult.NONE;
}
dataAssertHandler.handle(sqlFile, st, sql);
return DfRunnerDispatchResult.DISPATCHED;
}
});
return runnerExecute;
}
use of org.dbflute.helper.jdbc.sqlfile.DfSqlFileRunnerExecute in project dbflute-core by dbflute.
the class DfCreateSchemaProcess method getSqlFileRunner.
protected DfSqlFileRunner getSqlFileRunner(final DfRunnerInformation runInfo) {
final DfReplaceSchemaProperties prop = getReplaceSchemaProperties();
final DfSqlFileRunnerExecute execute = new DfSqlFileRunnerExecuteCreateSchema(runInfo, getDataSource());
execute.setDispatcher(new // for additional user dispatch
DfSqlFileRunnerDispatcher() {
protected final Set<String> _skippedFileSet = new HashSet<String>();
public DfRunnerDispatchResult dispatch(File sqlFile, Statement st, String sql) throws SQLException {
if (_currentUser == null || _currentUser.trim().length() == 0) {
return DfRunnerDispatchResult.NONE;
}
checkSkippedUser();
if (isSkippedUser()) {
return DfRunnerDispatchResult.SKIPPED;
}
Connection conn = _changeUserConnectionMap.get(_currentUser);
if (conn == null) {
_log.info("...Creating a connection to " + _currentUser);
conn = prop.createAdditionalUserConnection(_currentUser);
if (conn != null) {
_changeUserConnectionMap.put(_currentUser, conn);
} else {
final StringBuilder sb = new StringBuilder();
sb.append("...Saying good-bye to the user '").append(_currentUser).append("'");
sb.append(" because of no definition");
_log.info(sb.toString());
_goodByeUserSet.add(_currentUser);
return DfRunnerDispatchResult.SKIPPED;
}
}
final Statement dispatchStmt = conn.createStatement();
try {
dispatchStmt.execute(sql);
return DfRunnerDispatchResult.DISPATCHED;
} catch (SQLException e) {
final List<String> argList = analyzeCheckUser(sql);
if (argList != null) {
// means the command was found
if (argList.contains("mainSchema")) {
_alreadyExistsMainSchema = true;
}
final StringBuilder sb = new StringBuilder();
sb.append("...Saying good-bye to the user '").append(_currentUser).append("'");
sb.append(" because of checked: ").append(argList);
_log.info(sb.toString());
final String exmsg = e.getMessage();
_log.info(" -> " + (exmsg != null ? exmsg.trim() : null));
_goodByeUserSet.add(_currentUser);
return DfRunnerDispatchResult.SKIPPED;
}
throw e;
} finally {
if (dispatchStmt != null) {
dispatchStmt.close();
}
}
}
protected void checkSkippedUser() {
if (_skippedFileSet.contains(_currentUser)) {
return;
}
if (prop.isAdditionalUserSkipIfNotFoundPasswordFileAndDefault(_currentUser)) {
_log.info("...Skipping the user since no password file: " + _currentUser);
_skippedFileSet.add(_currentUser);
}
}
protected boolean isSkippedUser() {
return _skippedFileSet.contains(_currentUser);
}
});
return execute;
}
use of org.dbflute.helper.jdbc.sqlfile.DfSqlFileRunnerExecute in project dbflute-core by dbflute.
the class DfAlterCheckProcess method createSqlFileRunner.
protected DfSqlFileRunner createSqlFileRunner(final DfRunnerInformation runInfo) {
final String loadType = getReplaceSchemaProperties().getRepsEnvType();
final DfDataAssertProvider dataAssertProvider = new DfDataAssertProvider(loadType);
final DfSqlFileRunnerExecute runnerExecute = new DfSqlFileRunnerExecute(runInfo, _dataSource);
runnerExecute.setDispatcher(new DfSqlFileRunnerDispatcher() {
public DfRunnerDispatchResult dispatch(File sqlFile, Statement st, String sql) throws SQLException {
final DfDataAssertHandler dataAssertHandler = dataAssertProvider.provideDataAssertHandler(sql);
if (dataAssertHandler == null) {
return DfRunnerDispatchResult.NONE;
}
dataAssertHandler.handle(sqlFile, st, sql);
return DfRunnerDispatchResult.DISPATCHED;
}
});
return runnerExecute;
}
use of org.dbflute.helper.jdbc.sqlfile.DfSqlFileRunnerExecute in project dbflute-core by dbflute.
the class DfOutsideSqlTestTask method getSqlFileRunner.
protected DfSqlFileRunnerExecute getSqlFileRunner(final DfRunnerInformation runInfo) {
final String nonTargetMark = "df:x";
final DBDef currentDBDef = getDatabaseTypeFacadeProp().getCurrentDBDef();
return new DfSqlFileRunnerExecute(runInfo, getDataSource()) {
protected DfOutsideSqlChecker _outsideSqlChecker;
@Override
protected String filterSql(String sql) {
// /- - - - - - - - - - - - - - - - - - - - - - - - - -
// check parameter comments in the SQL before filtering
// - - - - - - - - - -/
checkParameterComment(_sqlFile, sql);
// filter comments if it needs.
if (!currentDBDef.dbway().isBlockCommentSupported()) {
sql = removeBlockComment(sql);
}
if (!currentDBDef.dbway().isLineCommentSupported()) {
sql = removeLineComment(sql);
}
return super.filterSql(sql);
}
protected String removeBlockComment(final String sql) {
return Srl.removeBlockComment(sql);
}
protected String removeLineComment(final String sql) {
return Srl.removeLineComment(sql);
}
@Override
protected boolean isTargetSql(String sql) {
final String entityName = getEntityName(sql);
if (entityName != null && nonTargetMark.equalsIgnoreCase(entityName)) {
// non-target SQL
_nonTargetSqlFileSet.add(_sqlFile);
_log.info("...Skipping the SQL by non-target mark '" + nonTargetMark + "'");
return false;
}
return super.isTargetSql(sql);
}
@Override
protected void traceSql(String sql) {
_log.info("SQL:" + ln() + sql);
}
@Override
protected void traceResult(int goodSqlCount, int totalSqlCount) {
_log.info(" -> success=" + goodSqlCount + " failure=" + (totalSqlCount - goodSqlCount) + ln());
}
protected String getEntityName(final String sql) {
return getTargetString(sql, "#");
}
protected String getTargetString(final String sql, final String mark) {
final List<String> targetList = getTargetList(sql, mark);
return !targetList.isEmpty() ? targetList.get(0) : null;
}
protected List<String> getTargetList(final String sql, final String mark) {
if (sql == null || sql.trim().length() == 0) {
String msg = "The sql is invalid: " + sql;
throw new IllegalArgumentException(msg);
}
final List<String> betweenBeginEndMarkList = getListBetweenBeginEndMark(sql, "--" + mark, mark);
if (!betweenBeginEndMarkList.isEmpty()) {
return betweenBeginEndMarkList;
} else {
// basically for MySQL
return getListBetweenBeginEndMark(sql, "-- " + mark, mark);
}
}
protected List<String> getListBetweenBeginEndMark(String targetStr, String beginMark, String endMark) {
final List<ScopeInfo> scopeList = Srl.extractScopeList(targetStr, beginMark, endMark);
final List<String> resultList = DfCollectionUtil.newArrayList();
for (ScopeInfo scope : scopeList) {
resultList.add(scope.getContent());
}
return resultList;
}
protected void checkParameterComment(File sqlFile, String sql) {
final DfOutsideSqlProperties outsideSqlProp = getOutsideSqlProperties();
if (outsideSqlProp.isSuppressParameterCommentCheck()) {
return;
}
if (_outsideSqlChecker == null) {
_outsideSqlChecker = createOutsideSqlChecker(outsideSqlProp);
}
_outsideSqlChecker.check(sqlFile.getName(), sql);
}
};
}
use of org.dbflute.helper.jdbc.sqlfile.DfSqlFileRunnerExecute in project dbflute-core by dbflute.
the class DfCraftDiffAssertSqlFire method getSqlFileRunner4CraftDiff.
protected DfSqlFileRunner getSqlFileRunner4CraftDiff(final DfRunnerInformation runInfo, final DfCraftDiffAssertProvider provider) {
final DfSqlFileRunnerExecute runnerExecute = new DfSqlFileRunnerExecute(runInfo, _dataSource) {
@Override
protected String getTerminator4Tool() {
return resolveTerminator4Tool();
}
@Override
protected boolean isTargetFile(String sql) {
return isTargetEnvTypeFile(sql);
}
};
runnerExecute.setDispatcher(new DfSqlFileRunnerDispatcher() {
public DfRunnerDispatchResult dispatch(File sqlFile, Statement st, String sql) throws SQLException {
final DfCraftDiffAssertHandler handler = provider.provideCraftDiffAssertHandler(sqlFile, sql);
if (handler == null) {
throwCraftDiffNonAssertionSqlFoundException(sqlFile, sql);
}
handler.handle(sqlFile, st, sql);
return DfRunnerDispatchResult.DISPATCHED;
}
});
return runnerExecute;
}
Aggregations