use of org.dbflute.properties.DfOutsideSqlProperties in project dbflute-core by dbflute.
the class DfOutsideSqlTestTask method getSqlFileRunner.
protected DfSqlFileRunnerExecute getSqlFileRunner(final DfRunnerInformation runInfo) {
final String nonTargetMark = "df:x";
final DBDef currentDBDef = getDatabaseTypeFacadeProp().getCurrentDBDef();
return new DfSqlFileRunnerExecute(runInfo, getDataSource()) {
protected DfOutsideSqlChecker _outsideSqlChecker;
@Override
protected String filterSql(String sql) {
// /- - - - - - - - - - - - - - - - - - - - - - - - - -
// check parameter comments in the SQL before filtering
// - - - - - - - - - -/
checkParameterComment(_sqlFile, sql);
// filter comments if it needs.
if (!currentDBDef.dbway().isBlockCommentSupported()) {
sql = removeBlockComment(sql);
}
if (!currentDBDef.dbway().isLineCommentSupported()) {
sql = removeLineComment(sql);
}
return super.filterSql(sql);
}
protected String removeBlockComment(final String sql) {
return Srl.removeBlockComment(sql);
}
protected String removeLineComment(final String sql) {
return Srl.removeLineComment(sql);
}
@Override
protected boolean isTargetSql(String sql) {
final String entityName = getEntityName(sql);
if (entityName != null && nonTargetMark.equalsIgnoreCase(entityName)) {
// non-target SQL
_nonTargetSqlFileSet.add(_sqlFile);
_log.info("...Skipping the SQL by non-target mark '" + nonTargetMark + "'");
return false;
}
return super.isTargetSql(sql);
}
@Override
protected void traceSql(String sql) {
_log.info("SQL:" + ln() + sql);
}
@Override
protected void traceResult(int goodSqlCount, int totalSqlCount) {
_log.info(" -> success=" + goodSqlCount + " failure=" + (totalSqlCount - goodSqlCount) + ln());
}
protected String getEntityName(final String sql) {
return getTargetString(sql, "#");
}
protected String getTargetString(final String sql, final String mark) {
final List<String> targetList = getTargetList(sql, mark);
return !targetList.isEmpty() ? targetList.get(0) : null;
}
protected List<String> getTargetList(final String sql, final String mark) {
if (sql == null || sql.trim().length() == 0) {
String msg = "The sql is invalid: " + sql;
throw new IllegalArgumentException(msg);
}
final List<String> betweenBeginEndMarkList = getListBetweenBeginEndMark(sql, "--" + mark, mark);
if (!betweenBeginEndMarkList.isEmpty()) {
return betweenBeginEndMarkList;
} else {
// basically for MySQL
return getListBetweenBeginEndMark(sql, "-- " + mark, mark);
}
}
protected List<String> getListBetweenBeginEndMark(String targetStr, String beginMark, String endMark) {
final List<ScopeInfo> scopeList = Srl.extractScopeList(targetStr, beginMark, endMark);
final List<String> resultList = DfCollectionUtil.newArrayList();
for (ScopeInfo scope : scopeList) {
resultList.add(scope.getContent());
}
return resultList;
}
protected void checkParameterComment(File sqlFile, String sql) {
final DfOutsideSqlProperties outsideSqlProp = getOutsideSqlProperties();
if (outsideSqlProp.isSuppressParameterCommentCheck()) {
return;
}
if (_outsideSqlChecker == null) {
_outsideSqlChecker = createOutsideSqlChecker(outsideSqlProp);
}
_outsideSqlChecker.check(sqlFile.getName(), sql);
}
};
}
use of org.dbflute.properties.DfOutsideSqlProperties in project dbflute-core by dbflute.
the class Table method switchSql2EntitySimpleDtoOutputDirectory.
public void switchSql2EntitySimpleDtoOutputDirectory() {
final DfOutsideSqlProperties prop = getProperties().getOutsideSqlProperties();
if (_sql2EntitySqlFile != null && _sql2EntitySqlFile.isSqlAp()) {
prop.switchSql2EntityOutputDirectory(_sql2EntitySqlFile.getSql2EntityOutputDirectory());
} else {
final String outputDirectory = getProperties().getSimpleDtoProperties().getSimpleDtoOutputDirectory();
prop.switchSql2EntityOutputDirectory(outputDirectory);
}
}
use of org.dbflute.properties.DfOutsideSqlProperties in project dbflute-core by dbflute.
the class DfProcedureExecutionMetaExtractor method doExtractExecutionMetaData.
protected void doExtractExecutionMetaData(DataSource dataSource, DfProcedureMeta procedure) throws SQLException {
final List<DfProcedureColumnMeta> columnList = procedure.getProcedureColumnList();
if (!needsToCall(columnList)) {
final String name = procedure.buildProcedureLoggingName();
_log.info("...Skipping unneeded call: " + name + " params=" + buildParameterTypeView(columnList));
return;
}
final List<Object> testValueList = DfCollectionUtil.newArrayList();
setupTestValueList(columnList, testValueList);
final boolean existsReturn = existsReturnValue(columnList);
final String sql = createSql(procedure, existsReturn, true);
Connection conn = null;
CallableStatement cs = null;
boolean beginTransaction = false;
try {
_log.info("...Calling: " + sql);
conn = dataSource.getConnection();
conn.setAutoCommit(false);
beginTransaction = true;
cs = conn.prepareCall(sql);
final List<DfProcedureColumnMeta> boundColumnList = DfCollectionUtil.newArrayList();
setupBindParameter(conn, cs, columnList, testValueList, boundColumnList);
boolean executed;
try {
executed = cs.execute();
} catch (SQLException e) {
// retry without escape because Oracle sometimes hates escape
final String retrySql = createSql(procedure, existsReturn, false);
try {
try {
cs.close();
} catch (SQLException ignored) {
}
cs = conn.prepareCall(retrySql);
setupBindParameter(conn, cs, columnList, testValueList, boundColumnList);
executed = cs.execute();
_log.info(" (o) retry: " + retrySql);
} catch (SQLException ignored) {
_log.info(" (x) retry: " + retrySql);
throw e;
}
}
if (executed) {
int closetIndex = 0;
do {
ResultSet rs = null;
try {
rs = cs.getResultSet();
if (rs == null) {
break;
}
final Map<String, DfColumnMeta> columnMetaInfoMap = extractColumnMetaInfoMap(rs, sql);
final DfProcedureNotParamResultMeta notParamResult = new DfProcedureNotParamResultMeta();
final String propertyName;
if (procedure.isCalledBySelect() && closetIndex == 0) {
// for example, table valued function
// if the procedure of this type does not have
// second or more result set basically
// but checks closetIndex just in case
propertyName = "returnResult";
} else {
// basically here
propertyName = "notParamResult" + (closetIndex + 1);
}
notParamResult.setPropertyName(propertyName);
notParamResult.setResultSetColumnInfoMap(columnMetaInfoMap);
procedure.addNotParamResult(notParamResult);
++closetIndex;
} finally {
closeResult(rs);
}
} while (cs.getMoreResults());
}
int index = 0;
for (DfProcedureColumnMeta column : boundColumnList) {
final DfProcedureColumnType columnType = column.getProcedureColumnType();
if (DfProcedureColumnType.procedureColumnIn.equals(columnType)) {
++index;
continue;
}
final int paramIndex = (index + 1);
final Object obj;
if (column.isPostgreSQLCursor()) {
obj = _postgreSqlResultSetType.getValue(cs, paramIndex);
} else if (column.isOracleCursor()) {
obj = _oracleResultSetType.getValue(cs, paramIndex);
} else {
// as default
obj = cs.getObject(paramIndex);
}
if (obj instanceof ResultSet) {
ResultSet rs = null;
try {
rs = (ResultSet) obj;
final Map<String, DfColumnMeta> columnMetaInfoMap = extractColumnMetaInfoMap(rs, sql);
column.setResultSetColumnInfoMap(columnMetaInfoMap);
} finally {
closeResult(rs);
}
}
++index;
}
} catch (SQLException e) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("Failed to execute the procedure for getting meta data.");
br.addItem("SQL");
br.addElement(sql);
br.addItem("Parameter");
for (DfProcedureColumnMeta column : columnList) {
br.addElement(column.getColumnDisplayName());
}
br.addItem("Test Value");
br.addElement(buildTestValueDisp(testValueList));
br.addItem("Exception Message");
br.addElement(DfJDBCException.extractMessage(e));
final SQLException nextEx = e.getNextException();
if (nextEx != null) {
br.addElement(DfJDBCException.extractMessage(nextEx));
}
final String msg = br.buildExceptionMessage();
final DfOutsideSqlProperties prop = getProperties().getOutsideSqlProperties();
if (prop.hasSpecifiedExecutionMetaProcedure()) {
throw new DfProcedureExecutionMetaGettingFailureException(msg, e);
} else {
// if no specified, it continues
_continuedFailureMessageMap.put(procedure.getProcedureFullQualifiedName(), msg);
_log.info("*Failed to call so read the warning message displayed later");
}
} finally {
if (cs != null) {
cs.close();
}
if (conn != null) {
try {
conn.rollback();
} catch (SQLException continued) {
// one day Oracle suddenly threw it (by socket trouble?)
final String exp = DfJDBCException.extractMessage(continued);
_log.info("*Failed to roll-back the procedure call but continued: " + exp);
}
if (beginTransaction) {
try {
conn.setAutoCommit(true);
} catch (SQLException continued) {
final String exp = DfJDBCException.extractMessage(continued);
_log.info("*Failed to set auto-commit true: " + exp);
}
}
}
}
}
use of org.dbflute.properties.DfOutsideSqlProperties in project dbflute-core by dbflute.
the class DfOutsideSqlPropertiesTest method test_resolveFileSeparatorWithFlatDirectory_startsWith.
public void test_resolveFileSeparatorWithFlatDirectory_startsWith() {
// ## Arrange ##
final DfOutsideSqlProperties prop = new DfOutsideSqlProperties(new Properties());
// ## Act ##
final String actual = prop.resolveSqlPackageFileSeparatorWithFlatDirectory("abc.def.ghi.dbflute", "abc.def");
// ## Assert ##
log(actual);
assertEquals("abc.def/ghi/dbflute", actual);
}
use of org.dbflute.properties.DfOutsideSqlProperties in project dbflute-core by dbflute.
the class DfOutsideSqlPropertiesTest method test_resolveSqlPackageFileSeparatorWithFlatDirectory_contains.
public void test_resolveSqlPackageFileSeparatorWithFlatDirectory_contains() {
// ## Arrange ##
final DfOutsideSqlProperties prop = new DfOutsideSqlProperties(new Properties());
// ## Act ##
final String actual = prop.resolveSqlPackageFileSeparatorWithFlatDirectory("abc.def.ghi.dbflute", "def.ghi");
// ## Assert ##
log(actual);
assertEquals("abc/def.ghi/dbflute", actual);
}
Aggregations