use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.
the class DfAbsractDataWriter method prepareTableCaseTranslationIfNeeds.
protected void prepareTableCaseTranslationIfNeeds() {
if (_columnHandler.isEnableTableCaseTranslation()) {
return;
}
Connection conn = null;
try {
conn = _dataSource.getConnection();
final DatabaseMetaData metaData = conn.getMetaData();
final List<DfTableMeta> tableList = _tableHandler.getTableList(metaData, _unifiedSchema);
final List<String> tableNameList = new ArrayList<String>();
for (DfTableMeta meta : tableList) {
tableNameList.add(meta.getTableDbName());
}
_columnHandler.enableTableCaseTranslation(tableNameList);
} catch (SQLException e) {
String msg = "Failed to get meta data of tables.";
throw new IllegalStateException(msg, e);
} finally {
if (conn != null) {
try {
conn.close();
} catch (SQLException ignored) {
}
}
}
}
use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.
the class DfSchemaInitializerJdbc method callbackDropTableByJdbc.
protected void callbackDropTableByJdbc(Connection conn, List<DfTableMeta> tableMetaList, DfDropTableByJdbcCallback callback) {
String currentSql = null;
Statement st = null;
try {
st = conn.createStatement();
for (DfTableMeta tableMeta : tableMetaList) {
final String dropTableSql = callback.buildDropTableSql(tableMeta);
currentSql = dropTableSql;
logReplaceSql(dropTableSql);
try {
st.execute(dropTableSql);
} catch (SQLException e) {
handleDroppingRetry(callback, st, tableMeta, e);
}
}
} catch (SQLException e) {
String msg = "Failed to drop the table: " + currentSql;
throw new SQLFailureException(msg, e);
} finally {
closeStatement(st);
}
}
use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.
the class DfSchemaInitializerJdbc method initializeSchema.
// ===================================================================================
// Initialize Schema
// =================
public void initializeSchema() {
Connection conn = null;
try {
try {
conn = _dataSource.getConnection();
} catch (SQLException e) {
if (_suppressConnectionFailure) {
handleSuppressedConnectionFailure(e);
return;
} else {
throw e;
}
}
final List<DfTableMeta> tableMetaList;
try {
final DatabaseMetaData metaData = conn.getMetaData();
final DfTableExtractor tableExtractor = createDropTableExtractor();
tableMetaList = tableExtractor.getTableList(metaData, _unifiedSchema);
} catch (SQLException e) {
throw new RuntimeException(e);
}
executeObject(conn, tableMetaList);
} catch (SQLException e) {
String msg = "Failed to the initialize schema: " + _unifiedSchema;
throw new SQLFailureException(msg, e);
} finally {
if (conn != null) {
try {
conn.close();
} catch (SQLException ignored) {
_log.info("connection.close() threw the exception!", ignored);
}
}
}
}
use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.
the class DfConventionalTakeAsserter method doAssertEmptyTable.
// ===================================================================================
// Empty Table
// ===========
protected void doAssertEmptyTable(DfConventionalTakeAssertMap propMap) {
final List<DfTableMeta> allTableList = extractTableList();
final List<DfTableMeta> emptyTableList = DfCollectionUtil.newArrayList();
// null allowed
final Date targetDate = propMap.getErrorIfFirstDateAfter();
if (targetDate != null) {
_log.info("...Using first-date for targeting of empty tables: targetDate=" + new HandyDate(targetDate).toString());
}
for (DfTableMeta tableMeta : allTableList) {
if (!propMap.isEmptyTableTarget(tableMeta.getTableDbName())) {
continue;
}
if (!determineEmptyTable(tableMeta)) {
continue;
}
// empty table here
if (targetDate != null) {
// more determination
if (isTableFirstDateAfter(tableMeta, targetDate)) {
// new table: is target so keep
emptyTableList.add(tableMeta);
} else {
_log.info("...Skipping the empty table by first-date: old-table=" + tableMeta.toString());
}
} else {
// fixedly keep
emptyTableList.add(tableMeta);
}
}
if (!emptyTableList.isEmpty()) {
throwTakeFinallyAssertionFailureEmptyTableException(emptyTableList);
}
}
use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.
the class DfRepsSequenceHandlerJdbc method incrementSequenceToDataMax.
// ===================================================================================
// Increment Sequence
// ==================
public void incrementSequenceToDataMax(Map<String, String> tableSequenceMap) {
final Map<String, List<String>> skippedMap = DfCollectionUtil.newLinkedHashMap();
_log.info("...Incrementing sequences to max value of table data");
String tableName = null;
String sequenceName = null;
DfTableMeta tableInfo = null;
DfPrimaryKeyMeta pkInfo = null;
String tableSqlName = null;
Integer actualValue = null;
Connection conn = null;
Statement st = null;
try {
conn = _dataSource.getConnection();
initializeTableInfo(conn);
st = conn.createStatement();
final Set<Entry<String, String>> entrySet = tableSequenceMap.entrySet();
for (Entry<String, String> entry : entrySet) {
// clear elements that are also used exception message
tableName = null;
sequenceName = null;
tableInfo = null;
pkInfo = null;
tableSqlName = null;
actualValue = null;
tableName = entry.getKey();
sequenceName = entry.getValue();
assertValidSequence(sequenceName, tableName);
tableInfo = findTableInfo(conn, tableName);
pkInfo = findPrimaryKeyInfo(conn, tableInfo);
final List<String> pkList = pkInfo.getPrimaryKeyList();
if (pkList.size() != 1) {
skippedMap.put(tableName, pkList);
continue;
}
final String primaryKeyColumnName = pkList.get(0);
tableSqlName = tableInfo.getTableSqlName();
final Integer count = selectCount(st, tableSqlName);
if (count == null || count == 0) {
// It is not necessary to increment because the table has no data.
continue;
}
actualValue = selectDataMax(st, tableInfo, primaryKeyColumnName);
if (actualValue == null) {
// It is not necessary to increment because the table has no data.
continue;
}
callSequenceLoop(st, sequenceName, actualValue);
}
} catch (SQLException e) {
throwIncrementSequenceToDataMaxFailureException(tableName, sequenceName, tableInfo, pkInfo, tableSqlName, actualValue, DfJDBCException.voice(e));
} finally {
if (st != null) {
try {
st.close();
} catch (SQLException ignored) {
_log.info("Statement.close() threw the exception!", ignored);
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException ignored) {
_log.info("Connection.close() threw the exception!", ignored);
}
}
}
if (!skippedMap.isEmpty()) {
_log.info("*Unsupported incrementing sequences(multiple-PK):");
final Set<Entry<String, List<String>>> skippedEntrySet = skippedMap.entrySet();
for (Entry<String, List<String>> skippedEntry : skippedEntrySet) {
final String skippedTableName = skippedEntry.getKey();
final List<String> pkList = skippedEntry.getValue();
_log.info(" " + skippedTableName + ": pk=" + pkList);
}
}
}
Aggregations