use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.
the class DfSequenceHandlerJdbc method incrementSequenceToDataMax.
// ===================================================================================
// Increment Sequence
// ==================
public void incrementSequenceToDataMax(Map<String, String> tableSequenceMap) {
final Map<String, List<String>> skippedMap = DfCollectionUtil.newLinkedHashMap();
_log.info("...Incrementing sequences to max value of table data");
String tableName = null;
String sequenceName = null;
DfTableMeta tableInfo = null;
DfPrimaryKeyMeta pkInfo = null;
String tableSqlName = null;
Integer actualValue = null;
Connection conn = null;
Statement st = null;
try {
conn = _dataSource.getConnection();
initializeTableInfo(conn);
st = conn.createStatement();
final Set<Entry<String, String>> entrySet = tableSequenceMap.entrySet();
for (Entry<String, String> entry : entrySet) {
// clear elements that are also used exception message
tableName = null;
sequenceName = null;
tableInfo = null;
pkInfo = null;
tableSqlName = null;
actualValue = null;
tableName = entry.getKey();
sequenceName = entry.getValue();
assertValidSequence(sequenceName, tableName);
tableInfo = findTableInfo(conn, tableName);
pkInfo = findPrimaryKeyInfo(conn, tableInfo);
final List<String> pkList = pkInfo.getPrimaryKeyList();
if (pkList.size() != 1) {
skippedMap.put(tableName, pkList);
continue;
}
final String primaryKeyColumnName = pkList.get(0);
tableSqlName = tableInfo.getTableSqlName();
final Integer count = selectCount(st, tableSqlName);
if (count == null || count == 0) {
// It is not necessary to increment because the table has no data.
continue;
}
actualValue = selectDataMax(st, tableInfo, primaryKeyColumnName);
if (actualValue == null) {
// It is not necessary to increment because the table has no data.
continue;
}
callSequenceLoop(st, sequenceName, actualValue);
}
} catch (SQLException e) {
throwIncrementSequenceToDataMaxFailureException(tableName, sequenceName, tableInfo, pkInfo, tableSqlName, actualValue, DfJDBCException.voice(e));
} finally {
if (st != null) {
try {
st.close();
} catch (SQLException ignored) {
_log.info("Statement.close() threw the exception!", ignored);
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException ignored) {
_log.info("Connection.close() threw the exception!", ignored);
}
}
}
if (!skippedMap.isEmpty()) {
_log.info("*Unsupported incrementing sequences(multiple-PK):");
final Set<Entry<String, List<String>>> skippedEntrySet = skippedMap.entrySet();
for (Entry<String, List<String>> skippedEntry : skippedEntrySet) {
final String skippedTableName = skippedEntry.getKey();
final List<String> pkList = skippedEntry.getValue();
_log.info(" " + skippedTableName + ": pk=" + pkList);
}
}
}
use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.
the class DfSequenceHandlerJdbc method findTableInfo.
protected DfTableMeta findTableInfo(Connection conn, String tableName) throws SQLException {
final DfTableMeta table = _tableMap.get(tableName);
if (table == null) {
String msg = "Failed to find the table in generated target tables:";
msg = msg + " table=" + tableName + " target=" + _tableMap.keySet();
throw new DfPropertySettingTableNotFoundException(msg);
}
return table;
}
use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.
the class DfSequenceHandlerPostgreSQL method handleSerialTypeSequence.
protected void handleSerialTypeSequence(Map<String, String> tableSequenceMap) {
final StringSet doneSequenceSet = StringSet.createAsFlexibleOrdered();
doneSequenceSet.addAll(tableSequenceMap.values());
DfTableMeta tableInfo = null;
DfPrimaryKeyMeta pkInfo = null;
String sequenceName = null;
String tableSqlName = null;
Integer actualValue = null;
String sequenceSqlName = null;
Connection conn = null;
Statement st = null;
try {
conn = _dataSource.getConnection();
st = conn.createStatement();
final DatabaseMetaData metaData = conn.getMetaData();
final DfColumnExtractor columnHandler = new DfColumnExtractor();
final DfAutoIncrementExtractor autoIncrementHandler = new DfAutoIncrementExtractor();
_log.info("...Incrementing serial type sequence");
final Set<Entry<String, DfTableMeta>> entrySet = _tableMap.entrySet();
for (Entry<String, DfTableMeta> entry : entrySet) {
// clear elements that are also used exception message
tableInfo = null;
pkInfo = null;
sequenceName = null;
tableSqlName = null;
actualValue = null;
sequenceSqlName = null;
tableInfo = entry.getValue();
pkInfo = _uniqueKeyHandler.getPrimaryKey(metaData, tableInfo);
final List<String> pkList = pkInfo.getPrimaryKeyList();
if (pkList.size() != 1) {
continue;
}
final String primaryKeyColumnName = pkList.get(0);
if (!autoIncrementHandler.isAutoIncrementColumn(conn, tableInfo, primaryKeyColumnName)) {
continue;
}
final Map<String, DfColumnMeta> columnMap = columnHandler.getColumnMap(metaData, tableInfo);
final DfColumnMeta columnInfo = columnMap.get(primaryKeyColumnName);
if (columnInfo == null) {
continue;
}
final String defaultValue = columnInfo.getDefaultValue();
if (defaultValue == null) {
continue;
}
final String prefix = "nextval('";
if (!defaultValue.startsWith(prefix)) {
continue;
}
final String excludedPrefixString = defaultValue.substring(prefix.length());
final int endIndex = excludedPrefixString.indexOf("'");
if (endIndex < 0) {
continue;
}
sequenceName = excludedPrefixString.substring(0, endIndex);
if (doneSequenceSet.contains(sequenceName)) {
// already done
continue;
}
tableSqlName = tableInfo.getTableSqlName();
final Integer count = selectCount(st, tableSqlName);
if (count == null || count == 0) {
// It is not necessary to increment because the table has no data.
continue;
}
actualValue = selectDataMax(st, tableInfo, primaryKeyColumnName);
if (actualValue == null) {
// It is not necessary to increment because the table has no data.
continue;
}
// because sequence names of other schemas have already been qualified
// sequenceSqlName = tableInfo.getUnifiedSchema().buildSqlName(sequenceName);
sequenceSqlName = sequenceName;
callSequenceLoop(st, sequenceSqlName, actualValue);
}
} catch (SQLException e) {
throwSerialTypeSequenceHandlingFailureException(tableInfo, pkInfo, sequenceName, tableSqlName, actualValue, sequenceSqlName, e);
} finally {
if (st != null) {
try {
st.close();
} catch (SQLException ignored) {
_log.info("Statement.close() threw the exception!", ignored);
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException ignored) {
_log.info("Connection.close() threw the exception!", ignored);
}
}
}
}
use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.
the class DfCraftDiffAssertProvider method toTableSqlMap.
protected Map<String, String> toTableSqlMap(String tableHint, String exceptExp) {
final Set<String> exceptColumnSet = StringSet.createAsFlexible();
final String exceptMark = "except:";
if (exceptExp != null && exceptExp.startsWith(exceptMark)) {
final String columnExp = Srl.substringFirstRear(exceptExp, exceptMark);
final List<String> exceptColumnList = Srl.splitListTrimmed(columnExp, "/");
exceptColumnSet.addAll(exceptColumnList);
}
final Map<String, String> tableSqlMap = new HashMap<String, String>();
final StringBuilder logSb = new StringBuilder();
logSb.append("...Switching table-equals SQL to:");
for (DfTableMeta tableMeta : _tableList) {
final String tableSql = buildTableEqualsSql(tableMeta, tableHint, exceptColumnSet);
if (tableSql == null) {
continue;
}
logSb.append(ln()).append(tableSql).append(";");
tableSqlMap.put(tableMeta.getTableName(), tableSql);
}
_log.info(logSb.toString());
return tableSqlMap;
}
use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.
the class DfForeignKeyExtractor method judgeSameNameForeignKey.
protected boolean judgeSameNameForeignKey(String localName, String firstName, String secondName) {
final DfTableMeta localInfo = getTableMeta(localName);
final DfTableMeta firstInfo = getTableMeta(firstName);
final DfTableMeta secondInfo = getTableMeta(secondName);
if (localInfo != null && firstInfo != null && secondInfo != null) {
final String localType = localInfo.getTableType();
if (localType.equals(firstInfo.getTableType())) {
// use first
return true;
} else if (localType.equals(secondInfo.getTableType())) {
// use second
return false;
}
}
// use first
return true;
}
Aggregations