use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.
the class DfCraftDiffAssertProvider method createTableEqualsAssertHandler.
protected DfCraftDiffAssertHandler createTableEqualsAssertHandler(final String craftTitle, final String tableHint, final String exceptExp) {
return new DfCraftDiffAssertHandler(_craftMetaDir, _nextDirection, craftTitle) {
@Override
protected List<Map<String, String>> selectDiffDataList(File sqlFile, Statement st, String sql) throws SQLException {
final List<Map<String, String>> unifiedList = new ArrayList<Map<String, String>>();
final Map<String, String> tableSqlMap = toTableSqlMap(tableHint, exceptExp);
for (DfTableMeta tableMeta : _tableList) {
final String tableSql = tableSqlMap.get(tableMeta.getTableName());
if (tableSql == null) {
continue;
}
final List<Map<String, String>> selectedList = super.selectDiffDataList(sqlFile, st, tableSql);
final List<DfColumnMeta> columnMetaList = tableMeta.getLazyColumnMetaList();
if (columnMetaList == null) {
String msg = "Not found the column meta for the table: " + tableMeta;
throw new IllegalStateException(msg);
}
if (columnMetaList.isEmpty()) {
String msg = "Empty column meta for the table: " + tableMeta;
throw new IllegalStateException(msg);
}
// first column should be PK
final DfColumnMeta pkCol = columnMetaList.get(0);
final String pkName = pkCol.getColumnName();
for (Map<String, String> recordMap : selectedList) {
final String pkValue = recordMap.remove(pkName);
final Map<String, String> adjustedMap = StringKeyMap.createAsFlexibleOrdered();
final String uniqueCode = tableMeta.getTableName() + "::" + pkValue;
adjustedMap.put(TABLE_EQUALS_UNIQUE_NAME, uniqueCode);
final StringBuilder valueSb = new StringBuilder();
int columnIndex = 0;
for (Entry<String, String> entry : recordMap.entrySet()) {
// no PK loop
if (columnIndex > 0) {
valueSb.append("|");
}
final String columnValue = entry.getValue();
valueSb.append(columnValue);
++columnIndex;
}
adjustedMap.put(TABLE_EQUALS_DATA_NAME, valueSb.toString());
unifiedList.add(adjustedMap);
}
}
return unifiedList;
}
@Override
protected void handleSQLException(SQLException e, String sql) throws SQLException {
String msg = "Failed to execute the SQL for CraftDiff.";
msg = msg + ln() + "The SQL has been switched so see the SQL bellow:";
msg = msg + ln() + sql;
throw new DfJDBCException(msg, e);
}
};
}
use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.
the class DfRepsSequenceHandlerPostgreSQL method handleSerialTypeSequence.
protected void handleSerialTypeSequence(Map<String, String> tableSequenceMap) {
final StringSet doneSequenceSet = StringSet.createAsFlexibleOrdered();
doneSequenceSet.addAll(tableSequenceMap.values());
DfTableMeta tableInfo = null;
DfPrimaryKeyMeta pkInfo = null;
String sequenceName = null;
String tableSqlName = null;
Integer actualValue = null;
String sequenceSqlName = null;
Connection conn = null;
Statement st = null;
try {
conn = _dataSource.getConnection();
st = conn.createStatement();
final DatabaseMetaData metaData = conn.getMetaData();
final DfColumnExtractor columnHandler = new DfColumnExtractor();
final DfAutoIncrementExtractor autoIncrementHandler = new DfAutoIncrementExtractor();
_log.info("...Incrementing serial type sequence");
final Set<Entry<String, DfTableMeta>> entrySet = _tableMap.entrySet();
for (Entry<String, DfTableMeta> entry : entrySet) {
// clear elements that are also used exception message
tableInfo = null;
pkInfo = null;
sequenceName = null;
tableSqlName = null;
actualValue = null;
sequenceSqlName = null;
tableInfo = entry.getValue();
pkInfo = _uniqueKeyHandler.getPrimaryKey(metaData, tableInfo);
final List<String> pkList = pkInfo.getPrimaryKeyList();
if (pkList.size() != 1) {
continue;
}
final String primaryKeyColumnName = pkList.get(0);
if (!autoIncrementHandler.isAutoIncrementColumn(conn, tableInfo, primaryKeyColumnName)) {
continue;
}
final Map<String, DfColumnMeta> columnMap = columnHandler.getColumnMap(metaData, tableInfo);
final DfColumnMeta columnInfo = columnMap.get(primaryKeyColumnName);
if (columnInfo == null) {
continue;
}
final String defaultValue = columnInfo.getDefaultValue();
if (defaultValue == null) {
continue;
}
final String prefix = "nextval('";
if (!defaultValue.startsWith(prefix)) {
continue;
}
final String excludedPrefixString = defaultValue.substring(prefix.length());
final int endIndex = excludedPrefixString.indexOf("'");
if (endIndex < 0) {
continue;
}
sequenceName = excludedPrefixString.substring(0, endIndex);
if (doneSequenceSet.contains(sequenceName)) {
// already done
continue;
}
tableSqlName = tableInfo.getTableSqlName();
final Integer count = selectCount(st, tableSqlName);
if (count == null || count == 0) {
// It is not necessary to increment because the table has no data.
continue;
}
actualValue = selectDataMax(st, tableInfo, primaryKeyColumnName);
if (actualValue == null) {
// It is not necessary to increment because the table has no data.
continue;
}
// because sequence names of other schemas have already been qualified
// sequenceSqlName = tableInfo.getUnifiedSchema().buildSqlName(sequenceName);
sequenceSqlName = sequenceName;
callSequenceLoop(st, sequenceSqlName, actualValue);
}
} catch (SQLException e) {
throwSerialTypeSequenceHandlingFailureException(tableInfo, pkInfo, sequenceName, tableSqlName, actualValue, sequenceSqlName, e);
} finally {
if (st != null) {
try {
st.close();
} catch (SQLException ignored) {
_log.info("Statement.close() threw the exception!", ignored);
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException ignored) {
_log.info("Connection.close() threw the exception!", ignored);
}
}
}
}
use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.
the class DfSchemaInitializerJdbc method callbackTruncateTableByJdbc.
protected void callbackTruncateTableByJdbc(Connection conn, List<DfTableMeta> tableMetaInfoList, DfTruncateTableByJdbcCallback callback) {
for (DfTableMeta metaInfo : tableMetaInfoList) {
final String truncateTableSql = callback.buildTruncateTableSql(metaInfo);
Statement st = null;
try {
st = conn.createStatement();
st.execute(truncateTableSql);
logReplaceSql(truncateTableSql);
} catch (Exception e) {
continue;
} finally {
closeStatement(st);
}
}
}
use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.
the class DfSchemaInitializerJdbc method dropTable.
// ===================================================================================
// Drop Table
// ==========
protected void dropTable(Connection conn, List<DfTableMeta> tableMetaList) {
List<DfTableMeta> viewList = new ArrayList<DfTableMeta>();
List<DfTableMeta> otherList = new ArrayList<DfTableMeta>();
for (DfTableMeta tableMeta : tableMetaList) {
if (tableMeta.isTableTypeView()) {
viewList.add(tableMeta);
} else {
otherList.add(tableMeta);
}
}
// Drop view and drop others
final List<DfTableMeta> sortedList = prepareSortedTableList(conn, viewList, otherList);
callbackDropTableByJdbc(conn, sortedList, new DfDropTableByJdbcCallback() {
public String buildDropTableSql(DfTableMeta metaInfo) {
final StringBuilder sb = new StringBuilder();
setupDropTable(sb, metaInfo);
return sb.toString();
}
public String buildDropMaterializedViewSql(DfTableMeta metaInfo) {
final StringBuilder sb = new StringBuilder();
sb.append("drop materialized view ").append(metaInfo.getTableName());
return sb.toString();
}
});
}
use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.
the class DfSchemaInitializerJdbc method callbackDropForeignKeyByJdbc.
protected void callbackDropForeignKeyByJdbc(Connection conn, List<DfTableMeta> tableMetaList, DfDropForeignKeyByJdbcCallback callback) {
Statement st = null;
try {
st = conn.createStatement();
for (DfTableMeta tableMeta : tableMetaList) {
if (isSkipDropForeignKey(tableMeta)) {
continue;
}
final DfForeignKeyExtractor extractor = new DfForeignKeyExtractor();
extractor.suppressExceptTarget();
final DatabaseMetaData dbMetaData = conn.getMetaData();
final Map<String, DfForeignKeyMeta> foreignKeyMetaInfoMap = extractor.getForeignKeyMap(conn, dbMetaData, tableMeta);
final Set<String> keySet = foreignKeyMetaInfoMap.keySet();
for (String foreignKeyName : keySet) {
final DfForeignKeyMeta foreignKeyMetaInfo = foreignKeyMetaInfoMap.get(foreignKeyName);
final String dropForeignKeySql = callback.buildDropForeignKeySql(foreignKeyMetaInfo);
logReplaceSql(dropForeignKeySql);
st.execute(dropForeignKeySql);
}
}
} catch (SQLException e) {
String msg = "Failed to drop foreign keys!";
throw new SQLFailureException(msg, e);
} finally {
closeStatement(st);
}
}
Aggregations