Search in sources :

Example 6 with DfTableMeta

use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.

the class DfCraftDiffAssertProvider method createTableEqualsAssertHandler.

protected DfCraftDiffAssertHandler createTableEqualsAssertHandler(final String craftTitle, final String tableHint, final String exceptExp) {
    return new DfCraftDiffAssertHandler(_craftMetaDir, _nextDirection, craftTitle) {

        @Override
        protected List<Map<String, String>> selectDiffDataList(File sqlFile, Statement st, String sql) throws SQLException {
            final List<Map<String, String>> unifiedList = new ArrayList<Map<String, String>>();
            final Map<String, String> tableSqlMap = toTableSqlMap(tableHint, exceptExp);
            for (DfTableMeta tableMeta : _tableList) {
                final String tableSql = tableSqlMap.get(tableMeta.getTableName());
                if (tableSql == null) {
                    continue;
                }
                final List<Map<String, String>> selectedList = super.selectDiffDataList(sqlFile, st, tableSql);
                final List<DfColumnMeta> columnMetaList = tableMeta.getLazyColumnMetaList();
                if (columnMetaList == null) {
                    String msg = "Not found the column meta for the table: " + tableMeta;
                    throw new IllegalStateException(msg);
                }
                if (columnMetaList.isEmpty()) {
                    String msg = "Empty column meta for the table: " + tableMeta;
                    throw new IllegalStateException(msg);
                }
                // first column should be PK
                final DfColumnMeta pkCol = columnMetaList.get(0);
                final String pkName = pkCol.getColumnName();
                for (Map<String, String> recordMap : selectedList) {
                    final String pkValue = recordMap.remove(pkName);
                    final Map<String, String> adjustedMap = StringKeyMap.createAsFlexibleOrdered();
                    final String uniqueCode = tableMeta.getTableName() + "::" + pkValue;
                    adjustedMap.put(TABLE_EQUALS_UNIQUE_NAME, uniqueCode);
                    final StringBuilder valueSb = new StringBuilder();
                    int columnIndex = 0;
                    for (Entry<String, String> entry : recordMap.entrySet()) {
                        // no PK loop
                        if (columnIndex > 0) {
                            valueSb.append("|");
                        }
                        final String columnValue = entry.getValue();
                        valueSb.append(columnValue);
                        ++columnIndex;
                    }
                    adjustedMap.put(TABLE_EQUALS_DATA_NAME, valueSb.toString());
                    unifiedList.add(adjustedMap);
                }
            }
            return unifiedList;
        }

        @Override
        protected void handleSQLException(SQLException e, String sql) throws SQLException {
            String msg = "Failed to execute the SQL for CraftDiff.";
            msg = msg + ln() + "The SQL has been switched so see the SQL bellow:";
            msg = msg + ln() + sql;
            throw new DfJDBCException(msg, e);
        }
    };
}
Also used : DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) SQLException(java.sql.SQLException) Statement(java.sql.Statement) ArrayList(java.util.ArrayList) DfJDBCException(org.dbflute.exception.DfJDBCException) DfTableMeta(org.dbflute.logic.jdbc.metadata.info.DfTableMeta) File(java.io.File) HashMap(java.util.HashMap) Map(java.util.Map) StringKeyMap(org.dbflute.helper.StringKeyMap)

Example 7 with DfTableMeta

use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.

the class DfRepsSequenceHandlerPostgreSQL method handleSerialTypeSequence.

protected void handleSerialTypeSequence(Map<String, String> tableSequenceMap) {
    final StringSet doneSequenceSet = StringSet.createAsFlexibleOrdered();
    doneSequenceSet.addAll(tableSequenceMap.values());
    DfTableMeta tableInfo = null;
    DfPrimaryKeyMeta pkInfo = null;
    String sequenceName = null;
    String tableSqlName = null;
    Integer actualValue = null;
    String sequenceSqlName = null;
    Connection conn = null;
    Statement st = null;
    try {
        conn = _dataSource.getConnection();
        st = conn.createStatement();
        final DatabaseMetaData metaData = conn.getMetaData();
        final DfColumnExtractor columnHandler = new DfColumnExtractor();
        final DfAutoIncrementExtractor autoIncrementHandler = new DfAutoIncrementExtractor();
        _log.info("...Incrementing serial type sequence");
        final Set<Entry<String, DfTableMeta>> entrySet = _tableMap.entrySet();
        for (Entry<String, DfTableMeta> entry : entrySet) {
            // clear elements that are also used exception message
            tableInfo = null;
            pkInfo = null;
            sequenceName = null;
            tableSqlName = null;
            actualValue = null;
            sequenceSqlName = null;
            tableInfo = entry.getValue();
            pkInfo = _uniqueKeyHandler.getPrimaryKey(metaData, tableInfo);
            final List<String> pkList = pkInfo.getPrimaryKeyList();
            if (pkList.size() != 1) {
                continue;
            }
            final String primaryKeyColumnName = pkList.get(0);
            if (!autoIncrementHandler.isAutoIncrementColumn(conn, tableInfo, primaryKeyColumnName)) {
                continue;
            }
            final Map<String, DfColumnMeta> columnMap = columnHandler.getColumnMap(metaData, tableInfo);
            final DfColumnMeta columnInfo = columnMap.get(primaryKeyColumnName);
            if (columnInfo == null) {
                continue;
            }
            final String defaultValue = columnInfo.getDefaultValue();
            if (defaultValue == null) {
                continue;
            }
            final String prefix = "nextval('";
            if (!defaultValue.startsWith(prefix)) {
                continue;
            }
            final String excludedPrefixString = defaultValue.substring(prefix.length());
            final int endIndex = excludedPrefixString.indexOf("'");
            if (endIndex < 0) {
                continue;
            }
            sequenceName = excludedPrefixString.substring(0, endIndex);
            if (doneSequenceSet.contains(sequenceName)) {
                // already done
                continue;
            }
            tableSqlName = tableInfo.getTableSqlName();
            final Integer count = selectCount(st, tableSqlName);
            if (count == null || count == 0) {
                // It is not necessary to increment because the table has no data.
                continue;
            }
            actualValue = selectDataMax(st, tableInfo, primaryKeyColumnName);
            if (actualValue == null) {
                // It is not necessary to increment because the table has no data.
                continue;
            }
            // because sequence names of other schemas have already been qualified
            // sequenceSqlName = tableInfo.getUnifiedSchema().buildSqlName(sequenceName);
            sequenceSqlName = sequenceName;
            callSequenceLoop(st, sequenceSqlName, actualValue);
        }
    } catch (SQLException e) {
        throwSerialTypeSequenceHandlingFailureException(tableInfo, pkInfo, sequenceName, tableSqlName, actualValue, sequenceSqlName, e);
    } finally {
        if (st != null) {
            try {
                st.close();
            } catch (SQLException ignored) {
                _log.info("Statement.close() threw the exception!", ignored);
            }
        }
        if (conn != null) {
            try {
                conn.close();
            } catch (SQLException ignored) {
                _log.info("Connection.close() threw the exception!", ignored);
            }
        }
    }
}
Also used : DfPrimaryKeyMeta(org.dbflute.logic.jdbc.metadata.info.DfPrimaryKeyMeta) DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) SQLException(java.sql.SQLException) Statement(java.sql.Statement) DfAutoIncrementExtractor(org.dbflute.logic.jdbc.metadata.basic.DfAutoIncrementExtractor) Connection(java.sql.Connection) DatabaseMetaData(java.sql.DatabaseMetaData) Entry(java.util.Map.Entry) DfColumnExtractor(org.dbflute.logic.jdbc.metadata.basic.DfColumnExtractor) StringSet(org.dbflute.helper.StringSet) DfTableMeta(org.dbflute.logic.jdbc.metadata.info.DfTableMeta)

Example 8 with DfTableMeta

use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.

the class DfSchemaInitializerJdbc method callbackTruncateTableByJdbc.

protected void callbackTruncateTableByJdbc(Connection conn, List<DfTableMeta> tableMetaInfoList, DfTruncateTableByJdbcCallback callback) {
    for (DfTableMeta metaInfo : tableMetaInfoList) {
        final String truncateTableSql = callback.buildTruncateTableSql(metaInfo);
        Statement st = null;
        try {
            st = conn.createStatement();
            st.execute(truncateTableSql);
            logReplaceSql(truncateTableSql);
        } catch (Exception e) {
            continue;
        } finally {
            closeStatement(st);
        }
    }
}
Also used : Statement(java.sql.Statement) DfTableMeta(org.dbflute.logic.jdbc.metadata.info.DfTableMeta) SQLException(java.sql.SQLException) SQLFailureException(org.dbflute.exception.SQLFailureException)

Example 9 with DfTableMeta

use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.

the class DfSchemaInitializerJdbc method dropTable.

// ===================================================================================
// Drop Table
// ==========
protected void dropTable(Connection conn, List<DfTableMeta> tableMetaList) {
    List<DfTableMeta> viewList = new ArrayList<DfTableMeta>();
    List<DfTableMeta> otherList = new ArrayList<DfTableMeta>();
    for (DfTableMeta tableMeta : tableMetaList) {
        if (tableMeta.isTableTypeView()) {
            viewList.add(tableMeta);
        } else {
            otherList.add(tableMeta);
        }
    }
    // Drop view and drop others
    final List<DfTableMeta> sortedList = prepareSortedTableList(conn, viewList, otherList);
    callbackDropTableByJdbc(conn, sortedList, new DfDropTableByJdbcCallback() {

        public String buildDropTableSql(DfTableMeta metaInfo) {
            final StringBuilder sb = new StringBuilder();
            setupDropTable(sb, metaInfo);
            return sb.toString();
        }

        public String buildDropMaterializedViewSql(DfTableMeta metaInfo) {
            final StringBuilder sb = new StringBuilder();
            sb.append("drop materialized view ").append(metaInfo.getTableName());
            return sb.toString();
        }
    });
}
Also used : ArrayList(java.util.ArrayList) DfTableMeta(org.dbflute.logic.jdbc.metadata.info.DfTableMeta)

Example 10 with DfTableMeta

use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.

the class DfSchemaInitializerJdbc method callbackDropForeignKeyByJdbc.

protected void callbackDropForeignKeyByJdbc(Connection conn, List<DfTableMeta> tableMetaList, DfDropForeignKeyByJdbcCallback callback) {
    Statement st = null;
    try {
        st = conn.createStatement();
        for (DfTableMeta tableMeta : tableMetaList) {
            if (isSkipDropForeignKey(tableMeta)) {
                continue;
            }
            final DfForeignKeyExtractor extractor = new DfForeignKeyExtractor();
            extractor.suppressExceptTarget();
            final DatabaseMetaData dbMetaData = conn.getMetaData();
            final Map<String, DfForeignKeyMeta> foreignKeyMetaInfoMap = extractor.getForeignKeyMap(conn, dbMetaData, tableMeta);
            final Set<String> keySet = foreignKeyMetaInfoMap.keySet();
            for (String foreignKeyName : keySet) {
                final DfForeignKeyMeta foreignKeyMetaInfo = foreignKeyMetaInfoMap.get(foreignKeyName);
                final String dropForeignKeySql = callback.buildDropForeignKeySql(foreignKeyMetaInfo);
                logReplaceSql(dropForeignKeySql);
                st.execute(dropForeignKeySql);
            }
        }
    } catch (SQLException e) {
        String msg = "Failed to drop foreign keys!";
        throw new SQLFailureException(msg, e);
    } finally {
        closeStatement(st);
    }
}
Also used : SQLException(java.sql.SQLException) Statement(java.sql.Statement) DfForeignKeyMeta(org.dbflute.logic.jdbc.metadata.info.DfForeignKeyMeta) DfTableMeta(org.dbflute.logic.jdbc.metadata.info.DfTableMeta) DatabaseMetaData(java.sql.DatabaseMetaData) DfForeignKeyExtractor(org.dbflute.logic.jdbc.metadata.basic.DfForeignKeyExtractor) SQLFailureException(org.dbflute.exception.SQLFailureException)

Aggregations

DfTableMeta (org.dbflute.logic.jdbc.metadata.info.DfTableMeta)31 SQLException (java.sql.SQLException)12 Connection (java.sql.Connection)9 DatabaseMetaData (java.sql.DatabaseMetaData)8 Statement (java.sql.Statement)8 ArrayList (java.util.ArrayList)7 UnifiedSchema (org.apache.torque.engine.database.model.UnifiedSchema)6 Map (java.util.Map)4 Entry (java.util.Map.Entry)4 SQLFailureException (org.dbflute.exception.SQLFailureException)4 DfPrimaryKeyMeta (org.dbflute.logic.jdbc.metadata.info.DfPrimaryKeyMeta)4 StringKeyMap (org.dbflute.helper.StringKeyMap)3 DfColumnMeta (org.dbflute.logic.jdbc.metadata.info.DfColumnMeta)3 HashMap (java.util.HashMap)2 HashSet (java.util.HashSet)2 LinkedHashMap (java.util.LinkedHashMap)2 LinkedHashSet (java.util.LinkedHashSet)2 List (java.util.List)2 TreeMap (java.util.TreeMap)2 TypeMap (org.apache.torque.engine.database.model.TypeMap)2