Search in sources :

Example 1 with DfPrimaryKeyMeta

use of org.dbflute.logic.jdbc.metadata.info.DfPrimaryKeyMeta in project dbflute-core by dbflute.

the class DfRepsSequenceHandlerPostgreSQL method handleSerialTypeSequence.

protected void handleSerialTypeSequence(Map<String, String> tableSequenceMap) {
    final StringSet doneSequenceSet = StringSet.createAsFlexibleOrdered();
    doneSequenceSet.addAll(tableSequenceMap.values());
    DfTableMeta tableInfo = null;
    DfPrimaryKeyMeta pkInfo = null;
    String sequenceName = null;
    String tableSqlName = null;
    Integer actualValue = null;
    String sequenceSqlName = null;
    Connection conn = null;
    Statement st = null;
    try {
        conn = _dataSource.getConnection();
        st = conn.createStatement();
        final DatabaseMetaData metaData = conn.getMetaData();
        final DfColumnExtractor columnHandler = new DfColumnExtractor();
        final DfAutoIncrementExtractor autoIncrementHandler = new DfAutoIncrementExtractor();
        _log.info("...Incrementing serial type sequence");
        final Set<Entry<String, DfTableMeta>> entrySet = _tableMap.entrySet();
        for (Entry<String, DfTableMeta> entry : entrySet) {
            // clear elements that are also used exception message
            tableInfo = null;
            pkInfo = null;
            sequenceName = null;
            tableSqlName = null;
            actualValue = null;
            sequenceSqlName = null;
            tableInfo = entry.getValue();
            pkInfo = _uniqueKeyHandler.getPrimaryKey(metaData, tableInfo);
            final List<String> pkList = pkInfo.getPrimaryKeyList();
            if (pkList.size() != 1) {
                continue;
            }
            final String primaryKeyColumnName = pkList.get(0);
            if (!autoIncrementHandler.isAutoIncrementColumn(conn, tableInfo, primaryKeyColumnName)) {
                continue;
            }
            final Map<String, DfColumnMeta> columnMap = columnHandler.getColumnMap(metaData, tableInfo);
            final DfColumnMeta columnInfo = columnMap.get(primaryKeyColumnName);
            if (columnInfo == null) {
                continue;
            }
            final String defaultValue = columnInfo.getDefaultValue();
            if (defaultValue == null) {
                continue;
            }
            final String prefix = "nextval('";
            if (!defaultValue.startsWith(prefix)) {
                continue;
            }
            final String excludedPrefixString = defaultValue.substring(prefix.length());
            final int endIndex = excludedPrefixString.indexOf("'");
            if (endIndex < 0) {
                continue;
            }
            sequenceName = excludedPrefixString.substring(0, endIndex);
            if (doneSequenceSet.contains(sequenceName)) {
                // already done
                continue;
            }
            tableSqlName = tableInfo.getTableSqlName();
            final Integer count = selectCount(st, tableSqlName);
            if (count == null || count == 0) {
                // It is not necessary to increment because the table has no data.
                continue;
            }
            actualValue = selectDataMax(st, tableInfo, primaryKeyColumnName);
            if (actualValue == null) {
                // It is not necessary to increment because the table has no data.
                continue;
            }
            // because sequence names of other schemas have already been qualified
            // sequenceSqlName = tableInfo.getUnifiedSchema().buildSqlName(sequenceName);
            sequenceSqlName = sequenceName;
            callSequenceLoop(st, sequenceSqlName, actualValue);
        }
    } catch (SQLException e) {
        throwSerialTypeSequenceHandlingFailureException(tableInfo, pkInfo, sequenceName, tableSqlName, actualValue, sequenceSqlName, e);
    } finally {
        if (st != null) {
            try {
                st.close();
            } catch (SQLException ignored) {
                _log.info("Statement.close() threw the exception!", ignored);
            }
        }
        if (conn != null) {
            try {
                conn.close();
            } catch (SQLException ignored) {
                _log.info("Connection.close() threw the exception!", ignored);
            }
        }
    }
}
Also used : DfPrimaryKeyMeta(org.dbflute.logic.jdbc.metadata.info.DfPrimaryKeyMeta) DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) SQLException(java.sql.SQLException) Statement(java.sql.Statement) DfAutoIncrementExtractor(org.dbflute.logic.jdbc.metadata.basic.DfAutoIncrementExtractor) Connection(java.sql.Connection) DatabaseMetaData(java.sql.DatabaseMetaData) Entry(java.util.Map.Entry) DfColumnExtractor(org.dbflute.logic.jdbc.metadata.basic.DfColumnExtractor) StringSet(org.dbflute.helper.StringSet) DfTableMeta(org.dbflute.logic.jdbc.metadata.info.DfTableMeta)

Example 2 with DfPrimaryKeyMeta

use of org.dbflute.logic.jdbc.metadata.info.DfPrimaryKeyMeta in project dbflute-core by dbflute.

the class DfSynonymExtractorOracle method setupBasicConstraintInfo.

protected void setupBasicConstraintInfo(DfSynonymMeta info, UnifiedSchema tableOwner, String tableName, Connection conn) throws SQLException {
    final DatabaseMetaData md = conn.getMetaData();
    final DfPrimaryKeyMeta pkInfo = getPKList(md, tableOwner, tableName);
    info.setPrimaryKey(pkInfo);
    final List<String> pkList = pkInfo.getPrimaryKeyList();
    if (info.isSelectable()) {
        // because it needs a select statement
        for (String primaryKeyName : pkList) {
            final boolean autoIncrement = isAutoIncrement(conn, tableOwner, tableName, primaryKeyName);
            if (autoIncrement) {
                info.setAutoIncrement(autoIncrement);
                break;
            }
        }
    }
    {
        final Map<String, Map<Integer, String>> uqMap = getUQMap(md, tableOwner, tableName, pkList);
        info.setUniqueKeyMap(uqMap);
    }
    {
        final Map<String, DfForeignKeyMeta> fkMap = getFKMap(conn, md, tableOwner, tableName);
        // It's tentative information at this timing!
        info.setForeignKeyMap(fkMap);
    }
    {
        final Map<String, Map<Integer, String>> uqMap = info.getUniqueKeyMap();
        final Map<String, Map<Integer, String>> indexMap = getIndexMap(md, tableOwner, tableName, uqMap);
        info.setIndexMap(indexMap);
    }
}
Also used : DfPrimaryKeyMeta(org.dbflute.logic.jdbc.metadata.info.DfPrimaryKeyMeta) DatabaseMetaData(java.sql.DatabaseMetaData) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) StringKeyMap(org.dbflute.helper.StringKeyMap)

Example 3 with DfPrimaryKeyMeta

use of org.dbflute.logic.jdbc.metadata.info.DfPrimaryKeyMeta in project dbflute-core by dbflute.

the class DfSequenceHandlerJdbc method incrementSequenceToDataMax.

// ===================================================================================
// Increment Sequence
// ==================
public void incrementSequenceToDataMax(Map<String, String> tableSequenceMap) {
    final Map<String, List<String>> skippedMap = DfCollectionUtil.newLinkedHashMap();
    _log.info("...Incrementing sequences to max value of table data");
    String tableName = null;
    String sequenceName = null;
    DfTableMeta tableInfo = null;
    DfPrimaryKeyMeta pkInfo = null;
    String tableSqlName = null;
    Integer actualValue = null;
    Connection conn = null;
    Statement st = null;
    try {
        conn = _dataSource.getConnection();
        initializeTableInfo(conn);
        st = conn.createStatement();
        final Set<Entry<String, String>> entrySet = tableSequenceMap.entrySet();
        for (Entry<String, String> entry : entrySet) {
            // clear elements that are also used exception message
            tableName = null;
            sequenceName = null;
            tableInfo = null;
            pkInfo = null;
            tableSqlName = null;
            actualValue = null;
            tableName = entry.getKey();
            sequenceName = entry.getValue();
            assertValidSequence(sequenceName, tableName);
            tableInfo = findTableInfo(conn, tableName);
            pkInfo = findPrimaryKeyInfo(conn, tableInfo);
            final List<String> pkList = pkInfo.getPrimaryKeyList();
            if (pkList.size() != 1) {
                skippedMap.put(tableName, pkList);
                continue;
            }
            final String primaryKeyColumnName = pkList.get(0);
            tableSqlName = tableInfo.getTableSqlName();
            final Integer count = selectCount(st, tableSqlName);
            if (count == null || count == 0) {
                // It is not necessary to increment because the table has no data.
                continue;
            }
            actualValue = selectDataMax(st, tableInfo, primaryKeyColumnName);
            if (actualValue == null) {
                // It is not necessary to increment because the table has no data.
                continue;
            }
            callSequenceLoop(st, sequenceName, actualValue);
        }
    } catch (SQLException e) {
        throwIncrementSequenceToDataMaxFailureException(tableName, sequenceName, tableInfo, pkInfo, tableSqlName, actualValue, DfJDBCException.voice(e));
    } finally {
        if (st != null) {
            try {
                st.close();
            } catch (SQLException ignored) {
                _log.info("Statement.close() threw the exception!", ignored);
            }
        }
        if (conn != null) {
            try {
                conn.close();
            } catch (SQLException ignored) {
                _log.info("Connection.close() threw the exception!", ignored);
            }
        }
    }
    if (!skippedMap.isEmpty()) {
        _log.info("*Unsupported incrementing sequences(multiple-PK):");
        final Set<Entry<String, List<String>>> skippedEntrySet = skippedMap.entrySet();
        for (Entry<String, List<String>> skippedEntry : skippedEntrySet) {
            final String skippedTableName = skippedEntry.getKey();
            final List<String> pkList = skippedEntry.getValue();
            _log.info("    " + skippedTableName + ": pk=" + pkList);
        }
    }
}
Also used : DfPrimaryKeyMeta(org.dbflute.logic.jdbc.metadata.info.DfPrimaryKeyMeta) SQLException(java.sql.SQLException) Statement(java.sql.Statement) Connection(java.sql.Connection) Entry(java.util.Map.Entry) List(java.util.List) DfTableMeta(org.dbflute.logic.jdbc.metadata.info.DfTableMeta)

Example 4 with DfPrimaryKeyMeta

use of org.dbflute.logic.jdbc.metadata.info.DfPrimaryKeyMeta in project dbflute-core by dbflute.

the class DfSequenceHandlerPostgreSQL method handleSerialTypeSequence.

protected void handleSerialTypeSequence(Map<String, String> tableSequenceMap) {
    final StringSet doneSequenceSet = StringSet.createAsFlexibleOrdered();
    doneSequenceSet.addAll(tableSequenceMap.values());
    DfTableMeta tableInfo = null;
    DfPrimaryKeyMeta pkInfo = null;
    String sequenceName = null;
    String tableSqlName = null;
    Integer actualValue = null;
    String sequenceSqlName = null;
    Connection conn = null;
    Statement st = null;
    try {
        conn = _dataSource.getConnection();
        st = conn.createStatement();
        final DatabaseMetaData metaData = conn.getMetaData();
        final DfColumnExtractor columnHandler = new DfColumnExtractor();
        final DfAutoIncrementExtractor autoIncrementHandler = new DfAutoIncrementExtractor();
        _log.info("...Incrementing serial type sequence");
        final Set<Entry<String, DfTableMeta>> entrySet = _tableMap.entrySet();
        for (Entry<String, DfTableMeta> entry : entrySet) {
            // clear elements that are also used exception message
            tableInfo = null;
            pkInfo = null;
            sequenceName = null;
            tableSqlName = null;
            actualValue = null;
            sequenceSqlName = null;
            tableInfo = entry.getValue();
            pkInfo = _uniqueKeyHandler.getPrimaryKey(metaData, tableInfo);
            final List<String> pkList = pkInfo.getPrimaryKeyList();
            if (pkList.size() != 1) {
                continue;
            }
            final String primaryKeyColumnName = pkList.get(0);
            if (!autoIncrementHandler.isAutoIncrementColumn(conn, tableInfo, primaryKeyColumnName)) {
                continue;
            }
            final Map<String, DfColumnMeta> columnMap = columnHandler.getColumnMap(metaData, tableInfo);
            final DfColumnMeta columnInfo = columnMap.get(primaryKeyColumnName);
            if (columnInfo == null) {
                continue;
            }
            final String defaultValue = columnInfo.getDefaultValue();
            if (defaultValue == null) {
                continue;
            }
            final String prefix = "nextval('";
            if (!defaultValue.startsWith(prefix)) {
                continue;
            }
            final String excludedPrefixString = defaultValue.substring(prefix.length());
            final int endIndex = excludedPrefixString.indexOf("'");
            if (endIndex < 0) {
                continue;
            }
            sequenceName = excludedPrefixString.substring(0, endIndex);
            if (doneSequenceSet.contains(sequenceName)) {
                // already done
                continue;
            }
            tableSqlName = tableInfo.getTableSqlName();
            final Integer count = selectCount(st, tableSqlName);
            if (count == null || count == 0) {
                // It is not necessary to increment because the table has no data.
                continue;
            }
            actualValue = selectDataMax(st, tableInfo, primaryKeyColumnName);
            if (actualValue == null) {
                // It is not necessary to increment because the table has no data.
                continue;
            }
            // because sequence names of other schemas have already been qualified
            // sequenceSqlName = tableInfo.getUnifiedSchema().buildSqlName(sequenceName);
            sequenceSqlName = sequenceName;
            callSequenceLoop(st, sequenceSqlName, actualValue);
        }
    } catch (SQLException e) {
        throwSerialTypeSequenceHandlingFailureException(tableInfo, pkInfo, sequenceName, tableSqlName, actualValue, sequenceSqlName, e);
    } finally {
        if (st != null) {
            try {
                st.close();
            } catch (SQLException ignored) {
                _log.info("Statement.close() threw the exception!", ignored);
            }
        }
        if (conn != null) {
            try {
                conn.close();
            } catch (SQLException ignored) {
                _log.info("Connection.close() threw the exception!", ignored);
            }
        }
    }
}
Also used : DfPrimaryKeyMeta(org.dbflute.logic.jdbc.metadata.info.DfPrimaryKeyMeta) DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) SQLException(java.sql.SQLException) Statement(java.sql.Statement) DfAutoIncrementExtractor(org.dbflute.logic.jdbc.metadata.basic.DfAutoIncrementExtractor) Connection(java.sql.Connection) DatabaseMetaData(java.sql.DatabaseMetaData) Entry(java.util.Map.Entry) DfColumnExtractor(org.dbflute.logic.jdbc.metadata.basic.DfColumnExtractor) StringSet(org.dbflute.helper.StringSet) DfTableMeta(org.dbflute.logic.jdbc.metadata.info.DfTableMeta)

Example 5 with DfPrimaryKeyMeta

use of org.dbflute.logic.jdbc.metadata.info.DfPrimaryKeyMeta in project dbflute-core by dbflute.

the class DfUniqueKeyExtractor method doGetPrimaryKey.

protected DfPrimaryKeyMeta doGetPrimaryKey(DatabaseMetaData metaData, UnifiedSchema unifiedSchema, String tableName, boolean retry) throws SQLException {
    final DfPrimaryKeyMeta info = new DfPrimaryKeyMeta();
    if (isPrimaryKeyExtractingUnsupported()) {
        if (isDatabaseMsAccess()) {
            return processMSAccess(metaData, unifiedSchema, tableName, info);
        }
        return info;
    }
    ResultSet rs = null;
    try {
        rs = extractPrimaryKeyMetaData(metaData, unifiedSchema, tableName, retry);
        if (rs == null) {
            return info;
        }
        // MySQL might return (actually returned) unordered list so sort it by the map
        // getting ordinal was implemented recently (1.0.5G) so it has just-in-case process
        final TreeMap<Integer, String> positionColumnNameMap = new TreeMap<Integer, String>();
        final Map<Integer, String> positionPkNameMap = new HashMap<Integer, String>();
        int justInCaseIndex = 100001;
        while (rs.next()) {
            final String metaTableName = rs.getString(3);
            if (checkMetaTableDiffIfNeeds(tableName, metaTableName)) {
                continue;
            }
            final String columnName = rs.getString(4);
            final String posStr = rs.getString(5);
            Integer ordinalPosition = null;
            try {
                ordinalPosition = Integer.valueOf(posStr);
            } catch (NumberFormatException continued) {
                warnPrimaryKeyPositionNotNumberException(tableName, columnName, posStr);
                // just in case
                ordinalPosition = justInCaseIndex;
                ++justInCaseIndex;
            }
            final String pkName = rs.getString(6);
            positionColumnNameMap.put(ordinalPosition, columnName);
            positionPkNameMap.put(ordinalPosition, pkName);
        }
        for (Entry<Integer, String> entry : positionColumnNameMap.entrySet()) {
            final Integer pkPosition = entry.getKey();
            final String columnName = entry.getValue();
            final String pkName = positionPkNameMap.get(pkPosition);
            info.addPrimaryKey(columnName, pkName, pkPosition);
        }
    } finally {
        if (rs != null) {
            rs.close();
        }
    }
    return info;
}
Also used : DfPrimaryKeyMeta(org.dbflute.logic.jdbc.metadata.info.DfPrimaryKeyMeta) HashMap(java.util.HashMap) ResultSet(java.sql.ResultSet) TreeMap(java.util.TreeMap)

Aggregations

DfPrimaryKeyMeta (org.dbflute.logic.jdbc.metadata.info.DfPrimaryKeyMeta)11 SQLException (java.sql.SQLException)5 Statement (java.sql.Statement)5 Connection (java.sql.Connection)4 Entry (java.util.Map.Entry)4 DfTableMeta (org.dbflute.logic.jdbc.metadata.info.DfTableMeta)4 DatabaseMetaData (java.sql.DatabaseMetaData)3 LinkedHashMap (java.util.LinkedHashMap)3 Map (java.util.Map)3 StringKeyMap (org.dbflute.helper.StringKeyMap)3 DfColumnMeta (org.dbflute.logic.jdbc.metadata.info.DfColumnMeta)3 ResultSet (java.sql.ResultSet)2 List (java.util.List)2 TreeMap (java.util.TreeMap)2 StringSet (org.dbflute.helper.StringSet)2 DfAutoIncrementExtractor (org.dbflute.logic.jdbc.metadata.basic.DfAutoIncrementExtractor)2 DfColumnExtractor (org.dbflute.logic.jdbc.metadata.basic.DfColumnExtractor)2 HashMap (java.util.HashMap)1 Constraint (org.apache.torque.engine.database.model.Constraint)1 TypeMap (org.apache.torque.engine.database.model.TypeMap)1