Search in sources :

Example 21 with DfTableMeta

use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.

the class DfSchemaXmlSerializer method doHelpTableDatePrecision.

protected void doHelpTableDatePrecision(List<DfTableMeta> tableList, UnifiedSchema unifiedSchema) {
    final DfDatetimePrecisionExtractor datetimePrecisionExtractor = createDatetimePrecisionExtractor(unifiedSchema);
    if (datetimePrecisionExtractor != null) {
        final Set<String> tableSet = new HashSet<String>();
        for (DfTableMeta table : tableList) {
            tableSet.add(table.getTableName());
        }
        try {
            if (_datetimePrecisionAllMap == null) {
                _datetimePrecisionAllMap = new LinkedHashMap<UnifiedSchema, Map<String, Map<String, Integer>>>();
            }
            final Map<String, Map<String, Integer>> datetimePrecisionMap = _datetimePrecisionAllMap.get(unifiedSchema);
            final Map<String, Map<String, Integer>> extractedMap = datetimePrecisionExtractor.extractDatetimePrecisionMap(tableSet);
            if (datetimePrecisionMap == null) {
                _datetimePrecisionAllMap.put(unifiedSchema, extractedMap);
            } else {
                // basically no way, schema is unique but just in case
                // merge
                datetimePrecisionMap.putAll(extractedMap);
            }
        } catch (RuntimeException continued) {
            _log.info("Failed to extract date-time precisions: extractor=" + datetimePrecisionExtractor, continued);
        }
    }
}
Also used : DfDatetimePrecisionExtractor(org.dbflute.logic.jdbc.metadata.supplement.DfDatetimePrecisionExtractor) UnifiedSchema(org.apache.torque.engine.database.model.UnifiedSchema) DfTableMeta(org.dbflute.logic.jdbc.metadata.info.DfTableMeta) Map(java.util.Map) TypeMap(org.apache.torque.engine.database.model.TypeMap) LinkedHashMap(java.util.LinkedHashMap) TreeMap(java.util.TreeMap) StringKeyMap(org.dbflute.helper.StringKeyMap) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet)

Example 22 with DfTableMeta

use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.

the class DfSchemaXmlSerializer method generateXML.

/**
 * Generates an XML database schema from JDBC meta data.
 * @throws SQLException When it fails to handle the SQL.
 */
protected void generateXML() throws SQLException {
    Connection conn = null;
    try {
        _log.info("...Getting DB connection");
        conn = _dataSource.getConnection();
        _log.info("...Getting DB meta data");
        final DatabaseMetaData metaData = conn.getMetaData();
        final List<DfTableMeta> tableList = getTableList(metaData);
        // initialize the map of generated tables
        // this is used by synonym handling and foreign key handling
        // so this process should be before their processes
        _generatedTableMap = StringKeyMap.createAsCaseInsensitive();
        for (DfTableMeta meta : tableList) {
            _generatedTableMap.put(meta.getTableName(), meta);
        }
        // Load synonym information for merging additional meta data if it needs.
        loadSupplementarySynonymInfoIfNeeds();
        // This should be after loading synonyms so it is executed at this timing!
        // The property 'outOfGenerateTarget' is set here
        processSynonymTable(tableList);
        // The handler of foreign keys for generating.
        // It needs to check whether a reference table is generate-target or not.
        _foreignKeyExtractor.exceptForeignTableNotGenerated(_generatedTableMap);
        // Create database node. (The beginning of schema XML!)
        _databaseNode = _doc.createElement("database");
        // as main schema
        _databaseNode.setAttribute("name", _dataSource.getSchema().getPureSchema());
        processTable(conn, metaData, tableList);
        final boolean additionalTableExists = setupAddtionalTableIfNeeds();
        if (tableList.isEmpty() && !additionalTableExists) {
            throwSchemaEmptyException();
        }
        processSequence(conn, metaData);
        if (isProcedureMetaEnabled()) {
            processProcedure(conn, metaData);
        }
        if (isCraftMetaEnabled()) {
            processCraftMeta(tableList);
        }
        _doc.appendChild(_databaseNode);
    } finally {
        if (conn != null) {
            conn.close();
        }
    }
}
Also used : Connection(java.sql.Connection) DfTableMeta(org.dbflute.logic.jdbc.metadata.info.DfTableMeta) DatabaseMetaData(java.sql.DatabaseMetaData)

Example 23 with DfTableMeta

use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.

the class DfSchemaXmlSerializer method assertDuplicateTable.

protected void assertDuplicateTable(List<DfTableMeta> tableList) {
    if (getLittleAdjustmentProperties().isSuppressOtherSchemaSameNameTableLimiter()) {
        return;
    }
    final Set<String> tableNameSet = StringSet.createAsCaseInsensitive();
    final Set<String> duplicateTableSet = StringSet.createAsCaseInsensitive();
    for (DfTableMeta info : tableList) {
        final String tableName = info.getTableName();
        if (tableNameSet.contains(tableName)) {
            duplicateTableSet.add(tableName);
        } else {
            tableNameSet.add(tableName);
        }
    }
    if (!duplicateTableSet.isEmpty()) {
        throwTableDuplicateException(duplicateTableSet);
    }
}
Also used : DfTableMeta(org.dbflute.logic.jdbc.metadata.info.DfTableMeta)

Example 24 with DfTableMeta

use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.

the class DfSchemaXmlSerializer method countDownRaceProcessTable.

protected void countDownRaceProcessTable(final List<DfTableMeta> tableList, int runnerCount, final DfFittingDataSource fittingDs) {
    final CountDownRace fireMan = new CountDownRace(runnerCount);
    fireMan.readyGo(new CountDownRaceExecution() {

        public void execute(CountDownRaceRunner resource) {
            final Object lockObj = resource.getLockObj();
            // for exception message
            String currentTable = null;
            Connection runnerConn = null;
            try {
                runnerConn = fittingDs.newConnection();
                prepareThreadDataSource(fittingDs, runnerConn);
                final DatabaseMetaData newMetaData = runnerConn.getMetaData();
                for (DfTableMeta tableMeta : tableList) {
                    final String tableKey = tableMeta.getTableFullQualifiedName();
                    synchronized (lockObj) {
                        if (_tableMetaDataSyncSet.contains(tableKey)) {
                            continue;
                        }
                        _tableMetaDataSyncSet.add(tableKey);
                    }
                    currentTable = tableKey;
                    doProcessTable(runnerConn, newMetaData, tableMeta);
                }
            } catch (SQLException e) {
                String msg = "Failed to get the table meta data: " + currentTable;
                throw new IllegalStateException(msg, e);
            } finally {
                if (runnerConn != null) {
                    try {
                        runnerConn.close();
                    } catch (SQLException e) {
                    }
                }
                DfDataSourceContext.clearDataSource();
            }
        }

        protected void prepareThreadDataSource(final DfFittingDataSource fittingDs, final Connection runnerConn) {
            if (DfDataSourceContext.isExistDataSource()) {
                return;
            }
            final Connection threadConn = new NotClosingConnectionWrapper(runnerConn);
            DfDataSourceContext.setDataSource(new HandlingDataSourceWrapper(fittingDs, new DataSourceHandler() {

                public Connection getConnection(DataSource dataSource) throws SQLException {
                    return threadConn;
                }
            }));
        }
    });
}
Also used : DataSourceHandler(org.dbflute.jdbc.DataSourceHandler) CountDownRaceRunner(org.dbflute.helper.thread.CountDownRaceRunner) SQLException(java.sql.SQLException) Connection(java.sql.Connection) DatabaseMetaData(java.sql.DatabaseMetaData) DfFittingDataSource(org.dbflute.helper.jdbc.connection.DfFittingDataSource) DataSource(javax.sql.DataSource) CountDownRace(org.dbflute.helper.thread.CountDownRace) CountDownRaceExecution(org.dbflute.helper.thread.CountDownRaceExecution) HandlingDataSourceWrapper(org.dbflute.jdbc.HandlingDataSourceWrapper) DfFittingDataSource(org.dbflute.helper.jdbc.connection.DfFittingDataSource) DfTableMeta(org.dbflute.logic.jdbc.metadata.info.DfTableMeta) NotClosingConnectionWrapper(org.dbflute.jdbc.NotClosingConnectionWrapper)

Example 25 with DfTableMeta

use of org.dbflute.logic.jdbc.metadata.info.DfTableMeta in project dbflute-core by dbflute.

the class DfSchemaXmlSerializer method doHelpTableComment.

protected void doHelpTableComment(List<DfTableMeta> tableList, UnifiedSchema unifiedSchema) {
    final DfDbCommentExtractor dbCommentExtractor = createDbCommentExtractor(unifiedSchema);
    if (dbCommentExtractor != null) {
        final Set<String> tableSet = new HashSet<String>();
        for (DfTableMeta table : tableList) {
            tableSet.add(table.getTableName());
        }
        try {
            final Map<String, UserTabComments> tableCommentMap = dbCommentExtractor.extractTableComment(tableSet);
            for (DfTableMeta table : tableList) {
                table.acceptTableComment(tableCommentMap);
            // *Synonym Processing is after loading synonyms.
            }
        } catch (RuntimeException ignored) {
            _log.info("Failed to extract table comments: extractor=" + dbCommentExtractor, ignored);
        }
        try {
            if (_columnCommentAllMap == null) {
                _columnCommentAllMap = new LinkedHashMap<UnifiedSchema, Map<String, Map<String, UserColComments>>>();
            }
            final Map<String, Map<String, UserColComments>> columnCommentMap = _columnCommentAllMap.get(unifiedSchema);
            final Map<String, Map<String, UserColComments>> extractedMap = dbCommentExtractor.extractColumnComment(tableSet);
            if (columnCommentMap == null) {
                _columnCommentAllMap.put(unifiedSchema, extractedMap);
            } else {
                // basically no way, schema is unique but just in case
                // merge
                columnCommentMap.putAll(extractedMap);
            }
        } catch (RuntimeException continued) {
            _log.info("Failed to extract column comments: extractor=" + dbCommentExtractor, continued);
        }
    }
}
Also used : UserColComments(org.dbflute.logic.jdbc.metadata.comment.DfDbCommentExtractor.UserColComments) UserTabComments(org.dbflute.logic.jdbc.metadata.comment.DfDbCommentExtractor.UserTabComments) DfDbCommentExtractor(org.dbflute.logic.jdbc.metadata.comment.DfDbCommentExtractor) UnifiedSchema(org.apache.torque.engine.database.model.UnifiedSchema) DfTableMeta(org.dbflute.logic.jdbc.metadata.info.DfTableMeta) Map(java.util.Map) TypeMap(org.apache.torque.engine.database.model.TypeMap) LinkedHashMap(java.util.LinkedHashMap) TreeMap(java.util.TreeMap) StringKeyMap(org.dbflute.helper.StringKeyMap) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet)

Aggregations

DfTableMeta (org.dbflute.logic.jdbc.metadata.info.DfTableMeta)31 SQLException (java.sql.SQLException)12 Connection (java.sql.Connection)9 DatabaseMetaData (java.sql.DatabaseMetaData)8 Statement (java.sql.Statement)8 ArrayList (java.util.ArrayList)7 UnifiedSchema (org.apache.torque.engine.database.model.UnifiedSchema)6 Map (java.util.Map)4 Entry (java.util.Map.Entry)4 SQLFailureException (org.dbflute.exception.SQLFailureException)4 DfPrimaryKeyMeta (org.dbflute.logic.jdbc.metadata.info.DfPrimaryKeyMeta)4 StringKeyMap (org.dbflute.helper.StringKeyMap)3 DfColumnMeta (org.dbflute.logic.jdbc.metadata.info.DfColumnMeta)3 HashMap (java.util.HashMap)2 HashSet (java.util.HashSet)2 LinkedHashMap (java.util.LinkedHashMap)2 LinkedHashSet (java.util.LinkedHashSet)2 List (java.util.List)2 TreeMap (java.util.TreeMap)2 TypeMap (org.apache.torque.engine.database.model.TypeMap)2