Search in sources :

Example 16 with DfColumnMeta

use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.

the class DfRepsSequenceHandlerPostgreSQL method handleSerialTypeSequence.

protected void handleSerialTypeSequence(Map<String, String> tableSequenceMap) {
    final StringSet doneSequenceSet = StringSet.createAsFlexibleOrdered();
    doneSequenceSet.addAll(tableSequenceMap.values());
    DfTableMeta tableInfo = null;
    DfPrimaryKeyMeta pkInfo = null;
    String sequenceName = null;
    String tableSqlName = null;
    Integer actualValue = null;
    String sequenceSqlName = null;
    Connection conn = null;
    Statement st = null;
    try {
        conn = _dataSource.getConnection();
        st = conn.createStatement();
        final DatabaseMetaData metaData = conn.getMetaData();
        final DfColumnExtractor columnHandler = new DfColumnExtractor();
        final DfAutoIncrementExtractor autoIncrementHandler = new DfAutoIncrementExtractor();
        _log.info("...Incrementing serial type sequence");
        final Set<Entry<String, DfTableMeta>> entrySet = _tableMap.entrySet();
        for (Entry<String, DfTableMeta> entry : entrySet) {
            // clear elements that are also used exception message
            tableInfo = null;
            pkInfo = null;
            sequenceName = null;
            tableSqlName = null;
            actualValue = null;
            sequenceSqlName = null;
            tableInfo = entry.getValue();
            pkInfo = _uniqueKeyHandler.getPrimaryKey(metaData, tableInfo);
            final List<String> pkList = pkInfo.getPrimaryKeyList();
            if (pkList.size() != 1) {
                continue;
            }
            final String primaryKeyColumnName = pkList.get(0);
            if (!autoIncrementHandler.isAutoIncrementColumn(conn, tableInfo, primaryKeyColumnName)) {
                continue;
            }
            final Map<String, DfColumnMeta> columnMap = columnHandler.getColumnMap(metaData, tableInfo);
            final DfColumnMeta columnInfo = columnMap.get(primaryKeyColumnName);
            if (columnInfo == null) {
                continue;
            }
            final String defaultValue = columnInfo.getDefaultValue();
            if (defaultValue == null) {
                continue;
            }
            final String prefix = "nextval('";
            if (!defaultValue.startsWith(prefix)) {
                continue;
            }
            final String excludedPrefixString = defaultValue.substring(prefix.length());
            final int endIndex = excludedPrefixString.indexOf("'");
            if (endIndex < 0) {
                continue;
            }
            sequenceName = excludedPrefixString.substring(0, endIndex);
            if (doneSequenceSet.contains(sequenceName)) {
                // already done
                continue;
            }
            tableSqlName = tableInfo.getTableSqlName();
            final Integer count = selectCount(st, tableSqlName);
            if (count == null || count == 0) {
                // It is not necessary to increment because the table has no data.
                continue;
            }
            actualValue = selectDataMax(st, tableInfo, primaryKeyColumnName);
            if (actualValue == null) {
                // It is not necessary to increment because the table has no data.
                continue;
            }
            // because sequence names of other schemas have already been qualified
            // sequenceSqlName = tableInfo.getUnifiedSchema().buildSqlName(sequenceName);
            sequenceSqlName = sequenceName;
            callSequenceLoop(st, sequenceSqlName, actualValue);
        }
    } catch (SQLException e) {
        throwSerialTypeSequenceHandlingFailureException(tableInfo, pkInfo, sequenceName, tableSqlName, actualValue, sequenceSqlName, e);
    } finally {
        if (st != null) {
            try {
                st.close();
            } catch (SQLException ignored) {
                _log.info("Statement.close() threw the exception!", ignored);
            }
        }
        if (conn != null) {
            try {
                conn.close();
            } catch (SQLException ignored) {
                _log.info("Connection.close() threw the exception!", ignored);
            }
        }
    }
}
Also used : DfPrimaryKeyMeta(org.dbflute.logic.jdbc.metadata.info.DfPrimaryKeyMeta) DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) SQLException(java.sql.SQLException) Statement(java.sql.Statement) DfAutoIncrementExtractor(org.dbflute.logic.jdbc.metadata.basic.DfAutoIncrementExtractor) Connection(java.sql.Connection) DatabaseMetaData(java.sql.DatabaseMetaData) Entry(java.util.Map.Entry) DfColumnExtractor(org.dbflute.logic.jdbc.metadata.basic.DfColumnExtractor) StringSet(org.dbflute.helper.StringSet) DfTableMeta(org.dbflute.logic.jdbc.metadata.info.DfTableMeta)

Example 17 with DfColumnMeta

use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.

the class DfOutsideSqlAnalyzer method execSQL.

// ===================================================================================
// Execution
// =========
@Override
protected void execSQL(String sql) {
    checkRequiredSqlComment(sql);
    ResultSet rs = null;
    try {
        DfCustomizeEntityInfo customizeEntityInfo = null;
        boolean alreadyIncrementGoodSqlCount = false;
        if (isTargetEntityMakingSql(sql)) {
            {
                final String executedSql = buildExecutedSql(sql);
                checkStatement(executedSql);
                rs = _currentStatement.executeQuery(executedSql);
            }
            _goodSqlCount++;
            alreadyIncrementGoodSqlCount = true;
            // for Customize Entity
            final Map<String, DfColumnMeta> columnMetaMap = extractColumnMetaMap(sql, rs);
            customizeEntityInfo = processCustomizeEntity(sql, columnMetaMap);
        }
        if (isTargetParameterBeanMakingSql(sql)) {
            if (customizeEntityInfo == null) {
                _log.info("*Only parameter-bean is created: the SQL was not executed.");
            }
            if (!alreadyIncrementGoodSqlCount) {
                _goodSqlCount++;
            }
            // for Parameter Bean
            processParameterBean(sql, customizeEntityInfo);
        }
    } catch (SQLException e) {
        if (_runInfo.isErrorContinue()) {
            _log.warn("Failed to execute: " + sql, e);
            _sql2entityMeta.addExceptionInfo(_sqlFile.getName(), e.getMessage() + ln() + sql);
        } else {
            throwSQLFailureException(sql, e);
        }
    } finally {
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException ignored) {
                _log.warn("Ignored exception: " + ignored.getMessage());
            }
        }
    }
}
Also used : DfCustomizeEntityInfo(org.dbflute.logic.sql2entity.cmentity.DfCustomizeEntityInfo) DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) SQLException(java.sql.SQLException) ResultSet(java.sql.ResultSet)

Example 18 with DfColumnMeta

use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.

the class DfSchemaXmlSerializer method helpColumnDatetimePrecision.

protected void helpColumnDatetimePrecision(DfTableMeta tableMeta, List<DfColumnMeta> columnList) {
    if (_datetimePrecisionAllMap != null) {
        final String tableName = tableMeta.getTableName();
        final Map<String, Map<String, Integer>> tableMap = _datetimePrecisionAllMap.get(tableMeta.getUnifiedSchema());
        if (tableMap != null) {
            // just in case
            final Map<String, Integer> datetimePrecisionMap = tableMap.get(tableName);
            if (datetimePrecisionMap != null) {
                // just in case
                for (DfColumnMeta column : columnList) {
                    column.acceptDatetimePrecision(datetimePrecisionMap);
                }
            }
        }
    }
}
Also used : DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) Map(java.util.Map) TypeMap(org.apache.torque.engine.database.model.TypeMap) LinkedHashMap(java.util.LinkedHashMap) TreeMap(java.util.TreeMap) StringKeyMap(org.dbflute.helper.StringKeyMap)

Example 19 with DfColumnMeta

use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.

the class DfSynonymExtractorOracle method getSynonymColumns.

// -----------------------------------------------------
// Supplementary Column Info
// -------------------------
protected List<DfColumnMeta> getSynonymColumns(Connection conn, UnifiedSchema synonymOwner, String synonymName) throws SQLException {
    final List<DfColumnMeta> columnList = new ArrayList<DfColumnMeta>();
    Statement st = null;
    ResultSet rs = null;
    try {
        st = conn.createStatement();
        final String synonymSqlName = synonymOwner.buildSchemaQualifiedName(synonymName);
        final String sql = "select * from " + synonymSqlName + " where 0=1";
        rs = st.executeQuery(sql);
        final ResultSetMetaData metaData = rs.getMetaData();
        int count = metaData.getColumnCount();
        for (int i = 0; i < count; i++) {
            int index = i + 1;
            String columnName = metaData.getColumnName(index);
            int columnType = metaData.getColumnType(index);
            String columnTypeName = metaData.getColumnTypeName(index);
            int precision = metaData.getPrecision(index);
            int scale = metaData.getScale(index);
            int nullableType = metaData.isNullable(index);
            DfColumnMeta column = new DfColumnMeta();
            column.setColumnName(columnName);
            column.setJdbcDefValue(columnType);
            column.setDbTypeName(columnTypeName);
            column.setColumnSize(precision);
            column.setDecimalDigits(scale);
            column.setRequired(nullableType == ResultSetMetaData.columnNoNulls);
            columnList.add(column);
        }
        return columnList;
    } finally {
        if (st != null) {
            try {
                st.close();
            } catch (SQLException ignored) {
            }
        }
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException ignored) {
            }
        }
    }
}
Also used : ResultSetMetaData(java.sql.ResultSetMetaData) DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) SQLException(java.sql.SQLException) Statement(java.sql.Statement) ArrayList(java.util.ArrayList) ResultSet(java.sql.ResultSet)

Example 20 with DfColumnMeta

use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.

the class DfAbstractDataWriterTest method test_processBoolean.

// -----------------------------------------------------
// Boolean
// -------
public void test_processBoolean() throws Exception {
    // via XlsData
    // ## Arrange ##
    final DfXlsDataHandlingWriter impl = new DfXlsDataHandlingWriter(null, null) {

        @Override
        protected Class<?> getBindType(String tableName, DfColumnMeta columnMetaInfo) {
            return BigDecimal.class;
        }
    };
    Map<String, DfColumnMeta> columnMetaInfoMap = StringKeyMap.createAsCaseInsensitive();
    DfColumnMeta info = new DfColumnMeta();
    info.setColumnName("foo");
    info.setColumnSize(3);
    info.setJdbcDefValue(Types.NUMERIC);
    columnMetaInfoMap.put("foo", info);
    // ## Act ##
    boolean actual = impl.processBoolean("tbl", "foo", "0", null, null, 0, columnMetaInfoMap, 3);
    // ## Assert ##
    log("actual=" + actual);
    assertFalse(actual);
}
Also used : DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) DfXlsDataHandlingWriter(org.dbflute.logic.replaceschema.loaddata.xls.DfXlsDataHandlingWriter) BigDecimal(java.math.BigDecimal)

Aggregations

DfColumnMeta (org.dbflute.logic.jdbc.metadata.info.DfColumnMeta)71 SQLException (java.sql.SQLException)16 Connection (java.sql.Connection)10 LinkedHashMap (java.util.LinkedHashMap)9 Map (java.util.Map)8 File (java.io.File)7 StringKeyMap (org.dbflute.helper.StringKeyMap)7 ResultSet (java.sql.ResultSet)5 Statement (java.sql.Statement)5 ArrayList (java.util.ArrayList)5 DfJDBCException (org.dbflute.exception.DfJDBCException)5 FileInputStream (java.io.FileInputStream)4 BigDecimal (java.math.BigDecimal)4 DatabaseMetaData (java.sql.DatabaseMetaData)4 PreparedStatement (java.sql.PreparedStatement)4 Timestamp (java.sql.Timestamp)4 HashMap (java.util.HashMap)4 StringSet (org.dbflute.helper.StringSet)4 DfDataRow (org.dbflute.helper.dataset.DfDataRow)4 DfDataTable (org.dbflute.helper.dataset.DfDataTable)4