Search in sources :

Example 51 with DfColumnMeta

use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.

the class DfColumnExtractor method getColumnMap.

public Map<String, DfColumnMeta> getColumnMap(DatabaseMetaData metaData, DfTableMeta tableInfo) throws SQLException {
    final List<DfColumnMeta> columnList = getColumnList(metaData, tableInfo);
    final Map<String, DfColumnMeta> map = new LinkedHashMap<String, DfColumnMeta>();
    for (DfColumnMeta columnInfo : columnList) {
        map.put(columnInfo.getColumnName(), columnInfo);
    }
    return map;
}
Also used : DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) LinkedHashMap(java.util.LinkedHashMap)

Example 52 with DfColumnMeta

use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.

the class DfXlsDataHandlingWriter method setupDefaultValue.

protected void setupDefaultValue(String dataDirectory, final DfDataSet dataSet) {
    final Map<String, String> defaultValueMap = getDefaultValueMap(dataDirectory);
    for (int i = 0; i < dataSet.getTableSize(); i++) {
        final DfDataTable table = dataSet.getTable(i);
        final Set<String> defaultValueMapKeySet = defaultValueMap.keySet();
        final String tableName = table.getTableDbName();
        final Map<String, DfColumnMeta> metaMetaMap = getColumnMetaMap(tableName);
        for (String defaultTargetColumnName : defaultValueMapKeySet) {
            final String defaultValue = defaultValueMap.get(defaultTargetColumnName);
            if (metaMetaMap.containsKey(defaultTargetColumnName) && !table.hasColumn(defaultTargetColumnName)) {
                // values are resolved later so resolve type only here
                final DfDtsColumnType columnType;
                if (defaultValue.equalsIgnoreCase("sysdate")) {
                    columnType = DfDtsColumnTypes.TIMESTAMP;
                } else {
                    columnType = DfDtsColumnTypes.STRING;
                }
                table.addColumn(defaultTargetColumnName, columnType);
                for (int j = 0; j < table.getRowSize(); j++) {
                    final DfDataRow row = table.getRow(j);
                    // value is set later
                    row.addValue(defaultTargetColumnName, null);
                }
            }
        }
    }
}
Also used : DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) DfDtsColumnType(org.dbflute.helper.dataset.types.DfDtsColumnType) DfDataTable(org.dbflute.helper.dataset.DfDataTable) DfDataRow(org.dbflute.helper.dataset.DfDataRow)

Example 53 with DfColumnMeta

use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.

the class DfXlsDataHandlingWriter method doWriteDataTable.

// -----------------------------------------------------
// DataTable
// ---------
protected int doWriteDataTable(DfXlsDataResource resource, File file, DfDataTable dataTable) {
    final String tableDbName = dataTable.getTableDbName();
    if (dataTable.getRowSize() == 0) {
        _log.info("*Not found row at the table: " + tableDbName);
        return 0;
    }
    final Map<String, DfColumnMeta> columnMetaMap = getColumnMetaMap(tableDbName);
    if (columnMetaMap.isEmpty()) {
        throwTableNotFoundException(file, tableDbName);
    }
    beforeHandlingTable(tableDbName, columnMetaMap);
    checkHeaderColumnIfNeeds(resource, file, dataTable, columnMetaMap);
    final List<String> columnNameList = extractColumnNameList(dataTable);
    final String dataDirectory = resource.getDataDirectory();
    final LoggingInsertType loggingInsertType = getLoggingInsertType(dataDirectory);
    final boolean suppressBatchUpdate = isMergedSuppressBatchUpdate(resource.getDataDirectory());
    Connection conn = null;
    PreparedStatement ps = null;
    String preparedSql = null;
    SQLException retryEx = null;
    DfDataRow retryDataRow = null;
    try {
        conn = _dataSource.getConnection();
        int loadedRowCount = 0;
        final int rowSize = dataTable.getRowSize();
        boolean existsEmptyRow = false;
        for (int i = 0; i < rowSize; i++) {
            final DfDataRow dataRow = dataTable.getRow(i);
            if (ps == null) {
                final MyCreatedState myCreatedState = new MyCreatedState();
                preparedSql = myCreatedState.buildPreparedSql(dataRow);
                ps = conn.prepareStatement(preparedSql);
            }
            if (doWriteDataRow(// basic resources
            resource, // basic resources
            file, // basic resources
            dataTable, // basic resources
            dataRow, // meta data
            columnMetaMap, // JDBC resources
            conn, // JDBC resources
            ps, loggingInsertType, suppressBatchUpdate)) {
                // option
                ++loadedRowCount;
                if (existsEmptyRow) {
                    final int emptyRowNumber = dataRow.getRowNumber() - 1;
                    throwXlsDataEmptyRowDataException(dataDirectory, file, dataTable, emptyRowNumber);
                }
            } else {
                existsEmptyRow = true;
            }
        }
        if (existsEmptyRow) {
            _log.info("...Skipping the terminal garbage row");
        }
        if (!suppressBatchUpdate) {
            boolean beginTransaction = false;
            boolean transactionClosed = false;
            try {
                // transaction to retry after
                conn.setAutoCommit(false);
                beginTransaction = true;
                ps.executeBatch();
                conn.commit();
                transactionClosed = true;
            } catch (SQLException e) {
                conn.rollback();
                transactionClosed = true;
                if (!(e instanceof BatchUpdateException)) {
                    throw e;
                }
                _log.info("...Retrying by suppressing batch update: " + tableDbName);
                final PreparedStatement retryPs = conn.prepareStatement(preparedSql);
                for (int i = 0; i < rowSize; i++) {
                    final DfDataRow dataRow = dataTable.getRow(i);
                    try {
                        doWriteDataRow(// basic resources
                        resource, // basic resources
                        file, // basic resources
                        dataTable, // basic resources
                        dataRow, // meta data
                        columnMetaMap, // JDBC resources
                        conn, // JDBC resources
                        retryPs, LoggingInsertType.NONE, // option (no logging and suppress batch)
                        true);
                    } catch (SQLException rowEx) {
                        retryEx = rowEx;
                        retryDataRow = dataRow;
                        break;
                    }
                }
                try {
                    retryPs.close();
                } catch (SQLException ignored) {
                }
                throw e;
            } finally {
                if (!transactionClosed) {
                    // for other exceptions
                    conn.rollback();
                }
                if (beginTransaction) {
                    conn.setAutoCommit(true);
                }
            }
        }
        noticeLoadedRowSize(tableDbName, loadedRowCount);
        checkImplicitClassification(file, tableDbName, columnNameList);
        return loadedRowCount;
    } catch (RuntimeException e) {
        handleWriteTableFailureException(dataDirectory, file, tableDbName, e);
        // unreachable
        return -1;
    } catch (SQLException e) {
        handleWriteTableSQLException(dataDirectory, file, dataTable, e, retryEx, retryDataRow, columnNameList);
        // unreachable
        return -1;
    } finally {
        closeResource(conn, ps);
        // process after (finally) handling table
        finallyHandlingTable(tableDbName, columnMetaMap);
    }
}
Also used : LoggingInsertType(org.dbflute.logic.replaceschema.loaddata.base.dataprop.DfLoadingControlProp.LoggingInsertType) DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) SQLException(java.sql.SQLException) Connection(java.sql.Connection) PreparedStatement(java.sql.PreparedStatement) DfDataRow(org.dbflute.helper.dataset.DfDataRow) BatchUpdateException(java.sql.BatchUpdateException)

Example 54 with DfColumnMeta

use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.

the class DfAbsractDataWriter method processArray.

// -----------------------------------------------------
// ARRAY
// -----
protected boolean processArray(String tableName, String columnName, String value, PreparedStatement ps, int bindCount, Map<String, DfColumnMeta> columnInfoMap, int rowNumber) throws SQLException {
    if (value == null || value.trim().length() == 0) {
        // cannot be array
        return false;
    }
    final DfColumnMeta columnInfo = columnInfoMap.get(columnName);
    if (columnInfo != null) {
        if (getBasicProperties().isDatabasePostgreSQL()) {
            // rsMeta#getColumnTypeName() returns value starts with "_" if
            // rsMeta#getColumnType() returns Types.ARRAY in PostgreSQL.
            // e.g. UUID[] -> _uuid
            final int jdbcDefValue = columnInfo.getJdbcDefValue();
            final String dbTypeName = columnInfo.getDbTypeName();
            if (jdbcDefValue != Types.ARRAY || !dbTypeName.startsWith("_")) {
                return false;
            }
            value = filterArrayValue(value);
            ps.setObject(bindCount, value, Types.OTHER);
            return true;
        }
    }
    // unsupported when meta data is not found
    return false;
}
Also used : DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta)

Example 55 with DfColumnMeta

use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.

the class DfLoadingControlProp method resolveRelativeDate.

// ===================================================================================
// Date Adjustment
// ===============
public void resolveRelativeDate(String dataDirectory, String tableName, Map<String, Object> columnValueMap, Map<String, DfColumnMeta> columnMetaMap, Set<String> sysdateColumnSet, DfColumnBindTypeProvider bindTypeProvider, int rowNumber) {
    // was born at LUXA
    if (!hasDateAdjustment(dataDirectory, tableName)) {
        return;
    }
    final Map<String, Object> resolvedMap = new HashMap<String, Object>();
    for (Entry<String, Object> entry : columnValueMap.entrySet()) {
        final String columnName = entry.getKey();
        if (isSysdateColumn(sysdateColumnSet, columnName)) {
            // keep sysdate as default value
            continue;
        }
        final Object value = entry.getValue();
        if (value == null) {
            continue;
        }
        if (!isDateAdjustmentAllowedValueType(value)) {
            // out of target type
            continue;
        }
        if (!hasDateAdjustmentExp(dataDirectory, tableName, columnName)) {
            // no-adjustment column
            continue;
        }
        final DfColumnMeta columnMeta = columnMetaMap.get(columnName);
        final Class<?> bindType = bindTypeProvider.provide(tableName, columnMeta);
        if (bindType == null) {
            // unknown column type
            continue;
        }
        if (!isDateAdjustmentAllowedBindType(dataDirectory, tableName, columnName, bindType)) {
            // cannot be date
            continue;
        }
        final String dateExp = toAdjustedResourceDateExp(tableName, columnName, bindType, value);
        if (dateExp == null) {
            // e.g. wrong value
            continue;
        }
        final String adjusted = adjustDateIfNeeds(dataDirectory, tableName, columnName, dateExp, rowNumber);
        resolvedMap.put(columnName, convertAdjustedValueToDateType(tableName, columnName, bindType, adjusted));
    }
    for (Entry<String, Object> entry : resolvedMap.entrySet()) {
        // to keep original map instance
        columnValueMap.put(entry.getKey(), entry.getValue());
    }
}
Also used : DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap)

Aggregations

DfColumnMeta (org.dbflute.logic.jdbc.metadata.info.DfColumnMeta)71 SQLException (java.sql.SQLException)16 Connection (java.sql.Connection)10 LinkedHashMap (java.util.LinkedHashMap)9 Map (java.util.Map)8 File (java.io.File)7 StringKeyMap (org.dbflute.helper.StringKeyMap)7 ResultSet (java.sql.ResultSet)5 Statement (java.sql.Statement)5 ArrayList (java.util.ArrayList)5 DfJDBCException (org.dbflute.exception.DfJDBCException)5 FileInputStream (java.io.FileInputStream)4 BigDecimal (java.math.BigDecimal)4 DatabaseMetaData (java.sql.DatabaseMetaData)4 PreparedStatement (java.sql.PreparedStatement)4 Timestamp (java.sql.Timestamp)4 HashMap (java.util.HashMap)4 StringSet (org.dbflute.helper.StringSet)4 DfDataRow (org.dbflute.helper.dataset.DfDataRow)4 DfDataTable (org.dbflute.helper.dataset.DfDataTable)4