Search in sources :

Example 41 with DfColumnMeta

use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.

the class DfDelimiterDataWriterImpl method writeData.

// ===================================================================================
// Write
// =====
public void writeData(DfDelimiterDataResultInfo resultInfo) throws IOException {
    _log.info("/= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = ");
    _log.info("writeData(" + _fileName + ")");
    _log.info("= = = = = = =/");
    FileInputStream fis = null;
    InputStreamReader ir = null;
    BufferedReader br = null;
    final String dataDirectory = Srl.substringLastFront(_fileName, "/");
    final LoggingInsertType loggingInsertType = getLoggingInsertType(dataDirectory);
    final String tableDbName = extractTableDbName();
    final Map<String, DfColumnMeta> columnMetaMap = getColumnMetaMap(tableDbName);
    if (columnMetaMap.isEmpty()) {
        throwTableNotFoundException(_fileName, tableDbName);
    }
    // process before handling table
    beforeHandlingTable(tableDbName, columnMetaMap);
    String lineString = null;
    String preContinueString = null;
    String executedSql = null;
    final List<String> columnNameList = new ArrayList<String>();
    final List<String> valueList = new ArrayList<String>();
    final boolean canBatchUpdate = !isMergedSuppressBatchUpdate(dataDirectory);
    final File dataFile = new File(_fileName);
    Connection conn = null;
    PreparedStatement ps = null;
    try {
        fis = new FileInputStream(dataFile);
        ir = new InputStreamReader(fis, _encoding);
        br = new BufferedReader(ir);
        FirstLineInfo firstLineInfo = null;
        int loopIndex = -1;
        int rowNumber = 0;
        int addedBatchSize = 0;
        while (true) {
            ++loopIndex;
            lineString = br.readLine();
            if (lineString == null) {
                break;
            }
            // - - - - - - - - - -/
            if (loopIndex == 0) {
                firstLineInfo = analyzeFirstLineInfo(_delimiter, lineString);
                setupColumnNameList(dataDirectory, dataFile, tableDbName, columnMetaMap, firstLineInfo, columnNameList);
                continue;
            }
            // /- - - - - - - - - - - - - - -
            // analyze values in line strings
            // - - - - - - - - - -/
            lineString = filterLineString(lineString);
            {
                if (preContinueString != null && !preContinueString.equals("")) {
                    lineString = preContinueString + "\n" + lineString;
                }
                final ValueLineInfo valueLineInfo = arrangeValueList(lineString, _delimiter);
                // empty string resolved later
                final List<String> ls = valueLineInfo.getValueList();
                if (valueLineInfo.isContinueNextLine()) {
                    preContinueString = ls.remove(ls.size() - 1);
                    valueList.addAll(ls);
                    continue;
                }
                valueList.addAll(ls);
            }
            // - - - - - - - - - -/
            if (isDifferentColumnValueCount(firstLineInfo, valueList)) {
                handleDifferentColumnValueCount(resultInfo, dataDirectory, tableDbName, firstLineInfo, valueList);
                // clear temporary variables
                valueList.clear();
                preContinueString = null;
                continue;
            }
            // *valid record is prepared here
            ++rowNumber;
            // /- - - - - - - - - - - - - - - -
            // process registration to database
            // - - - - - - - - - -/
            final DfDelimiterDataWriteSqlBuilder sqlBuilder = createSqlBuilder(resultInfo, tableDbName, columnMetaMap, columnNameList, valueList);
            if (conn == null) {
                conn = _dataSource.getConnection();
            }
            if (ps == null) {
                // for performance (suppress implicit transaction per SQL)
                beginTransaction(conn);
                executedSql = sqlBuilder.buildSql();
                ps = prepareStatement(conn, executedSql);
            }
            final Map<String, Object> columnValueMap = sqlBuilder.setupParameter();
            final Set<String> sysdateColumnSet = sqlBuilder.getSysdateColumnSet();
            resolveRelativeDate(dataDirectory, tableDbName, columnValueMap, columnMetaMap, sysdateColumnSet, rowNumber);
            handleLoggingInsert(tableDbName, columnValueMap, loggingInsertType, rowNumber);
            int bindCount = 1;
            for (Entry<String, Object> entry : columnValueMap.entrySet()) {
                final String columnName = entry.getKey();
                final Object obj = entry.getValue();
                // - - - - - - - - - -/
                if (processNull(dataDirectory, tableDbName, columnName, obj, ps, bindCount, columnMetaMap, rowNumber)) {
                    bindCount++;
                    continue;
                }
                // It registers the value to statement by the type.
                if (processNotNullNotString(dataDirectory, tableDbName, columnName, obj, conn, ps, bindCount, columnMetaMap, rowNumber)) {
                    bindCount++;
                    continue;
                }
                // /- - - - - - - - - - - - - - - - - -
                // process NotNull and StringExpression
                // - - - - - - - - - -/
                final String value = (String) obj;
                processNotNullString(dataDirectory, dataFile, tableDbName, columnName, value, conn, ps, bindCount, columnMetaMap, rowNumber);
                bindCount++;
            }
            if (canBatchUpdate) {
                // mainly here
                ps.addBatch();
            } else {
                ps.execute();
            }
            ++addedBatchSize;
            if (isBatchSizeLimit(addedBatchSize)) {
                // transaction scope
                if (canBatchUpdate) {
                    // mainly here
                    // this is supported in only delimiter data writer
                    // because delimiter data can treat large data
                    // to avoid OutOfMemory
                    ps.executeBatch();
                }
                commitTransaction(conn);
                addedBatchSize = 0;
                close(ps);
                ps = null;
            }
            // *one record is finished here
            // clear temporary variables
            // if an exception occurs from execute() or addBatch(),
            // this valueList is to be information for debug
            valueList.clear();
            preContinueString = null;
        }
        if (ps != null && addedBatchSize > 0) {
            if (canBatchUpdate) {
                // mainly here
                ps.executeBatch();
            }
            commitTransaction(conn);
        }
        noticeLoadedRowSize(tableDbName, rowNumber);
        resultInfo.registerLoadedMeta(dataDirectory, _fileName, rowNumber);
        checkImplicitClassification(dataFile, tableDbName, columnNameList);
    } catch (FileNotFoundException e) {
        throw e;
    } catch (IOException e) {
        throw e;
    } catch (SQLException e) {
        DfJDBCException wrapped = DfJDBCException.voice(e);
        String msg = buildRegExpMessage(_fileName, tableDbName, executedSql, valueList, wrapped);
        throw new DfDelimiterDataRegistrationFailureException(msg, wrapped.getNextException());
    } catch (RuntimeException e) {
        String msg = buildRegExpMessage(_fileName, tableDbName, executedSql, valueList, null);
        throw new DfDelimiterDataRegistrationFailureException(msg, e);
    } finally {
        closeStream(fis, ir, br);
        commitJustInCase(conn);
        close(ps);
        close(conn);
        // process after (finally) handling table
        finallyHandlingTable(tableDbName, columnMetaMap);
    }
}
Also used : LoggingInsertType(org.dbflute.logic.replaceschema.loaddata.impl.dataprop.DfLoadingControlProp.LoggingInsertType) SQLException(java.sql.SQLException) ArrayList(java.util.ArrayList) FileNotFoundException(java.io.FileNotFoundException) DfJDBCException(org.dbflute.exception.DfJDBCException) ArrayList(java.util.ArrayList) List(java.util.List) DfDelimiterDataRegistrationFailureException(org.dbflute.exception.DfDelimiterDataRegistrationFailureException) DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) InputStreamReader(java.io.InputStreamReader) Connection(java.sql.Connection) PreparedStatement(java.sql.PreparedStatement) IOException(java.io.IOException) FileInputStream(java.io.FileInputStream) BufferedReader(java.io.BufferedReader) File(java.io.File)

Example 42 with DfColumnMeta

use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.

the class DfXlsDataHandlerImpl method setupDefaultValue.

protected void setupDefaultValue(String dataDirectory, final DfDataSet dataSet) {
    final Map<String, String> defaultValueMap = getDefaultValueMap(dataDirectory);
    for (int i = 0; i < dataSet.getTableSize(); i++) {
        final DfDataTable table = dataSet.getTable(i);
        final Set<String> defaultValueMapKeySet = defaultValueMap.keySet();
        final String tableName = table.getTableDbName();
        final Map<String, DfColumnMeta> metaMetaMap = getColumnMetaMap(tableName);
        for (String defaultTargetColumnName : defaultValueMapKeySet) {
            final String defaultValue = defaultValueMap.get(defaultTargetColumnName);
            if (metaMetaMap.containsKey(defaultTargetColumnName) && !table.hasColumn(defaultTargetColumnName)) {
                // values are resolved later so resolve type only here
                final DfDtsColumnType columnType;
                if (defaultValue.equalsIgnoreCase("sysdate")) {
                    columnType = DfDtsColumnTypes.TIMESTAMP;
                } else {
                    columnType = DfDtsColumnTypes.STRING;
                }
                table.addColumn(defaultTargetColumnName, columnType);
                for (int j = 0; j < table.getRowSize(); j++) {
                    final DfDataRow row = table.getRow(j);
                    // value is set later
                    row.addValue(defaultTargetColumnName, null);
                }
            }
        }
    }
}
Also used : DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) DfDtsColumnType(org.dbflute.helper.dataset.types.DfDtsColumnType) DfDataTable(org.dbflute.helper.dataset.DfDataTable) DfDataRow(org.dbflute.helper.dataset.DfDataRow)

Example 43 with DfColumnMeta

use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.

the class DfXlsDataHandlerImpl method doWriteDataTable.

// -----------------------------------------------------
// DataTable
// ---------
protected int doWriteDataTable(DfXlsDataResource resource, File file, DfDataTable dataTable) {
    final String tableDbName = dataTable.getTableDbName();
    if (dataTable.getRowSize() == 0) {
        _log.info("*Not found row at the table: " + tableDbName);
        return 0;
    }
    final Map<String, DfColumnMeta> columnMetaMap = getColumnMetaMap(tableDbName);
    if (columnMetaMap.isEmpty()) {
        throwTableNotFoundException(file, tableDbName);
    }
    beforeHandlingTable(tableDbName, columnMetaMap);
    checkHeaderColumnIfNeeds(resource, file, dataTable, columnMetaMap);
    final List<String> columnNameList = extractColumnNameList(dataTable);
    final String dataDirectory = resource.getDataDirectory();
    final LoggingInsertType loggingInsertType = getLoggingInsertType(dataDirectory);
    final boolean suppressBatchUpdate = isMergedSuppressBatchUpdate(resource.getDataDirectory());
    Connection conn = null;
    PreparedStatement ps = null;
    String preparedSql = null;
    SQLException retryEx = null;
    DfDataRow retryDataRow = null;
    try {
        conn = _dataSource.getConnection();
        int loadedRowCount = 0;
        final int rowSize = dataTable.getRowSize();
        boolean existsEmptyRow = false;
        for (int i = 0; i < rowSize; i++) {
            final DfDataRow dataRow = dataTable.getRow(i);
            if (ps == null) {
                final MyCreatedState myCreatedState = new MyCreatedState();
                preparedSql = myCreatedState.buildPreparedSql(dataRow);
                ps = conn.prepareStatement(preparedSql);
            }
            if (doWriteDataRow(// basic resources
            resource, // basic resources
            file, // basic resources
            dataTable, // basic resources
            dataRow, // meta data
            columnMetaMap, // JDBC resources
            conn, // JDBC resources
            ps, loggingInsertType, suppressBatchUpdate)) {
                // option
                ++loadedRowCount;
                if (existsEmptyRow) {
                    final int emptyRowNumber = dataRow.getRowNumber() - 1;
                    throwXlsDataEmptyRowDataException(dataDirectory, file, dataTable, emptyRowNumber);
                }
            } else {
                existsEmptyRow = true;
            }
        }
        if (existsEmptyRow) {
            _log.info("...Skipping the terminal garbage row");
        }
        if (!suppressBatchUpdate) {
            boolean beginTransaction = false;
            boolean transactionClosed = false;
            try {
                // transaction to retry after
                conn.setAutoCommit(false);
                beginTransaction = true;
                ps.executeBatch();
                conn.commit();
                transactionClosed = true;
            } catch (SQLException e) {
                conn.rollback();
                transactionClosed = true;
                if (!(e instanceof BatchUpdateException)) {
                    throw e;
                }
                _log.info("...Retrying by suppressing batch update: " + tableDbName);
                final PreparedStatement retryPs = conn.prepareStatement(preparedSql);
                for (int i = 0; i < rowSize; i++) {
                    final DfDataRow dataRow = dataTable.getRow(i);
                    try {
                        doWriteDataRow(// basic resources
                        resource, // basic resources
                        file, // basic resources
                        dataTable, // basic resources
                        dataRow, // meta data
                        columnMetaMap, // JDBC resources
                        conn, // JDBC resources
                        retryPs, LoggingInsertType.NONE, // option (no logging and suppress batch)
                        true);
                    } catch (SQLException rowEx) {
                        retryEx = rowEx;
                        retryDataRow = dataRow;
                        break;
                    }
                }
                try {
                    retryPs.close();
                } catch (SQLException ignored) {
                }
                throw e;
            } finally {
                if (!transactionClosed) {
                    // for other exceptions
                    conn.rollback();
                }
                if (beginTransaction) {
                    conn.setAutoCommit(true);
                }
            }
        }
        noticeLoadedRowSize(tableDbName, loadedRowCount);
        checkImplicitClassification(file, tableDbName, columnNameList);
        return loadedRowCount;
    } catch (RuntimeException e) {
        handleXlsDataRegistartionFailureException(dataDirectory, file, tableDbName, e);
        // unreachable
        return -1;
    } catch (SQLException e) {
        handleWriteTableException(dataDirectory, file, dataTable, e, retryEx, retryDataRow, columnNameList);
        // unreachable
        return -1;
    } finally {
        closeResource(conn, ps);
        // process after (finally) handling table
        finallyHandlingTable(tableDbName, columnMetaMap);
    }
}
Also used : LoggingInsertType(org.dbflute.logic.replaceschema.loaddata.impl.dataprop.DfLoadingControlProp.LoggingInsertType) DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) SQLException(java.sql.SQLException) Connection(java.sql.Connection) PreparedStatement(java.sql.PreparedStatement) DfDataRow(org.dbflute.helper.dataset.DfDataRow) BatchUpdateException(java.sql.BatchUpdateException)

Example 44 with DfColumnMeta

use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.

the class DfSequenceHandlerPostgreSQL method handleSerialTypeSequence.

protected void handleSerialTypeSequence(Map<String, String> tableSequenceMap) {
    final StringSet doneSequenceSet = StringSet.createAsFlexibleOrdered();
    doneSequenceSet.addAll(tableSequenceMap.values());
    DfTableMeta tableInfo = null;
    DfPrimaryKeyMeta pkInfo = null;
    String sequenceName = null;
    String tableSqlName = null;
    Integer actualValue = null;
    String sequenceSqlName = null;
    Connection conn = null;
    Statement st = null;
    try {
        conn = _dataSource.getConnection();
        st = conn.createStatement();
        final DatabaseMetaData metaData = conn.getMetaData();
        final DfColumnExtractor columnHandler = new DfColumnExtractor();
        final DfAutoIncrementExtractor autoIncrementHandler = new DfAutoIncrementExtractor();
        _log.info("...Incrementing serial type sequence");
        final Set<Entry<String, DfTableMeta>> entrySet = _tableMap.entrySet();
        for (Entry<String, DfTableMeta> entry : entrySet) {
            // clear elements that are also used exception message
            tableInfo = null;
            pkInfo = null;
            sequenceName = null;
            tableSqlName = null;
            actualValue = null;
            sequenceSqlName = null;
            tableInfo = entry.getValue();
            pkInfo = _uniqueKeyHandler.getPrimaryKey(metaData, tableInfo);
            final List<String> pkList = pkInfo.getPrimaryKeyList();
            if (pkList.size() != 1) {
                continue;
            }
            final String primaryKeyColumnName = pkList.get(0);
            if (!autoIncrementHandler.isAutoIncrementColumn(conn, tableInfo, primaryKeyColumnName)) {
                continue;
            }
            final Map<String, DfColumnMeta> columnMap = columnHandler.getColumnMap(metaData, tableInfo);
            final DfColumnMeta columnInfo = columnMap.get(primaryKeyColumnName);
            if (columnInfo == null) {
                continue;
            }
            final String defaultValue = columnInfo.getDefaultValue();
            if (defaultValue == null) {
                continue;
            }
            final String prefix = "nextval('";
            if (!defaultValue.startsWith(prefix)) {
                continue;
            }
            final String excludedPrefixString = defaultValue.substring(prefix.length());
            final int endIndex = excludedPrefixString.indexOf("'");
            if (endIndex < 0) {
                continue;
            }
            sequenceName = excludedPrefixString.substring(0, endIndex);
            if (doneSequenceSet.contains(sequenceName)) {
                // already done
                continue;
            }
            tableSqlName = tableInfo.getTableSqlName();
            final Integer count = selectCount(st, tableSqlName);
            if (count == null || count == 0) {
                // It is not necessary to increment because the table has no data.
                continue;
            }
            actualValue = selectDataMax(st, tableInfo, primaryKeyColumnName);
            if (actualValue == null) {
                // It is not necessary to increment because the table has no data.
                continue;
            }
            // because sequence names of other schemas have already been qualified
            // sequenceSqlName = tableInfo.getUnifiedSchema().buildSqlName(sequenceName);
            sequenceSqlName = sequenceName;
            callSequenceLoop(st, sequenceSqlName, actualValue);
        }
    } catch (SQLException e) {
        throwSerialTypeSequenceHandlingFailureException(tableInfo, pkInfo, sequenceName, tableSqlName, actualValue, sequenceSqlName, e);
    } finally {
        if (st != null) {
            try {
                st.close();
            } catch (SQLException ignored) {
                _log.info("Statement.close() threw the exception!", ignored);
            }
        }
        if (conn != null) {
            try {
                conn.close();
            } catch (SQLException ignored) {
                _log.info("Connection.close() threw the exception!", ignored);
            }
        }
    }
}
Also used : DfPrimaryKeyMeta(org.dbflute.logic.jdbc.metadata.info.DfPrimaryKeyMeta) DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) SQLException(java.sql.SQLException) Statement(java.sql.Statement) DfAutoIncrementExtractor(org.dbflute.logic.jdbc.metadata.basic.DfAutoIncrementExtractor) Connection(java.sql.Connection) DatabaseMetaData(java.sql.DatabaseMetaData) Entry(java.util.Map.Entry) DfColumnExtractor(org.dbflute.logic.jdbc.metadata.basic.DfColumnExtractor) StringSet(org.dbflute.helper.StringSet) DfTableMeta(org.dbflute.logic.jdbc.metadata.info.DfTableMeta)

Example 45 with DfColumnMeta

use of org.dbflute.logic.jdbc.metadata.info.DfColumnMeta in project dbflute-core by dbflute.

the class DfDataTable method setupMetaData.

public void setupMetaData(DatabaseMetaData metaData, UnifiedSchema unifiedSchema) throws SQLException {
    final Map<String, DfColumnMeta> metaMap = extractColumnMetaMap(metaData, unifiedSchema);
    final Set<String> primaryKeySet = getPrimaryKeySet(metaData, unifiedSchema);
    for (int i = 0; i < getColumnSize(); ++i) {
        final DfDataColumn column = getColumn(i);
        if (primaryKeySet.contains(column.getColumnDbName())) {
            column.setPrimaryKey(true);
        } else {
            column.setPrimaryKey(false);
        }
        final DfColumnMeta metaInfo = metaMap.get(column.getColumnDbName());
        if (metaInfo != null) {
            column.setWritable(true);
            final int jdbcDefValue = metaInfo.getJdbcDefValue();
            column.setColumnType(DfDtsColumnTypes.getColumnType(jdbcDefValue));
        } else {
            column.setWritable(false);
        }
    }
    _hasMetaData = true;
}
Also used : DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta)

Aggregations

DfColumnMeta (org.dbflute.logic.jdbc.metadata.info.DfColumnMeta)71 SQLException (java.sql.SQLException)16 Connection (java.sql.Connection)10 LinkedHashMap (java.util.LinkedHashMap)9 Map (java.util.Map)8 File (java.io.File)7 StringKeyMap (org.dbflute.helper.StringKeyMap)7 ResultSet (java.sql.ResultSet)5 Statement (java.sql.Statement)5 ArrayList (java.util.ArrayList)5 DfJDBCException (org.dbflute.exception.DfJDBCException)5 FileInputStream (java.io.FileInputStream)4 BigDecimal (java.math.BigDecimal)4 DatabaseMetaData (java.sql.DatabaseMetaData)4 PreparedStatement (java.sql.PreparedStatement)4 Timestamp (java.sql.Timestamp)4 HashMap (java.util.HashMap)4 StringSet (org.dbflute.helper.StringSet)4 DfDataRow (org.dbflute.helper.dataset.DfDataRow)4 DfDataTable (org.dbflute.helper.dataset.DfDataTable)4