Search in sources :

Example 1 with LoggingInsertType

use of org.dbflute.logic.replaceschema.loaddata.impl.dataprop.DfLoadingControlProp.LoggingInsertType in project dbflute-core by dbflute.

the class DfDelimiterDataWriterImpl method writeData.

// ===================================================================================
// Write
// =====
public void writeData(DfDelimiterDataResultInfo resultInfo) throws IOException {
    _log.info("/= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = ");
    _log.info("writeData(" + _fileName + ")");
    _log.info("= = = = = = =/");
    FileInputStream fis = null;
    InputStreamReader ir = null;
    BufferedReader br = null;
    final String dataDirectory = Srl.substringLastFront(_fileName, "/");
    final LoggingInsertType loggingInsertType = getLoggingInsertType(dataDirectory);
    final String tableDbName = extractTableDbName();
    final Map<String, DfColumnMeta> columnMetaMap = getColumnMetaMap(tableDbName);
    if (columnMetaMap.isEmpty()) {
        throwTableNotFoundException(_fileName, tableDbName);
    }
    // process before handling table
    beforeHandlingTable(tableDbName, columnMetaMap);
    String lineString = null;
    String preContinueString = null;
    String executedSql = null;
    final List<String> columnNameList = new ArrayList<String>();
    final List<String> valueList = new ArrayList<String>();
    final boolean canBatchUpdate = !isMergedSuppressBatchUpdate(dataDirectory);
    final File dataFile = new File(_fileName);
    Connection conn = null;
    PreparedStatement ps = null;
    try {
        fis = new FileInputStream(dataFile);
        ir = new InputStreamReader(fis, _encoding);
        br = new BufferedReader(ir);
        FirstLineInfo firstLineInfo = null;
        int loopIndex = -1;
        int rowNumber = 0;
        int addedBatchSize = 0;
        while (true) {
            ++loopIndex;
            lineString = br.readLine();
            if (lineString == null) {
                break;
            }
            // - - - - - - - - - -/
            if (loopIndex == 0) {
                firstLineInfo = analyzeFirstLineInfo(_delimiter, lineString);
                setupColumnNameList(dataDirectory, dataFile, tableDbName, columnMetaMap, firstLineInfo, columnNameList);
                continue;
            }
            // /- - - - - - - - - - - - - - -
            // analyze values in line strings
            // - - - - - - - - - -/
            lineString = filterLineString(lineString);
            {
                if (preContinueString != null && !preContinueString.equals("")) {
                    lineString = preContinueString + "\n" + lineString;
                }
                final ValueLineInfo valueLineInfo = arrangeValueList(lineString, _delimiter);
                // empty string resolved later
                final List<String> ls = valueLineInfo.getValueList();
                if (valueLineInfo.isContinueNextLine()) {
                    preContinueString = ls.remove(ls.size() - 1);
                    valueList.addAll(ls);
                    continue;
                }
                valueList.addAll(ls);
            }
            // - - - - - - - - - -/
            if (isDifferentColumnValueCount(firstLineInfo, valueList)) {
                handleDifferentColumnValueCount(resultInfo, dataDirectory, tableDbName, firstLineInfo, valueList);
                // clear temporary variables
                valueList.clear();
                preContinueString = null;
                continue;
            }
            // *valid record is prepared here
            ++rowNumber;
            // /- - - - - - - - - - - - - - - -
            // process registration to database
            // - - - - - - - - - -/
            final DfDelimiterDataWriteSqlBuilder sqlBuilder = createSqlBuilder(resultInfo, tableDbName, columnMetaMap, columnNameList, valueList);
            if (conn == null) {
                conn = _dataSource.getConnection();
            }
            if (ps == null) {
                // for performance (suppress implicit transaction per SQL)
                beginTransaction(conn);
                executedSql = sqlBuilder.buildSql();
                ps = prepareStatement(conn, executedSql);
            }
            final Map<String, Object> columnValueMap = sqlBuilder.setupParameter();
            final Set<String> sysdateColumnSet = sqlBuilder.getSysdateColumnSet();
            resolveRelativeDate(dataDirectory, tableDbName, columnValueMap, columnMetaMap, sysdateColumnSet, rowNumber);
            handleLoggingInsert(tableDbName, columnValueMap, loggingInsertType, rowNumber);
            int bindCount = 1;
            for (Entry<String, Object> entry : columnValueMap.entrySet()) {
                final String columnName = entry.getKey();
                final Object obj = entry.getValue();
                // - - - - - - - - - -/
                if (processNull(dataDirectory, tableDbName, columnName, obj, ps, bindCount, columnMetaMap, rowNumber)) {
                    bindCount++;
                    continue;
                }
                // It registers the value to statement by the type.
                if (processNotNullNotString(dataDirectory, tableDbName, columnName, obj, conn, ps, bindCount, columnMetaMap, rowNumber)) {
                    bindCount++;
                    continue;
                }
                // /- - - - - - - - - - - - - - - - - -
                // process NotNull and StringExpression
                // - - - - - - - - - -/
                final String value = (String) obj;
                processNotNullString(dataDirectory, dataFile, tableDbName, columnName, value, conn, ps, bindCount, columnMetaMap, rowNumber);
                bindCount++;
            }
            if (canBatchUpdate) {
                // mainly here
                ps.addBatch();
            } else {
                ps.execute();
            }
            ++addedBatchSize;
            if (isBatchSizeLimit(addedBatchSize)) {
                // transaction scope
                if (canBatchUpdate) {
                    // mainly here
                    // this is supported in only delimiter data writer
                    // because delimiter data can treat large data
                    // to avoid OutOfMemory
                    ps.executeBatch();
                }
                commitTransaction(conn);
                addedBatchSize = 0;
                close(ps);
                ps = null;
            }
            // *one record is finished here
            // clear temporary variables
            // if an exception occurs from execute() or addBatch(),
            // this valueList is to be information for debug
            valueList.clear();
            preContinueString = null;
        }
        if (ps != null && addedBatchSize > 0) {
            if (canBatchUpdate) {
                // mainly here
                ps.executeBatch();
            }
            commitTransaction(conn);
        }
        noticeLoadedRowSize(tableDbName, rowNumber);
        resultInfo.registerLoadedMeta(dataDirectory, _fileName, rowNumber);
        checkImplicitClassification(dataFile, tableDbName, columnNameList);
    } catch (FileNotFoundException e) {
        throw e;
    } catch (IOException e) {
        throw e;
    } catch (SQLException e) {
        DfJDBCException wrapped = DfJDBCException.voice(e);
        String msg = buildRegExpMessage(_fileName, tableDbName, executedSql, valueList, wrapped);
        throw new DfDelimiterDataRegistrationFailureException(msg, wrapped.getNextException());
    } catch (RuntimeException e) {
        String msg = buildRegExpMessage(_fileName, tableDbName, executedSql, valueList, null);
        throw new DfDelimiterDataRegistrationFailureException(msg, e);
    } finally {
        closeStream(fis, ir, br);
        commitJustInCase(conn);
        close(ps);
        close(conn);
        // process after (finally) handling table
        finallyHandlingTable(tableDbName, columnMetaMap);
    }
}
Also used : LoggingInsertType(org.dbflute.logic.replaceschema.loaddata.impl.dataprop.DfLoadingControlProp.LoggingInsertType) SQLException(java.sql.SQLException) ArrayList(java.util.ArrayList) FileNotFoundException(java.io.FileNotFoundException) DfJDBCException(org.dbflute.exception.DfJDBCException) ArrayList(java.util.ArrayList) List(java.util.List) DfDelimiterDataRegistrationFailureException(org.dbflute.exception.DfDelimiterDataRegistrationFailureException) DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) InputStreamReader(java.io.InputStreamReader) Connection(java.sql.Connection) PreparedStatement(java.sql.PreparedStatement) IOException(java.io.IOException) FileInputStream(java.io.FileInputStream) BufferedReader(java.io.BufferedReader) File(java.io.File)

Example 2 with LoggingInsertType

use of org.dbflute.logic.replaceschema.loaddata.impl.dataprop.DfLoadingControlProp.LoggingInsertType in project dbflute-core by dbflute.

the class DfXlsDataHandlerImpl method doWriteDataTable.

// -----------------------------------------------------
// DataTable
// ---------
protected int doWriteDataTable(DfXlsDataResource resource, File file, DfDataTable dataTable) {
    final String tableDbName = dataTable.getTableDbName();
    if (dataTable.getRowSize() == 0) {
        _log.info("*Not found row at the table: " + tableDbName);
        return 0;
    }
    final Map<String, DfColumnMeta> columnMetaMap = getColumnMetaMap(tableDbName);
    if (columnMetaMap.isEmpty()) {
        throwTableNotFoundException(file, tableDbName);
    }
    beforeHandlingTable(tableDbName, columnMetaMap);
    checkHeaderColumnIfNeeds(resource, file, dataTable, columnMetaMap);
    final List<String> columnNameList = extractColumnNameList(dataTable);
    final String dataDirectory = resource.getDataDirectory();
    final LoggingInsertType loggingInsertType = getLoggingInsertType(dataDirectory);
    final boolean suppressBatchUpdate = isMergedSuppressBatchUpdate(resource.getDataDirectory());
    Connection conn = null;
    PreparedStatement ps = null;
    String preparedSql = null;
    SQLException retryEx = null;
    DfDataRow retryDataRow = null;
    try {
        conn = _dataSource.getConnection();
        int loadedRowCount = 0;
        final int rowSize = dataTable.getRowSize();
        boolean existsEmptyRow = false;
        for (int i = 0; i < rowSize; i++) {
            final DfDataRow dataRow = dataTable.getRow(i);
            if (ps == null) {
                final MyCreatedState myCreatedState = new MyCreatedState();
                preparedSql = myCreatedState.buildPreparedSql(dataRow);
                ps = conn.prepareStatement(preparedSql);
            }
            if (doWriteDataRow(// basic resources
            resource, // basic resources
            file, // basic resources
            dataTable, // basic resources
            dataRow, // meta data
            columnMetaMap, // JDBC resources
            conn, // JDBC resources
            ps, loggingInsertType, suppressBatchUpdate)) {
                // option
                ++loadedRowCount;
                if (existsEmptyRow) {
                    final int emptyRowNumber = dataRow.getRowNumber() - 1;
                    throwXlsDataEmptyRowDataException(dataDirectory, file, dataTable, emptyRowNumber);
                }
            } else {
                existsEmptyRow = true;
            }
        }
        if (existsEmptyRow) {
            _log.info("...Skipping the terminal garbage row");
        }
        if (!suppressBatchUpdate) {
            boolean beginTransaction = false;
            boolean transactionClosed = false;
            try {
                // transaction to retry after
                conn.setAutoCommit(false);
                beginTransaction = true;
                ps.executeBatch();
                conn.commit();
                transactionClosed = true;
            } catch (SQLException e) {
                conn.rollback();
                transactionClosed = true;
                if (!(e instanceof BatchUpdateException)) {
                    throw e;
                }
                _log.info("...Retrying by suppressing batch update: " + tableDbName);
                final PreparedStatement retryPs = conn.prepareStatement(preparedSql);
                for (int i = 0; i < rowSize; i++) {
                    final DfDataRow dataRow = dataTable.getRow(i);
                    try {
                        doWriteDataRow(// basic resources
                        resource, // basic resources
                        file, // basic resources
                        dataTable, // basic resources
                        dataRow, // meta data
                        columnMetaMap, // JDBC resources
                        conn, // JDBC resources
                        retryPs, LoggingInsertType.NONE, // option (no logging and suppress batch)
                        true);
                    } catch (SQLException rowEx) {
                        retryEx = rowEx;
                        retryDataRow = dataRow;
                        break;
                    }
                }
                try {
                    retryPs.close();
                } catch (SQLException ignored) {
                }
                throw e;
            } finally {
                if (!transactionClosed) {
                    // for other exceptions
                    conn.rollback();
                }
                if (beginTransaction) {
                    conn.setAutoCommit(true);
                }
            }
        }
        noticeLoadedRowSize(tableDbName, loadedRowCount);
        checkImplicitClassification(file, tableDbName, columnNameList);
        return loadedRowCount;
    } catch (RuntimeException e) {
        handleXlsDataRegistartionFailureException(dataDirectory, file, tableDbName, e);
        // unreachable
        return -1;
    } catch (SQLException e) {
        handleWriteTableException(dataDirectory, file, dataTable, e, retryEx, retryDataRow, columnNameList);
        // unreachable
        return -1;
    } finally {
        closeResource(conn, ps);
        // process after (finally) handling table
        finallyHandlingTable(tableDbName, columnMetaMap);
    }
}
Also used : LoggingInsertType(org.dbflute.logic.replaceschema.loaddata.impl.dataprop.DfLoadingControlProp.LoggingInsertType) DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) SQLException(java.sql.SQLException) Connection(java.sql.Connection) PreparedStatement(java.sql.PreparedStatement) DfDataRow(org.dbflute.helper.dataset.DfDataRow) BatchUpdateException(java.sql.BatchUpdateException)

Aggregations

Connection (java.sql.Connection)2 PreparedStatement (java.sql.PreparedStatement)2 SQLException (java.sql.SQLException)2 DfColumnMeta (org.dbflute.logic.jdbc.metadata.info.DfColumnMeta)2 LoggingInsertType (org.dbflute.logic.replaceschema.loaddata.impl.dataprop.DfLoadingControlProp.LoggingInsertType)2 BufferedReader (java.io.BufferedReader)1 File (java.io.File)1 FileInputStream (java.io.FileInputStream)1 FileNotFoundException (java.io.FileNotFoundException)1 IOException (java.io.IOException)1 InputStreamReader (java.io.InputStreamReader)1 BatchUpdateException (java.sql.BatchUpdateException)1 ArrayList (java.util.ArrayList)1 List (java.util.List)1 DfDelimiterDataRegistrationFailureException (org.dbflute.exception.DfDelimiterDataRegistrationFailureException)1 DfJDBCException (org.dbflute.exception.DfJDBCException)1 DfDataRow (org.dbflute.helper.dataset.DfDataRow)1