Search in sources :

Example 6 with DfDataRow

use of org.dbflute.helper.dataset.DfDataRow in project dbflute-core by dbflute.

the class DfXlsDataHandlerImpl method doWriteDataTable.

// -----------------------------------------------------
// DataTable
// ---------
protected int doWriteDataTable(DfXlsDataResource resource, File file, DfDataTable dataTable) {
    final String tableDbName = dataTable.getTableDbName();
    if (dataTable.getRowSize() == 0) {
        _log.info("*Not found row at the table: " + tableDbName);
        return 0;
    }
    final Map<String, DfColumnMeta> columnMetaMap = getColumnMetaMap(tableDbName);
    if (columnMetaMap.isEmpty()) {
        throwTableNotFoundException(file, tableDbName);
    }
    beforeHandlingTable(tableDbName, columnMetaMap);
    checkHeaderColumnIfNeeds(resource, file, dataTable, columnMetaMap);
    final List<String> columnNameList = extractColumnNameList(dataTable);
    final String dataDirectory = resource.getDataDirectory();
    final LoggingInsertType loggingInsertType = getLoggingInsertType(dataDirectory);
    final boolean suppressBatchUpdate = isMergedSuppressBatchUpdate(resource.getDataDirectory());
    Connection conn = null;
    PreparedStatement ps = null;
    String preparedSql = null;
    SQLException retryEx = null;
    DfDataRow retryDataRow = null;
    try {
        conn = _dataSource.getConnection();
        int loadedRowCount = 0;
        final int rowSize = dataTable.getRowSize();
        boolean existsEmptyRow = false;
        for (int i = 0; i < rowSize; i++) {
            final DfDataRow dataRow = dataTable.getRow(i);
            if (ps == null) {
                final MyCreatedState myCreatedState = new MyCreatedState();
                preparedSql = myCreatedState.buildPreparedSql(dataRow);
                ps = conn.prepareStatement(preparedSql);
            }
            if (doWriteDataRow(// basic resources
            resource, // basic resources
            file, // basic resources
            dataTable, // basic resources
            dataRow, // meta data
            columnMetaMap, // JDBC resources
            conn, // JDBC resources
            ps, loggingInsertType, suppressBatchUpdate)) {
                // option
                ++loadedRowCount;
                if (existsEmptyRow) {
                    final int emptyRowNumber = dataRow.getRowNumber() - 1;
                    throwXlsDataEmptyRowDataException(dataDirectory, file, dataTable, emptyRowNumber);
                }
            } else {
                existsEmptyRow = true;
            }
        }
        if (existsEmptyRow) {
            _log.info("...Skipping the terminal garbage row");
        }
        if (!suppressBatchUpdate) {
            boolean beginTransaction = false;
            boolean transactionClosed = false;
            try {
                // transaction to retry after
                conn.setAutoCommit(false);
                beginTransaction = true;
                ps.executeBatch();
                conn.commit();
                transactionClosed = true;
            } catch (SQLException e) {
                conn.rollback();
                transactionClosed = true;
                if (!(e instanceof BatchUpdateException)) {
                    throw e;
                }
                _log.info("...Retrying by suppressing batch update: " + tableDbName);
                final PreparedStatement retryPs = conn.prepareStatement(preparedSql);
                for (int i = 0; i < rowSize; i++) {
                    final DfDataRow dataRow = dataTable.getRow(i);
                    try {
                        doWriteDataRow(// basic resources
                        resource, // basic resources
                        file, // basic resources
                        dataTable, // basic resources
                        dataRow, // meta data
                        columnMetaMap, // JDBC resources
                        conn, // JDBC resources
                        retryPs, LoggingInsertType.NONE, // option (no logging and suppress batch)
                        true);
                    } catch (SQLException rowEx) {
                        retryEx = rowEx;
                        retryDataRow = dataRow;
                        break;
                    }
                }
                try {
                    retryPs.close();
                } catch (SQLException ignored) {
                }
                throw e;
            } finally {
                if (!transactionClosed) {
                    // for other exceptions
                    conn.rollback();
                }
                if (beginTransaction) {
                    conn.setAutoCommit(true);
                }
            }
        }
        noticeLoadedRowSize(tableDbName, loadedRowCount);
        checkImplicitClassification(file, tableDbName, columnNameList);
        return loadedRowCount;
    } catch (RuntimeException e) {
        handleXlsDataRegistartionFailureException(dataDirectory, file, tableDbName, e);
        // unreachable
        return -1;
    } catch (SQLException e) {
        handleWriteTableException(dataDirectory, file, dataTable, e, retryEx, retryDataRow, columnNameList);
        // unreachable
        return -1;
    } finally {
        closeResource(conn, ps);
        // process after (finally) handling table
        finallyHandlingTable(tableDbName, columnMetaMap);
    }
}
Also used : LoggingInsertType(org.dbflute.logic.replaceschema.loaddata.impl.dataprop.DfLoadingControlProp.LoggingInsertType) DfColumnMeta(org.dbflute.logic.jdbc.metadata.info.DfColumnMeta) SQLException(java.sql.SQLException) Connection(java.sql.Connection) PreparedStatement(java.sql.PreparedStatement) DfDataRow(org.dbflute.helper.dataset.DfDataRow) BatchUpdateException(java.sql.BatchUpdateException)

Example 7 with DfDataRow

use of org.dbflute.helper.dataset.DfDataRow in project dbflute-core by dbflute.

the class DfDtsSqlTableWriter method doWrite.

protected void doWrite(DfDataTable table) {
    for (int i = 0; i < table.getRowSize(); ++i) {
        DfDataRow row = table.getRow(i);
        DfDtsRowState state = row.getState();
        state.update(_dataSource, row);
    }
}
Also used : DfDataRow(org.dbflute.helper.dataset.DfDataRow) DfDtsRowState(org.dbflute.helper.dataset.states.DfDtsRowState)

Example 8 with DfDataRow

use of org.dbflute.helper.dataset.DfDataRow in project dbflute-core by dbflute.

the class DfTableXlsWriter method setupTableSheet.

// ===================================================================================
// Table Sheet
// ===========
protected void setupTableSheet(DfDataSet dataSet) {
    for (int tableIndex = 0; tableIndex < dataSet.getTableSize(); ++tableIndex) {
        final DfDataTable table = dataSet.getTable(tableIndex);
        final Sheet sheet = _workbook.createSheet();
        final String tableName = table.getTableDbName();
        try {
            _workbook.setSheetName(tableIndex, tableName);
        } catch (RuntimeException e) {
            String msg = "Failed to set the sheet name: " + tableName;
            throw new IllegalStateException(msg, e);
        }
        final Row headerRow = sheet.createRow(0);
        for (int columnIndex = 0; columnIndex < table.getColumnSize(); ++columnIndex) {
            final Cell cell = headerRow.createCell(columnIndex);
            cell.setCellValue(createRichTextString(_workbook, table.getColumnName(columnIndex)));
        }
        for (int rowIndex = 0; rowIndex < table.getRowSize(); ++rowIndex) {
            final Row row = sheet.createRow(rowIndex + 1);
            for (int columnIndex = 0; columnIndex < table.getColumnSize(); ++columnIndex) {
                final DfDataRow dataRow = table.getRow(rowIndex);
                final Object value = dataRow.getValue(columnIndex);
                if (value != null) {
                    final Cell cell = row.createCell(columnIndex);
                    setupCellValueOfTableSheet(table, columnIndex, row, cell, value);
                }
            }
        }
    }
}
Also used : DfDataTable(org.dbflute.helper.dataset.DfDataTable) RichTextString(org.apache.poi.ss.usermodel.RichTextString) Row(org.apache.poi.ss.usermodel.Row) DfDataRow(org.dbflute.helper.dataset.DfDataRow) Sheet(org.apache.poi.ss.usermodel.Sheet) Cell(org.apache.poi.ss.usermodel.Cell) DfDataRow(org.dbflute.helper.dataset.DfDataRow)

Example 9 with DfDataRow

use of org.dbflute.helper.dataset.DfDataRow in project dbflute-core by dbflute.

the class DfTableXlsReader method setupRow.

protected void setupRow(DfDataTable table, Row row) {
    final DfDataRow dataRow = table.addRow();
    Cell cell = null;
    Object value = null;
    DfDataColumn column = null;
    try {
        for (int columnIndex = 0; columnIndex < table.getColumnSize(); ++columnIndex) {
            cell = row.getCell(columnIndex);
            value = extractCellValue(table, columnIndex, row, cell);
            column = table.getColumn(columnIndex);
            final String columnName = column.getColumnDbName();
            try {
                dataRow.addValue(columnName, value);
            } catch (NumberFormatException e) {
                if (cell.getCellType() != Cell.CELL_TYPE_STRING) {
                    throw e;
                }
                _log.info("...Changing the column type to STRING type: name=" + columnName + " value=" + value);
                column.setColumnType(DfDtsColumnTypes.STRING);
                dataRow.addValue(columnName, value);
            }
        }
    } catch (RuntimeException e) {
        throwCellValueHandlingException(table, column, row, cell, value, e);
    }
}
Also used : DfDataColumn(org.dbflute.helper.dataset.DfDataColumn) RichTextString(org.apache.poi.ss.usermodel.RichTextString) DfDataRow(org.dbflute.helper.dataset.DfDataRow) Cell(org.apache.poi.ss.usermodel.Cell)

Example 10 with DfDataRow

use of org.dbflute.helper.dataset.DfDataRow in project dbflute-core by dbflute.

the class DfTableXlsReaderTest method test_read_rtrim.

public void test_read_rtrim() throws IOException {
    // ## Arrange ##
    final File xlsFile = prepareTestBasicXlsFile();
    final DfTableXlsReader reader = createTableXlsReader(xlsFile, null, true);
    // ## Act ##
    final DfDataSet dataSet = reader.read();
    // ## Assert ##
    log("[DataSet]:" + ln() + dataSet);
    final int tableSize = dataSet.getTableSize();
    assertTrue(tableSize > 0);
    for (int tableIndex = 0; tableIndex < tableSize; tableIndex++) {
        final DfDataTable table = dataSet.getTable(tableIndex);
        final int columnSize = table.getColumnSize();
        assertTrue(columnSize > 0);
        final int rowSize = table.getRowSize();
        assertTrue(rowSize > 0);
        for (int rowIndex = 0; rowIndex < rowSize; rowIndex++) {
            final DfDataRow row = table.getRow(rowIndex);
            for (int columnIndex = 0; columnIndex < columnSize; columnIndex++) {
                final DfDataColumn column = table.getColumn(columnIndex);
                final String columnDbName = column.getColumnDbName();
                final Object value = row.getValue(columnDbName);
                if (columnDbName.equals("AAA")) {
                    assertNotNull(value);
                } else if (columnDbName.equals("BBB")) {
                    markHere("nullBBB");
                } else if (columnDbName.equals("CCC")) {
                    assertNotNull(value);
                } else if (columnDbName.equals("DDD")) {
                    assertNotNull(value);
                    String str = (String) value;
                    if (str.length() > str.trim().length()) {
                        fail();
                    }
                } else if (columnDbName.equals("EEE")) {
                    assertNotNull(value);
                    String str = (String) value;
                    if (str.length() > str.trim().length()) {
                        // because of not trimmed column
                        markHere("trimmed_EEE");
                    }
                }
            }
        }
    }
    assertMarked("nullBBB");
    assertMarked("trimmed_EEE");
}
Also used : DfDataSet(org.dbflute.helper.dataset.DfDataSet) DfDataColumn(org.dbflute.helper.dataset.DfDataColumn) DfDataTable(org.dbflute.helper.dataset.DfDataTable) File(java.io.File) DfDataRow(org.dbflute.helper.dataset.DfDataRow)

Aggregations

DfDataRow (org.dbflute.helper.dataset.DfDataRow)13 DfDataTable (org.dbflute.helper.dataset.DfDataTable)9 DfDataColumn (org.dbflute.helper.dataset.DfDataColumn)5 DfColumnMeta (org.dbflute.logic.jdbc.metadata.info.DfColumnMeta)4 File (java.io.File)3 RichTextString (org.apache.poi.ss.usermodel.RichTextString)3 DfDataSet (org.dbflute.helper.dataset.DfDataSet)3 BatchUpdateException (java.sql.BatchUpdateException)2 Connection (java.sql.Connection)2 PreparedStatement (java.sql.PreparedStatement)2 SQLException (java.sql.SQLException)2 LinkedHashMap (java.util.LinkedHashMap)2 Map (java.util.Map)2 Cell (org.apache.poi.ss.usermodel.Cell)2 Sheet (org.apache.poi.ss.usermodel.Sheet)2 Column (org.apache.torque.engine.database.model.Column)2 DfDtsColumnType (org.dbflute.helper.dataset.types.DfDtsColumnType)2 Row (org.apache.poi.ss.usermodel.Row)1 DfDtsRowState (org.dbflute.helper.dataset.states.DfDtsRowState)1 LoggingInsertType (org.dbflute.logic.replaceschema.loaddata.base.dataprop.DfLoadingControlProp.LoggingInsertType)1