Search in sources :

Example 26 with DatabaseConnectionSettings

use of org.knime.core.node.port.database.DatabaseConnectionSettings in project knime-core by knime.

the class DBWriterImpl method updateTable.

/**
 * Create connection to update table in database.
 * @param data The data to write.
 * @param setColumns columns part of the SET clause
 * @param whereColumns columns part of the WHERE clause
 * @param updateStatus int array of length data#getRowCount; will be filled with
 *             update info from the database
 * @param table name of table to write
 * @param exec Used the cancel writing.
 * @param cp {@link CredentialsProvider} providing user/password
 * @param batchSize number of rows updated in one batch
 * @return error string or null, if non
 * @throws Exception if connection could not be established
 */
@Override
public final String updateTable(final String schema, final String table, final BufferedDataTable data, final String[] setColumns, final String[] whereColumns, final int[] updateStatus, final ExecutionMonitor exec, final CredentialsProvider cp, final int batchSize) throws Exception {
    final DatabaseConnectionSettings conSettings = getDatabaseConnectionSettings();
    // synchronized (conSettings.syncConnection(conn)) {
    return conSettings.execute(cp, conn -> {
        exec.setMessage("Start updating rows in database...");
        final DataTableSpec spec = data.getDataTableSpec();
        final String updateStmt = createUpdateStatement(table, setColumns, whereColumns);
        // problems writing more than 13 columns. the prepare statement
        // ensures that we can set the columns directly row-by-row, the
        // database will handle the commit
        long rowCount = data.size();
        int cnt = 1;
        int errorCnt = 0;
        int allErrors = 0;
        // count number of rows added to current batch
        int curBatchSize = 0;
        // selected timezone
        final TimeZone timezone = conSettings.getTimeZone();
        LOGGER.debug("Executing SQL statement as prepareStatement: " + updateStmt);
        final PreparedStatement stmt = conn.prepareStatement(updateStmt);
        // remember auto-commit flag
        final boolean autoCommit = conn.getAutoCommit();
        DatabaseConnectionSettings.setAutoCommit(conn, false);
        try {
            for (RowIterator it = data.iterator(); it.hasNext(); cnt++) {
                exec.checkCanceled();
                exec.setProgress(1.0 * cnt / rowCount, "Row " + "#" + cnt);
                final DataRow row = it.next();
                // SET columns
                for (int i = 0; i < setColumns.length; i++) {
                    final int dbIdx = i + 1;
                    final int columnIndex = spec.findColumnIndex(setColumns[i]);
                    final DataColumnSpec cspec = spec.getColumnSpec(columnIndex);
                    final DataCell cell = row.getCell(columnIndex);
                    fillStatement(stmt, dbIdx, cspec, cell, timezone, null);
                }
                // WHERE columns
                for (int i = 0; i < whereColumns.length; i++) {
                    final int dbIdx = i + 1 + setColumns.length;
                    final int columnIndex = spec.findColumnIndex(whereColumns[i]);
                    final DataColumnSpec cspec = spec.getColumnSpec(columnIndex);
                    final DataCell cell = row.getCell(columnIndex);
                    fillStatement(stmt, dbIdx, cspec, cell, timezone, null);
                }
                // if batch mode
                if (batchSize > 1) {
                    // a new row will be added
                    stmt.addBatch();
                }
                curBatchSize++;
                // if batch size equals number of row in batch or input table at end
                if ((curBatchSize == batchSize) || !it.hasNext()) {
                    curBatchSize = 0;
                    try {
                        // write batch
                        if (batchSize > 1) {
                            int[] status = stmt.executeBatch();
                            for (int i = 0; i < status.length; i++) {
                                updateStatus[cnt - status.length + i] = status[i];
                            }
                        } else {
                            // or write single row
                            int status = stmt.executeUpdate();
                            updateStatus[cnt - 1] = status;
                        }
                    } catch (Throwable t) {
                        // anyway.
                        if (!conn.getAutoCommit()) {
                            conn.commit();
                        }
                        allErrors++;
                        if (errorCnt > -1) {
                            final String errorMsg;
                            if (batchSize > 1) {
                                errorMsg = "Error while updating rows #" + (cnt - batchSize) + " - #" + cnt + ", reason: " + t.getMessage();
                            } else {
                                errorMsg = "Error while updating row #" + cnt + " (" + row.getKey() + "), reason: " + t.getMessage();
                            }
                            exec.setMessage(errorMsg);
                            if (errorCnt++ < 10) {
                                LOGGER.warn(errorMsg);
                            } else {
                                errorCnt = -1;
                                LOGGER.warn(errorMsg + " - more errors...", t);
                            }
                        }
                    } finally {
                        // clear batch if in batch mode
                        if (batchSize > 1) {
                            stmt.clearBatch();
                        }
                    }
                }
            }
            if (!conn.getAutoCommit()) {
                conn.commit();
            }
            if (allErrors == 0) {
                return null;
            } else {
                return "Errors \"" + allErrors + "\" updating " + rowCount + " rows.";
            }
        } finally {
            DatabaseConnectionSettings.setAutoCommit(conn, autoCommit);
            stmt.close();
        }
    });
}
Also used : DataTableSpec(org.knime.core.data.DataTableSpec) DatabaseConnectionSettings(org.knime.core.node.port.database.DatabaseConnectionSettings) PreparedStatement(java.sql.PreparedStatement) DataRow(org.knime.core.data.DataRow) TimeZone(java.util.TimeZone) DataColumnSpec(org.knime.core.data.DataColumnSpec) RowIterator(org.knime.core.data.RowIterator) DataCell(org.knime.core.data.DataCell)

Aggregations

DatabaseConnectionSettings (org.knime.core.node.port.database.DatabaseConnectionSettings)26 InvalidSettingsException (org.knime.core.node.InvalidSettingsException)14 DataTableSpec (org.knime.core.data.DataTableSpec)12 DatabaseConnectionPortObject (org.knime.core.node.port.database.DatabaseConnectionPortObject)10 SQLException (java.sql.SQLException)7 DatabaseConnectionPortObjectSpec (org.knime.core.node.port.database.DatabaseConnectionPortObjectSpec)7 PortObject (org.knime.core.node.port.PortObject)6 ColumnRearranger (org.knime.core.data.container.ColumnRearranger)4 PortObjectSpec (org.knime.core.node.port.PortObjectSpec)4 DBWriter (org.knime.core.node.port.database.writer.DBWriter)4 DataColumnSpecFilterConfiguration (org.knime.core.node.util.filter.column.DataColumnSpecFilterConfiguration)4 PreparedStatement (java.sql.PreparedStatement)3 ArrayList (java.util.ArrayList)3 TimeZone (java.util.TimeZone)3 DataCell (org.knime.core.data.DataCell)3 DataColumnSpec (org.knime.core.data.DataColumnSpec)3 DataRow (org.knime.core.data.DataRow)3 BufferedDataTable (org.knime.core.node.BufferedDataTable)3 SettingsModelString (org.knime.core.node.defaultnodesettings.SettingsModelString)3 DatabasePortObjectSpec (org.knime.core.node.port.database.DatabasePortObjectSpec)3