Search in sources :

Example 1 with DatabaseConnectionSettings

use of org.knime.core.node.port.database.DatabaseConnectionSettings in project knime-core by knime.

the class DBWriterImpl method writeData.

/**
 * {@inheritDoc}
 * @deprecated
 */
@Deprecated
@Override
public String writeData(final String table, final RowInput input, final long rowCount, final boolean appendData, final ExecutionMonitor exec, final Map<String, String> sqlTypes, final CredentialsProvider cp, final int batchSize, final boolean insertNullForMissingCols, final boolean failOnError) throws Exception {
    final DatabaseConnectionSettings conSettings = getDatabaseConnectionSettings();
    // final Connection conn = conSettings.createConnection(cp);
    return conSettings.execute(cp, conn -> {
        exec.setMessage("Waiting for free database connection...");
        final StringBuilder columnNamesForInsertStatement = new StringBuilder("(");
        // synchronized (conSettings.syncConnection(conn)) {
        exec.setMessage("Start writing rows in database...");
        DataTableSpec spec = input.getDataTableSpec();
        // mapping from spec columns to database columns
        final int[] mapping;
        // append data to existing table
        if (appendData) {
            if (conSettings.getUtility().tableExists(conn, table)) {
                String query = conSettings.getUtility().getStatementManipulator().forMetadataOnly("SELECT * FROM " + table);
                try (ResultSet rs = conn.createStatement().executeQuery(query)) {
                    ResultSetMetaData rsmd = rs.getMetaData();
                    final Map<String, Integer> columnNames = new LinkedHashMap<String, Integer>();
                    for (int i = 0; i < spec.getNumColumns(); i++) {
                        String colName = replaceColumnName(spec.getColumnSpec(i).getName());
                        columnNames.put(colName.toLowerCase(), i);
                    }
                    // sanity check to lock if all input columns are in db
                    ArrayList<String> columnNotInSpec = new ArrayList<String>(columnNames.keySet());
                    for (int i = 0; i < rsmd.getColumnCount(); i++) {
                        String dbColName = replaceColumnName(rsmd.getColumnName(i + 1));
                        if (columnNames.containsKey(dbColName.toLowerCase())) {
                            columnNotInSpec.remove(dbColName.toLowerCase());
                            columnNamesForInsertStatement.append(dbColName).append(',');
                        } else if (insertNullForMissingCols) {
                            // append the column name of a missing column only if the insert null for missing
                            // column option is enabled
                            columnNamesForInsertStatement.append(dbColName).append(',');
                        }
                    }
                    if (rsmd.getColumnCount() > 0) {
                        columnNamesForInsertStatement.deleteCharAt(columnNamesForInsertStatement.length() - 1);
                    }
                    columnNamesForInsertStatement.append(')');
                    if (columnNotInSpec.size() > 0) {
                        throw new RuntimeException("No. of columns in input" + " table > in database; not existing columns: " + columnNotInSpec.toString());
                    }
                    mapping = new int[rsmd.getColumnCount()];
                    for (int i = 0; i < mapping.length; i++) {
                        String name = replaceColumnName(rsmd.getColumnName(i + 1)).toLowerCase();
                        if (!columnNames.containsKey(name)) {
                            mapping[i] = -1;
                            continue;
                        }
                        mapping[i] = columnNames.get(name);
                        DataColumnSpec cspec = spec.getColumnSpec(mapping[i]);
                        int type = rsmd.getColumnType(i + 1);
                        switch(type) {
                            // check all boolean compatible types
                            case Types.BIT:
                            case Types.BOOLEAN:
                                // types must be compatible to BooleanValue
                                if (!cspec.getType().isCompatible(BooleanValue.class)) {
                                    throw new RuntimeException("Column \"" + name + "\" of type \"" + cspec.getType() + "\" from input does not match type " + "\"" + rsmd.getColumnTypeName(i + 1) + "\" in database at position " + i);
                                }
                                break;
                            // check all int compatible types
                            case Types.TINYINT:
                            case Types.SMALLINT:
                            case Types.INTEGER:
                                // types must be compatible to IntValue
                                if (!cspec.getType().isCompatible(IntValue.class)) {
                                    throw new RuntimeException("Column \"" + name + "\" of type \"" + cspec.getType() + "\" from input does not match type " + "\"" + rsmd.getColumnTypeName(i + 1) + "\" in database at position " + i);
                                }
                                break;
                            case Types.BIGINT:
                                // types must also be compatible to LongValue
                                if (!cspec.getType().isCompatible(LongValue.class)) {
                                    throw new RuntimeException("Column \"" + name + "\" of type \"" + cspec.getType() + "\" from input does not match type " + "\"" + rsmd.getColumnTypeName(i + 1) + "\" in database at position " + i);
                                }
                                break;
                            // check all double compatible types
                            case Types.FLOAT:
                            case Types.DOUBLE:
                            case Types.NUMERIC:
                            case Types.DECIMAL:
                            case Types.REAL:
                                // types must also be compatible to DoubleValue
                                if (!cspec.getType().isCompatible(DoubleValue.class)) {
                                    throw new RuntimeException("Column \"" + name + "\" of type \"" + cspec.getType() + "\" from input does not match type " + "\"" + rsmd.getColumnTypeName(i + 1) + "\" in database at position " + i);
                                }
                                break;
                            // check for date-and-time compatible types
                            case Types.DATE:
                            case Types.TIME:
                            case Types.TIMESTAMP:
                                // types must also be compatible to DataValue
                                if (!cspec.getType().isCompatible(DateAndTimeValue.class)) {
                                    throw new RuntimeException("Column \"" + name + "\" of type \"" + cspec.getType() + "\" from input does not match type " + "\"" + rsmd.getColumnTypeName(i + 1) + "\" in database at position " + i);
                                }
                                break;
                            // check for blob compatible types
                            case Types.BLOB:
                            case Types.BINARY:
                            case Types.LONGVARBINARY:
                                // types must also be compatible to DataValue
                                if (!cspec.getType().isCompatible(BinaryObjectDataValue.class)) {
                                    throw new RuntimeException("Column \"" + name + "\" of type \"" + cspec.getType() + "\" from input does not match type " + "\"" + rsmd.getColumnTypeName(i + 1) + "\" in database at position " + i);
                                }
                                break;
                        }
                    }
                }
            } else {
                LOGGER.info("Table \"" + table + "\" does not exist in database, " + "will create new table.");
                // and create new table
                final String query = "CREATE TABLE " + table + " " + createTableStmt(spec, sqlTypes, columnNamesForInsertStatement);
                LOGGER.debug("Executing SQL statement as execute: " + query);
                try (Statement statement = conn.createStatement()) {
                    statement.execute(query);
                }
                if (!conn.getAutoCommit()) {
                    conn.commit();
                }
                mapping = new int[spec.getNumColumns()];
                for (int k = 0; k < mapping.length; k++) {
                    mapping[k] = k;
                }
            }
        } else {
            LOGGER.debug("Append not enabled. Table " + table + " will be dropped if exists.");
            mapping = new int[spec.getNumColumns()];
            for (int k = 0; k < mapping.length; k++) {
                mapping[k] = k;
            }
            Statement statement = null;
            try {
                statement = conn.createStatement();
                // remove existing table (if any)
                final String query = "DROP TABLE " + table;
                LOGGER.debug("Executing SQL statement as execute: " + query);
                statement.execute(query);
            } catch (Throwable t) {
                if (statement == null) {
                    throw new SQLException("Could not create SQL statement," + " reason: " + t.getMessage(), t);
                }
                LOGGER.info("Exception droping table \"" + table + "\": " + t.getMessage() + ". Will create new table.");
            } finally {
                if (!conn.getAutoCommit()) {
                    conn.commit();
                }
            }
            // and create new table
            final String query = "CREATE TABLE " + table + " " + createTableStmt(spec, sqlTypes, columnNamesForInsertStatement);
            LOGGER.debug("Executing SQL statement as execute: " + query);
            statement.execute(query);
            statement.close();
            if (!conn.getAutoCommit()) {
                conn.commit();
            }
        }
        // this is a (temporary) workaround for bug #5802: if there is a DataValue column in the input table
        // we need to use the SQL type for creating the insert statements.
        Map<Integer, Integer> columnTypes = null;
        for (DataColumnSpec cs : spec) {
            if (cs.getType().getPreferredValueClass() == DataValue.class) {
                columnTypes = getColumnTypes(conn, table);
                break;
            }
        }
        final String insertStamtement = createInsertStatment(table, columnNamesForInsertStatement.toString(), mapping, insertNullForMissingCols);
        // problems writing more than 13 columns. the prepare statement
        // ensures that we can set the columns directly row-by-row, the
        // database will handle the commit
        long cnt = 1;
        long errorCnt = 0;
        long allErrors = 0;
        // count number of rows added to current batch
        int curBatchSize = 0;
        LOGGER.debug("Executing SQL statement as prepareStatement: " + insertStamtement);
        final PreparedStatement stmt = conn.prepareStatement(insertStamtement);
        // remember auto-commit flag
        final boolean autoCommit = conn.getAutoCommit();
        DatabaseConnectionSettings.setAutoCommit(conn, false);
        try {
            final TimeZone timezone = conSettings.getTimeZone();
            // get the first row
            DataRow row;
            DataRow nextRow = input.poll();
            // iterate over all incoming data rows
            while (nextRow != null) {
                row = nextRow;
                cnt++;
                exec.checkCanceled();
                if (rowCount > 0) {
                    exec.setProgress(1.0 * cnt / rowCount, "Row " + "#" + cnt);
                } else {
                    exec.setProgress("Writing Row#" + cnt);
                }
                int dbIdx = 1;
                for (int i = 0; i < mapping.length; i++) {
                    if (mapping[i] < 0) {
                        if (insertNullForMissingCols) {
                            // insert only null if the insert null for missing col option is enabled
                            stmt.setNull(dbIdx++, Types.NULL);
                        }
                    } else {
                        final DataColumnSpec cspec = spec.getColumnSpec(mapping[i]);
                        final DataCell cell = row.getCell(mapping[i]);
                        fillStatement(stmt, dbIdx++, cspec, cell, timezone, columnTypes);
                    }
                }
                // if batch mode
                if (batchSize > 1) {
                    // a new row will be added
                    stmt.addBatch();
                }
                // get one more input row to check if 'row' is the last one
                nextRow = input.poll();
                curBatchSize++;
                // if batch size equals number of row in batch or input table at end
                if ((curBatchSize == batchSize) || nextRow == null) {
                    curBatchSize = 0;
                    try {
                        // write batch
                        if (batchSize > 1) {
                            stmt.executeBatch();
                        } else {
                            // or write single row
                            stmt.execute();
                        }
                    } catch (Throwable t) {
                        final String errorMsg;
                        if (batchSize > 1) {
                            errorMsg = "Error while adding rows #" + (cnt - batchSize) + " - #" + cnt + ", reason: " + t.getMessage();
                        } else {
                            errorMsg = "Error while adding row #" + cnt + " (" + row.getKey() + "), reason: " + t.getMessage();
                        }
                        // introduced in KNIME 3.3.2
                        if (failOnError) {
                            try {
                                // rollback all changes
                                conn.rollback();
                                LOGGER.debug("Rollback complete transaction with auto commit=" + autoCommit);
                            } catch (Throwable ex) {
                                LOGGER.info("Failed rollback after db exception with auto commit=" + autoCommit + ". Rollback error: " + ex.getMessage(), ex);
                            }
                            throw new Exception(errorMsg, t);
                        }
                        // anyway.
                        if (!conn.getAutoCommit()) {
                            conn.commit();
                        }
                        allErrors++;
                        if (errorCnt > -1) {
                            exec.setMessage(errorMsg);
                            if (errorCnt++ < 10) {
                                LOGGER.warn(errorMsg);
                            } else {
                                errorCnt = -1;
                                LOGGER.warn(errorMsg + " - more errors...", t);
                            }
                        }
                    } finally {
                        // clear batch if in batch mode
                        if (batchSize > 1) {
                            stmt.clearBatch();
                        }
                    }
                }
            }
            if (!conn.getAutoCommit()) {
                conn.commit();
            }
            if (allErrors == 0) {
                return null;
            } else {
                return "Errors \"" + allErrors + "\" writing " + (cnt - 1) + " rows.";
            }
        } finally {
            DatabaseConnectionSettings.setAutoCommit(conn, autoCommit);
            stmt.close();
        }
    });
}
Also used : DataTableSpec(org.knime.core.data.DataTableSpec) SQLException(java.sql.SQLException) DateAndTimeValue(org.knime.core.data.date.DateAndTimeValue) ArrayList(java.util.ArrayList) DataRow(org.knime.core.data.DataRow) LinkedHashMap(java.util.LinkedHashMap) ResultSetMetaData(java.sql.ResultSetMetaData) DataColumnSpec(org.knime.core.data.DataColumnSpec) BinaryObjectDataValue(org.knime.core.data.blob.BinaryObjectDataValue) BooleanValue(org.knime.core.data.BooleanValue) ResultSet(java.sql.ResultSet) IntValue(org.knime.core.data.IntValue) DatabaseConnectionSettings(org.knime.core.node.port.database.DatabaseConnectionSettings) PreparedStatement(java.sql.PreparedStatement) Statement(java.sql.Statement) PreparedStatement(java.sql.PreparedStatement) SQLException(java.sql.SQLException) TimeZone(java.util.TimeZone) DoubleValue(org.knime.core.data.DoubleValue) LongValue(org.knime.core.data.LongValue) DataCell(org.knime.core.data.DataCell)

Example 2 with DatabaseConnectionSettings

use of org.knime.core.node.port.database.DatabaseConnectionSettings in project knime-core by knime.

the class DBWriterImpl method deleteRows.

/**
 * Create connection to update table in database.
 * @param schema table schema name. Could be <code>null</code>.
 * @param data The data to write.
 * @param whereColumns columns part of the WHERE clause
 * @param deleteStatus int array of length data#getRowCount; will be filled with
 *             the number of rows effected
 * @param table name of table to write
 * @param exec Used the cancel writing.
 * @param cp {@link CredentialsProvider} providing user/password
 * @param batchSize number of rows deleted in one batch
 * @return error string or null, if non
 * @throws Exception if connection could not be established
 */
@Override
public final String deleteRows(final String schema, final String table, final BufferedDataTable data, final String[] whereColumns, final int[] deleteStatus, final ExecutionMonitor exec, final CredentialsProvider cp, final int batchSize) throws Exception {
    DatabaseConnectionSettings conSettings = getDatabaseConnectionSettings();
    // synchronized (conSettings.syncConnection(conn)) {
    return conSettings.execute(cp, conn -> {
        exec.setMessage("Start deleting rows from database...");
        final DataTableSpec spec = data.getDataTableSpec();
        // create query connection object
        final StringBuilder query = new StringBuilder("DELETE FROM " + table + " WHERE");
        for (int i = 0; i < whereColumns.length; i++) {
            if (i > 0) {
                query.append(" AND");
            }
            final String newColumnName = replaceColumnName(whereColumns[i]);
            query.append(" " + newColumnName + " = ?");
        }
        // problems writing more than 13 columns. the prepare statement
        // ensures that we can set the columns directly row-by-row, the
        // database will handle the commit
        long rowCount = data.size();
        int cnt = 1;
        int errorCnt = 0;
        int allErrors = 0;
        // count number of rows added to current batch
        int curBatchSize = 0;
        // selected timezone
        final TimeZone timezone = conSettings.getTimeZone();
        LOGGER.debug("Executing SQL statement as prepareStatement: " + query);
        final PreparedStatement stmt = conn.prepareStatement(query.toString());
        // remember auto-commit flag
        final boolean autoCommit = conn.getAutoCommit();
        DatabaseConnectionSettings.setAutoCommit(conn, false);
        try {
            for (RowIterator it = data.iterator(); it.hasNext(); cnt++) {
                exec.checkCanceled();
                exec.setProgress(1.0 * cnt / rowCount, "Row " + "#" + cnt);
                final DataRow row = it.next();
                // WHERE columns
                for (int i = 0; i < whereColumns.length; i++) {
                    final int dbIdx = i + 1;
                    final int columnIndex = spec.findColumnIndex(whereColumns[i]);
                    final DataColumnSpec cspec = spec.getColumnSpec(columnIndex);
                    final DataCell cell = row.getCell(columnIndex);
                    fillStatement(stmt, dbIdx, cspec, cell, timezone, null);
                }
                // if batch mode
                if (batchSize > 1) {
                    // a new row will be added
                    stmt.addBatch();
                }
                curBatchSize++;
                // if batch size equals number of row in batch or input table at end
                if ((curBatchSize == batchSize) || !it.hasNext()) {
                    curBatchSize = 0;
                    try {
                        // write batch
                        if (batchSize > 1) {
                            int[] status = stmt.executeBatch();
                            for (int i = 0; i < status.length; i++) {
                                deleteStatus[cnt - status.length + i] = status[i];
                            }
                        } else {
                            // or write single row
                            int status = stmt.executeUpdate();
                            deleteStatus[cnt - 1] = status;
                        }
                    } catch (Throwable t) {
                        // anyway.
                        if (!conn.getAutoCommit()) {
                            conn.commit();
                        }
                        allErrors++;
                        if (errorCnt > -1) {
                            final String errorMsg;
                            if (batchSize > 1) {
                                errorMsg = "Error while deleting rows #" + (cnt - batchSize) + " - #" + cnt + ", reason: " + t.getMessage();
                            } else {
                                errorMsg = "Error while deleting row #" + cnt + " (" + row.getKey() + "), reason: " + t.getMessage();
                            }
                            exec.setMessage(errorMsg);
                            if (errorCnt++ < 10) {
                                LOGGER.warn(errorMsg);
                            } else {
                                errorCnt = -1;
                                LOGGER.warn(errorMsg + " - more errors...", t);
                            }
                        }
                    } finally {
                        // clear batch if in batch mode
                        if (batchSize > 1) {
                            stmt.clearBatch();
                        }
                    }
                }
            }
            if (!conn.getAutoCommit()) {
                conn.commit();
            }
            if (allErrors == 0) {
                return null;
            } else {
                return "Errors \"" + allErrors + "\" deleting " + rowCount + " rows.";
            }
        } finally {
            DatabaseConnectionSettings.setAutoCommit(conn, autoCommit);
            stmt.close();
        }
    });
}
Also used : DataTableSpec(org.knime.core.data.DataTableSpec) DatabaseConnectionSettings(org.knime.core.node.port.database.DatabaseConnectionSettings) PreparedStatement(java.sql.PreparedStatement) DataRow(org.knime.core.data.DataRow) TimeZone(java.util.TimeZone) DataColumnSpec(org.knime.core.data.DataColumnSpec) RowIterator(org.knime.core.data.RowIterator) DataCell(org.knime.core.data.DataCell)

Example 3 with DatabaseConnectionSettings

use of org.knime.core.node.port.database.DatabaseConnectionSettings in project knime-core by knime.

the class DBDialogPane method saveSettingsTo.

/**
 * Save settings.
 * @param settings to save into
 * @param credProvider a credentials provider, must not be <code>null</code>
 * @throws InvalidSettingsException if the connection could not be validated
 */
public void saveSettingsTo(final NodeSettingsWO settings, final CredentialsProvider credProvider) throws InvalidSettingsException {
    DatabaseConnectionSettings s = new DatabaseConnectionSettings();
    String driverName = m_driver.getSelectedItem().toString();
    s.setDriver(driverName);
    String url = m_db.getEditor().getItem().toString();
    s.setJDBCUrl(url);
    boolean useCredential = m_credCheckBox.isSelected();
    if (useCredential) {
        s.setCredentialName((String) m_credBox.getSelectedItem());
    } else {
        s.setUserName(m_user.getText().trim());
        if (m_passwordChanged) {
            try {
                s.setPassword(KnimeEncryption.encrypt(m_pass.getPassword()));
            } catch (InvalidKeyException | BadPaddingException | IllegalBlockSizeException | UnsupportedEncodingException ex) {
                LOGGER.error("Could not encrypt password, reason: " + ex.getMessage(), ex);
            }
        } else {
            s.setPassword(new String(m_pass.getPassword()));
        }
    }
    if (m_noCorrectionTZ.isSelected()) {
        s.setTimezone("none");
    } else if (m_currentTZ.isSelected()) {
        s.setTimezone("current");
    } else {
        final String timezone = (String) m_timezone.getSelectedItem();
        s.setTimezone(timezone);
    }
    s.setAllowSpacesInColumnNames(m_allowSpacesInColumnNames.isSelected());
    s.setValidateConnection(m_validateConnection.isSelected());
    s.setRetrieveMetadataInConfigure(m_retrieveMetadataInConfigure.isSelected());
    if (s.getValidateConnection()) {
        try {
            s.execute(credProvider, conn -> {
                return conn != null;
            });
        } catch (SQLException ex) {
            Throwable cause = ExceptionUtils.getRootCause(ex);
            if (cause == null) {
                cause = ex;
            }
            throw new InvalidSettingsException("Database connection could not be validated: " + cause.getMessage(), ex);
        }
    }
    s.saveConnection(settings);
}
Also used : DatabaseConnectionSettings(org.knime.core.node.port.database.DatabaseConnectionSettings) SQLException(java.sql.SQLException) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) IllegalBlockSizeException(javax.crypto.IllegalBlockSizeException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) BadPaddingException(javax.crypto.BadPaddingException) InvalidKeyException(java.security.InvalidKeyException)

Example 4 with DatabaseConnectionSettings

use of org.knime.core.node.port.database.DatabaseConnectionSettings in project knime-core by knime.

the class DBDialogPane method loadSettingsFrom.

/**
 * Load settings.
 * @param settings to load
 * @param specs input spec
 * @param credProvider a credentials provider, must not be <code>null</code>
 */
public void loadSettingsFrom(final NodeSettingsRO settings, final PortObjectSpec[] specs, final CredentialsProvider credProvider) {
    DatabaseConnectionSettings s = new DatabaseConnectionSettings();
    try {
        s.loadValidatedConnection(settings, null);
    } catch (InvalidSettingsException ex) {
    // use the available settings as they are
    }
    // update credentials
    m_credBox.removeAllItems();
    if (credProvider == null || credProvider.listNames().isEmpty()) {
        m_credCheckBox.setEnabled(false);
        m_credBox.setEnabled(false);
    } else {
        m_credCheckBox.setEnabled(true);
        m_credBox.setEnabled(true);
        for (String c : credProvider.listNames()) {
            m_credBox.addItem(c);
        }
    }
    // database driver and name
    m_driver.removeAllItems();
    // update list of registered driver
    updateDriver();
    // check if at least one driver is selected and the list is not empty
    final Object selectedDriver = m_driver.getSelectedItem();
    if (selectedDriver != null) {
        m_driver.setSelectedItem(s.getDriver());
    }
    // update list of urls
    m_db.removeAllItems();
    for (String databaseURL : DatabaseConnectionSettings.DATABASE_URLS.getHistory()) {
        m_db.addItem(databaseURL);
    }
    if (s.getJDBCUrl() == null) {
        m_db.setSelectedItem("jdbc:odbc:<database_name>");
    } else {
        m_db.setSelectedItem(s.getJDBCUrl());
    }
    boolean useCredential = (s.getCredentialName() != null);
    enableCredentials(useCredential);
    if (useCredential) {
        m_credBox.setSelectedItem(s.getCredentialName());
        m_credCheckBox.setSelected(true);
    } else {
        // user
        String user = s.getUserName(null);
        m_user.setText(user == null ? "" : user);
        // password
        String password = s.getPassword(null);
        m_pass.setText(password == null ? "" : password);
        m_passwordChanged = false;
        m_credCheckBox.setSelected(false);
    }
    // read timezone
    final String timezone = s.getTimezone();
    if ("none".equals(timezone) || (timezone == null)) {
        m_noCorrectionTZ.setSelected(true);
    } else if ("current".equals(timezone)) {
        m_currentTZ.setSelected(true);
    } else {
        m_selectTZ.setSelected(true);
        m_timezone.setSelectedItem(timezone);
    }
    m_allowSpacesInColumnNames.setSelected(s.getAllowSpacesInColumnNames());
    m_validateConnection.setSelected(s.getValidateConnection());
    m_retrieveMetadataInConfigure.setSelected(s.getRetrieveMetadataInConfigure());
}
Also used : DatabaseConnectionSettings(org.knime.core.node.port.database.DatabaseConnectionSettings) InvalidSettingsException(org.knime.core.node.InvalidSettingsException)

Example 5 with DatabaseConnectionSettings

use of org.knime.core.node.port.database.DatabaseConnectionSettings in project knime-core by knime.

the class DBWriterNodeModel method configure.

/**
 * {@inheritDoc}
 */
@Override
protected PortObjectSpec[] configure(final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
    DataTableSpec tableSpec = (DataTableSpec) inSpecs[0];
    // check optional incoming connection
    if ((inSpecs.length > 1) && (inSpecs[1] instanceof DatabaseConnectionPortObjectSpec)) {
        DatabaseConnectionSettings connSettings = ((DatabaseConnectionPortObjectSpec) inSpecs[1]).getConnectionSettings(getCredentialsProvider());
        if ((connSettings.getJDBCUrl() == null) || connSettings.getJDBCUrl().isEmpty() || (connSettings.getDriver() == null) || connSettings.getDriver().isEmpty()) {
            throw new InvalidSettingsException("No valid database connection provided via second input port");
        }
        if (!connSettings.getUtility().supportsInsert()) {
            throw new InvalidSettingsException("Connected database does not support insert operations");
        }
    } else {
        if (!m_conn.getUtility().supportsInsert()) {
            throw new InvalidSettingsException("Selected database does not support insert operations");
        }
    }
    // check table name
    if ((m_tableName == null) || m_tableName.trim().isEmpty()) {
        throw new InvalidSettingsException("Configure node and enter a valid table name.");
    }
    // throw exception if no data provided
    if (tableSpec.getNumColumns() == 0) {
        throw new InvalidSettingsException("No columns in input data.");
    }
    // copy map to ensure only columns which are with the data
    Map<String, String> map = new LinkedHashMap<>();
    // check that each column has a assigned type
    for (int i = 0; i < tableSpec.getNumColumns(); i++) {
        final String name = tableSpec.getColumnSpec(i).getName();
        String sqlType = m_types.get(name);
        if (sqlType == null) {
            final DataType type = tableSpec.getColumnSpec(i).getType();
            if (type.isCompatible(IntValue.class)) {
                sqlType = DBWriterNodeModel.SQL_TYPE_INTEGER;
            } else if (type.isCompatible(DoubleValue.class)) {
                sqlType = DBWriterNodeModel.SQL_TYPE_DOUBLE;
            } else if (type.isCompatible(DateAndTimeValue.class)) {
                sqlType = DBWriterNodeModel.SQL_TYPE_DATEANDTIME;
            } else {
                sqlType = DBWriterNodeModel.SQL_TYPE_STRING;
            }
        }
        map.put(name, sqlType);
    }
    m_types.clear();
    m_types.putAll(map);
    if (!m_append) {
        super.setWarningMessage("Existing table \"" + m_tableName + "\" will be dropped!");
    }
    return new DataTableSpec[0];
}
Also used : DataTableSpec(org.knime.core.data.DataTableSpec) DatabaseConnectionSettings(org.knime.core.node.port.database.DatabaseConnectionSettings) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) DoubleValue(org.knime.core.data.DoubleValue) DataType(org.knime.core.data.DataType) DatabaseConnectionPortObjectSpec(org.knime.core.node.port.database.DatabaseConnectionPortObjectSpec) LinkedHashMap(java.util.LinkedHashMap)

Aggregations

DatabaseConnectionSettings (org.knime.core.node.port.database.DatabaseConnectionSettings)26 InvalidSettingsException (org.knime.core.node.InvalidSettingsException)14 DataTableSpec (org.knime.core.data.DataTableSpec)12 DatabaseConnectionPortObject (org.knime.core.node.port.database.DatabaseConnectionPortObject)10 SQLException (java.sql.SQLException)7 DatabaseConnectionPortObjectSpec (org.knime.core.node.port.database.DatabaseConnectionPortObjectSpec)7 PortObject (org.knime.core.node.port.PortObject)6 ColumnRearranger (org.knime.core.data.container.ColumnRearranger)4 PortObjectSpec (org.knime.core.node.port.PortObjectSpec)4 DBWriter (org.knime.core.node.port.database.writer.DBWriter)4 DataColumnSpecFilterConfiguration (org.knime.core.node.util.filter.column.DataColumnSpecFilterConfiguration)4 PreparedStatement (java.sql.PreparedStatement)3 ArrayList (java.util.ArrayList)3 TimeZone (java.util.TimeZone)3 DataCell (org.knime.core.data.DataCell)3 DataColumnSpec (org.knime.core.data.DataColumnSpec)3 DataRow (org.knime.core.data.DataRow)3 BufferedDataTable (org.knime.core.node.BufferedDataTable)3 SettingsModelString (org.knime.core.node.defaultnodesettings.SettingsModelString)3 DatabasePortObjectSpec (org.knime.core.node.port.database.DatabasePortObjectSpec)3