Search in sources :

Example 16 with IntValue

use of org.knime.core.data.IntValue in project knime-core by knime.

the class DatabaseWriterConnection method writeData.

/**
 * Create connection to write into database.
 * @param dbConn a database connection object
 * @param table name of table to write
 * @param input the data table as as row input
 * @param rowCount number of row of the table to write, -1 if unknown
 * @param appendData if checked the data is appended to an existing table
 * @param exec Used the cancel writing.
 * @param sqlTypes A mapping from column name to SQL-type.
 * @param cp {@link CredentialsProvider} providing user/password
 * @param batchSize number of rows written in one batch
 * @param insertNullForMissingCols <code>true</code> if <code>null</code> should be inserted for missing columns
 * @return error string or null, if non
 * @throws Exception if connection could not be established
 * @since 3.1
 */
public static final String writeData(final DatabaseConnectionSettings dbConn, final String table, final RowInput input, final long rowCount, final boolean appendData, final ExecutionMonitor exec, final Map<String, String> sqlTypes, final CredentialsProvider cp, final int batchSize, final boolean insertNullForMissingCols) throws Exception {
    // synchronized (dbConn.syncConnection(conn)) {
    return dbConn.execute(cp, conn -> {
        final StringBuilder columnNamesForInsertStatement = new StringBuilder("(");
        exec.setMessage("Start writing rows in database...");
        DataTableSpec spec = input.getDataTableSpec();
        // mapping from spec columns to database columns
        final int[] mapping;
        // append data to existing table
        if (appendData) {
            if (dbConn.getUtility().tableExists(conn, table)) {
                String query = dbConn.getUtility().getStatementManipulator().forMetadataOnly("SELECT * FROM " + table);
                try (ResultSet rs = conn.createStatement().executeQuery(query)) {
                    ResultSetMetaData rsmd = rs.getMetaData();
                    final Map<String, Integer> columnNames = new LinkedHashMap<String, Integer>();
                    for (int i = 0; i < spec.getNumColumns(); i++) {
                        String colName = replaceColumnName(spec.getColumnSpec(i).getName(), dbConn);
                        columnNames.put(colName.toLowerCase(), i);
                    }
                    // sanity check to lock if all input columns are in db
                    ArrayList<String> columnNotInSpec = new ArrayList<String>(columnNames.keySet());
                    for (int i = 0; i < rsmd.getColumnCount(); i++) {
                        String dbColName = replaceColumnName(rsmd.getColumnName(i + 1), dbConn);
                        if (columnNames.containsKey(dbColName.toLowerCase())) {
                            columnNotInSpec.remove(dbColName.toLowerCase());
                            columnNamesForInsertStatement.append(dbColName).append(',');
                        } else if (insertNullForMissingCols) {
                            // append the column name of a missing column only if the insert null for missing
                            // column option is enabled
                            columnNamesForInsertStatement.append(dbColName).append(',');
                        }
                    }
                    if (rsmd.getColumnCount() > 0) {
                        columnNamesForInsertStatement.deleteCharAt(columnNamesForInsertStatement.length() - 1);
                    }
                    columnNamesForInsertStatement.append(')');
                    if (columnNotInSpec.size() > 0) {
                        throw new RuntimeException("No. of columns in input" + " table > in database; not existing columns: " + columnNotInSpec.toString());
                    }
                    mapping = new int[rsmd.getColumnCount()];
                    for (int i = 0; i < mapping.length; i++) {
                        String name = replaceColumnName(rsmd.getColumnName(i + 1), dbConn).toLowerCase();
                        if (!columnNames.containsKey(name)) {
                            mapping[i] = -1;
                            continue;
                        }
                        mapping[i] = columnNames.get(name);
                        DataColumnSpec cspec = spec.getColumnSpec(mapping[i]);
                        int type = rsmd.getColumnType(i + 1);
                        switch(type) {
                            // check all boolean compatible types
                            case Types.BIT:
                            case Types.BOOLEAN:
                                // types must be compatible to BooleanValue
                                if (!cspec.getType().isCompatible(BooleanValue.class)) {
                                    throw new RuntimeException("Column \"" + name + "\" of type \"" + cspec.getType() + "\" from input does not match type " + "\"" + rsmd.getColumnTypeName(i + 1) + "\" in database at position " + i);
                                }
                                break;
                            // check all int compatible types
                            case Types.TINYINT:
                            case Types.SMALLINT:
                            case Types.INTEGER:
                                // types must be compatible to IntValue
                                if (!cspec.getType().isCompatible(IntValue.class)) {
                                    throw new RuntimeException("Column \"" + name + "\" of type \"" + cspec.getType() + "\" from input does not match type " + "\"" + rsmd.getColumnTypeName(i + 1) + "\" in database at position " + i);
                                }
                                break;
                            case Types.BIGINT:
                                // types must also be compatible to LongValue
                                if (!cspec.getType().isCompatible(LongValue.class)) {
                                    throw new RuntimeException("Column \"" + name + "\" of type \"" + cspec.getType() + "\" from input does not match type " + "\"" + rsmd.getColumnTypeName(i + 1) + "\" in database at position " + i);
                                }
                                break;
                            // check all double compatible types
                            case Types.FLOAT:
                            case Types.DOUBLE:
                            case Types.NUMERIC:
                            case Types.DECIMAL:
                            case Types.REAL:
                                // types must also be compatible to DoubleValue
                                if (!cspec.getType().isCompatible(DoubleValue.class)) {
                                    throw new RuntimeException("Column \"" + name + "\" of type \"" + cspec.getType() + "\" from input does not match type " + "\"" + rsmd.getColumnTypeName(i + 1) + "\" in database at position " + i);
                                }
                                break;
                            // check for date-and-time compatible types
                            case Types.DATE:
                            case Types.TIME:
                            case Types.TIMESTAMP:
                                // types must also be compatible to DataValue
                                if (!cspec.getType().isCompatible(DateAndTimeValue.class)) {
                                    throw new RuntimeException("Column \"" + name + "\" of type \"" + cspec.getType() + "\" from input does not match type " + "\"" + rsmd.getColumnTypeName(i + 1) + "\" in database at position " + i);
                                }
                                break;
                            // check for blob compatible types
                            case Types.BLOB:
                            case Types.BINARY:
                            case Types.LONGVARBINARY:
                                // types must also be compatible to DataValue
                                if (!cspec.getType().isCompatible(BinaryObjectDataValue.class)) {
                                    throw new RuntimeException("Column \"" + name + "\" of type \"" + cspec.getType() + "\" from input does not match type " + "\"" + rsmd.getColumnTypeName(i + 1) + "\" in database at position " + i);
                                }
                                break;
                        }
                    }
                }
            } else {
                LOGGER.info("Table \"" + table + "\" does not exist in database, " + "will create new table.");
                // and create new table
                final String query = "CREATE TABLE " + table + " " + createStmt(spec, sqlTypes, dbConn, columnNamesForInsertStatement);
                LOGGER.debug("Executing SQL statement as execute: " + query);
                Statement statement = conn.createStatement();
                statement.execute(query);
                mapping = new int[spec.getNumColumns()];
                for (int k = 0; k < mapping.length; k++) {
                    mapping[k] = k;
                }
            }
        } else {
            LOGGER.debug("Append not enabled. Table " + table + " will be dropped if exists.");
            mapping = new int[spec.getNumColumns()];
            for (int k = 0; k < mapping.length; k++) {
                mapping[k] = k;
            }
            Statement statement = null;
            try {
                statement = conn.createStatement();
                // remove existing table (if any)
                final String query = "DROP TABLE " + table;
                LOGGER.debug("Executing SQL statement as execute: " + query);
                statement.execute(query);
            } catch (Throwable t) {
                if (statement == null) {
                    throw new SQLException("Could not create SQL statement," + " reason: " + t.getMessage(), t);
                }
                LOGGER.info("Exception droping table \"" + table + "\": " + t.getMessage() + ". Will create new table.");
            } finally {
                if (!conn.getAutoCommit()) {
                    conn.commit();
                }
            }
            // and create new table
            final String query = "CREATE TABLE " + table + " " + createStmt(spec, sqlTypes, dbConn, columnNamesForInsertStatement);
            LOGGER.debug("Executing SQL statement as execute: " + query);
            statement.execute(query);
            statement.close();
        }
        // this is a (temporary) workaround for bug #5802: if there is a DataValue column in the input table
        // we need to use the SQL type for creating the insert statements.
        Map<Integer, Integer> columnTypes = null;
        for (DataColumnSpec cs : spec) {
            if (cs.getType().getPreferredValueClass() == DataValue.class) {
                columnTypes = getColumnTypes(conn, table);
                break;
            }
        }
        // creates the wild card string based on the number of columns
        // this string it used every time an new row is inserted into the db
        final StringBuilder wildcard = new StringBuilder("(");
        boolean first = true;
        for (int i = 0; i < mapping.length; i++) {
            if (mapping[i] >= 0 || insertNullForMissingCols) {
                // columns option is enabled
                if (first) {
                    first = false;
                } else {
                    wildcard.append(", ");
                }
                wildcard.append("?");
            }
        }
        wildcard.append(")");
        // problems writing more than 13 columns. the prepare statement
        // ensures that we can set the columns directly row-by-row, the
        // database will handle the commit
        long cnt = 1;
        long errorCnt = 0;
        long allErrors = 0;
        // count number of rows added to current batch
        int curBatchSize = 0;
        // create table meta data with empty column information
        final String query = "INSERT INTO " + table + " " + columnNamesForInsertStatement + " VALUES " + wildcard;
        LOGGER.debug("Executing SQL statement as prepareStatement: " + query);
        final PreparedStatement stmt = conn.prepareStatement(query);
        // remember auto-commit flag
        final boolean autoCommit = conn.getAutoCommit();
        DatabaseConnectionSettings.setAutoCommit(conn, false);
        try {
            final TimeZone timezone = dbConn.getTimeZone();
            // get the first row
            DataRow row;
            DataRow nextRow = input.poll();
            // iterate over all incoming data rows
            do {
                row = nextRow;
                cnt++;
                exec.checkCanceled();
                if (rowCount > 0) {
                    exec.setProgress(1.0 * cnt / rowCount, "Row " + "#" + cnt);
                } else {
                    exec.setProgress("Writing Row#" + cnt);
                }
                int dbIdx = 1;
                for (int i = 0; i < mapping.length; i++) {
                    if (mapping[i] < 0) {
                        if (insertNullForMissingCols) {
                            // insert only null if the insert null for missing col option is enabled
                            stmt.setNull(dbIdx++, Types.NULL);
                        }
                    } else {
                        final DataColumnSpec cspec = spec.getColumnSpec(mapping[i]);
                        final DataCell cell = row.getCell(mapping[i]);
                        fillStatement(stmt, dbIdx++, cspec, cell, timezone, columnTypes);
                    }
                }
                // if batch mode
                if (batchSize > 1) {
                    // a new row will be added
                    stmt.addBatch();
                }
                // get one more input row to check if 'row' is the last one
                nextRow = input.poll();
                curBatchSize++;
                // if batch size equals number of row in batch or input table at end
                if ((curBatchSize == batchSize) || nextRow == null) {
                    curBatchSize = 0;
                    try {
                        // write batch
                        if (batchSize > 1) {
                            stmt.executeBatch();
                        } else {
                            // or write single row
                            stmt.execute();
                        }
                    } catch (Throwable t) {
                        // anyway.
                        if (!conn.getAutoCommit()) {
                            conn.commit();
                        }
                        allErrors++;
                        if (errorCnt > -1) {
                            final String errorMsg;
                            if (batchSize > 1) {
                                errorMsg = "Error while adding rows #" + (cnt - batchSize) + " - #" + cnt + ", reason: " + t.getMessage();
                            } else {
                                errorMsg = "Error while adding row #" + cnt + " (" + row.getKey() + "), reason: " + t.getMessage();
                            }
                            exec.setMessage(errorMsg);
                            if (errorCnt++ < 10) {
                                LOGGER.warn(errorMsg);
                            } else {
                                errorCnt = -1;
                                LOGGER.warn(errorMsg + " - more errors...", t);
                            }
                        }
                    } finally {
                        // clear batch if in batch mode
                        if (batchSize > 1) {
                            stmt.clearBatch();
                        }
                    }
                }
            } while (nextRow != null);
            if (!conn.getAutoCommit()) {
                conn.commit();
            }
            if (allErrors == 0) {
                return null;
            } else {
                return "Errors \"" + allErrors + "\" writing " + (cnt - 1) + " rows.";
            }
        } finally {
            DatabaseConnectionSettings.setAutoCommit(conn, autoCommit);
            stmt.close();
        }
    });
}
Also used : DataTableSpec(org.knime.core.data.DataTableSpec) SQLException(java.sql.SQLException) DateAndTimeValue(org.knime.core.data.date.DateAndTimeValue) ArrayList(java.util.ArrayList) DataRow(org.knime.core.data.DataRow) LinkedHashMap(java.util.LinkedHashMap) ResultSetMetaData(java.sql.ResultSetMetaData) DataColumnSpec(org.knime.core.data.DataColumnSpec) BinaryObjectDataValue(org.knime.core.data.blob.BinaryObjectDataValue) BooleanValue(org.knime.core.data.BooleanValue) ResultSet(java.sql.ResultSet) IntValue(org.knime.core.data.IntValue) PreparedStatement(java.sql.PreparedStatement) Statement(java.sql.Statement) PreparedStatement(java.sql.PreparedStatement) TimeZone(java.util.TimeZone) DoubleValue(org.knime.core.data.DoubleValue) LongValue(org.knime.core.data.LongValue) DataCell(org.knime.core.data.DataCell)

Example 17 with IntValue

use of org.knime.core.data.IntValue in project knime-core by knime.

the class AverageInterpolationStatisticMB method consumeRow.

/**
 * {@inheritDoc}
 */
@Override
protected void consumeRow(final DataRow dataRow) {
    DataCell cell = dataRow.getCell(m_colIdx);
    if (cell.isMissing()) {
        m_numMissing++;
    } else {
        DataCell res;
        if (m_previous.isMissing()) {
            res = cell;
        } else {
            if (m_isDateColumn) {
                DateAndTimeValue val = (DateAndTimeValue) cell;
                DateAndTimeValue prevVal = (DateAndTimeValue) m_previous;
                boolean hasDate = val.hasDate() | prevVal.hasDate();
                boolean hasTime = val.hasTime() | prevVal.hasTime();
                boolean hasMilis = val.hasMillis() | prevVal.hasMillis();
                long prev = prevVal.getUTCTimeInMillis();
                long next = val.getUTCTimeInMillis();
                long lin = Math.round((prev + next) / 2);
                res = new DateAndTimeCell(lin, hasDate, hasTime, hasMilis);
            } else {
                DoubleValue val = (DoubleValue) cell;
                double prev = ((DoubleValue) m_previous).getDoubleValue();
                double next = val.getDoubleValue();
                double lin = (prev + next) / 2;
                if (m_previous instanceof IntValue) {
                    // get an int, create an int
                    res = new IntCell((int) Math.round(lin));
                } else if (m_previous instanceof LongValue) {
                    // get an long, create an long
                    res = new LongCell(Math.round(lin));
                } else {
                    res = new DoubleCell(lin);
                }
            }
        }
        for (int i = 0; i < m_numMissing; i++) {
            m_values.add(res);
        }
        m_previous = cell;
        m_numMissing = 0;
    }
}
Also used : DateAndTimeValue(org.knime.core.data.date.DateAndTimeValue) DoubleCell(org.knime.core.data.def.DoubleCell) IntCell(org.knime.core.data.def.IntCell) LongCell(org.knime.core.data.def.LongCell) DoubleValue(org.knime.core.data.DoubleValue) LongValue(org.knime.core.data.LongValue) DataCell(org.knime.core.data.DataCell) DateAndTimeCell(org.knime.core.data.date.DateAndTimeCell) IntValue(org.knime.core.data.IntValue)

Example 18 with IntValue

use of org.knime.core.data.IntValue in project knime-core by knime.

the class CategoryToNumberNodeDialogPane method loadSettingsFrom.

/**
 * {@inheritDoc}
 */
@Override
protected void loadSettingsFrom(final NodeSettingsRO settings, final PortObjectSpec[] specs) throws NotConfigurableException {
    DataTableSpec spec = (DataTableSpec) specs[0];
    m_settings.loadSettingsForDialog(settings, spec);
    DataColumnSpecFilterConfiguration config = m_settings.getFilterConfiguration();
    m_filterPanel.loadConfiguration(config, spec);
    m_appendColums.setSelected(m_settings.getAppendColumns());
    m_columnSuffix.setText(m_settings.getColumnSuffix());
    m_columnSuffix.setEnabled(m_appendColums.isSelected());
    m_startIndex.setValue(m_settings.getStartIndex());
    m_increment.setValue(m_settings.getIncrement());
    m_maxCategories.setValue(m_settings.getMaxCategories());
    if (!m_settings.getDefaultValue().isMissing()) {
        IntValue value = (IntValue) m_settings.getDefaultValue();
        m_defaultValue.setText(Integer.toString(value.getIntValue()));
    } else {
        m_defaultValue.setText("");
    }
    if (!m_settings.getMapMissingTo().isMissing()) {
        IntValue value = (IntValue) m_settings.getMapMissingTo();
        m_mapMissingTo.setText(Integer.toString(value.getIntValue()));
    } else {
        m_mapMissingTo.setText("");
    }
}
Also used : DataTableSpec(org.knime.core.data.DataTableSpec) DataColumnSpecFilterConfiguration(org.knime.core.node.util.filter.column.DataColumnSpecFilterConfiguration) IntValue(org.knime.core.data.IntValue)

Example 19 with IntValue

use of org.knime.core.data.IntValue in project knime-core by knime.

the class NodeViewUtil method renderDataCell.

/**
 * @param cell the data cell to render
 * @param buffer write to this buffer
 */
public static void renderDataCell(final DataCell cell, final StringBuilder buffer) {
    if (cell.isMissing()) {
        buffer.append("<td></td>");
        return;
    }
    if (cell.getType().isCompatible(IntValue.class)) {
        IntValue value = (IntValue) cell;
        buffer.append("<td class=\"numeric\">");
        buffer.append(value.getIntValue());
        buffer.append("</td>");
    } else if (cell.getType().isCompatible(DoubleValue.class)) {
        DoubleValue value = (DoubleValue) cell;
        buffer.append("<td class=\"numeric\">");
        buffer.append(DoubleFormat.formatDouble(value.getDoubleValue()));
        buffer.append("</td>");
    } else if (cell.getType().isCompatible(StringValue.class)) {
        StringValue value = (StringValue) cell;
        buffer.append("<td class=\"left\">");
        buffer.append(value.getStringValue());
        buffer.append("</td>");
    } else {
        buffer.append("<td class=\"left\">");
        buffer.append(cell.toString());
        buffer.append("</td>");
    }
}
Also used : DoubleValue(org.knime.core.data.DoubleValue) StringValue(org.knime.core.data.StringValue) IntValue(org.knime.core.data.IntValue)

Aggregations

IntValue (org.knime.core.data.IntValue)19 DoubleValue (org.knime.core.data.DoubleValue)14 DataCell (org.knime.core.data.DataCell)12 LongValue (org.knime.core.data.LongValue)10 DateAndTimeValue (org.knime.core.data.date.DateAndTimeValue)10 DataRow (org.knime.core.data.DataRow)6 DateAndTimeCell (org.knime.core.data.date.DateAndTimeCell)6 DoubleCell (org.knime.core.data.def.DoubleCell)6 IntCell (org.knime.core.data.def.IntCell)6 LongCell (org.knime.core.data.def.LongCell)6 SQLException (java.sql.SQLException)4 BooleanValue (org.knime.core.data.BooleanValue)4 DataTableSpec (org.knime.core.data.DataTableSpec)4 BinaryObjectDataValue (org.knime.core.data.blob.BinaryObjectDataValue)4 BufferedDataTable (org.knime.core.node.BufferedDataTable)4 IOException (java.io.IOException)3 DataColumnSpec (org.knime.core.data.DataColumnSpec)3 StringValue (org.knime.core.data.StringValue)3 InputStream (java.io.InputStream)2 PreparedStatement (java.sql.PreparedStatement)2