Search in sources :

Example 11 with LongValue

use of org.knime.core.data.LongValue in project knime-core by knime.

the class DatabaseWriterConnection method fillStatement.

/**
 * Set given column value into SQL statement.
 * @param stmt statement used
 * @param dbIdx database index to update/write
 * @param cspec column spec to check type
 * @param cell the data cell to write into the statement
 * @param tz the {@link TimeZone} to use
 * @throws SQLException if the value can't be set
 */
private static void fillStatement(final PreparedStatement stmt, final int dbIdx, final DataColumnSpec cspec, final DataCell cell, final TimeZone tz, final Map<Integer, Integer> columnTypes) throws SQLException {
    if (cspec.getType().isCompatible(BooleanValue.class)) {
        if (cell.isMissing()) {
            stmt.setNull(dbIdx, Types.BOOLEAN);
        } else {
            boolean bool = ((BooleanValue) cell).getBooleanValue();
            stmt.setBoolean(dbIdx, bool);
        }
    } else if (cspec.getType().isCompatible(IntValue.class)) {
        if (cell.isMissing()) {
            stmt.setNull(dbIdx, Types.INTEGER);
        } else {
            int integer = ((IntValue) cell).getIntValue();
            stmt.setInt(dbIdx, integer);
        }
    } else if (cspec.getType().isCompatible(LongValue.class)) {
        if (cell.isMissing()) {
            stmt.setNull(dbIdx, Types.BIGINT);
        } else {
            long dbl = ((LongValue) cell).getLongValue();
            stmt.setLong(dbIdx, dbl);
        }
    } else if (cspec.getType().isCompatible(DoubleValue.class)) {
        if (cell.isMissing()) {
            stmt.setNull(dbIdx, Types.DOUBLE);
        } else {
            double dbl = ((DoubleValue) cell).getDoubleValue();
            if (Double.isNaN(dbl)) {
                stmt.setNull(dbIdx, Types.DOUBLE);
            } else {
                stmt.setDouble(dbIdx, dbl);
            }
        }
    } else if (cspec.getType().isCompatible(DateAndTimeValue.class)) {
        if (cell.isMissing()) {
            stmt.setNull(dbIdx, Types.DATE);
        } else {
            final DateAndTimeValue dateCell = (DateAndTimeValue) cell;
            final long corrDate = dateCell.getUTCTimeInMillis() - tz.getOffset(dateCell.getUTCTimeInMillis());
            if (!dateCell.hasTime() && !dateCell.hasMillis()) {
                java.sql.Date date = new java.sql.Date(corrDate);
                stmt.setDate(dbIdx, date);
            } else if (!dateCell.hasDate()) {
                java.sql.Time time = new java.sql.Time(corrDate);
                stmt.setTime(dbIdx, time);
            } else {
                java.sql.Timestamp timestamp = new java.sql.Timestamp(corrDate);
                stmt.setTimestamp(dbIdx, timestamp);
            }
        }
    } else if (cspec.getType().isCompatible(BinaryObjectDataValue.class)) {
        if (cell.isMissing()) {
            stmt.setNull(dbIdx, Types.BLOB);
        } else {
            try {
                BinaryObjectDataValue value = (BinaryObjectDataValue) cell;
                InputStream is = value.openInputStream();
                if (is == null) {
                    stmt.setNull(dbIdx, Types.BLOB);
                } else {
                    try {
                        // to be compatible with JDBC 3.0, the length of the stream is restricted to max integer,
                        // which are ~2GB; with JDBC 4.0 longs are supported and the respective method can be called
                        stmt.setBinaryStream(dbIdx, is, (int) value.length());
                    } catch (SQLException ex) {
                        // if no supported (i.e. SQLite) set byte array
                        byte[] bytes = IOUtils.toByteArray(is);
                        stmt.setBytes(dbIdx, bytes);
                    }
                }
            } catch (IOException ioe) {
                stmt.setNull(dbIdx, Types.BLOB);
            }
        }
    } else if ((columnTypes == null) || cspec.getType().isCompatible(StringValue.class)) {
        if (cell.isMissing()) {
            stmt.setNull(dbIdx, Types.VARCHAR);
        } else {
            stmt.setString(dbIdx, cell.toString());
        }
    } else {
        Integer sqlType = columnTypes.get(dbIdx);
        if (sqlType == null) {
            sqlType = Types.VARCHAR;
        }
        if (cell.isMissing()) {
            stmt.setNull(dbIdx, sqlType);
        } else {
            stmt.setObject(dbIdx, cell.toString(), sqlType);
        }
    }
}
Also used : SQLException(java.sql.SQLException) DateAndTimeValue(org.knime.core.data.date.DateAndTimeValue) InputStream(java.io.InputStream) IOException(java.io.IOException) DoubleValue(org.knime.core.data.DoubleValue) BinaryObjectDataValue(org.knime.core.data.blob.BinaryObjectDataValue) BooleanValue(org.knime.core.data.BooleanValue) LongValue(org.knime.core.data.LongValue) StringValue(org.knime.core.data.StringValue) IntValue(org.knime.core.data.IntValue)

Example 12 with LongValue

use of org.knime.core.data.LongValue in project knime-core by knime.

the class DatabaseWriterConnection method writeData.

/**
 * Create connection to write into database.
 * @param dbConn a database connection object
 * @param table name of table to write
 * @param input the data table as as row input
 * @param rowCount number of row of the table to write, -1 if unknown
 * @param appendData if checked the data is appended to an existing table
 * @param exec Used the cancel writing.
 * @param sqlTypes A mapping from column name to SQL-type.
 * @param cp {@link CredentialsProvider} providing user/password
 * @param batchSize number of rows written in one batch
 * @param insertNullForMissingCols <code>true</code> if <code>null</code> should be inserted for missing columns
 * @return error string or null, if non
 * @throws Exception if connection could not be established
 * @since 3.1
 */
public static final String writeData(final DatabaseConnectionSettings dbConn, final String table, final RowInput input, final long rowCount, final boolean appendData, final ExecutionMonitor exec, final Map<String, String> sqlTypes, final CredentialsProvider cp, final int batchSize, final boolean insertNullForMissingCols) throws Exception {
    // synchronized (dbConn.syncConnection(conn)) {
    return dbConn.execute(cp, conn -> {
        final StringBuilder columnNamesForInsertStatement = new StringBuilder("(");
        exec.setMessage("Start writing rows in database...");
        DataTableSpec spec = input.getDataTableSpec();
        // mapping from spec columns to database columns
        final int[] mapping;
        // append data to existing table
        if (appendData) {
            if (dbConn.getUtility().tableExists(conn, table)) {
                String query = dbConn.getUtility().getStatementManipulator().forMetadataOnly("SELECT * FROM " + table);
                try (ResultSet rs = conn.createStatement().executeQuery(query)) {
                    ResultSetMetaData rsmd = rs.getMetaData();
                    final Map<String, Integer> columnNames = new LinkedHashMap<String, Integer>();
                    for (int i = 0; i < spec.getNumColumns(); i++) {
                        String colName = replaceColumnName(spec.getColumnSpec(i).getName(), dbConn);
                        columnNames.put(colName.toLowerCase(), i);
                    }
                    // sanity check to lock if all input columns are in db
                    ArrayList<String> columnNotInSpec = new ArrayList<String>(columnNames.keySet());
                    for (int i = 0; i < rsmd.getColumnCount(); i++) {
                        String dbColName = replaceColumnName(rsmd.getColumnName(i + 1), dbConn);
                        if (columnNames.containsKey(dbColName.toLowerCase())) {
                            columnNotInSpec.remove(dbColName.toLowerCase());
                            columnNamesForInsertStatement.append(dbColName).append(',');
                        } else if (insertNullForMissingCols) {
                            // append the column name of a missing column only if the insert null for missing
                            // column option is enabled
                            columnNamesForInsertStatement.append(dbColName).append(',');
                        }
                    }
                    if (rsmd.getColumnCount() > 0) {
                        columnNamesForInsertStatement.deleteCharAt(columnNamesForInsertStatement.length() - 1);
                    }
                    columnNamesForInsertStatement.append(')');
                    if (columnNotInSpec.size() > 0) {
                        throw new RuntimeException("No. of columns in input" + " table > in database; not existing columns: " + columnNotInSpec.toString());
                    }
                    mapping = new int[rsmd.getColumnCount()];
                    for (int i = 0; i < mapping.length; i++) {
                        String name = replaceColumnName(rsmd.getColumnName(i + 1), dbConn).toLowerCase();
                        if (!columnNames.containsKey(name)) {
                            mapping[i] = -1;
                            continue;
                        }
                        mapping[i] = columnNames.get(name);
                        DataColumnSpec cspec = spec.getColumnSpec(mapping[i]);
                        int type = rsmd.getColumnType(i + 1);
                        switch(type) {
                            // check all boolean compatible types
                            case Types.BIT:
                            case Types.BOOLEAN:
                                // types must be compatible to BooleanValue
                                if (!cspec.getType().isCompatible(BooleanValue.class)) {
                                    throw new RuntimeException("Column \"" + name + "\" of type \"" + cspec.getType() + "\" from input does not match type " + "\"" + rsmd.getColumnTypeName(i + 1) + "\" in database at position " + i);
                                }
                                break;
                            // check all int compatible types
                            case Types.TINYINT:
                            case Types.SMALLINT:
                            case Types.INTEGER:
                                // types must be compatible to IntValue
                                if (!cspec.getType().isCompatible(IntValue.class)) {
                                    throw new RuntimeException("Column \"" + name + "\" of type \"" + cspec.getType() + "\" from input does not match type " + "\"" + rsmd.getColumnTypeName(i + 1) + "\" in database at position " + i);
                                }
                                break;
                            case Types.BIGINT:
                                // types must also be compatible to LongValue
                                if (!cspec.getType().isCompatible(LongValue.class)) {
                                    throw new RuntimeException("Column \"" + name + "\" of type \"" + cspec.getType() + "\" from input does not match type " + "\"" + rsmd.getColumnTypeName(i + 1) + "\" in database at position " + i);
                                }
                                break;
                            // check all double compatible types
                            case Types.FLOAT:
                            case Types.DOUBLE:
                            case Types.NUMERIC:
                            case Types.DECIMAL:
                            case Types.REAL:
                                // types must also be compatible to DoubleValue
                                if (!cspec.getType().isCompatible(DoubleValue.class)) {
                                    throw new RuntimeException("Column \"" + name + "\" of type \"" + cspec.getType() + "\" from input does not match type " + "\"" + rsmd.getColumnTypeName(i + 1) + "\" in database at position " + i);
                                }
                                break;
                            // check for date-and-time compatible types
                            case Types.DATE:
                            case Types.TIME:
                            case Types.TIMESTAMP:
                                // types must also be compatible to DataValue
                                if (!cspec.getType().isCompatible(DateAndTimeValue.class)) {
                                    throw new RuntimeException("Column \"" + name + "\" of type \"" + cspec.getType() + "\" from input does not match type " + "\"" + rsmd.getColumnTypeName(i + 1) + "\" in database at position " + i);
                                }
                                break;
                            // check for blob compatible types
                            case Types.BLOB:
                            case Types.BINARY:
                            case Types.LONGVARBINARY:
                                // types must also be compatible to DataValue
                                if (!cspec.getType().isCompatible(BinaryObjectDataValue.class)) {
                                    throw new RuntimeException("Column \"" + name + "\" of type \"" + cspec.getType() + "\" from input does not match type " + "\"" + rsmd.getColumnTypeName(i + 1) + "\" in database at position " + i);
                                }
                                break;
                        }
                    }
                }
            } else {
                LOGGER.info("Table \"" + table + "\" does not exist in database, " + "will create new table.");
                // and create new table
                final String query = "CREATE TABLE " + table + " " + createStmt(spec, sqlTypes, dbConn, columnNamesForInsertStatement);
                LOGGER.debug("Executing SQL statement as execute: " + query);
                Statement statement = conn.createStatement();
                statement.execute(query);
                mapping = new int[spec.getNumColumns()];
                for (int k = 0; k < mapping.length; k++) {
                    mapping[k] = k;
                }
            }
        } else {
            LOGGER.debug("Append not enabled. Table " + table + " will be dropped if exists.");
            mapping = new int[spec.getNumColumns()];
            for (int k = 0; k < mapping.length; k++) {
                mapping[k] = k;
            }
            Statement statement = null;
            try {
                statement = conn.createStatement();
                // remove existing table (if any)
                final String query = "DROP TABLE " + table;
                LOGGER.debug("Executing SQL statement as execute: " + query);
                statement.execute(query);
            } catch (Throwable t) {
                if (statement == null) {
                    throw new SQLException("Could not create SQL statement," + " reason: " + t.getMessage(), t);
                }
                LOGGER.info("Exception droping table \"" + table + "\": " + t.getMessage() + ". Will create new table.");
            } finally {
                if (!conn.getAutoCommit()) {
                    conn.commit();
                }
            }
            // and create new table
            final String query = "CREATE TABLE " + table + " " + createStmt(spec, sqlTypes, dbConn, columnNamesForInsertStatement);
            LOGGER.debug("Executing SQL statement as execute: " + query);
            statement.execute(query);
            statement.close();
        }
        // this is a (temporary) workaround for bug #5802: if there is a DataValue column in the input table
        // we need to use the SQL type for creating the insert statements.
        Map<Integer, Integer> columnTypes = null;
        for (DataColumnSpec cs : spec) {
            if (cs.getType().getPreferredValueClass() == DataValue.class) {
                columnTypes = getColumnTypes(conn, table);
                break;
            }
        }
        // creates the wild card string based on the number of columns
        // this string it used every time an new row is inserted into the db
        final StringBuilder wildcard = new StringBuilder("(");
        boolean first = true;
        for (int i = 0; i < mapping.length; i++) {
            if (mapping[i] >= 0 || insertNullForMissingCols) {
                // columns option is enabled
                if (first) {
                    first = false;
                } else {
                    wildcard.append(", ");
                }
                wildcard.append("?");
            }
        }
        wildcard.append(")");
        // problems writing more than 13 columns. the prepare statement
        // ensures that we can set the columns directly row-by-row, the
        // database will handle the commit
        long cnt = 1;
        long errorCnt = 0;
        long allErrors = 0;
        // count number of rows added to current batch
        int curBatchSize = 0;
        // create table meta data with empty column information
        final String query = "INSERT INTO " + table + " " + columnNamesForInsertStatement + " VALUES " + wildcard;
        LOGGER.debug("Executing SQL statement as prepareStatement: " + query);
        final PreparedStatement stmt = conn.prepareStatement(query);
        // remember auto-commit flag
        final boolean autoCommit = conn.getAutoCommit();
        DatabaseConnectionSettings.setAutoCommit(conn, false);
        try {
            final TimeZone timezone = dbConn.getTimeZone();
            // get the first row
            DataRow row;
            DataRow nextRow = input.poll();
            // iterate over all incoming data rows
            do {
                row = nextRow;
                cnt++;
                exec.checkCanceled();
                if (rowCount > 0) {
                    exec.setProgress(1.0 * cnt / rowCount, "Row " + "#" + cnt);
                } else {
                    exec.setProgress("Writing Row#" + cnt);
                }
                int dbIdx = 1;
                for (int i = 0; i < mapping.length; i++) {
                    if (mapping[i] < 0) {
                        if (insertNullForMissingCols) {
                            // insert only null if the insert null for missing col option is enabled
                            stmt.setNull(dbIdx++, Types.NULL);
                        }
                    } else {
                        final DataColumnSpec cspec = spec.getColumnSpec(mapping[i]);
                        final DataCell cell = row.getCell(mapping[i]);
                        fillStatement(stmt, dbIdx++, cspec, cell, timezone, columnTypes);
                    }
                }
                // if batch mode
                if (batchSize > 1) {
                    // a new row will be added
                    stmt.addBatch();
                }
                // get one more input row to check if 'row' is the last one
                nextRow = input.poll();
                curBatchSize++;
                // if batch size equals number of row in batch or input table at end
                if ((curBatchSize == batchSize) || nextRow == null) {
                    curBatchSize = 0;
                    try {
                        // write batch
                        if (batchSize > 1) {
                            stmt.executeBatch();
                        } else {
                            // or write single row
                            stmt.execute();
                        }
                    } catch (Throwable t) {
                        // anyway.
                        if (!conn.getAutoCommit()) {
                            conn.commit();
                        }
                        allErrors++;
                        if (errorCnt > -1) {
                            final String errorMsg;
                            if (batchSize > 1) {
                                errorMsg = "Error while adding rows #" + (cnt - batchSize) + " - #" + cnt + ", reason: " + t.getMessage();
                            } else {
                                errorMsg = "Error while adding row #" + cnt + " (" + row.getKey() + "), reason: " + t.getMessage();
                            }
                            exec.setMessage(errorMsg);
                            if (errorCnt++ < 10) {
                                LOGGER.warn(errorMsg);
                            } else {
                                errorCnt = -1;
                                LOGGER.warn(errorMsg + " - more errors...", t);
                            }
                        }
                    } finally {
                        // clear batch if in batch mode
                        if (batchSize > 1) {
                            stmt.clearBatch();
                        }
                    }
                }
            } while (nextRow != null);
            if (!conn.getAutoCommit()) {
                conn.commit();
            }
            if (allErrors == 0) {
                return null;
            } else {
                return "Errors \"" + allErrors + "\" writing " + (cnt - 1) + " rows.";
            }
        } finally {
            DatabaseConnectionSettings.setAutoCommit(conn, autoCommit);
            stmt.close();
        }
    });
}
Also used : DataTableSpec(org.knime.core.data.DataTableSpec) SQLException(java.sql.SQLException) DateAndTimeValue(org.knime.core.data.date.DateAndTimeValue) ArrayList(java.util.ArrayList) DataRow(org.knime.core.data.DataRow) LinkedHashMap(java.util.LinkedHashMap) ResultSetMetaData(java.sql.ResultSetMetaData) DataColumnSpec(org.knime.core.data.DataColumnSpec) BinaryObjectDataValue(org.knime.core.data.blob.BinaryObjectDataValue) BooleanValue(org.knime.core.data.BooleanValue) ResultSet(java.sql.ResultSet) IntValue(org.knime.core.data.IntValue) PreparedStatement(java.sql.PreparedStatement) Statement(java.sql.Statement) PreparedStatement(java.sql.PreparedStatement) TimeZone(java.util.TimeZone) DoubleValue(org.knime.core.data.DoubleValue) LongValue(org.knime.core.data.LongValue) DataCell(org.knime.core.data.DataCell)

Example 13 with LongValue

use of org.knime.core.data.LongValue in project knime-core by knime.

the class AverageInterpolationStatisticMB method consumeRow.

/**
 * {@inheritDoc}
 */
@Override
protected void consumeRow(final DataRow dataRow) {
    DataCell cell = dataRow.getCell(m_colIdx);
    if (cell.isMissing()) {
        m_numMissing++;
    } else {
        DataCell res;
        if (m_previous.isMissing()) {
            res = cell;
        } else {
            if (m_isDateColumn) {
                DateAndTimeValue val = (DateAndTimeValue) cell;
                DateAndTimeValue prevVal = (DateAndTimeValue) m_previous;
                boolean hasDate = val.hasDate() | prevVal.hasDate();
                boolean hasTime = val.hasTime() | prevVal.hasTime();
                boolean hasMilis = val.hasMillis() | prevVal.hasMillis();
                long prev = prevVal.getUTCTimeInMillis();
                long next = val.getUTCTimeInMillis();
                long lin = Math.round((prev + next) / 2);
                res = new DateAndTimeCell(lin, hasDate, hasTime, hasMilis);
            } else {
                DoubleValue val = (DoubleValue) cell;
                double prev = ((DoubleValue) m_previous).getDoubleValue();
                double next = val.getDoubleValue();
                double lin = (prev + next) / 2;
                if (m_previous instanceof IntValue) {
                    // get an int, create an int
                    res = new IntCell((int) Math.round(lin));
                } else if (m_previous instanceof LongValue) {
                    // get an long, create an long
                    res = new LongCell(Math.round(lin));
                } else {
                    res = new DoubleCell(lin);
                }
            }
        }
        for (int i = 0; i < m_numMissing; i++) {
            m_values.add(res);
        }
        m_previous = cell;
        m_numMissing = 0;
    }
}
Also used : DateAndTimeValue(org.knime.core.data.date.DateAndTimeValue) DoubleCell(org.knime.core.data.def.DoubleCell) IntCell(org.knime.core.data.def.IntCell) LongCell(org.knime.core.data.def.LongCell) DoubleValue(org.knime.core.data.DoubleValue) LongValue(org.knime.core.data.LongValue) DataCell(org.knime.core.data.DataCell) DateAndTimeCell(org.knime.core.data.date.DateAndTimeCell) IntValue(org.knime.core.data.IntValue)

Aggregations

LongValue (org.knime.core.data.LongValue)13 DoubleValue (org.knime.core.data.DoubleValue)11 IntValue (org.knime.core.data.IntValue)10 DateAndTimeValue (org.knime.core.data.date.DateAndTimeValue)10 DataCell (org.knime.core.data.DataCell)7 IntCell (org.knime.core.data.def.IntCell)7 LongCell (org.knime.core.data.def.LongCell)7 DateAndTimeCell (org.knime.core.data.date.DateAndTimeCell)6 DoubleCell (org.knime.core.data.def.DoubleCell)6 SQLException (java.sql.SQLException)4 BooleanValue (org.knime.core.data.BooleanValue)4 BinaryObjectDataValue (org.knime.core.data.blob.BinaryObjectDataValue)4 IOException (java.io.IOException)2 InputStream (java.io.InputStream)2 PreparedStatement (java.sql.PreparedStatement)2 ResultSet (java.sql.ResultSet)2 ResultSetMetaData (java.sql.ResultSetMetaData)2 Statement (java.sql.Statement)2 ArrayList (java.util.ArrayList)2 LinkedHashMap (java.util.LinkedHashMap)2