Search in sources :

Example 46 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class DBReaderNodeModel method execute.

/**
 * {@inheritDoc}
 */
@Override
protected PortObject[] execute(final PortObject[] inData, final ExecutionContext exec) throws CanceledExecutionException, Exception {
    exec.setProgress("Opening database connection...");
    try {
        exec.setProgress("Reading data from database...");
        DBReader load = loadConnectionSettings(inData[getNrInPorts() - 1]);
        final BufferedDataTable result = getResultTable(exec, inData, load);
        setLastSpec(result.getDataTableSpec());
        return new BufferedDataTable[] { result };
    } catch (CanceledExecutionException cee) {
        throw cee;
    } catch (Exception e) {
        setLastSpec(null);
        throw e;
    }
}
Also used : DBReader(org.knime.core.node.port.database.reader.DBReader) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) BufferedDataTable(org.knime.core.node.BufferedDataTable) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) SQLException(java.sql.SQLException) IOException(java.io.IOException)

Example 47 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class DatabaseLoopingNodeModel method execute.

/**
 * {@inheritDoc}
 */
@Override
protected PortObject[] execute(final PortObject[] inData, final ExecutionContext exec) throws Exception {
    final BufferedDataTable inputTable = (BufferedDataTable) inData[0];
    final long rowCount = inputTable.size();
    final String column = m_columnModel.getStringValue();
    final DataTableSpec spec = inputTable.getDataTableSpec();
    final int colIdx = spec.findColumnIndex(column);
    if (colIdx < 0) {
        throw new InvalidSettingsException("Column " + column + " not found in input table.");
    }
    final Set<DataCell> values = new HashSet<>();
    BufferedDataContainer buf = null;
    final String oQuery = getQuery();
    final Collection<DataCell> curSet = new LinkedHashSet<>();
    final DBReader load = loadConnectionSettings(inData[getNrInPorts() - 1]);
    try {
        final int noValues = m_noValues.getIntValue();
        MutableInteger rowCnt = new MutableInteger(0);
        for (Iterator<DataRow> it = inputTable.iterator(); it.hasNext(); ) {
            exec.checkCanceled();
            DataCell cell = it.next().getCell(colIdx);
            if (values.contains(cell) && !it.hasNext() && curSet.isEmpty()) {
                continue;
            }
            values.add(cell);
            curSet.add(cell);
            if (curSet.size() == noValues || !it.hasNext()) {
                StringBuilder queryValues = new StringBuilder();
                for (DataCell v : curSet) {
                    if (queryValues.length() > 0) {
                        queryValues.append("','");
                    }
                    queryValues.append(v.toString());
                }
                String newQuery = parseQuery(oQuery.replaceAll(IN_PLACE_HOLDER, queryValues.toString()));
                load.updateQuery(newQuery);
                exec.setProgress(values.size() * (double) noValues / rowCount, "Selecting all values \"" + queryValues + "\"...");
                final BufferedDataTable table = getResultTable(exec, inData, load);
                if (buf == null) {
                    DataTableSpec resSpec = table.getDataTableSpec();
                    buf = exec.createDataContainer(createSpec(resSpec, spec.getColumnSpec(column)));
                }
                if (m_aggByRow.getBooleanValue()) {
                    aggregate(table, rowCnt, buf, CollectionCellFactory.createListCell(curSet));
                } else {
                    notAggregate(table, rowCnt, buf, CollectionCellFactory.createListCell(curSet));
                }
                curSet.clear();
            }
        }
        if (buf == null) {
            // create empty dummy container with spec generated during #configure
            final PortObjectSpec[] inSpec;
            if ((inData.length > 1) && (inData[1] instanceof DatabaseConnectionPortObject)) {
                DatabaseConnectionPortObject dbPort = (DatabaseConnectionPortObject) inData[1];
                inSpec = new PortObjectSpec[] { inputTable.getSpec(), dbPort.getSpec() };
            } else {
                inSpec = new PortObjectSpec[] { inputTable.getSpec() };
            }
            final String newQuery = createDummyValueQuery(spec, colIdx, oQuery);
            setQuery(newQuery);
            final DataTableSpec resultSpec = getResultSpec(inSpec);
            final DataTableSpec outSpec = createSpec(resultSpec, spec.getColumnSpec(column));
            buf = exec.createDataContainer(outSpec);
        }
        buf.close();
    } catch (CanceledExecutionException cee) {
        throw cee;
    } catch (Exception e) {
        setLastSpec(null);
        throw e;
    } finally {
        // reset query to original
        setQuery(oQuery);
    }
    final BufferedDataTable resultTable = buf.getTable();
    setLastSpec(resultTable.getDataTableSpec());
    return new BufferedDataTable[] { resultTable };
}
Also used : LinkedHashSet(java.util.LinkedHashSet) DataTableSpec(org.knime.core.data.DataTableSpec) BufferedDataContainer(org.knime.core.node.BufferedDataContainer) MutableInteger(org.knime.core.util.MutableInteger) SettingsModelString(org.knime.core.node.defaultnodesettings.SettingsModelString) DataRow(org.knime.core.data.DataRow) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) SQLException(java.sql.SQLException) DatabaseConnectionPortObject(org.knime.core.node.port.database.DatabaseConnectionPortObject) DBReader(org.knime.core.node.port.database.reader.DBReader) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) BufferedDataTable(org.knime.core.node.BufferedDataTable) PortObjectSpec(org.knime.core.node.port.PortObjectSpec) DatabaseConnectionPortObjectSpec(org.knime.core.node.port.database.DatabaseConnectionPortObjectSpec) DataCell(org.knime.core.data.DataCell) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet)

Example 48 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class DBPivotNodeModel method configure.

/**
 * {@inheritDoc}
 */
@Override
protected PortObjectSpec[] configure(final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
    final DatabasePortObjectSpec dbSpec = (DatabasePortObjectSpec) inSpecs[0];
    final DataTableSpec tableSpec = dbSpec.getDataTableSpec();
    final DatabaseQueryConnectionSettings connection = dbSpec.getConnectionSettings(getCredentialsProvider());
    final String dbIdentifier = connection.getDatabaseIdentifier();
    final List<DBColumnAggregationFunctionRow> columnFunctions = DBColumnAggregationFunctionRow.loadFunctions(m_settings, DBPivotNodeModel.CFG_AGGREGATION_FUNCTIONS, dbIdentifier, tableSpec);
    final ArrayList<DBColumnAggregationFunctionRow> invalidColAggrs = new ArrayList<>(1);
    final Set<String> usedColNames = new HashSet<>(tableSpec.getNumColumns());
    usedColNames.addAll(m_groupByCols.getIncludeList());
    usedColNames.addAll(m_pivotCols.getIncludeList());
    m_aggregationFunction2Use.clear();
    for (DBColumnAggregationFunctionRow row : columnFunctions) {
        final DataColumnSpec columnSpec = row.getColumnSpec();
        final DataColumnSpec inputSpec = tableSpec.getColumnSpec(columnSpec.getName());
        final AggregationFunction function = row.getFunction();
        if (inputSpec == null || !inputSpec.getType().equals(columnSpec.getType())) {
            invalidColAggrs.add(row);
            continue;
        }
        if (function instanceof InvalidAggregationFunction) {
            throw new InvalidSettingsException(((InvalidAggregationFunction) function).getErrorMessage());
        }
        if (function.hasOptionalSettings()) {
            try {
                function.configure(tableSpec);
            } catch (InvalidSettingsException e) {
                throw new InvalidSettingsException("Wrong aggregation function configuration '" + function.getLabel() + "' of column '" + row.getColumnSpec().getName() + "': " + e.getMessage(), e);
            }
        }
        usedColNames.add(row.getColumnSpec().getName());
        m_aggregationFunction2Use.add(row);
    }
    if (m_aggregationFunction2Use.isEmpty()) {
        throw new InvalidSettingsException("No aggregation columns selected.");
    }
    if (m_groupByCols.getIncludeList().isEmpty()) {
        setWarningMessage("No grouping column included. Aggregate complete table");
    }
    if (m_pivotCols.getIncludeList().isEmpty()) {
        throw new InvalidSettingsException("No pivot columns selected.");
    }
    if (!invalidColAggrs.isEmpty()) {
        setWarningMessage(invalidColAggrs.size() + " aggregation functions ignored due to incompatible columns.");
    }
    final DatabasePortObjectSpec resultSpec;
    if (connection.getRetrieveMetadataInConfigure()) {
        try {
            resultSpec = createDbOutSpec(dbSpec, new ExecutionMonitor());
        } catch (CanceledExecutionException e) {
            throw new InvalidSettingsException(e.getMessage());
        }
    } else {
        resultSpec = null;
    }
    return new PortObjectSpec[] { resultSpec };
}
Also used : DataTableSpec(org.knime.core.data.DataTableSpec) InvalidAggregationFunction(org.knime.core.node.port.database.aggregation.InvalidAggregationFunction) ArrayList(java.util.ArrayList) SettingsModelFilterString(org.knime.core.node.defaultnodesettings.SettingsModelFilterString) SettingsModelString(org.knime.core.node.defaultnodesettings.SettingsModelString) InvalidAggregationFunction(org.knime.core.node.port.database.aggregation.InvalidAggregationFunction) DBAggregationFunction(org.knime.core.node.port.database.aggregation.DBAggregationFunction) AggregationFunction(org.knime.core.node.port.database.aggregation.AggregationFunction) DatabaseQueryConnectionSettings(org.knime.core.node.port.database.DatabaseQueryConnectionSettings) DataColumnSpec(org.knime.core.data.DataColumnSpec) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) DBColumnAggregationFunctionRow(org.knime.base.node.io.database.groupby.dialog.column.DBColumnAggregationFunctionRow) DatabasePortObjectSpec(org.knime.core.node.port.database.DatabasePortObjectSpec) PortObjectSpec(org.knime.core.node.port.PortObjectSpec) DatabasePortObjectSpec(org.knime.core.node.port.database.DatabasePortObjectSpec) ExecutionMonitor(org.knime.core.node.ExecutionMonitor) HashSet(java.util.HashSet)

Example 49 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class MissingValueHandling2TableIterator method push.

/**
 * pushes the internal iterator forward to the next row to return.
 */
private void push() {
    DataRow row;
    boolean hasMissing;
    boolean skipRow;
    // containing missing values, you get a StackOverFlow, bug fix #350
    do {
        if (!m_internIt.hasNext()) {
            m_next = null;
            return;
        }
        row = m_internIt.next();
        if (m_finalCount > 0) {
            m_exec.setProgress(m_count / (double) m_finalCount);
        }
        try {
            m_exec.checkCanceled();
        } catch (CanceledExecutionException cee) {
            throw new RuntimeCanceledExecutionException(cee);
        }
        m_count++;
        // check once if we can get away easy
        hasMissing = false;
        skipRow = false;
        for (int i = 0; !skipRow && i < row.getNumCells(); i++) {
            if (row.getCell(i).isMissing()) {
                switch(m_table.getColSetting(i).getMethod()) {
                    case MissingValueHandling2ColSetting.METHOD_NO_HANDLING:
                        break;
                    case MissingValueHandling2ColSetting.METHOD_IGNORE_ROWS:
                        skipRow = true;
                        break;
                    default:
                        hasMissing = true;
                }
            }
        }
    } while (skipRow);
    if (hasMissing) {
        m_next = handleMissing(row);
    } else {
        m_next = row;
    }
}
Also used : CanceledExecutionException(org.knime.core.node.CanceledExecutionException) DataRow(org.knime.core.data.DataRow)

Example 50 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class MissingValueHandling3TableIterator method push.

/**
 * pushes the internal iterator forward to the next row to return.
 */
private void push() {
    DataRow row;
    boolean hasMissing;
    boolean skipRow;
    // containing missing values, you get a StackOverFlow, bug fix #350
    do {
        if (!m_internIt.hasNext()) {
            m_next = null;
            return;
        }
        row = m_internIt.next();
        if (m_finalCount > 0) {
            m_exec.setProgress(m_count / (double) m_finalCount);
        }
        try {
            m_exec.checkCanceled();
        } catch (CanceledExecutionException cee) {
            throw new RuntimeCanceledExecutionException(cee);
        }
        m_count++;
        // check once if we can get away easy
        hasMissing = false;
        skipRow = false;
        for (int i = 0; !skipRow && i < row.getNumCells(); i++) {
            if (row.getCell(i).isMissing()) {
                switch(m_table.getColSetting(i).getMethod()) {
                    case MissingValueHandling2ColSetting.METHOD_NO_HANDLING:
                        break;
                    case MissingValueHandling2ColSetting.METHOD_IGNORE_ROWS:
                        skipRow = true;
                        break;
                    default:
                        hasMissing = true;
                }
            }
        }
    } while (skipRow);
    if (hasMissing) {
        m_next = handleMissing(row);
    } else {
        m_next = row;
    }
}
Also used : CanceledExecutionException(org.knime.core.node.CanceledExecutionException) DataRow(org.knime.core.data.DataRow)

Aggregations

CanceledExecutionException (org.knime.core.node.CanceledExecutionException)82 InvalidSettingsException (org.knime.core.node.InvalidSettingsException)34 IOException (java.io.IOException)32 File (java.io.File)21 ExecutionMonitor (org.knime.core.node.ExecutionMonitor)21 DataRow (org.knime.core.data.DataRow)20 DataTableSpec (org.knime.core.data.DataTableSpec)20 BufferedDataTable (org.knime.core.node.BufferedDataTable)20 DataCell (org.knime.core.data.DataCell)19 ArrayList (java.util.ArrayList)11 DataColumnSpec (org.knime.core.data.DataColumnSpec)11 BufferedDataContainer (org.knime.core.node.BufferedDataContainer)10 LinkedHashMap (java.util.LinkedHashMap)9 ExecutionException (java.util.concurrent.ExecutionException)9 DefaultRow (org.knime.core.data.def.DefaultRow)9 RowKey (org.knime.core.data.RowKey)8 BufferedWriter (java.io.BufferedWriter)7 FileInputStream (java.io.FileInputStream)7 Map (java.util.Map)7 Future (java.util.concurrent.Future)7