Search in sources :

Example 51 with ExecutionContext

use of org.knime.core.node.ExecutionContext in project knime-core by knime.

the class VirtualSubNodeInputNodeModel method createStreamableOperator.

@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
    return new StreamableOperator() {

        @Override
        public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
            assert inputs.length == 0;
            PortObject[] dataFromParent = ArrayUtils.remove(m_subNodeContainer.fetchInputDataFromParent(), 0);
            for (int i = 0; i < outputs.length; i++) {
                if (BufferedDataTable.TYPE.equals(getOutPortType(i))) {
                    // stream port content if it's data
                    BufferedDataTable bdt = (BufferedDataTable) (dataFromParent[i]);
                    RowOutput rowOutput = (RowOutput) outputs[i];
                    for (DataRow dr : bdt) {
                        rowOutput.push(dr);
                    }
                    rowOutput.close();
                } else {
                    ((PortObjectOutput) outputs[i]).setPortObject(dataFromParent[i]);
                }
            }
        }
    };
}
Also used : RowOutput(org.knime.core.node.streamable.RowOutput) ExecutionContext(org.knime.core.node.ExecutionContext) StreamableOperator(org.knime.core.node.streamable.StreamableOperator) BufferedDataTable(org.knime.core.node.BufferedDataTable) InactiveBranchPortObject(org.knime.core.node.port.inactive.InactiveBranchPortObject) PortObject(org.knime.core.node.port.PortObject) DataRow(org.knime.core.data.DataRow) PortObjectOutput(org.knime.core.node.streamable.PortObjectOutput)

Example 52 with ExecutionContext

use of org.knime.core.node.ExecutionContext in project knime-core by knime.

the class JavaRowSplitterNodeModel method execute.

/**
 * {@inheritDoc}
 */
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
    final int rowCount = inData[0].getRowCount();
    m_rowCount = rowCount;
    DataTableRowInput input = new DataTableRowInput(inData[0]);
    DataTableSpec spec = inData[0].getDataTableSpec();
    BufferedDataContainer trueMatch = exec.createDataContainer(spec);
    BufferedDataTableRowOutput[] outputs;
    BufferedDataContainer falseMatch = null;
    if (getNrOutPorts() == 2) {
        falseMatch = exec.createDataContainer(spec);
    }
    outputs = Stream.of(trueMatch, falseMatch).filter(f -> f != null).map(f -> new BufferedDataTableRowOutput(f)).toArray(BufferedDataTableRowOutput[]::new);
    execute(input, outputs, exec);
    BufferedDataTable[] outTables = Stream.of(trueMatch, falseMatch).filter(f -> f != null).map(f -> f.getTable()).toArray(BufferedDataTable[]::new);
    return outTables;
}
Also used : IntStream(java.util.stream.IntStream) Arrays(java.util.Arrays) NodeSettingsRO(org.knime.core.node.NodeSettingsRO) DataTableSpec(org.knime.core.data.DataTableSpec) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) JavaScriptingCustomizer(org.knime.ext.sun.nodes.script.settings.JavaScriptingCustomizer) BufferedDataTableRowOutput(org.knime.core.node.streamable.BufferedDataTableRowOutput) Expression(org.knime.ext.sun.nodes.script.expression.Expression) ExecutionContext(org.knime.core.node.ExecutionContext) DataTableRowInput(org.knime.core.node.streamable.DataTableRowInput) StreamableOperatorInternals(org.knime.core.node.streamable.StreamableOperatorInternals) BooleanValue(org.knime.core.data.BooleanValue) DataCell(org.knime.core.data.DataCell) PortInput(org.knime.core.node.streamable.PortInput) ExecutionMonitor(org.knime.core.node.ExecutionMonitor) PartitionInfo(org.knime.core.node.streamable.PartitionInfo) RowInput(org.knime.core.node.streamable.RowInput) PortObjectSpec(org.knime.core.node.port.PortObjectSpec) ColumnCalculator(org.knime.ext.sun.nodes.script.calculator.ColumnCalculator) IOException(java.io.IOException) OutputPortRole(org.knime.core.node.streamable.OutputPortRole) BufferedDataContainer(org.knime.core.node.BufferedDataContainer) NodeModel(org.knime.core.node.NodeModel) File(java.io.File) DataRow(org.knime.core.data.DataRow) PortOutput(org.knime.core.node.streamable.PortOutput) NodeSettingsWO(org.knime.core.node.NodeSettingsWO) BufferedDataTable(org.knime.core.node.BufferedDataTable) Stream(java.util.stream.Stream) KnowsRowCountTable(org.knime.core.node.BufferedDataTable.KnowsRowCountTable) FlowVariableProvider(org.knime.ext.sun.nodes.script.calculator.FlowVariableProvider) InputPortRole(org.knime.core.node.streamable.InputPortRole) SimpleStreamableOperatorInternals(org.knime.core.node.streamable.simple.SimpleStreamableOperatorInternals) CheckUtils(org.knime.core.node.util.CheckUtils) MergeOperator(org.knime.core.node.streamable.MergeOperator) RowOutput(org.knime.core.node.streamable.RowOutput) StreamableOperator(org.knime.core.node.streamable.StreamableOperator) JavaScriptingSettings(org.knime.ext.sun.nodes.script.settings.JavaScriptingSettings) DataTableSpec(org.knime.core.data.DataTableSpec) BufferedDataContainer(org.knime.core.node.BufferedDataContainer) DataTableRowInput(org.knime.core.node.streamable.DataTableRowInput) BufferedDataTable(org.knime.core.node.BufferedDataTable) BufferedDataTableRowOutput(org.knime.core.node.streamable.BufferedDataTableRowOutput)

Example 53 with ExecutionContext

use of org.knime.core.node.ExecutionContext in project knime-core by knime.

the class PivotNodeModel method execute.

/**
 * {@inheritDoc}
 */
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
    final DataTableSpec inspec = inData[0].getDataTableSpec();
    final int group = inspec.findColumnIndex(m_group.getStringValue());
    final int pivot = inspec.findColumnIndex(m_pivot.getStringValue());
    final int aggre = (m_makeAgg.getStringValue().equals(PivotNodeDialogPane.MAKE_AGGREGATION[1]) ? inspec.findColumnIndex(m_agg.getStringValue()) : -1);
    PivotAggregationMethod aggMethod;
    if (aggre < 0) {
        aggMethod = PivotAggregationMethod.COUNT;
    } else {
        aggMethod = PivotAggregationMethod.METHODS.get(m_aggMethod.getStringValue());
    }
    // pair contains group and pivot plus the aggregation value
    final Map<Pair<String, String>, Double[]> map = new LinkedHashMap<Pair<String, String>, Double[]>();
    // list of pivot values
    final Set<String> pivotList = new LinkedHashSet<String>();
    final DataColumnSpec pivotSpec = inspec.getColumnSpec(pivot);
    if (pivotSpec.getDomain().hasValues()) {
        for (DataCell domValue : pivotSpec.getDomain().getValues()) {
            pivotList.add(domValue.toString());
        }
    }
    // list of group values
    final Set<String> groupList = new LinkedHashSet<String>();
    final LinkedHashMap<RowKey, Set<RowKey>> mapping = new LinkedHashMap<RowKey, Set<RowKey>>();
    final double nrRows = inData[0].getRowCount();
    int rowCnt = 0;
    ExecutionContext subExec = exec.createSubExecutionContext(0.75);
    // final all group, pivot pair and aggregate the values of each group
    for (final DataRow row : inData[0]) {
        subExec.checkCanceled();
        subExec.setProgress(++rowCnt / nrRows, "Aggregating row: \"" + row.getKey().getString() + "\" (" + rowCnt + "\\" + (int) nrRows + ")");
        final String groupString = row.getCell(group).toString();
        groupList.add(groupString);
        final DataCell pivotCell = row.getCell(pivot);
        // if missing values should be ignored
        if (pivotCell.isMissing()) {
            if (m_ignoreMissValues.getBooleanValue()) {
                continue;
            }
        }
        final String pivotString = pivotCell.toString();
        pivotList.add(pivotString);
        final Pair<String, String> pair = new Pair<String, String>(groupString, pivotString);
        Double[] aggValue = map.get(pair);
        if (aggValue == null) {
            aggValue = aggMethod.init();
            map.put(pair, aggValue);
        }
        if (aggre < 0) {
            aggMethod.compute(aggValue, null);
        } else {
            final DataCell value = row.getCell(aggre);
            aggMethod.compute(aggValue, value);
        }
        if (m_hiliting.getBooleanValue()) {
            final RowKey groupKey = new RowKey(groupString);
            Set<RowKey> set = mapping.get(groupKey);
            if (set == null) {
                set = new LinkedHashSet<RowKey>();
                mapping.put(groupKey, set);
            }
            set.add(row.getKey());
        }
    }
    final DataTableSpec outspec = initSpec(pivotList);
    // will contain the final pivoting table
    final BufferedDataContainer buf = exec.createDataContainer(outspec);
    final double nrElements = groupList.size();
    int elementCnt = 0;
    subExec = exec.createSubExecutionContext(0.25);
    for (final String groupString : groupList) {
        subExec.checkCanceled();
        subExec.setProgress(++elementCnt / nrElements, "Computing aggregation of group \"" + groupString + "\" (" + elementCnt + "\\" + (int) nrElements + ")");
        // contains the aggregated values
        final DataCell[] aggValues = new DataCell[pivotList.size()];
        // pivot index
        int idx = 0;
        for (final String pivotString : pivotList) {
            final Pair<String, String> newPair = new Pair<String, String>(groupString, pivotString);
            final Double[] aggValue = map.get(newPair);
            aggValues[idx] = aggMethod.done(aggValue);
            idx++;
        }
        // create new row with the given group id and aggregation values
        buf.addRowToTable(new DefaultRow(groupString, aggValues));
    }
    buf.close();
    if (m_hiliting.getBooleanValue()) {
        m_translator.setMapper(new DefaultHiLiteMapper(mapping));
    }
    return new BufferedDataTable[] { buf.getTable() };
}
Also used : LinkedHashSet(java.util.LinkedHashSet) DataTableSpec(org.knime.core.data.DataTableSpec) LinkedHashSet(java.util.LinkedHashSet) Set(java.util.Set) RowKey(org.knime.core.data.RowKey) SettingsModelString(org.knime.core.node.defaultnodesettings.SettingsModelString) DataRow(org.knime.core.data.DataRow) LinkedHashMap(java.util.LinkedHashMap) DataColumnSpec(org.knime.core.data.DataColumnSpec) BufferedDataTable(org.knime.core.node.BufferedDataTable) DefaultHiLiteMapper(org.knime.core.node.property.hilite.DefaultHiLiteMapper) Pair(org.knime.core.util.Pair) BufferedDataContainer(org.knime.core.node.BufferedDataContainer) ExecutionContext(org.knime.core.node.ExecutionContext) DataCell(org.knime.core.data.DataCell) DefaultRow(org.knime.core.data.def.DefaultRow)

Example 54 with ExecutionContext

use of org.knime.core.node.ExecutionContext in project knime-core by knime.

the class CSVReaderNodeModel method createFileTable.

protected FileTable createFileTable(final ExecutionContext exec) throws Exception {
    // prepare the settings for the file analyzer
    FileReaderNodeSettings settings = new FileReaderNodeSettings();
    CheckUtils.checkSourceFile(m_config.getLocation());
    URL url = FileUtil.toURL(m_config.getLocation());
    settings.setDataFileLocationAndUpdateTableName(url);
    String colDel = m_config.getColDelimiter();
    if (colDel != null && !colDel.isEmpty()) {
        settings.addDelimiterPattern(colDel, false, false, false);
    }
    settings.setDelimiterUserSet(true);
    String rowDel = m_config.getRowDelimiter();
    if (rowDel != null && !rowDel.isEmpty()) {
        settings.addRowDelimiter(rowDel, true);
    }
    String quote = m_config.getQuoteString();
    if (quote != null && !quote.isEmpty()) {
        settings.addQuotePattern(quote, quote);
    }
    settings.setQuoteUserSet(true);
    String commentStart = m_config.getCommentStart();
    if (commentStart != null && !commentStart.isEmpty()) {
        settings.addSingleLineCommentPattern(commentStart, false, false);
    }
    settings.setCommentUserSet(true);
    boolean hasColHeader = m_config.hasColHeader();
    settings.setFileHasColumnHeaders(hasColHeader);
    settings.setFileHasColumnHeadersUserSet(true);
    boolean hasRowHeader = m_config.hasRowHeader();
    settings.setFileHasRowHeaders(hasRowHeader);
    settings.setFileHasRowHeadersUserSet(true);
    settings.setWhiteSpaceUserSet(true);
    boolean supportShortLines = m_config.isSupportShortLines();
    settings.setSupportShortLines(supportShortLines);
    int skipFirstLinesCount = m_config.getSkipFirstLinesCount();
    settings.setSkipFirstLines(skipFirstLinesCount);
    final long limitRowsCount = m_config.getLimitRowsCount();
    settings.setMaximumNumberOfRowsToRead(limitRowsCount);
    settings.setCharsetName(m_config.getCharSetName());
    settings.setCharsetUserSet(true);
    settings.setConnectTimeout(m_config.getConnectTimeout());
    final int limitAnalysisCount = m_config.getLimitAnalysisCount();
    final ExecutionMonitor analyseExec = exec.createSubProgress(0.5);
    final ExecutionContext readExec = exec.createSubExecutionContext(0.5);
    exec.setMessage("Analyzing file");
    if (limitAnalysisCount >= 0) {
        final FileReaderExecutionMonitor fileReaderExec = new FileReaderExecutionMonitor();
        fileReaderExec.getProgressMonitor().addProgressListener(new NodeProgressListener() {

            @Override
            public void progressChanged(final NodeProgressEvent pe) {
                try {
                    // if the node was canceled, cancel (interrupt) the analysis
                    analyseExec.checkCanceled();
                    // otherwise update the node progress
                    NodeProgress nodeProgress = pe.getNodeProgress();
                    analyseExec.setProgress(nodeProgress.getProgress(), nodeProgress.getMessage());
                } catch (CanceledExecutionException e) {
                    fileReaderExec.setExecuteInterrupted();
                }
            }
        });
        fileReaderExec.setShortCutLines(limitAnalysisCount);
        fileReaderExec.setExecuteCanceled();
        settings = FileAnalyzer.analyze(settings, fileReaderExec);
    } else {
        settings = FileAnalyzer.analyze(settings, analyseExec);
    }
    SettingsStatus status = settings.getStatusOfSettings();
    if (status.getNumOfErrors() > 0) {
        throw new IllegalStateException(status.getErrorMessage(0));
    }
    final DataTableSpec tableSpec = settings.createDataTableSpec();
    if (tableSpec == null) {
        final SettingsStatus status2 = settings.getStatusOfSettings(true, null);
        if (status2.getNumOfErrors() > 0) {
            throw new IllegalStateException(status2.getErrorMessage(0));
        } else {
            throw new IllegalStateException("Unknown error during file analysis.");
        }
    }
    exec.setMessage("Buffering file");
    return new FileTable(tableSpec, settings, readExec);
}
Also used : DataTableSpec(org.knime.core.data.DataTableSpec) NodeProgressListener(org.knime.core.node.workflow.NodeProgressListener) NodeProgress(org.knime.core.node.workflow.NodeProgress) FileTable(org.knime.base.node.io.filereader.FileTable) SettingsStatus(org.knime.core.util.tokenizer.SettingsStatus) URL(java.net.URL) FileReaderNodeSettings(org.knime.base.node.io.filereader.FileReaderNodeSettings) ExecutionContext(org.knime.core.node.ExecutionContext) NodeProgressEvent(org.knime.core.node.workflow.NodeProgressEvent) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) FileReaderExecutionMonitor(org.knime.base.node.io.filereader.FileReaderExecutionMonitor) ExecutionMonitor(org.knime.core.node.ExecutionMonitor) FileReaderExecutionMonitor(org.knime.base.node.io.filereader.FileReaderExecutionMonitor)

Example 55 with ExecutionContext

use of org.knime.core.node.ExecutionContext in project knime-core by knime.

the class CSVWriterNodeModel method createStreamableOperator.

/**
 * {@inheritDoc}
 */
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
    return new StreamableOperator() {

        @Override
        public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
            assert outputs.length == 0;
            RowInput input = (RowInput) inputs[0];
            doIt(null, input, exec);
            return;
        }
    };
}
Also used : ExecutionContext(org.knime.core.node.ExecutionContext) StreamableOperator(org.knime.core.node.streamable.StreamableOperator) RowInput(org.knime.core.node.streamable.RowInput)

Aggregations

ExecutionContext (org.knime.core.node.ExecutionContext)107 DataTableSpec (org.knime.core.data.DataTableSpec)61 StreamableOperator (org.knime.core.node.streamable.StreamableOperator)57 ColumnRearranger (org.knime.core.data.container.ColumnRearranger)45 BufferedDataTable (org.knime.core.node.BufferedDataTable)44 DataRow (org.knime.core.data.DataRow)35 RowInput (org.knime.core.node.streamable.RowInput)26 RowOutput (org.knime.core.node.streamable.RowOutput)24 StreamableFunction (org.knime.core.node.streamable.StreamableFunction)23 ExecutionMonitor (org.knime.core.node.ExecutionMonitor)20 InvalidSettingsException (org.knime.core.node.InvalidSettingsException)20 DataColumnSpec (org.knime.core.data.DataColumnSpec)19 DataCell (org.knime.core.data.DataCell)18 BufferedDataContainer (org.knime.core.node.BufferedDataContainer)15 NodeModel (org.knime.core.node.NodeModel)14 PortObject (org.knime.core.node.port.PortObject)14 RowKey (org.knime.core.data.RowKey)13 CanceledExecutionException (org.knime.core.node.CanceledExecutionException)13 PMMLPortObject (org.knime.core.node.port.pmml.PMMLPortObject)13 SettingsModelString (org.knime.core.node.defaultnodesettings.SettingsModelString)12