Search in sources :

Example 1 with LoopStartNode

use of org.knime.core.node.workflow.LoopStartNode in project knime-core by knime.

the class LoopEndJoinNodeModel method execute.

/**
 * {@inheritDoc}
 */
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
    boolean hasSameRowsInEachIteration = m_configuration.hasSameRowsInEachIteration();
    LoopStartNode startNode = getLoopStartNode();
    if (!(startNode instanceof LoopStartNodeTerminator)) {
        throw new IllegalStateException("Loop end is not connected" + " to matching/corresponding loop start node. You" + " are trying to create an infinite loop!");
    }
    boolean continueLoop = !((LoopStartNodeTerminator) startNode).terminateLoop();
    if (m_currentAppendTable == null) {
        m_currentAppendTable = copy(inData[0], false, exec);
    } else if (hasSameRowsInEachIteration) {
        boolean isCacheNew = m_iteration % 50 == 0;
        double amount = isCacheNew ? (1.0 / 3.0) : (1.0 / 2.0);
        ExecutionContext copyCtx = exec.createSubExecutionContext(amount);
        ExecutionContext joinCtx = exec.createSubExecutionContext(amount);
        exec.setProgress("Copying input");
        BufferedDataTable t = copy(inData[0], true, copyCtx);
        copyCtx.setProgress(1.0);
        exec.setProgress("Joining with previous input");
        m_currentAppendTable = exec.createJoinedTable(m_currentAppendTable, t, joinCtx);
        joinCtx.setProgress(1.0);
        if (isCacheNew) {
            exec.setProgress("Caching intermediate results (iteration " + m_iteration + ")");
            ExecutionContext ctx = exec.createSubExecutionContext(amount);
            // copy the whole table every 50 columns (avoids wrapping to much individual tables)
            // In this case the whole table is copied and column names DON'T need to be made unique (bugfix 6544)
            m_currentAppendTable = copy(m_currentAppendTable, m_appendIterSuffixForBackwardComp, ctx);
            ctx.setProgress(1.0);
        }
    } else {
        Joiner2Settings settings = new Joiner2Settings();
        settings.setCompositionMode(CompositionMode.MatchAll);
        settings.setDuplicateColumnSuffix(" (Iter #" + m_iteration + ")");
        settings.setDuplicateHandling(DuplicateHandling.AppendSuffix);
        settings.setEnableHiLite(false);
        // joining on RowIDs, this should not generate new row IDs but
        // only fill missing rows in either table
        settings.setJoinMode(JoinMode.FullOuterJoin);
        settings.setLeftIncludeAll(true);
        settings.setRightIncludeAll(true);
        // TODO to be replaced by Joiner2Settings.ROW_KEY_IDENTIFIER
        // once that is public
        settings.setLeftJoinColumns(new String[] { "$RowID$" });
        settings.setRightJoinColumns(new String[] { "$RowID$" });
        BufferedDataTable left = m_currentAppendTable;
        BufferedDataTable right = copy(inData[0], true, exec.createSubExecutionContext(0.1));
        Joiner joiner = new Joiner(left.getDataTableSpec(), right.getDataTableSpec(), settings);
        m_currentAppendTable = joiner.computeJoinTable(left, right, exec.createSubExecutionContext(0.9));
    }
    m_iteration += 1;
    if (continueLoop) {
        super.continueLoop();
        return null;
    } else {
        return new BufferedDataTable[] { m_currentAppendTable };
    }
}
Also used : ExecutionContext(org.knime.core.node.ExecutionContext) Joiner(org.knime.base.node.preproc.joiner.Joiner) BufferedDataTable(org.knime.core.node.BufferedDataTable) LoopStartNodeTerminator(org.knime.core.node.workflow.LoopStartNodeTerminator) LoopStartNode(org.knime.core.node.workflow.LoopStartNode) Joiner2Settings(org.knime.base.node.preproc.joiner.Joiner2Settings)

Example 2 with LoopStartNode

use of org.knime.core.node.workflow.LoopStartNode in project knime-core by knime.

the class LoopEndConditionNodeModel method execute.

/**
 * {@inheritDoc}
 */
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
    int count = peekFlowVariableInt("currentIteration");
    exec.setMessage("Iteration " + count);
    DataTableSpec spec1 = createSpec1(inData[0].getDataTableSpec());
    if (m_collectContainer == null) {
        assert m_variableContainer == null;
        m_startTime = System.currentTimeMillis();
        // first time we are getting to this: open container
        m_collectContainer = exec.createDataContainer(spec1);
        m_variableContainer = exec.createDataContainer(createSpec2());
    } else if (!spec1.equalStructure(m_collectContainer.getTableSpec())) {
        DataTableSpec predSpec = m_collectContainer.getTableSpec();
        StringBuilder error = new StringBuilder("Input table's structure differs from reference " + "(first iteration) table: ");
        if (spec1.getNumColumns() != predSpec.getNumColumns()) {
            error.append("different column counts ");
            error.append(spec1.getNumColumns());
            error.append(" vs. ").append(predSpec.getNumColumns());
        } else {
            for (int i = 0; i < spec1.getNumColumns(); i++) {
                DataColumnSpec inCol = spec1.getColumnSpec(i);
                DataColumnSpec predCol = predSpec.getColumnSpec(i);
                if (!inCol.equalStructure(predCol)) {
                    error.append("Column ").append(i).append(" [");
                    error.append(inCol).append("] vs. [");
                    error.append(predCol).append("]");
                }
            }
        }
        throw new IllegalArgumentException(error.toString());
    }
    RowKey rk = new RowKey("Iteration " + count);
    if (m_settings.variableType() == Type.DOUBLE) {
        m_variableContainer.addRowToTable(new DefaultRow(rk, new DoubleCell(peekFlowVariableDouble(m_settings.variableName()))));
    } else if (m_settings.variableType() == Type.INTEGER) {
        m_variableContainer.addRowToTable(new DefaultRow(rk, new IntCell(peekFlowVariableInt(m_settings.variableName()))));
    } else {
        m_variableContainer.addRowToTable(new DefaultRow(rk, new StringCell(peekFlowVariableString(m_settings.variableName()))));
    }
    LoopStartNode lsn = getLoopStartNode();
    boolean stop = checkCondition() || ((lsn instanceof LoopStartNodeTerminator) && ((LoopStartNodeTerminator) lsn).terminateLoop());
    if ((m_settings.addLastRows() && !m_settings.addLastRowsOnly()) || ((stop == m_settings.addLastRows()) && (stop == m_settings.addLastRowsOnly()))) {
        exec.setMessage("Collecting rows from current iteration");
        int k = 0;
        final double max = inData[0].size();
        IntCell currIterCell = new IntCell(count);
        for (DataRow row : inData[0]) {
            exec.checkCanceled();
            if (k++ % 10 == 0) {
                exec.setProgress(k / max);
            }
            DataRow newRow = new DefaultRow(new RowKey(row.getKey() + "#" + count), row);
            if (m_settings.addIterationColumn()) {
                newRow = new AppendedColumnRow(newRow, currIterCell);
            }
            m_collectContainer.addRowToTable(newRow);
        }
    }
    if (stop) {
        m_collectContainer.close();
        m_variableContainer.close();
        BufferedDataTable out1 = m_collectContainer.getTable();
        BufferedDataTable out2 = m_variableContainer.getTable();
        LOGGER.debug("Total loop execution time: " + (System.currentTimeMillis() - m_startTime) + "ms");
        m_startTime = 0;
        return new BufferedDataTable[] { out1, out2 };
    } else {
        continueLoop();
        return new BufferedDataTable[2];
    }
}
Also used : DataTableSpec(org.knime.core.data.DataTableSpec) RowKey(org.knime.core.data.RowKey) DoubleCell(org.knime.core.data.def.DoubleCell) DataRow(org.knime.core.data.DataRow) IntCell(org.knime.core.data.def.IntCell) DataColumnSpec(org.knime.core.data.DataColumnSpec) StringCell(org.knime.core.data.def.StringCell) BufferedDataTable(org.knime.core.node.BufferedDataTable) LoopStartNodeTerminator(org.knime.core.node.workflow.LoopStartNodeTerminator) DefaultRow(org.knime.core.data.def.DefaultRow) LoopStartNode(org.knime.core.node.workflow.LoopStartNode) AppendedColumnRow(org.knime.base.data.append.column.AppendedColumnRow)

Example 3 with LoopStartNode

use of org.knime.core.node.workflow.LoopStartNode in project knime-core by knime.

the class NodeModel method executeModel.

/**
 * Invokes the abstract <code>#execute()</code> method of this model. In
 * addition, this method notifies all assigned views of the model about the
 * changes.
 *
 * @param rawData An array of <code>PortObject</code> objects holding the data
 *            from the inputs (includes flow variable port).
 * @param exEnv The execution environment used for execution of this model.
 * @param exec The execution monitor which is passed to the execute method
 *            of this model.
 * @return The result of the execution in form of an array with
 *         <code>DataTable</code> elements, as many as the node has
 *         outputs.
 * @throws Exception any exception or error that is fired in the derived
 *             model will be just forwarded. It may throw an
 *             CanceledExecutionException if the user pressed cancel during
 *             execution. Even if the derived model doesn't check, the
 *             result will be discarded and the exception thrown.
 * @throws IllegalStateException If the number of <code>PortObject</code>
 *             objects returned by the derived <code>NodeModel</code>
 *             does not match the number of outputs. Or if any of them is
 *             null.
 * @see #execute(PortObject[],ExecutionContext)
 * @since 2.8
 * @noreference This method is not intended to be referenced by clients
 *              (use Node class instead)
 */
PortObject[] executeModel(final PortObject[] rawData, final ExecutionEnvironment exEnv, final ExecutionContext exec) throws Exception {
    final PortObject[] data = ArrayUtils.remove(rawData, 0);
    assert (data != null && data.length == getNrInPorts());
    assert (exec != null);
    setWarningMessage(null);
    // check for compatible input PortObjects
    for (int i = 0; i < data.length; i++) {
        PortType thisType = getInPortType(i);
        if (thisType.isOptional() && data[i] == null) {
        // ignore non-populated optional input
        } else if (data[i] instanceof InactiveBranchPortObject) {
            assert this instanceof InactiveBranchConsumer;
        // allow Inactive POs at InactiveBranchConsumer
        } else if (!(thisType.getPortObjectClass().isInstance(data[i]))) {
            m_logger.error("  (Wanted: " + thisType.getPortObjectClass().getName() + ", " + "actual: " + data[i].getClass().getName() + ")");
            throw new IllegalStateException("Connection Error: Mismatch" + " of input port types (port " + (i) + ").");
        }
    }
    // temporary storage for result of derived model.
    // EXECUTE DERIVED MODEL
    PortObject[] outData;
    if (!exEnv.reExecute()) {
        outData = execute(data, exec);
    } else {
        // FIXME: implement reexecution with loading view content and execute
        if (this instanceof InteractiveNode) {
            InteractiveNode iThis = (InteractiveNode) this;
            ViewContent viewContent = exEnv.getPreExecuteViewContent();
            iThis.loadViewValue(viewContent, exEnv.getUseAsDefault());
            outData = execute(data, exec);
        } else if (this instanceof LoopStartNode) {
            outData = execute(data, exec);
        } else {
            m_logger.coding("Cannot re-execute non interactive node. Using normal execute instead.");
            outData = execute(data, exec);
        }
    }
    // if execution was canceled without exception flying return false
    if (exec.isCanceled()) {
        throw new CanceledExecutionException("Result discarded due to user cancel");
    }
    if (outData == null) {
        outData = new PortObject[getNrOutPorts()];
    }
    /* Cleanup operation for nodes that just pass on their input
         * data table. We need to wrap those here so that the framework
         * explicitly references them (instead of copying) */
    for (int i = 0; i < outData.length; i++) {
        if (outData[i] instanceof BufferedDataTable) {
            for (int j = 0; j < data.length; j++) {
                if (outData[i] == data[j]) {
                    outData[i] = exec.createWrappedTable((BufferedDataTable) data[j]);
                }
            }
        } else if (outData[i] instanceof FileStorePortObject) {
            // file stores can be 'external', e.g. when a model reader node reads an external model file
            FileStorePortObject fsPO = (FileStorePortObject) outData[i];
            FileStoreHandlerRepository expectedRep = exec.getFileStoreHandler().getFileStoreHandlerRepository();
            FileStoreHandlerRepository actualRep = FileStoreUtil.getFileStores(fsPO).stream().map(FileStoreUtil::getFileStoreHandler).map(h -> h.getFileStoreHandlerRepository()).findFirst().orElse(expectedRep);
            if (actualRep != expectedRep) {
                outData[i] = Node.copyPortObject(fsPO, exec);
            }
        }
    }
    // if number of out tables does not match: fail
    if (outData.length != getNrOutPorts()) {
        throw new IllegalStateException("Invalid result. Execution failed. " + "Reason: Incorrect implementation; the execute" + " method in " + this.getClass().getSimpleName() + " returned null or an incorrect number of output" + " tables.");
    }
    // check the result, data tables must not be null
    for (int i = 0; i < outData.length; i++) {
        // of a loop and another loop iteration is requested
        if ((getLoopContext() == null) && (outData[i] == null)) {
            m_logger.error("Execution failed: Incorrect implementation;" + " the execute method in " + this.getClass().getSimpleName() + "returned a null data table at port: " + i);
            throw new IllegalStateException("Invalid result. " + "Execution failed, reason: data at output " + i + " is null.");
        }
    }
    // - only if the execute didn't issue a warning already
    if ((m_warningMessage == null) || (m_warningMessage.length() == 0)) {
        boolean hasData = false;
        // number of BDT ports
        int bdtPortCount = 0;
        for (int i = 0; i < outData.length; i++) {
            if (outData[i] instanceof BufferedDataTable) {
                // do some sanity checks on PortObjects holding data tables
                bdtPortCount += 1;
                BufferedDataTable outDataTable = (BufferedDataTable) outData[i];
                if (outDataTable.size() > 0) {
                    hasData = true;
                } else {
                    m_logger.info("The result table at port " + i + " contains no rows");
                }
            }
        }
        if (!hasData && bdtPortCount > 0) {
            if (bdtPortCount == 1) {
                setWarningMessage("Node created an empty data table.");
            } else {
                setWarningMessage("Node created empty data tables on all out-ports.");
            }
        }
    }
    setHasContent(true);
    PortObject[] rawOutData = new PortObject[getNrOutPorts() + 1];
    rawOutData[0] = FlowVariablePortObject.INSTANCE;
    System.arraycopy(outData, 0, rawOutData, 1, outData.length);
    return rawOutData;
}
Also used : Arrays(java.util.Arrays) FileStoreUtil(org.knime.core.data.filestore.FileStoreUtil) InteractiveNode(org.knime.core.node.interactive.InteractiveNode) ExecutionEnvironment(org.knime.core.node.workflow.ExecutionEnvironment) Map(java.util.Map) FlowTryCatchContext(org.knime.core.node.workflow.FlowTryCatchContext) InactiveBranchPortObjectSpec(org.knime.core.node.port.inactive.InactiveBranchPortObjectSpec) FileStoreHandlerRepository(org.knime.core.data.filestore.internal.FileStoreHandlerRepository) PortType(org.knime.core.node.port.PortType) PortInput(org.knime.core.node.streamable.PortInput) PartitionInfo(org.knime.core.node.streamable.PartitionInfo) CopyOnWriteArraySet(java.util.concurrent.CopyOnWriteArraySet) PortObjectOutput(org.knime.core.node.streamable.PortObjectOutput) InputPortRole(org.knime.core.node.streamable.InputPortRole) HiLiteHandler(org.knime.core.node.property.hilite.HiLiteHandler) PortObject(org.knime.core.node.port.PortObject) RowOutput(org.knime.core.node.streamable.RowOutput) CopyOnWriteArrayList(java.util.concurrent.CopyOnWriteArrayList) FileStorePortObject(org.knime.core.data.filestore.FileStorePortObject) FlowVariablePortObject(org.knime.core.node.port.flowvariable.FlowVariablePortObject) DataTableSpec(org.knime.core.data.DataTableSpec) LoopEndNode(org.knime.core.node.workflow.LoopEndNode) FlowVariable(org.knime.core.node.workflow.FlowVariable) PortObjectInput(org.knime.core.node.streamable.PortObjectInput) ArrayUtils(org.apache.commons.lang3.ArrayUtils) LinkedHashMap(java.util.LinkedHashMap) StreamableOperatorInternals(org.knime.core.node.streamable.StreamableOperatorInternals) CredentialsProvider(org.knime.core.node.workflow.CredentialsProvider) NoSuchElementException(java.util.NoSuchElementException) InactiveBranchPortObject(org.knime.core.node.port.inactive.InactiveBranchPortObject) FlowObjectStack(org.knime.core.node.workflow.FlowObjectStack) ScopeStartNode(org.knime.core.node.workflow.ScopeStartNode) RowInput(org.knime.core.node.streamable.RowInput) InteractiveView(org.knime.core.node.interactive.InteractiveView) PortObjectSpec(org.knime.core.node.port.PortObjectSpec) FlowScopeContext(org.knime.core.node.workflow.FlowScopeContext) IOException(java.io.IOException) InactiveBranchConsumer(org.knime.core.node.port.inactive.InactiveBranchConsumer) OutputPortRole(org.knime.core.node.streamable.OutputPortRole) NodeContext(org.knime.core.node.workflow.NodeContext) File(java.io.File) ViewContent(org.knime.core.node.interactive.ViewContent) PortOutput(org.knime.core.node.streamable.PortOutput) ICredentials(org.knime.core.node.workflow.ICredentials) LoopStartNode(org.knime.core.node.workflow.LoopStartNode) FlowLoopContext(org.knime.core.node.workflow.FlowLoopContext) Collections(java.util.Collections) MergeOperator(org.knime.core.node.streamable.MergeOperator) ViewableModel(org.knime.core.node.AbstractNodeView.ViewableModel) StreamableOperator(org.knime.core.node.streamable.StreamableOperator) InactiveBranchPortObject(org.knime.core.node.port.inactive.InactiveBranchPortObject) FileStorePortObject(org.knime.core.data.filestore.FileStorePortObject) FileStoreHandlerRepository(org.knime.core.data.filestore.internal.FileStoreHandlerRepository) InactiveBranchConsumer(org.knime.core.node.port.inactive.InactiveBranchConsumer) FileStoreUtil(org.knime.core.data.filestore.FileStoreUtil) InteractiveNode(org.knime.core.node.interactive.InteractiveNode) ViewContent(org.knime.core.node.interactive.ViewContent) LoopStartNode(org.knime.core.node.workflow.LoopStartNode) PortObject(org.knime.core.node.port.PortObject) FileStorePortObject(org.knime.core.data.filestore.FileStorePortObject) FlowVariablePortObject(org.knime.core.node.port.flowvariable.FlowVariablePortObject) InactiveBranchPortObject(org.knime.core.node.port.inactive.InactiveBranchPortObject) PortType(org.knime.core.node.port.PortType)

Aggregations

LoopStartNode (org.knime.core.node.workflow.LoopStartNode)3 DataTableSpec (org.knime.core.data.DataTableSpec)2 BufferedDataTable (org.knime.core.node.BufferedDataTable)2 File (java.io.File)1 IOException (java.io.IOException)1 Arrays (java.util.Arrays)1 Collections (java.util.Collections)1 LinkedHashMap (java.util.LinkedHashMap)1 Map (java.util.Map)1 NoSuchElementException (java.util.NoSuchElementException)1 CopyOnWriteArrayList (java.util.concurrent.CopyOnWriteArrayList)1 CopyOnWriteArraySet (java.util.concurrent.CopyOnWriteArraySet)1 ArrayUtils (org.apache.commons.lang3.ArrayUtils)1 AppendedColumnRow (org.knime.base.data.append.column.AppendedColumnRow)1 Joiner (org.knime.base.node.preproc.joiner.Joiner)1 Joiner2Settings (org.knime.base.node.preproc.joiner.Joiner2Settings)1 DataColumnSpec (org.knime.core.data.DataColumnSpec)1 DataRow (org.knime.core.data.DataRow)1 RowKey (org.knime.core.data.RowKey)1 DefaultRow (org.knime.core.data.def.DefaultRow)1