Search in sources :

Example 66 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class WorkflowManager method postLoad.

private void postLoad(final Map<NodeID, NodeContainerPersistor> persistorMap, final Map<Integer, BufferedDataTable> tblRep, final boolean mustWarnOnDataLoadError, final ExecutionMonitor exec, final LoadResult loadResult, final boolean keepNodeMessage) throws CanceledExecutionException {
    // linked set because we need reverse order later on
    Collection<NodeID> failedNodes = new LinkedHashSet<NodeID>();
    boolean isStateChangePredictable = false;
    final Set<NodeID> nodeIDsInPersistorSet = persistorMap.keySet();
    // had NPE below - adding this line to get better debug information
    CheckUtils.checkArgumentNotNull(nodeIDsInPersistorSet, "NodeID list from persistor must not be null for workflow %s", getNameWithID());
    for (NodeID bfsID : m_workflow.createBreadthFirstSortedList(nodeIDsInPersistorSet, true).keySet()) {
        NodeContainer cont = getNodeContainer(bfsID);
        // initialize node container with CredentialsStore
        if (cont instanceof SingleNodeContainer) {
            SingleNodeContainer snc = (SingleNodeContainer) cont;
            snc.setCredentialsStore(m_credentialsStore);
        }
        LoadResult subResult = new LoadResult(cont.getNameWithID());
        boolean isFullyConnected = isFullyConnected(bfsID);
        boolean needsReset;
        switch(cont.getInternalState()) {
            case IDLE:
            case UNCONFIGURED_MARKEDFOREXEC:
                needsReset = false;
                break;
            default:
                // we reset everything which is not fully connected
                needsReset = !isFullyConnected;
                break;
        }
        NodeOutPort[] predPorts = assemblePredecessorOutPorts(bfsID);
        final int predCount = predPorts.length;
        PortObject[] portObjects = new PortObject[predCount];
        boolean inPortsContainNull = false;
        FlowObjectStack[] predStacks = new FlowObjectStack[predCount];
        for (int i = 0; i < predCount; i++) {
            NodeOutPort p = predPorts[i];
            if (cont instanceof SingleNodeContainer && p != null) {
                SingleNodeContainer snc = (SingleNodeContainer) cont;
                snc.setInHiLiteHandler(i, p.getHiLiteHandler());
            }
            if (p != null) {
                predStacks[i] = p.getFlowObjectStack();
                portObjects[i] = p.getPortObject();
                inPortsContainNull &= portObjects[i] == null;
            }
        }
        FlowObjectStack inStack;
        try {
            if (isSourceNode(bfsID)) {
                predStacks = new FlowObjectStack[] { getWorkflowVariableStack() };
            }
            inStack = new FlowObjectStack(cont.getID(), predStacks);
        } catch (IllegalFlowObjectStackException ex) {
            subResult.addError("Errors creating flow object stack for " + "node \"" + cont.getNameWithID() + "\", (resetting " + "flow variables): " + ex.getMessage());
            needsReset = true;
            inStack = new FlowObjectStack(cont.getID());
        }
        NodeContainerPersistor persistor = persistorMap.get(bfsID);
        InternalNodeContainerState loadState = persistor.getMetaPersistor().getState();
        exec.setMessage(cont.getNameWithID());
        exec.checkCanceled();
        // two steps below: loadNodeContainer and loadContent
        ExecutionMonitor sub1 = exec.createSubProgress(1.0 / (2 * m_workflow.getNrNodes()));
        ExecutionMonitor sub2 = exec.createSubProgress(1.0 / (2 * m_workflow.getNrNodes()));
        NodeContext.pushContext(cont);
        try {
            persistor.loadNodeContainer(tblRep, sub1, subResult);
        } catch (CanceledExecutionException e) {
            throw e;
        } catch (Exception e) {
            if (!(e instanceof InvalidSettingsException) && !(e instanceof IOException)) {
                LOGGER.error("Caught unexpected \"" + e.getClass().getSimpleName() + "\" during node loading", e);
            }
            subResult.addError("Errors loading, skipping it: " + e.getMessage());
            needsReset = true;
        } finally {
            NodeContext.removeLastContext();
        }
        sub1.setProgress(1.0);
        // (that is being asserted in methods which get called indirectly)
        try (WorkflowLock lock = cont instanceof WorkflowManager ? ((WorkflowManager) cont).lock() : lock()) {
            cont.loadContent(persistor, tblRep, inStack, sub2, subResult, keepNodeMessage);
        }
        sub2.setProgress(1.0);
        if (persistor.isDirtyAfterLoad()) {
            cont.setDirty();
        }
        boolean hasPredecessorFailed = false;
        for (ConnectionContainer cc : m_workflow.getConnectionsByDest(bfsID)) {
            NodeID s = cc.getSource();
            if (s.equals(getID())) {
                // don't consider WFM_IN connections
                continue;
            }
            if (failedNodes.contains(s)) {
                hasPredecessorFailed = true;
            }
        }
        needsReset |= persistor.needsResetAfterLoad();
        needsReset |= hasPredecessorFailed;
        boolean isExecuted = cont.getInternalState().equals(EXECUTED);
        boolean remoteExec = persistor.getMetaPersistor().getExecutionJobSettings() != null;
        // predecessors has been loaded as IDLE
        if (!needsReset && isExecuted && inPortsContainNull) {
            needsReset = true;
            subResult.addError("Predecessor ports have no data", true);
        }
        if (needsReset && cont instanceof SingleNodeContainer && cont.isResetable()) {
            // we don't care for successors because they are not loaded yet
            invokeResetOnSingleNodeContainer((SingleNodeContainer) cont);
            isExecuted = false;
        }
        if (needsReset) {
            failedNodes.add(bfsID);
        }
        if (!isExecuted && cont instanceof SingleNodeContainer) {
            configureSingleNodeContainer((SingleNodeContainer) cont, keepNodeMessage);
        }
        if (persistor.mustComplainIfStateDoesNotMatch() && !cont.getInternalState().equals(loadState) && !hasPredecessorFailed) {
            isStateChangePredictable = true;
            String warning = "State has changed from " + loadState + " to " + cont.getInternalState();
            switch(subResult.getType()) {
                case DataLoadError:
                    // data load errors cause state changes
                    subResult.addError(warning, true);
                    break;
                default:
                    subResult.addWarning(warning);
            }
            cont.setDirty();
        }
        // saved in executing state (e.g. grid job), request to reconnect
        if (remoteExec) {
            if (needsReset) {
                subResult.addError("Can't continue execution " + "due to load errors");
            }
            if (inPortsContainNull) {
                subResult.addError("Can't continue execution; no data in inport");
            }
            if (!cont.getInternalState().equals(EXECUTINGREMOTELY)) {
                subResult.addError("Can't continue execution; node is not " + "configured but " + cont.getInternalState());
            }
            try {
                if (!continueExecutionOnLoad(cont, persistor)) {
                    cont.cancelExecution();
                    cont.setDirty();
                    subResult.addError("Can't continue execution; unknown reason");
                }
            } catch (Exception exc) {
                StringBuilder error = new StringBuilder("Can't continue execution");
                if (exc instanceof NodeExecutionJobReconnectException || exc instanceof InvalidSettingsException) {
                    error.append(": ").append(exc.getMessage());
                } else {
                    error.append(" due to ");
                    error.append(exc.getClass().getSimpleName());
                    error.append(": ").append(exc.getMessage());
                }
                LOGGER.error(error, exc);
                cont.cancelExecution();
                cont.setDirty();
                subResult.addError(error.toString());
            }
        }
        loadResult.addChildError(subResult);
        loadResult.addMissingNodes(subResult.getMissingNodes());
        // which must be reported.
        switch(subResult.getType()) {
            case Ok:
            case Warning:
                break;
            case DataLoadError:
                if (!mustWarnOnDataLoadError) {
                    break;
                }
            default:
                NodeMessage oldMessage = cont.getNodeMessage();
                StringBuilder messageBuilder = new StringBuilder(oldMessage.getMessage());
                if (messageBuilder.length() != 0) {
                    messageBuilder.append("\n");
                }
                NodeMessage.Type type;
                switch(oldMessage.getMessageType()) {
                    case RESET:
                    case WARNING:
                        type = NodeMessage.Type.WARNING;
                        break;
                    default:
                        type = NodeMessage.Type.ERROR;
                }
                messageBuilder.append(subResult.getFilteredError("", LoadResultEntryType.Warning));
                cont.setNodeMessage(new NodeMessage(type, messageBuilder.toString()));
        }
    }
    if (!sweep(nodeIDsInPersistorSet, false) && !isStateChangePredictable) {
        loadResult.addWarning("Some node states were invalid");
    }
}
Also used : LinkedHashSet(java.util.LinkedHashSet) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) ExecutionMonitor(org.knime.core.node.ExecutionMonitor) FlowVariablePortObject(org.knime.core.node.port.flowvariable.FlowVariablePortObject) PortObject(org.knime.core.node.port.PortObject) Type(org.knime.core.node.workflow.NodeMessage.Type) IOException(java.io.IOException) WorkflowLoadResult(org.knime.core.node.workflow.WorkflowPersistor.WorkflowLoadResult) LoadResult(org.knime.core.node.workflow.WorkflowPersistor.LoadResult) IExtensionPoint(org.eclipse.core.runtime.IExtensionPoint) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) CoreException(org.eclipse.core.runtime.CoreException) LockFailedException(org.knime.core.util.LockFailedException) NoSuchAlgorithmException(java.security.NoSuchAlgorithmException) NotConfigurableException(org.knime.core.node.NotConfigurableException) IOException(java.io.IOException) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) URISyntaxException(java.net.URISyntaxException) InvalidSettingsException(org.knime.core.node.InvalidSettingsException)

Example 67 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class AbstractSimplePortObject method getViews.

/**
 * {@inheritDoc}
 */
@Override
public JComponent[] getViews() {
    try {
        ModelContent model = new ModelContent("Model Content");
        save(model, new ExecutionMonitor());
        return new JComponent[] { new ModelContentOutPortView(model) };
    } catch (CanceledExecutionException cee) {
    // should not be possible
    }
    return null;
}
Also used : CanceledExecutionException(org.knime.core.node.CanceledExecutionException) ModelContent(org.knime.core.node.ModelContent) JComponent(javax.swing.JComponent) ExecutionMonitor(org.knime.core.node.ExecutionMonitor) ModelContentOutPortView(org.knime.core.node.workflow.ModelContentOutPortView)

Example 68 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class PMMLPortObjectSerializer method loadPortObject.

/**
 * {@inheritDoc}
 */
@Override
public PMMLPortObject loadPortObject(final PortObjectZipInputStream in, final PortObjectSpec spec, final ExecutionMonitor exec) throws IOException, CanceledExecutionException {
    String entryName = in.getNextEntry().getName();
    PMMLPortObject portObj = null;
    if (entryName.equals(CLAZZ_FILE_NAME)) {
        /* Skip the first entry. It only exists in old versions of the
               PMMLPortObject. */
        entryName = in.getNextEntry().getName();
    }
    if (!entryName.equals(FILE_NAME)) {
        throw new IOException("Found unexpected zip entry " + entryName + "! Expected " + FILE_NAME);
    }
    try {
        portObj = new PMMLPortObject();
        portObj.loadFrom((PMMLPortObjectSpec) spec, in);
    } catch (Exception e) {
        throw new IOException(e);
    }
    return portObj;
}
Also used : IOException(java.io.IOException) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) IOException(java.io.IOException)

Example 69 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class PMMLPreprocPortObject method save.

/**
 * {@inheritDoc}
 */
@Override
protected void save(final PortObjectZipOutputStream out, final ExecutionMonitor exec) throws IOException, CanceledExecutionException {
    if (m_operations.size() == 0) {
        return;
    }
    try {
        int size = m_operations.size();
        final double subProgress = 1.0 / size;
        int n = 0;
        for (PMMLPreprocOperation op : m_operations) {
            out.putNextEntry(new ZipEntry(op.getClass().getName()));
            PortObjectZipOutputStreamAndString sout = new PortObjectZipOutputStreamAndString(out);
            TransformerHandler handler = createTransformerHandlerForSave(sout);
            String writeElement = op.getTransformElement().toString();
            handler.startElement("", "", writeElement, null);
            op.save(handler, exec.createSubProgress(subProgress));
            handler.endElement("", "", writeElement);
            handler.endDocument();
            out.closeEntry();
            exec.setProgress(subProgress * ++n);
            exec.checkCanceled();
        }
        out.close();
    } catch (Exception e) {
        throw new IOException(e);
    }
}
Also used : TransformerHandler(javax.xml.transform.sax.TransformerHandler) ZipEntry(java.util.zip.ZipEntry) IOException(java.io.IOException) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) TransformerConfigurationException(javax.xml.transform.TransformerConfigurationException) IOException(java.io.IOException) SAXException(org.xml.sax.SAXException)

Example 70 with CanceledExecutionException

use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.

the class BatchExecutor method executeWorkflow.

/**
 * Executes a workflow.
 *
 * @param wfm the workflow manager
 * @param config the corresponding workflow configuration
 * @return <code>true</code> if the workflow executed successfully, <code>false</code> otherwise
 * @throws CanceledExecutionException if execution has been canceled by the user
 * @throws BatchException may be thrown by subclass implementations; the real exception is available via the cause
 *             of the batch exception
 * @since 2.7
 */
protected boolean executeWorkflow(final WorkflowManager wfm, final WorkflowConfiguration config) throws CanceledExecutionException, BatchException {
    LOGGER.debug("Status of workflow before execution:");
    LOGGER.debug("------------------------------------");
    dumpWorkflowToDebugLog(wfm);
    LOGGER.debug("------------------------------------");
    boolean successful = true;
    final MutableBoolean executionCanceled = new MutableBoolean(false);
    if (!config.noExecute) {
        // get workspace dir
        File wsFile = ResourcesPlugin.getWorkspace().getRoot().getLocation().toFile();
        // file to be checked for
        final File cancelFile = new File(wsFile, ".cancel");
        // create new timer task
        TimerTask task = new TimerTask() {

            /**
             * {@inheritDoc}
             */
            @Override
            public void run() {
                if (cancelFile.exists()) {
                    // CANCEL workflow manager
                    wfm.cancelExecution();
                    // delete cancel file
                    cancelFile.delete();
                    executionCanceled.setValue(true);
                    // cancel this timer
                    this.cancel();
                }
            }
        };
        KNIMETimer.getInstance().schedule(task, 1000, 1000);
        successful = wfm.executeAllAndWaitUntilDone();
        task.cancel();
    }
    if (executionCanceled.booleanValue()) {
        throw new CanceledExecutionException();
    }
    return successful;
}
Also used : TimerTask(java.util.TimerTask) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) MutableBoolean(org.knime.core.util.MutableBoolean) File(java.io.File)

Aggregations

CanceledExecutionException (org.knime.core.node.CanceledExecutionException)82 InvalidSettingsException (org.knime.core.node.InvalidSettingsException)34 IOException (java.io.IOException)32 File (java.io.File)21 ExecutionMonitor (org.knime.core.node.ExecutionMonitor)21 DataRow (org.knime.core.data.DataRow)20 DataTableSpec (org.knime.core.data.DataTableSpec)20 BufferedDataTable (org.knime.core.node.BufferedDataTable)20 DataCell (org.knime.core.data.DataCell)19 ArrayList (java.util.ArrayList)11 DataColumnSpec (org.knime.core.data.DataColumnSpec)11 BufferedDataContainer (org.knime.core.node.BufferedDataContainer)10 LinkedHashMap (java.util.LinkedHashMap)9 ExecutionException (java.util.concurrent.ExecutionException)9 DefaultRow (org.knime.core.data.def.DefaultRow)9 RowKey (org.knime.core.data.RowKey)8 BufferedWriter (java.io.BufferedWriter)7 FileInputStream (java.io.FileInputStream)7 Map (java.util.Map)7 Future (java.util.concurrent.Future)7