Search in sources :

Example 6 with LoadResult

use of org.knime.core.node.workflow.WorkflowPersistor.LoadResult in project knime-core by knime.

the class WorkflowManager method updateMetaNodeLinks.

/**
 * Update metanode links (recursively finding all metanodes but not updating metanodes in metanodes).
 *
 * @param lH Load helper.
 * @param failOnLoadError If to fail if there errors updating the links
 * @param exec Progress monitor
 * @return The update summary
 * @throws CanceledExecutionException If canceled
 * @throws IOException Special errors during update (not accessible)
 * @noreference This method is not intended to be referenced by clients.
 */
public NodeContainerTemplateLinkUpdateResult updateMetaNodeLinks(final WorkflowLoadHelper lH, final boolean failOnLoadError, final ExecutionMonitor exec) throws IOException, CanceledExecutionException {
    // all linked metanodes that need to be checked.
    Map<NodeID, NodeContainerTemplate> linkedMetaNodes = fillLinkedTemplateNodesList(new LinkedHashMap<NodeID, NodeContainerTemplate>(), true, true);
    int linksChecked = 0;
    int linksUpdated = 0;
    NodeContainerTemplateLinkUpdateResult update = new NodeContainerTemplateLinkUpdateResult("Update on " + linkedMetaNodes.size() + " node(s) in " + getNameWithID());
    HashMap<URI, NodeContainerTemplate> visitedTemplateMap = new HashMap<URI, NodeContainerTemplate>();
    try {
        for (NodeContainerTemplate tnc : linkedMetaNodes.values()) {
            linksChecked += 1;
            WorkflowManager parent = tnc.getParent();
            exec.setProgress(linksChecked / (double) linkedMetaNodes.size(), "node " + tnc.getNameWithID());
            exec.checkCanceled();
            LoadResult checkTemplateResult = new LoadResult("update check");
            final boolean updatesAvail = parent.checkUpdateMetaNodeLinkWithCache(tnc.getID(), lH, checkTemplateResult, visitedTemplateMap, true);
            if (failOnLoadError && checkTemplateResult.hasErrors()) {
                LOGGER.error(checkTemplateResult.getFilteredError("", LoadResultEntryType.Error));
                throw new IOException("Error(s) while updating metanode links");
            }
            if (updatesAvail) {
                NodeContainerTemplateLinkUpdateResult loadResult = parent.updateMetaNodeLinkWithCache(tnc.getID(), exec.createSubProgress(1.0 / linkedMetaNodes.size()), lH, visitedTemplateMap);
                update.addChildError(loadResult);
                linksUpdated += 1;
                if (failOnLoadError && loadResult.hasErrors()) {
                    LOGGER.error(loadResult.getFilteredError("", LoadResultEntryType.Error));
                    throw new IOException("Error(s) while updating metanode links");
                }
            }
        }
        if (linksChecked == 0) {
            LOGGER.debug("No metanode links in workflow, nothing updated");
        } else {
            LOGGER.debug("Workflow contains " + linksChecked + " metanode link(s), " + linksUpdated + " were updated");
        }
        return update;
    } finally {
        for (NodeContainerTemplate tempLink : visitedTemplateMap.values()) {
            tempLink.getParent().removeNode(tempLink.getID());
        }
    }
}
Also used : LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) IOException(java.io.IOException) URI(java.net.URI) IExtensionPoint(org.eclipse.core.runtime.IExtensionPoint) WorkflowLoadResult(org.knime.core.node.workflow.WorkflowPersistor.WorkflowLoadResult) LoadResult(org.knime.core.node.workflow.WorkflowPersistor.LoadResult) NodeContainerTemplateLinkUpdateResult(org.knime.core.node.workflow.WorkflowPersistor.NodeContainerTemplateLinkUpdateResult)

Example 7 with LoadResult

use of org.knime.core.node.workflow.WorkflowPersistor.LoadResult in project knime-core by knime.

the class WorkflowManager method checkUpdateMetaNodeLinkWithCache.

/**
 * Implementation of #checkUpdateMetaNodeLink that uses a cache of already checked metanode links.
 *
 * @param loadResult Errors while loading the template are added here
 * @param visitedTemplateMap avoids repeated checks for copies of the same metanode link.
 * @param recurseInto Should linked metanodes contained in the metanode also be checked.
 */
private boolean checkUpdateMetaNodeLinkWithCache(final NodeID id, final WorkflowLoadHelper loadHelper, final LoadResult loadResult, final Map<URI, NodeContainerTemplate> visitedTemplateMap, final boolean recurseInto) throws IOException {
    NodeContainer nc = m_workflow.getNode(id);
    if (!(nc instanceof NodeContainerTemplate)) {
        return false;
    }
    NodeContainerTemplate tnc = (NodeContainerTemplate) nc;
    Map<NodeID, NodeContainerTemplate> idsToCheck = new LinkedHashMap<NodeID, NodeContainerTemplate>();
    if (tnc.getTemplateInformation().getRole().equals(Role.Link)) {
        idsToCheck.put(id, tnc);
    }
    if (recurseInto) {
        idsToCheck = tnc.fillLinkedTemplateNodesList(idsToCheck, true, false);
    }
    boolean hasUpdate = false;
    for (NodeContainerTemplate linkedMeta : idsToCheck.values()) {
        MetaNodeTemplateInformation linkInfo = linkedMeta.getTemplateInformation();
        final URI uri = linkInfo.getSourceURI();
        NodeContainerTemplate tempLink = visitedTemplateMap.get(uri);
        if (tempLink == null) {
            try {
                final LoadResult templateLoadResult = new LoadResult("Template to " + uri);
                tempLink = loadMetaNodeTemplate(linkedMeta, loadHelper, templateLoadResult);
                loadResult.addChildError(templateLoadResult);
                visitedTemplateMap.put(uri, tempLink);
            } catch (Exception e) {
                if (linkInfo.setUpdateStatusInternal(UpdateStatus.Error)) {
                    linkedMeta.notifyTemplateConnectionChangedListener();
                }
                if (e instanceof IOException) {
                    throw new IOException("Could not update metanode '" + tnc + "': " + e.getMessage(), e);
                } else if (e instanceof CanceledExecutionException) {
                    throw new IOException("Canceled while loading from template", e);
                } else if (e instanceof RuntimeException) {
                    throw (RuntimeException) e;
                } else {
                    throw new RuntimeException(e);
                }
            }
        }
        boolean hasThisOneAnUpdate = tempLink.getTemplateInformation().isNewerThan(linkInfo);
        UpdateStatus updateStatus = hasThisOneAnUpdate ? UpdateStatus.HasUpdate : UpdateStatus.UpToDate;
        hasUpdate = hasUpdate || hasThisOneAnUpdate;
        if (linkInfo.setUpdateStatusInternal(updateStatus)) {
            linkedMeta.notifyTemplateConnectionChangedListener();
        }
    }
    return hasUpdate;
}
Also used : UpdateStatus(org.knime.core.node.workflow.MetaNodeTemplateInformation.UpdateStatus) IOException(java.io.IOException) URI(java.net.URI) WorkflowLoadResult(org.knime.core.node.workflow.WorkflowPersistor.WorkflowLoadResult) LoadResult(org.knime.core.node.workflow.WorkflowPersistor.LoadResult) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) CoreException(org.eclipse.core.runtime.CoreException) LockFailedException(org.knime.core.util.LockFailedException) NoSuchAlgorithmException(java.security.NoSuchAlgorithmException) NotConfigurableException(org.knime.core.node.NotConfigurableException) IOException(java.io.IOException) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) URISyntaxException(java.net.URISyntaxException) LinkedHashMap(java.util.LinkedHashMap) CanceledExecutionException(org.knime.core.node.CanceledExecutionException)

Example 8 with LoadResult

use of org.knime.core.node.workflow.WorkflowPersistor.LoadResult in project knime-core by knime.

the class WorkflowManager method postLoad.

private void postLoad(final Map<NodeID, NodeContainerPersistor> persistorMap, final Map<Integer, BufferedDataTable> tblRep, final boolean mustWarnOnDataLoadError, final ExecutionMonitor exec, final LoadResult loadResult, final boolean keepNodeMessage) throws CanceledExecutionException {
    // linked set because we need reverse order later on
    Collection<NodeID> failedNodes = new LinkedHashSet<NodeID>();
    boolean isStateChangePredictable = false;
    final Set<NodeID> nodeIDsInPersistorSet = persistorMap.keySet();
    // had NPE below - adding this line to get better debug information
    CheckUtils.checkArgumentNotNull(nodeIDsInPersistorSet, "NodeID list from persistor must not be null for workflow %s", getNameWithID());
    for (NodeID bfsID : m_workflow.createBreadthFirstSortedList(nodeIDsInPersistorSet, true).keySet()) {
        NodeContainer cont = getNodeContainer(bfsID);
        // initialize node container with CredentialsStore
        if (cont instanceof SingleNodeContainer) {
            SingleNodeContainer snc = (SingleNodeContainer) cont;
            snc.setCredentialsStore(m_credentialsStore);
        }
        LoadResult subResult = new LoadResult(cont.getNameWithID());
        boolean isFullyConnected = isFullyConnected(bfsID);
        boolean needsReset;
        switch(cont.getInternalState()) {
            case IDLE:
            case UNCONFIGURED_MARKEDFOREXEC:
                needsReset = false;
                break;
            default:
                // we reset everything which is not fully connected
                needsReset = !isFullyConnected;
                break;
        }
        NodeOutPort[] predPorts = assemblePredecessorOutPorts(bfsID);
        final int predCount = predPorts.length;
        PortObject[] portObjects = new PortObject[predCount];
        boolean inPortsContainNull = false;
        FlowObjectStack[] predStacks = new FlowObjectStack[predCount];
        for (int i = 0; i < predCount; i++) {
            NodeOutPort p = predPorts[i];
            if (cont instanceof SingleNodeContainer && p != null) {
                SingleNodeContainer snc = (SingleNodeContainer) cont;
                snc.setInHiLiteHandler(i, p.getHiLiteHandler());
            }
            if (p != null) {
                predStacks[i] = p.getFlowObjectStack();
                portObjects[i] = p.getPortObject();
                inPortsContainNull &= portObjects[i] == null;
            }
        }
        FlowObjectStack inStack;
        try {
            if (isSourceNode(bfsID)) {
                predStacks = new FlowObjectStack[] { getWorkflowVariableStack() };
            }
            inStack = new FlowObjectStack(cont.getID(), predStacks);
        } catch (IllegalFlowObjectStackException ex) {
            subResult.addError("Errors creating flow object stack for " + "node \"" + cont.getNameWithID() + "\", (resetting " + "flow variables): " + ex.getMessage());
            needsReset = true;
            inStack = new FlowObjectStack(cont.getID());
        }
        NodeContainerPersistor persistor = persistorMap.get(bfsID);
        InternalNodeContainerState loadState = persistor.getMetaPersistor().getState();
        exec.setMessage(cont.getNameWithID());
        exec.checkCanceled();
        // two steps below: loadNodeContainer and loadContent
        ExecutionMonitor sub1 = exec.createSubProgress(1.0 / (2 * m_workflow.getNrNodes()));
        ExecutionMonitor sub2 = exec.createSubProgress(1.0 / (2 * m_workflow.getNrNodes()));
        NodeContext.pushContext(cont);
        try {
            persistor.loadNodeContainer(tblRep, sub1, subResult);
        } catch (CanceledExecutionException e) {
            throw e;
        } catch (Exception e) {
            if (!(e instanceof InvalidSettingsException) && !(e instanceof IOException)) {
                LOGGER.error("Caught unexpected \"" + e.getClass().getSimpleName() + "\" during node loading", e);
            }
            subResult.addError("Errors loading, skipping it: " + e.getMessage());
            needsReset = true;
        } finally {
            NodeContext.removeLastContext();
        }
        sub1.setProgress(1.0);
        // (that is being asserted in methods which get called indirectly)
        try (WorkflowLock lock = cont instanceof WorkflowManager ? ((WorkflowManager) cont).lock() : lock()) {
            cont.loadContent(persistor, tblRep, inStack, sub2, subResult, keepNodeMessage);
        }
        sub2.setProgress(1.0);
        if (persistor.isDirtyAfterLoad()) {
            cont.setDirty();
        }
        boolean hasPredecessorFailed = false;
        for (ConnectionContainer cc : m_workflow.getConnectionsByDest(bfsID)) {
            NodeID s = cc.getSource();
            if (s.equals(getID())) {
                // don't consider WFM_IN connections
                continue;
            }
            if (failedNodes.contains(s)) {
                hasPredecessorFailed = true;
            }
        }
        needsReset |= persistor.needsResetAfterLoad();
        needsReset |= hasPredecessorFailed;
        boolean isExecuted = cont.getInternalState().equals(EXECUTED);
        boolean remoteExec = persistor.getMetaPersistor().getExecutionJobSettings() != null;
        // predecessors has been loaded as IDLE
        if (!needsReset && isExecuted && inPortsContainNull) {
            needsReset = true;
            subResult.addError("Predecessor ports have no data", true);
        }
        if (needsReset && cont instanceof SingleNodeContainer && cont.isResetable()) {
            // we don't care for successors because they are not loaded yet
            invokeResetOnSingleNodeContainer((SingleNodeContainer) cont);
            isExecuted = false;
        }
        if (needsReset) {
            failedNodes.add(bfsID);
        }
        if (!isExecuted && cont instanceof SingleNodeContainer) {
            configureSingleNodeContainer((SingleNodeContainer) cont, keepNodeMessage);
        }
        if (persistor.mustComplainIfStateDoesNotMatch() && !cont.getInternalState().equals(loadState) && !hasPredecessorFailed) {
            isStateChangePredictable = true;
            String warning = "State has changed from " + loadState + " to " + cont.getInternalState();
            switch(subResult.getType()) {
                case DataLoadError:
                    // data load errors cause state changes
                    subResult.addError(warning, true);
                    break;
                default:
                    subResult.addWarning(warning);
            }
            cont.setDirty();
        }
        // saved in executing state (e.g. grid job), request to reconnect
        if (remoteExec) {
            if (needsReset) {
                subResult.addError("Can't continue execution " + "due to load errors");
            }
            if (inPortsContainNull) {
                subResult.addError("Can't continue execution; no data in inport");
            }
            if (!cont.getInternalState().equals(EXECUTINGREMOTELY)) {
                subResult.addError("Can't continue execution; node is not " + "configured but " + cont.getInternalState());
            }
            try {
                if (!continueExecutionOnLoad(cont, persistor)) {
                    cont.cancelExecution();
                    cont.setDirty();
                    subResult.addError("Can't continue execution; unknown reason");
                }
            } catch (Exception exc) {
                StringBuilder error = new StringBuilder("Can't continue execution");
                if (exc instanceof NodeExecutionJobReconnectException || exc instanceof InvalidSettingsException) {
                    error.append(": ").append(exc.getMessage());
                } else {
                    error.append(" due to ");
                    error.append(exc.getClass().getSimpleName());
                    error.append(": ").append(exc.getMessage());
                }
                LOGGER.error(error, exc);
                cont.cancelExecution();
                cont.setDirty();
                subResult.addError(error.toString());
            }
        }
        loadResult.addChildError(subResult);
        loadResult.addMissingNodes(subResult.getMissingNodes());
        // which must be reported.
        switch(subResult.getType()) {
            case Ok:
            case Warning:
                break;
            case DataLoadError:
                if (!mustWarnOnDataLoadError) {
                    break;
                }
            default:
                NodeMessage oldMessage = cont.getNodeMessage();
                StringBuilder messageBuilder = new StringBuilder(oldMessage.getMessage());
                if (messageBuilder.length() != 0) {
                    messageBuilder.append("\n");
                }
                NodeMessage.Type type;
                switch(oldMessage.getMessageType()) {
                    case RESET:
                    case WARNING:
                        type = NodeMessage.Type.WARNING;
                        break;
                    default:
                        type = NodeMessage.Type.ERROR;
                }
                messageBuilder.append(subResult.getFilteredError("", LoadResultEntryType.Warning));
                cont.setNodeMessage(new NodeMessage(type, messageBuilder.toString()));
        }
    }
    if (!sweep(nodeIDsInPersistorSet, false) && !isStateChangePredictable) {
        loadResult.addWarning("Some node states were invalid");
    }
}
Also used : LinkedHashSet(java.util.LinkedHashSet) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) ExecutionMonitor(org.knime.core.node.ExecutionMonitor) FlowVariablePortObject(org.knime.core.node.port.flowvariable.FlowVariablePortObject) PortObject(org.knime.core.node.port.PortObject) Type(org.knime.core.node.workflow.NodeMessage.Type) IOException(java.io.IOException) WorkflowLoadResult(org.knime.core.node.workflow.WorkflowPersistor.WorkflowLoadResult) LoadResult(org.knime.core.node.workflow.WorkflowPersistor.LoadResult) IExtensionPoint(org.eclipse.core.runtime.IExtensionPoint) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) CoreException(org.eclipse.core.runtime.CoreException) LockFailedException(org.knime.core.util.LockFailedException) NoSuchAlgorithmException(java.security.NoSuchAlgorithmException) NotConfigurableException(org.knime.core.node.NotConfigurableException) IOException(java.io.IOException) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) URISyntaxException(java.net.URISyntaxException) InvalidSettingsException(org.knime.core.node.InvalidSettingsException)

Aggregations

LoadResult (org.knime.core.node.workflow.WorkflowPersistor.LoadResult)8 IOException (java.io.IOException)7 WorkflowLoadResult (org.knime.core.node.workflow.WorkflowPersistor.WorkflowLoadResult)5 URI (java.net.URI)4 HashMap (java.util.HashMap)4 LinkedHashMap (java.util.LinkedHashMap)4 CanceledExecutionException (org.knime.core.node.CanceledExecutionException)4 InvalidSettingsException (org.knime.core.node.InvalidSettingsException)4 LinkedHashSet (java.util.LinkedHashSet)3 IExtensionPoint (org.eclipse.core.runtime.IExtensionPoint)3 ExecutionMonitor (org.knime.core.node.ExecutionMonitor)3 PortObject (org.knime.core.node.port.PortObject)3 PortType (org.knime.core.node.port.PortType)3 FlowVariablePortObject (org.knime.core.node.port.flowvariable.FlowVariablePortObject)3 File (java.io.File)2 URISyntaxException (java.net.URISyntaxException)2 NoSuchAlgorithmException (java.security.NoSuchAlgorithmException)2 Arrays (java.util.Arrays)2 Collections (java.util.Collections)2 HashSet (java.util.HashSet)2