use of org.knime.core.node.workflow.WorkflowPersistor.LoadResult in project knime-core by knime.
the class FileNativeNodeContainerPersistor method guessPortTypesFromConnectedNodes.
/**
* {@inheritDoc}
*/
@Override
public void guessPortTypesFromConnectedNodes(final NodeAndBundleInformation nodeInfo, final NodeSettingsRO additionalFactorySettings, final ArrayList<PersistorWithPortIndex> upstreamNodes, final ArrayList<List<PersistorWithPortIndex>> downstreamNodes) {
if (m_node == null) {
/* Input ports from the connection table. */
// first is flow var port
PortType[] inPortTypes = new PortType[Math.max(upstreamNodes.size() - 1, 0)];
// default to BDT for unconnected ports
Arrays.fill(inPortTypes, BufferedDataTable.TYPE);
for (int i = 0; i < inPortTypes.length; i++) {
// first is flow var port
PersistorWithPortIndex p = upstreamNodes.get(i + 1);
if (p != null) {
PortType portTypeFromUpstreamNode = p.getPersistor().getUpstreamPortType(p.getPortIndex());
if (portTypeFromUpstreamNode != null) {
// null if upstream is missing, too
inPortTypes[i] = portTypeFromUpstreamNode;
}
}
}
/* Output ports from node settings (saved ports) -- if possible (executed) */
String nodeName = nodeInfo.getNodeNameNotNull();
PortType[] outPortTypes;
try {
LoadResult guessLoadResult = new LoadResult("Port type guessing for missing node \"" + nodeName + "\"");
NodeSettingsRO settingsForNode = loadSettingsForNode(guessLoadResult);
FileNodePersistor nodePersistor = createNodePersistor(settingsForNode);
outPortTypes = nodePersistor.guessOutputPortTypes(guessLoadResult, nodeName);
if (guessLoadResult.hasErrors()) {
getLogger().debug("Errors guessing port types for missing node \"" + nodeName + "\": " + guessLoadResult.getFilteredError("", LoadResultEntryType.Error));
}
} catch (Exception e) {
getLogger().debug("Unable to guess port types for missing node \"" + nodeName + "\"", e);
outPortTypes = null;
}
if (outPortTypes == null) {
// couldn't guess port types from looking at node settings (e.g. not executed)
// default to BDT for unconnected ports
outPortTypes = new PortType[Math.max(downstreamNodes.size() - 1, 0)];
}
for (int i = 0; i < outPortTypes.length; i++) {
PortType type = outPortTypes[i];
// output types may be partially filled by settings guessing above, list may be empty or too short
List<PersistorWithPortIndex> list = i < downstreamNodes.size() - 1 ? downstreamNodes.get(i + 1) : null;
if (list != null) {
assert !list.isEmpty();
for (PersistorWithPortIndex p : list) {
PortType current = p.getPersistor().getDownstreamPortType(p.getPortIndex());
if (current == null) {
// ignore, downstream node is also missing
} else if (type == null) {
type = current;
} else if (type.equals(current)) {
// keep type
} else {
// this shouldn't really happen - someone changed port types between versions
type = PortObject.TYPE;
}
}
outPortTypes[i] = type;
}
if (outPortTypes[i] == null) {
// might still be null if missing node is only connected to missing node, fallback: BDT
outPortTypes[i] = BufferedDataTable.TYPE;
}
}
MissingNodeFactory nodefactory = new MissingNodeFactory(nodeInfo, additionalFactorySettings, inPortTypes, outPortTypes);
if (getLoadVersion().ordinal() < FileWorkflowPersistor.VERSION_LATEST.ordinal()) {
nodefactory.setCopyInternDirForWorkflowVersionChange(true);
}
nodefactory.init();
m_node = new Node((NodeFactory) nodefactory);
}
}
use of org.knime.core.node.workflow.WorkflowPersistor.LoadResult in project knime-core by knime.
the class Node method createNodeExecutionResult.
/**
* Creates an execution result containing all calculated values in a
* execution. The returned value is suitable to be used in
* {@link #loadDataAndInternals(
* NodeContentPersistor, ExecutionMonitor, LoadResult)}.
* If this node is not executed, it will assign null values to the fields
* in the returned execution result.
* @param exec For progress information.
* @return A new execution result containing the values being calculated.
* @throws CanceledExecutionException If canceled
*/
public NodeExecutionResult createNodeExecutionResult(final ExecutionMonitor exec) throws CanceledExecutionException {
NodeExecutionResult result = new NodeExecutionResult();
result.setWarningMessage(m_model.getWarningMessage());
if (hasContent()) {
File internTempDir;
try {
internTempDir = FileUtil.createTempDir("knime_node_internDir");
exec.setMessage("Saving internals");
saveInternals(internTempDir, exec.createSubProgress(0.0));
result.setNodeInternDir(new ReferencedFile(internTempDir));
} catch (IOException ioe) {
LOGGER.error("Unable to save internals", ioe);
}
}
if (m_internalHeldPortObjects != null) {
PortObject[] internalHeldPortObjects = Arrays.copyOf(m_internalHeldPortObjects, m_internalHeldPortObjects.length);
result.setInternalHeldPortObjects(internalHeldPortObjects);
}
PortObject[] pos = new PortObject[getNrOutPorts()];
PortObjectSpec[] poSpecs = new PortObjectSpec[getNrOutPorts()];
for (int i = 0; i < pos.length; i++) {
PortObject po = getOutputObject(i);
if (po != null) {
pos[i] = po;
poSpecs[i] = po.getSpec();
}
}
result.setPortObjects(pos);
result.setPortObjectSpecs(poSpecs);
// Add the outgoing flow variables to the execution result
FlowObjectStack outgoingStack = m_model.getOutgoingFlowObjectStack();
List<FlowVariable> nodeFlowVars = outgoingStack.getAvailableFlowVariables().values().stream().filter(f -> f.getScope().equals(FlowVariable.Scope.Flow)).collect(Collectors.toList());
// the bottom most element should remain at the bottom of the stack
Collections.reverse(nodeFlowVars);
result.setFlowVariables(nodeFlowVars);
return result;
}
use of org.knime.core.node.workflow.WorkflowPersistor.LoadResult in project knime-core by knime.
the class CopyContentIntoTempFlowNodeExecutionJob method mainExecute.
/**
* {@inheritDoc}
*/
@Override
protected NodeContainerExecutionStatus mainExecute() {
LoadResult lR = new LoadResult("load data into temp flow");
getNodeContainer().loadExecutionResult(m_ncResult, new ExecutionMonitor(), lR);
if (lR.hasErrors()) {
LOGGER.error("Errors loading temporary data into workflow (to be submitted to cluster):\n" + lR.getFilteredError("", LoadResultEntryType.Warning));
}
return m_ncResult;
}
use of org.knime.core.node.workflow.WorkflowPersistor.LoadResult in project knime-core by knime.
the class WorkflowManager method loadExecutionResult.
/**
* {@inheritDoc}
*/
@Override
public void loadExecutionResult(final NodeContainerExecutionResult result, final ExecutionMonitor exec, final LoadResult loadResult) {
CheckUtils.checkArgument(result instanceof WorkflowExecutionResult, "Argument must be instance of \"%s\": %s", WorkflowExecutionResult.class.getSimpleName(), result == null ? "null" : result.getClass().getSimpleName());
WorkflowExecutionResult r = (WorkflowExecutionResult) result;
try (WorkflowLock lock = lock()) {
super.loadExecutionResult(result, exec, loadResult);
Map<NodeID, NodeContainerExecutionResult> map = r.getExecutionResultMap();
final int count = map.size();
// contains the corrected NodeID in this workflow (the node ids in
// the execution result refer to the base id of the remote workflow)
Map<NodeID, NodeID> transMap = new HashMap<NodeID, NodeID>();
NodeID otherIDPrefix = r.getBaseID();
for (NodeID otherID : map.keySet()) {
assert otherID.hasSamePrefix(otherIDPrefix);
transMap.put(new NodeID(getID(), otherID.getIndex()), otherID);
}
for (NodeID id : m_workflow.createBreadthFirstSortedList(transMap.keySet(), true).keySet()) {
NodeID otherID = transMap.get(id);
NodeContainer nc = m_workflow.getNode(id);
NodeContainerExecutionResult exResult = map.get(otherID);
if (exResult == null) {
loadResult.addError("No execution result for node " + nc.getNameWithID());
continue;
}
exec.setMessage(nc.getNameWithID());
ExecutionMonitor subExec = exec.createSubProgress(1.0 / count);
// Propagagte the flow variables
if (nc instanceof SingleNodeContainer) {
NodeOutPort[] predecessorOutPorts = assemblePredecessorOutPorts(id);
FlowObjectStack[] sos = Arrays.stream(predecessorOutPorts).map(p -> p != null ? p.getFlowObjectStack() : null).toArray(FlowObjectStack[]::new);
createAndSetFlowObjectStackFor((SingleNodeContainer) nc, sos);
}
nc.loadExecutionResult(exResult, subExec, loadResult);
subExec.setProgress(1.0);
}
}
}
use of org.knime.core.node.workflow.WorkflowPersistor.LoadResult in project knime-core by knime.
the class WorkflowManager method checkUpdateMetaNodeLink.
/**
* Query the template to the linked metanode with the given ID and check whether a newer version is available.
*
* @param id The ID of the linked metanode
* @param loadHelper The load helper to load the template
* @return true if a newer revision is available, false if not or this is not a metanode link.
* @throws IOException If that fails (template not accessible)
*/
public boolean checkUpdateMetaNodeLink(final NodeID id, final WorkflowLoadHelper loadHelper) throws IOException {
final HashMap<URI, NodeContainerTemplate> visitedTemplateMap = new HashMap<URI, NodeContainerTemplate>();
try {
final LoadResult loadResult = new LoadResult("ignored");
boolean result = checkUpdateMetaNodeLinkWithCache(id, loadHelper, loadResult, visitedTemplateMap, true);
if (loadResult.hasErrors()) {
throw new IOException("Errors checking updates:\n" + loadResult.getFilteredError(" ", LoadResultEntryType.Error));
}
return result;
} finally {
for (NodeContainerTemplate tempLink : visitedTemplateMap.values()) {
tempLink.getParent().removeNode(tempLink.getID());
}
}
}
Aggregations