use of org.knime.core.node.workflow.action.CollapseIntoMetaNodeResult in project knime-core by knime.
the class TestCollapseAndWrapMetaNodeActions method testCollapseIntoMetaNodeThenUndo.
/**
* Collect nodes, collapse them, undo.
*/
@Test
public void testCollapseIntoMetaNodeThenUndo() throws Exception {
WorkflowManager mgr = getManager();
executeAllAndWait();
checkState(mgr, EXECUTED);
mgr.resetAndConfigureAll();
// there is only one in the wfm
WorkflowAnnotation annotation = mgr.getWorkflowAnnotations().stream().findFirst().get();
final NodeID[] nodes = new NodeID[] { m_columnFilter_2, m_columnFilter_3, m_columnSplitter_4 };
CollapseIntoMetaNodeResult collapseResult = mgr.collapseIntoMetaNode(nodes, new WorkflowAnnotation[] { annotation }, "Test-Meta/Wrap Node");
NodeID metaSubID = collapseResult.getCollapsedMetanodeID();
mgr.getNodeContainer(metaSubID, WorkflowManager.class, true);
assertFalse("Should have removed node: " + m_columnFilter_2, mgr.containsNodeContainer(m_columnFilter_2));
assertTrue("No annotation expected", mgr.getWorkflowAnnotations().isEmpty());
executeAllAndWait();
checkState(mgr, EXECUTED);
mgr.resetAndConfigureNode(metaSubID);
assertTrue("Should be able to undo collapse", collapseResult.canUndo());
collapseResult.undo();
assertTrue("Should have restored node: " + m_columnFilter_2, mgr.containsNodeContainer(m_columnFilter_2));
assertFalse("Annotation expected", mgr.getWorkflowAnnotations().isEmpty());
}
use of org.knime.core.node.workflow.action.CollapseIntoMetaNodeResult in project knime-core by knime.
the class Bug6336_LoopsInServer_States method collapseToMetaNode.
/**
* @param manager
* @return
*/
private WorkflowManager collapseToMetaNode(final WorkflowManager manager) {
Set<NodeID> ids = new LinkedHashSet<>(manager.getWorkflow().getNodeIDs());
assertTrue(ids.remove(m_dataGenerator1));
assertTrue(ids.remove(m_tableView12));
final CollapseIntoMetaNodeResult collapseResult = manager.collapseIntoMetaNode(ids.toArray(new NodeID[0]), new WorkflowAnnotation[0], "Collapsed Content");
WorkflowManager metaNode = manager.getNodeContainer(collapseResult.getCollapsedMetanodeID(), WorkflowManager.class, true);
// remaining: data gen, table view, and new meta node
assertEquals("Node count after collapse incorrect", 3, manager.getWorkflow().getNrNodes());
assertTrue(manager.containsNodeContainer(m_dataGenerator1));
assertTrue(manager.containsNodeContainer(m_tableView12));
assertFalse(manager.containsNodeContainer(m_loopStart3));
assertFalse(manager.containsNodeContainer(m_loopEnd4));
return metaNode;
}
use of org.knime.core.node.workflow.action.CollapseIntoMetaNodeResult in project knime-core by knime.
the class WorkflowManager method collapseIntoMetaNode.
/**
* Collapse selected set of nodes into a metanode. Make sure connections from and to nodes not contained in this set
* are passed through appropriate ports of the new metanode.
*
* @param orgIDs the ids of the nodes to be moved to the new metanode.
* @param orgAnnos the workflow annotations to be moved
* @param name of the new metanode
* @return newly create metanode
* @throws IllegalArgumentException if collapse cannot be done
*/
public CollapseIntoMetaNodeResult collapseIntoMetaNode(final NodeID[] orgIDs, final WorkflowAnnotation[] orgAnnos, final String name) {
try (WorkflowLock lock = lock()) {
// make sure this is still true:
String res = canCollapseNodesIntoMetaNode(orgIDs);
CheckUtils.checkArgument(res == null, res);
// for quick search:
HashSet<NodeID> orgIDsHash = new HashSet<NodeID>(Arrays.asList(orgIDs));
// find outside Nodes/Ports that have connections to the inside.
// Map will hold SourceNodeID/PortIndex + Index of new MetanodeInport.
HashMap<Pair<NodeID, Integer>, VerticalPortIndex> exposedIncomingPorts = new HashMap<>();
// second Map holds list of affected connections
HashMap<ConnectionContainer, VerticalPortIndex> inportConnections = new HashMap<ConnectionContainer, VerticalPortIndex>();
for (NodeID id : orgIDs) {
if (m_workflow.getConnectionsByDest(id) != null) {
for (ConnectionContainer cc : m_workflow.getConnectionsByDest(id)) {
if (!orgIDsHash.contains(cc.getSource())) {
Pair<NodeID, Integer> npi = Pair.create(cc.getSource(), cc.getSourcePort());
if (!exposedIncomingPorts.containsKey(npi)) {
int yPos = npi.getSecond();
if (npi.getFirst().equals(this.getID())) {
// connection from metanode inport
// TODO: determine ypos of the port!
} else {
// connected to other node in this workflow
NodeContainer nc = getNodeContainer(npi.getFirst());
NodeUIInformation uii = nc.getUIInformation();
// correctly!
if (uii != null) {
int[] x = uii.getBounds();
if ((x != null) && (x.length >= 2)) {
// add node y position to port index
yPos += x[1];
}
}
}
VerticalPortIndex vpi = new VerticalPortIndex(yPos);
exposedIncomingPorts.put(npi, vpi);
}
VerticalPortIndex inportIndex = exposedIncomingPorts.get(npi);
inportConnections.put(cc, inportIndex);
}
}
}
}
// sort new input ports by vertical position of source nodes
VerticalPortIndex[] vpis = new VerticalPortIndex[exposedIncomingPorts.size()];
int vi = 0;
for (VerticalPortIndex vpi : exposedIncomingPorts.values()) {
vpis[vi] = vpi;
vi++;
}
Arrays.sort(vpis);
for (int i = 0; i < vpis.length; i++) {
vpis[i].setIndex(i);
}
// find Nodes/Ports that have outgoing connections to the outside.
// Map will hold SourceNodeID/PortIndex + Index of new MetanodeOutport.
HashMap<Pair<NodeID, Integer>, VerticalPortIndex> exposedOutports = new HashMap<>();
for (NodeID id : orgIDs) {
for (ConnectionContainer cc : m_workflow.getConnectionsBySource(id)) {
if (!orgIDsHash.contains(cc.getDest())) {
Pair<NodeID, Integer> npi = Pair.create(cc.getSource(), cc.getSourcePort());
if (!exposedOutports.containsKey(npi)) {
NodeContainer nc = getNodeContainer(npi.getFirst());
NodeUIInformation uii = nc.getUIInformation();
// also include source port index into the ypos
// to make sure ports of the same node are sorted
// correctly!
int yPos = npi.getSecond();
if (uii != null) {
int[] x = uii.getBounds();
if ((x != null) && (x.length >= 2)) {
// add node y position to port index
yPos += x[1];
}
}
VerticalPortIndex vpi = new VerticalPortIndex(yPos);
exposedOutports.put(npi, vpi);
}
}
}
}
// also sort new output ports by vertical position of source nodes
vpis = new VerticalPortIndex[exposedOutports.size()];
vi = 0;
for (VerticalPortIndex vpi : exposedOutports.values()) {
vpis[vi] = vpi;
vi++;
}
Arrays.sort(vpis);
for (int i = 0; i < vpis.length; i++) {
vpis[i].setIndex(i);
}
// determine types of new Metanode in- and outports:
// (note that we reach directly into the Node to get the port type
// so we need to correct the index for the - then missing - var
// port.)
PortType[] exposedIncomingPortTypes = new PortType[exposedIncomingPorts.size()];
for (Map.Entry<Pair<NodeID, Integer>, VerticalPortIndex> entry : exposedIncomingPorts.entrySet()) {
Pair<NodeID, Integer> npi = entry.getKey();
int index = entry.getValue().getIndex();
NodeID nID = npi.getFirst();
int portIndex = npi.getSecond();
if (nID.equals(this.getID())) {
// if this connection comes directly from a Metanode Inport:
exposedIncomingPortTypes[index] = this.getInPort(portIndex).getPortType();
} else {
// otherwise reach into Nodecontainer to find out port type:
NodeContainer nc = getNodeContainer(nID);
exposedIncomingPortTypes[index] = nc.getOutPort(portIndex).getPortType();
}
}
PortType[] exposedOutportTypes = new PortType[exposedOutports.size()];
for (Pair<NodeID, Integer> npi : exposedOutports.keySet()) {
int index = exposedOutports.get(npi).getIndex();
int portIndex = npi.getSecond();
NodeContainer nc = getNodeContainer(npi.getFirst());
exposedOutportTypes[index] = nc.getOutPort(portIndex).getPortType();
}
// create the new Metanode
WorkflowManager newWFM = createAndAddSubWorkflow(exposedIncomingPortTypes, exposedOutportTypes, name);
// move into center of nodes this one replaces...
int x = 0;
int y = 0;
int count = 0;
if (orgIDs.length >= 1) {
for (int i = 0; i < orgIDs.length; i++) {
NodeContainer nc = getNodeContainer(orgIDs[i]);
NodeUIInformation uii = nc.getUIInformation();
if (uii != null) {
int[] bounds = uii.getBounds();
if (bounds.length >= 2) {
x += bounds[0];
y += bounds[1];
count++;
}
}
}
}
if (count >= 1) {
NodeUIInformation newUii = NodeUIInformation.builder().setNodeLocation(x / count, y / count, -1, -1).build();
newWFM.setUIInformation(newUii);
}
// copy the nodes into the newly create WFM:
WorkflowCopyContent.Builder orgContentBuilder = WorkflowCopyContent.builder();
orgContentBuilder.setNodeIDs(orgIDs);
orgContentBuilder.setAnnotation(orgAnnos);
orgContentBuilder.setIncludeInOutConnections(true);
final WorkflowPersistor undoPersistor = copy(true, orgContentBuilder.build());
orgContentBuilder.setIncludeInOutConnections(false);
WorkflowCopyContent newContent = newWFM.copyFromAndPasteHere(this, orgContentBuilder.build());
NodeID[] newIDs = newContent.getNodeIDs();
Map<NodeID, NodeID> oldIDsHash = new HashMap<NodeID, NodeID>();
for (int i = 0; i < orgIDs.length; i++) {
oldIDsHash.put(orgIDs[i], newIDs[i]);
}
// move subworkflows into upper left corner but keep
// original layout (important for undo!)
// ATTENTION: if you change this, make sure it is revertable
// by extractMetanode (and correctly so!).
int xmin = Integer.MAX_VALUE;
int ymin = Integer.MAX_VALUE;
if (newIDs.length >= 1) {
// calculate shift
for (int i = 0; i < newIDs.length; i++) {
NodeContainer nc = newWFM.getNodeContainer(newIDs[i]);
NodeUIInformation uii = nc.getUIInformation();
if (uii != null) {
int[] bounds = uii.getBounds();
if (bounds.length >= 2) {
xmin = Math.min(bounds[0], xmin);
ymin = Math.min(bounds[1], ymin);
}
}
}
int xshift = 150 - Math.max(xmin, 70);
int yshift = 120 - Math.max(ymin, 20);
// move new nodes
for (int i = 0; i < newIDs.length; i++) {
NodeContainer nc = newWFM.getNodeContainer(newIDs[i]);
NodeUIInformation uii = nc.getUIInformation();
if (uii != null) {
NodeUIInformation newUii = NodeUIInformation.builder(uii).translate(new int[] { xshift, yshift }).build();
nc.setUIInformation(newUii);
}
}
// move new annotations
for (Annotation anno : newWFM.m_annotations) {
anno.shiftPosition(xshift, yshift);
}
// move bendpoints of all internal connections
for (ConnectionContainer cc : newWFM.getConnectionContainers()) {
if ((!cc.getSource().equals(newWFM.getID())) && (!cc.getDest().equals(newWFM.getID()))) {
ConnectionUIInformation uii = cc.getUIInfo();
if (uii != null) {
ConnectionUIInformation newUI = ConnectionUIInformation.builder(uii).translate(new int[] { xshift, yshift }).build();
cc.setUIInfo(newUI);
}
}
}
}
// create connections INSIDE the new workflow (from incoming ports)
for (ConnectionContainer cc : inportConnections.keySet()) {
int portIndex = inportConnections.get(cc).getIndex();
NodeID newID = oldIDsHash.get(cc.getDest());
newWFM.addConnection(newWFM.getID(), portIndex, newID, cc.getDestPort());
this.removeConnection(cc);
}
// create connections INSIDE the new workflow (to outgoing ports)
for (Pair<NodeID, Integer> npi : exposedOutports.keySet()) {
int index = exposedOutports.get(npi).getIndex();
NodeID newID = oldIDsHash.get(npi.getFirst());
newWFM.addConnection(newID, npi.getSecond(), newWFM.getID(), index);
}
// create OUTSIDE connections to the new workflow
for (Pair<NodeID, Integer> npi : exposedIncomingPorts.keySet()) {
int index = exposedIncomingPorts.get(npi).getIndex();
this.addConnection(npi.getFirst(), npi.getSecond(), newWFM.getID(), index);
}
// create OUTSIDE connections from the new workflow
for (NodeID id : orgIDs) {
// convert to a seperate array so we can delete connections!
ConnectionContainer[] cca = new ConnectionContainer[0];
cca = m_workflow.getConnectionsBySource(id).toArray(cca);
for (ConnectionContainer cc : cca) {
if (!orgIDsHash.contains(cc.getDest())) {
Pair<NodeID, Integer> npi = new Pair<NodeID, Integer>(cc.getSource(), cc.getSourcePort());
int newPort = exposedOutports.get(npi).getIndex();
this.removeConnection(cc);
this.addConnection(newWFM.getID(), newPort, cc.getDest(), cc.getDestPort());
}
}
}
// and finally: delete the original nodes and annotations.
Stream.of(orgIDs).forEach(id -> removeNode(id));
Stream.of(orgAnnos).forEach(anno -> removeAnnotation(anno));
return new CollapseIntoMetaNodeResult(this, newWFM.getID(), undoPersistor);
}
}
use of org.knime.core.node.workflow.action.CollapseIntoMetaNodeResult in project knime-core by knime.
the class Bug3673_CredentialsInputNode_Test1_SimpleNodeWithSavedPassword method testCollapseToSubnodeThenSaveLoad.
@Test
public void testCollapseToSubnodeThenSaveLoad() throws Exception {
// WorkflowCopyContent cnt = new WorkflowCopyContent();
// cnt.setNodeIDs(m_credentialsInput_1);
// getManager().copyFromAndPasteHere(getManager(), cnt);
CollapseIntoMetaNodeResult collapseResult = getManager().collapseIntoMetaNode(new NodeID[] { m_credentialsInput_1 }, new WorkflowAnnotation[0], "Collapsed-by-Testflow");
WorkflowManager metaNode = getManager().getNodeContainer(collapseResult.getCollapsedMetanodeID(), WorkflowManager.class, true);
getManager().convertMetaNodeToSubNode(metaNode.getID());
assertFalse("Expected to be removed", getManager().containsNodeContainer(m_credentialsInput_1));
SubNodeContainer subNode = getManager().getNodeContainer(metaNode.getID(), SubNodeContainer.class, true);
subNode.updateOutputConfigurationToIncludeAllFlowVariables();
NodeID subnodeID = subNode.getID();
final int subNodeIDIndex = subnodeID.getIndex();
ConnectionContainer findInConnection = findInConnection(m_credentialsValidate_2, 1);
assertEquals("Source should be subnode", subnodeID, findInConnection.getSource());
getManager().save(m_workflowDirTemp, new ExecutionMonitor(), true);
executeAndWait(m_credentialsValidate_2);
checkState(m_credentialsValidate_2, EXECUTED);
closeWorkflow();
initFlow();
subnodeID = getManager().getID().createChild(subNodeIDIndex);
checkState(subnodeID, CONFIGURED);
executeAndWait(subnodeID);
getManager().save(m_workflowDirTemp, new ExecutionMonitor(), true);
executeAndWait(m_credentialsValidate_2);
checkState(m_credentialsValidate_2, EXECUTED);
closeWorkflow();
initFlow();
subnodeID = getManager().getID().createChild(subNodeIDIndex);
checkState(subnodeID, EXECUTED);
checkState(m_credentialsValidate_2, CONFIGURED);
executeAndWait(m_credentialsValidate_2);
checkState(m_credentialsValidate_2, EXECUTED);
}
use of org.knime.core.node.workflow.action.CollapseIntoMetaNodeResult in project knime-core by knime.
the class Bug3673_CredentialsInputNode_Test2_SimpleNodeWithNoSavedPassword method testCollapseToSubnodeThenSaveLoad.
@Test
public void testCollapseToSubnodeThenSaveLoad() throws Exception {
TestWorkflowLoadHelper loadHelper = initFlow(new TestWorkflowLoadHelper("some-fixed-password"));
assertTrue("No password prompted", loadHelper.hasBeenPrompted());
/* Collapse into subnode - make sure it's there */
CollapseIntoMetaNodeResult collapseResult = getManager().collapseIntoMetaNode(new NodeID[] { m_credentialsInput_1 }, new WorkflowAnnotation[0], "Collapsed-by-Testflow");
WorkflowManager metaNode = getManager().getNodeContainer(collapseResult.getCollapsedMetanodeID(), WorkflowManager.class, true);
getManager().convertMetaNodeToSubNode(metaNode.getID());
assertFalse("Expected to be removed", getManager().containsNodeContainer(m_credentialsInput_1));
SubNodeContainer subNode = getManager().getNodeContainer(metaNode.getID(), SubNodeContainer.class, true);
subNode.updateOutputConfigurationToIncludeAllFlowVariables();
NodeID subnodeID = subNode.getID();
final int subNodeIDIndex = subnodeID.getIndex();
ConnectionContainer findInConnection = findInConnection(m_credentialsValidate_2, 1);
assertEquals("Source should be subnode", subnodeID, findInConnection.getSource());
getManager().save(m_workflowDirTemp, new ExecutionMonitor(), true);
executeAndWait(m_credentialsValidate_2);
checkState(m_credentialsValidate_2, EXECUTED);
closeWorkflow();
/* Load: subnode contained but not executed - prompt expected, enter wrong password */
loadHelper = initFlow(new TestWorkflowLoadHelper("some-wrong-password"));
subnodeID = getManager().getID().createChild(subNodeIDIndex);
checkState(subnodeID, CONFIGURED);
assertTrue("No password prompted", loadHelper.hasBeenPrompted());
getManager().save(m_workflowDirTemp, new ExecutionMonitor(), true);
executeAndWait(m_credentialsValidate_2);
// wrong password
checkState(subnodeID, EXECUTED);
// wrong password
checkState(m_credentialsValidate_2, IDLE);
closeWorkflow();
/* Load: subnode contained but not executed - prompt expected, enter correct password */
loadHelper = initFlow(new TestWorkflowLoadHelper("some-fixed-password"));
assertFalse("Expected to be removed", getManager().containsNodeContainer(m_credentialsInput_1));
subnodeID = getManager().getID().createChild(subNodeIDIndex);
checkState(subnodeID, CONFIGURED);
assertTrue("password prompt not expected", loadHelper.hasBeenPrompted());
executeAndWait(subnodeID);
checkState(subnodeID, EXECUTED);
getManager().save(m_workflowDirTemp, new ExecutionMonitor(), true);
executeAndWait(m_credentialsValidate_2);
checkState(m_credentialsValidate_2, EXECUTED);
closeWorkflow();
/* Load: subnode contained and executed - prompt not expected, downstream nodes need to fail. */
loadHelper = initFlow(new TestWorkflowLoadHelper("some-fixed-password"));
subnodeID = getManager().getID().createChild(subNodeIDIndex);
checkState(subnodeID, EXECUTED);
assertFalse("password prompt not expected", loadHelper.hasBeenPrompted());
executeAndWait(m_credentialsValidate_2);
checkState(m_credentialsValidate_2, IDLE);
executeAndWait(m_credentialsValidate_4);
checkState(m_credentialsValidate_4, EXECUTED);
}
Aggregations