use of org.knime.core.node.workflow.ConnectionContainer in project knime-core by knime.
the class ReplaceHelper method reconnect.
/**
* Connects new node with connection of the old node.
*
* @param container new node container
*/
public void reconnect(final NodeContainer container) {
// reset node location
NodeUIInformation uiInformation = m_oldNode.getUIInformation();
int[] bounds = uiInformation.getBounds();
NodeUIInformation info = NodeUIInformation.builder().setNodeLocation(bounds[0], bounds[1], -1, -1).setHasAbsoluteCoordinates(true).setSnapToGrid(uiInformation.getSnapToGrid()).setIsDropLocation(false).build();
container.setUIInformation(info);
int inShift;
int outShift;
if (m_oldNode instanceof WorkflowManager && !(container instanceof WorkflowManager)) {
inShift = 0;
// replacing a metanode (no opt. flow var ports) with a "normal" node (that has optional flow var ports)
if (m_oldNode.getNrInPorts() > 0 && container.getNrInPorts() > 1) {
// shift ports one index - unless we need to use the invisible optional flow var port of new node
if (!m_oldNode.getInPort(0).getPortType().equals(FlowVariablePortObject.TYPE)) {
inShift = 1;
} else if (container.getInPort(1).getPortType().equals(FlowVariablePortObject.TYPE)) {
inShift = 1;
}
}
outShift = 0;
if (m_oldNode.getNrOutPorts() > 0 && container.getNrOutPorts() > 1) {
if (!m_oldNode.getOutPort(0).getPortType().equals(FlowVariablePortObject.TYPE)) {
outShift = 1;
} else if (container.getOutPort(1).getPortType().equals(FlowVariablePortObject.TYPE)) {
outShift = 1;
}
}
} else if (!(m_oldNode instanceof WorkflowManager) && container instanceof WorkflowManager) {
// replacing a "normal" node with a metanode
inShift = -1;
for (ConnectionContainer cc : m_incomingConnections) {
if (cc.getDestPort() == 0) {
inShift = 0;
break;
}
}
outShift = -1;
for (ConnectionContainer cc : m_outgoingConnections) {
if (cc.getSourcePort() == 0) {
outShift = 0;
break;
}
}
} else {
inShift = 0;
outShift = 0;
}
// set incoming connections
NodeID newId = container.getID();
for (ConnectionContainer c : m_incomingConnections) {
if (m_wfm.canAddConnection(c.getSource(), c.getSourcePort(), newId, c.getDestPort() + inShift)) {
m_wfm.addConnection(c.getSource(), c.getSourcePort(), newId, c.getDestPort() + inShift);
} else {
break;
}
}
// set outgoing connections
for (ConnectionContainer c : m_outgoingConnections) {
if (m_wfm.canAddConnection(newId, c.getSourcePort() + outShift, c.getDest(), c.getDestPort())) {
m_wfm.addConnection(newId, c.getSourcePort() + outShift, c.getDest(), c.getDestPort());
} else {
break;
}
}
}
use of org.knime.core.node.workflow.ConnectionContainer in project knime-core by knime.
the class ShiftConnectionCommand method canExecute.
/**
* {@inheritDoc}
*/
@Override
public boolean canExecute() {
int port = getLastConnectedPort();
if (port < 0 || port >= m_node.getNodeContainer().getNrInPorts() - 1) {
return false;
}
ConnectionContainer existingConn = getHostWFM().getIncomingConnectionFor(m_nodeID, port);
assert existingConn != null;
if (!getHostWFM().canRemoveConnection(existingConn)) {
return false;
}
if (getNextMatchingPort(existingConn) == -1) {
return false;
}
return true;
}
use of org.knime.core.node.workflow.ConnectionContainer in project knime-core by knime.
the class NodeRecommendationManager method getNodeRecommendationFor.
/**
* Determines lists of node recommendation based on the given nodes (e.g. that are selected in the workflow editor).
* The {@link NodeRecommendation}s are determined based on the statistics of {@link NodeTriple}s (i.e. predecessor,
* node, successor, count -> (p,n,s,c)) that are provided by {@link NodeTripleProvider}s.
*
* Given the list's of node triples, {(predecessor, node, successor, count/frequency)} = {(p,n,s,c)} and given a
* selected node 'sn', the recommendations are determined for each node-triple-list as follows:
*
* (1) find all node triples (p,n,s,c) where n==sn and add them to the result list; in that case the predecessor is
* essentially ignored and recommendation are determined only based on n. The recommendation is the successor 's'
* given by each found triple. Since there will be multiple triples for the same 'n' and therewith successor
* duplicates (i.e. recommendations), those will be joined by taking the mean of the respective frequencies 'c' (2)
* determine all current predecessors ('sp') of the selected node 'sn' and find all node triples that match the
* given predecessor-node pairs ('sp','sn') (i.e. 'sp'='p' and 'sn'='n'). The recommended nodes are the successor
* nodes 's' given by the found triples. Those are added to the same list as the recommendations of (1). (3)
* Post-processing: duplicate recommendations are resolved by removing the recommendations with a smaller
* counts/frequencies
*
* If the array of given nodes is empty, all potential source nodes are recommended, i.e. all nodes 'n' in the node
* triples list that don't have a predecessor 'p'.
*
* @param nnc if it's an empty array, source nodes only will be recommended, if more than one node is given, the
* node recommendations for different nodes will end up in the same list
* @return an array of lists of node recommendations, i.e. a list of node recommendations for each used node
* {@link NodeTripleProvider}. It will return <code>null</code> if something went wrong with loading the
* node statistics!
*/
public List<NodeRecommendation>[] getNodeRecommendationFor(final NativeNodeContainer... nnc) {
if (m_recommendations == null) {
return null;
}
@SuppressWarnings("unchecked") List<NodeRecommendation>[] res = new List[m_recommendations.size()];
for (int idx = 0; idx < res.length; idx++) {
if (nnc.length == 0) {
// recommendations if no node is given -> source nodes are recommended
res[idx] = m_recommendations.get(idx).get(SOURCE_NODES_KEY);
if (res[idx] == null) {
res[idx] = Collections.emptyList();
}
} else if (nnc.length == 1) {
String nodeID = getKey(nnc[0]);
Set<NodeRecommendation> set = new HashSet<NodeRecommendationManager.NodeRecommendation>();
/* recommendations based on the given node and possible predecessors */
for (int i = 0; i < nnc[0].getNrInPorts(); i++) {
ConnectionContainer cc = nnc[0].getParent().getIncomingConnectionFor(nnc[0].getID(), i);
if (cc != null) {
// only take the predecessor if its not leaving the workflow (e.g. the actual predecessor is outside of a metanode)
if (cc.getType() != ConnectionType.WFMIN) {
NodeContainer predecessor = nnc[0].getParent().getNodeContainer(cc.getSource());
if (predecessor instanceof NativeNodeContainer) {
List<NodeRecommendation> l = m_recommendations.get(idx).get(getKey((NativeNodeContainer) predecessor) + NODE_NAME_SEP + getKey(nnc[0]));
if (l != null) {
set.addAll(l);
}
}
}
}
}
/* recommendation based on the given node only */
List<NodeRecommendation> p1 = m_recommendations.get(idx).get(nodeID);
if (p1 != null) {
set.addAll(p1);
}
// add to the result list
res[idx] = new ArrayList<NodeRecommendationManager.NodeRecommendation>(set.size());
res[idx].addAll(set);
} else {
throw new UnsupportedOperationException("Recommendations for more than one node are not supported, yet.");
}
/* post-process result */
Collections.sort(res[idx]);
if (nnc.length == 1) {
// remove the node, the recommendations have bee requested for, from the list
// in order to match the nodes [NodeFactory]#[NodeName] needs to be compared, otherwise it won't work with dynamically generated nodes
res[idx] = res[idx].stream().filter(nr -> !getKey(nr.getNodeTemplate()).equals(getKey(nnc[0]))).collect(Collectors.toList());
}
// update the total frequencies
int tmpFreqs = 0;
for (NodeRecommendation np : res[idx]) {
tmpFreqs += np.getFrequency();
}
for (NodeRecommendation np : res[idx]) {
np.setTotalFrequency(tmpFreqs);
}
}
return res;
}
Aggregations