use of org.knime.core.node.workflow.virtual.parchunk.VirtualParallelizedChunkPortObjectInNodeFactory in project knime-core by knime.
the class JoinerJoinAnyTest method setUp.
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
NodeFactory<NodeModel> dummyFactory = (NodeFactory) new VirtualParallelizedChunkPortObjectInNodeFactory(new PortType[0]);
m_exec = new ExecutionContext(new DefaultNodeProgressMonitor(), new Node(dummyFactory), SingleNodeContainer.MemoryPolicy.CacheOnDisc, new HashMap<Integer, ContainerTable>());
}
use of org.knime.core.node.workflow.virtual.parchunk.VirtualParallelizedChunkPortObjectInNodeFactory in project knime-core by knime.
the class AbstractColumnTableSorterTest method setUp.
/**
* @throws java.lang.Exception
*/
@SuppressWarnings("rawtypes")
@Before
public void setUp() throws Exception {
@SuppressWarnings("unchecked") NodeFactory<NodeModel> dummyFactory = (NodeFactory) new VirtualParallelizedChunkPortObjectInNodeFactory(new PortType[0]);
m_exec = new ExecutionContext(new DefaultNodeProgressMonitor(), new Node(dummyFactory), SingleNodeContainer.MemoryPolicy.CacheOnDisc, new HashMap<Integer, ContainerTable>());
DataColumnSpec[] colSpecs = new DataColumnSpec[] { new DataColumnSpecCreator(FEATURE1, DoubleCell.TYPE).createSpec(), new DataColumnSpecCreator(FEATURE2, DoubleCell.TYPE).createSpec(), new DataColumnSpecCreator(STRING_FEATURE, StringCell.TYPE).createSpec(), new DataColumnSpecCreator(CLASS, StringCell.TYPE).createSpec() };
DataTableSpec spec = new DataTableSpec(colSpecs);
final BufferedDataContainer container = m_exec.createDataContainer(spec);
int i = 0;
container.addRowToTable(creatRow(i++, 1, 8, "A", "AClass8"));
container.addRowToTable(creatRow(i++, 2, 2, "Z", "ZClass2"));
container.addRowToTable(creatRow(i++, 3, 5, "B", "BClass5"));
container.addRowToTable(creatRow(i++, 4, 0, "E", "EClass0"));
container.addRowToTable(creatRow(i++, 5, 1, "F", "FClass1"));
container.addRowToTable(creatRow(i++, 6, 7, "G", "GClass7"));
container.addRowToTable(creatRow(i++, 7, 9, "H", "HClass9"));
container.addRowToTable(creatRow(i++, 8, 8, null, "Class8"));
container.close();
testTable = container.getTable();
final BufferedDataContainer emptyContainer = m_exec.createDataContainer(spec);
emptyContainer.close();
emptyTestTable = emptyContainer.getTable();
MemoryAlertSystemTest.forceGC();
}
use of org.knime.core.node.workflow.virtual.parchunk.VirtualParallelizedChunkPortObjectInNodeFactory in project knime-core by knime.
the class CovarianceMatrixCalculatorTest method setUp.
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
@SuppressWarnings({ "unchecked", "rawtypes" }) NodeFactory<NodeModel> dummyFactory = (NodeFactory) new VirtualParallelizedChunkPortObjectInNodeFactory(new PortType[0]);
m_exec = new ExecutionContext(new DefaultNodeProgressMonitor(), new Node(dummyFactory), SingleNodeContainer.MemoryPolicy.CacheOnDisc, new HashMap<Integer, ContainerTable>());
}
use of org.knime.core.node.workflow.virtual.parchunk.VirtualParallelizedChunkPortObjectInNodeFactory in project knime-core by knime.
the class JoinerTest method setUp.
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
NodeFactory<NodeModel> dummyFactory = (NodeFactory) new VirtualParallelizedChunkPortObjectInNodeFactory(new PortType[0]);
m_exec = new ExecutionContext(new DefaultNodeProgressMonitor(), new Node(dummyFactory), SingleNodeContainer.MemoryPolicy.CacheOnDisc, new HashMap<Integer, ContainerTable>());
}
use of org.knime.core.node.workflow.virtual.parchunk.VirtualParallelizedChunkPortObjectInNodeFactory in project knime-core by knime.
the class WorkflowManager method duplicateLoopBodyInSubWFMandAttach.
/*
* ...
* @param subWFM already prepared subworkflow with appropriate
* inports. If subWFM==this then the subworkflows are simply
* added to the same workflow.
* @param extInConnections map of incoming connections
* (NodeID + PortIndex) => WFM-Inport. Can be null if subWFM==this.
* ...
*/
private ParallelizedChunkContent duplicateLoopBodyInSubWFMandAttach(final WorkflowManager subWFM, final HashMap<Pair<NodeID, Integer>, Integer> extInConnections, final NodeID startID, final NodeID endID, final NodeID[] oldIDs, final int chunkIndex) {
assert m_workflowLock.isHeldByCurrentThread();
// compute offset for new nodes (shifted in case of same
// workflow, otherwise just underneath each other)
final int[] moveUIDist;
if (subWFM == this) {
moveUIDist = new int[] { (chunkIndex + 1) * 10, (chunkIndex + 1) * 80, 0, 0 };
} else {
moveUIDist = new int[] { (chunkIndex + 1) * 0, (chunkIndex + 1) * 150, 0, 0 };
}
// create virtual start node
NodeContainer startNode = getNodeContainer(startID);
// find port types (ignore Variable Port "ear")
PortType[] outTypes = new PortType[startNode.getNrOutPorts() - 1];
for (int i = 0; i < outTypes.length; i++) {
outTypes[i] = startNode.getOutPort(i + 1).getPortType();
}
NodeID virtualStartID = subWFM.createAndAddNode(new VirtualParallelizedChunkPortObjectInNodeFactory(outTypes));
NodeUIInformation startUIPlain = startNode.getUIInformation();
if (startUIPlain != null) {
NodeUIInformation startUI = NodeUIInformation.builder(startUIPlain).translate(moveUIDist).build();
subWFM.getNodeContainer(virtualStartID).setUIInformation(startUI);
}
// create virtual end node
NodeContainer endNode = getNodeContainer(endID);
assert endNode instanceof SingleNodeContainer;
// find port types (ignore Variable Port "ear")
PortType[] realInTypes = new PortType[endNode.getNrInPorts() - 1];
for (int i = 0; i < realInTypes.length; i++) {
realInTypes[i] = endNode.getInPort(i + 1).getPortType();
}
NodeID virtualEndID = subWFM.createAndAddNode(new VirtualParallelizedChunkPortObjectOutNodeFactory(realInTypes));
NodeUIInformation endUIPlain = endNode.getUIInformation();
if (endUIPlain != null) {
NodeUIInformation endUI = NodeUIInformation.builder(endUIPlain).translate(moveUIDist).build();
subWFM.getNodeContainer(virtualEndID).setUIInformation(endUI);
}
// copy nodes in loop body
WorkflowCopyContent.Builder copyContent = WorkflowCopyContent.builder();
copyContent.setNodeIDs(oldIDs);
WorkflowCopyContent newBody = subWFM.copyFromAndPasteHere(this, copyContent.build());
NodeID[] newIDs = newBody.getNodeIDs();
Map<NodeID, NodeID> oldIDsHash = new HashMap<NodeID, NodeID>();
for (int i = 0; i < oldIDs.length; i++) {
oldIDsHash.put(oldIDs[i], newIDs[i]);
NodeContainer nc = subWFM.getNodeContainer(newIDs[i]);
NodeUIInformation uiInfo = nc.getUIInformation();
if (uiInfo != null) {
nc.setUIInformation(NodeUIInformation.builder(uiInfo).translate(moveUIDist).build());
}
}
// restore connections to nodes outside the loop body (only incoming)
for (int i = 0; i < newIDs.length; i++) {
NodeContainer oldNode = getNodeContainer(oldIDs[i]);
for (int p = 0; p < oldNode.getNrInPorts(); p++) {
ConnectionContainer c = getIncomingConnectionFor(oldIDs[i], p);
if (c == null) {
// ignore: no incoming connection
} else if (oldIDsHash.containsKey(c.getSource())) {
// ignore: connection already retained by paste persistor
} else if (c.getSource().equals(startID)) {
// used to connect to start node, connect to virtual in now
subWFM.addConnection(virtualStartID, c.getSourcePort(), newIDs[i], c.getDestPort());
} else {
// source node not part of loop:
if (subWFM == this) {
addConnection(c.getSource(), c.getSourcePort(), newIDs[i], c.getDestPort());
} else {
// find new replacement port
int subWFMportIndex = extInConnections.get(new Pair<NodeID, Integer>(c.getDest(), c.getDestPort()));
subWFM.addConnection(subWFM.getID(), subWFMportIndex, newIDs[i], c.getDestPort());
}
}
}
}
// attach incoming connections of new Virtual End Node
for (int p = 0; p < endNode.getNrInPorts(); p++) {
ConnectionContainer c = getIncomingConnectionFor(endID, p);
if (c == null) {
// ignore: no incoming connection
} else if (oldIDsHash.containsKey(c.getSource())) {
// connects to node in loop - connect to copy
NodeID source = oldIDsHash.get(c.getSource());
subWFM.addConnection(source, c.getSourcePort(), virtualEndID, c.getDestPort());
} else if (c.getSource().equals(startID)) {
// used to connect to start node, connect to virtual in now
subWFM.addConnection(virtualStartID, c.getSourcePort(), virtualEndID, c.getDestPort());
} else {
// source node not part of loop
if (subWFM == this) {
addConnection(c.getSource(), c.getSourcePort(), virtualEndID, c.getDestPort());
} else {
// find new replacement port
int subWFMportIndex = extInConnections.get(new Pair<NodeID, Integer>(c.getSource(), c.getSourcePort()));
subWFM.addConnection(this.getID(), subWFMportIndex, virtualEndID, c.getDestPort());
}
}
}
if (subWFM == this) {
// connect start node var port with virtual start node
addConnection(startID, 0, virtualStartID, 0);
} else {
// add variable connection to port 0 of WFM!
if (this.canAddConnection(startID, 0, subWFM.getID(), 0)) {
// only add this one the first time...
this.addConnection(startID, 0, subWFM.getID(), 0);
}
subWFM.addConnection(subWFM.getID(), 0, virtualStartID, 0);
}
// set chunk of table to be processed in new virtual start node
LoopStartParallelizeNode startModel = castNodeModel(startID, LoopStartParallelizeNode.class);
VirtualParallelizedChunkNodeInput data = startModel.getVirtualNodeInput(chunkIndex);
VirtualParallelizedChunkPortObjectInNodeModel virtualInModel = subWFM.castNodeModel(virtualStartID, VirtualParallelizedChunkPortObjectInNodeModel.class);
virtualInModel.setVirtualNodeInput(data);
return new ParallelizedChunkContent(subWFM, virtualStartID, virtualEndID, newIDs);
}
Aggregations