use of org.knime.core.util.Pair in project knime-core by knime.
the class AppendedRowsRowInput method create.
/**
* Concatenates a set of Inputs. Duplicates are handled according to the policy argument. Columns present in one
* input but not the others or conflicting column tables are handled as described in {@link AppendedRowsTable}.
*
* @param ins all inputs to be appended (non-null and no null values allowed)
* @param duplPolicy How to deal with duplicate keys. Non-null.
* @param suffix suffix to append to duplicate keys (must not be null if policy is
* {@link DuplicatePolicy#AppendSuffix})
* @param exec (optional) execution monitor that is used to report progress and check for cancelation. Can be null.
* @param totalRowCount The number of rows to expect (sum over all row counts in the inputs). Only be used for
* progress -- can be negative to have no progress.
* @return a new row input whose iteration scans all argument inputs.
*/
public static AppendedRowsRowInput create(final RowInput[] ins, final DuplicatePolicy duplPolicy, final String suffix, final ExecutionMonitor exec, final long totalRowCount) {
DataTableSpec[] specs = new DataTableSpec[ins.length];
for (int i = 0; i < specs.length; i++) {
specs[i] = ins[i].getDataTableSpec();
}
DataTableSpec spec = AppendedRowsTable.generateDataTableSpec(specs);
CheckUtils.checkArgumentNotNull(duplPolicy, "Arg must not be null");
if (DuplicatePolicy.AppendSuffix.equals(duplPolicy)) {
CheckUtils.checkArgument(StringUtils.isNotEmpty(suffix), "Suffix must not be an empty string.");
}
PairSupplier[] suppliers = new PairSupplier[ins.length];
for (int i = 0; i < suppliers.length; i++) {
suppliers[i] = new PairSupplier(new Pair<RowIterator, DataTableSpec>(new RowInputIterator(ins[i]), ins[i].getDataTableSpec()));
}
AppendedRowsIterator it = new AppendedRowsIterator(suppliers, duplPolicy, suffix, spec, exec, totalRowCount);
return new AppendedRowsRowInput(spec, it);
}
use of org.knime.core.util.Pair in project knime-core by knime.
the class VariableToTable2NodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inData, final ExecutionContext exec) throws Exception {
DataTableSpec spec = createOutSpec();
BufferedDataContainer cont = exec.createDataContainer(spec);
List<Pair<String, FlowVariable.Type>> vars = getVariablesOfInterest();
DataCell[] specs = new DataCell[vars.size()];
List<String> lostVariables = new ArrayList<String>();
for (int i = 0; i < vars.size(); i++) {
Pair<String, FlowVariable.Type> c = vars.get(i);
String name = c.getFirst();
// fallback
DataCell cell = DataType.getMissingCell();
switch(c.getSecond()) {
case DOUBLE:
try {
double dValue = peekFlowVariableDouble(c.getFirst());
cell = new DoubleCell(dValue);
} catch (NoSuchElementException e) {
lostVariables.add(name + " (Double)");
}
break;
case INTEGER:
try {
int iValue = peekFlowVariableInt(c.getFirst());
cell = new IntCell(iValue);
} catch (NoSuchElementException e) {
lostVariables.add(name + " (Integer)");
}
break;
case STRING:
try {
String sValue = peekFlowVariableString(c.getFirst());
sValue = sValue == null ? "" : sValue;
cell = new StringCell(sValue);
} catch (NoSuchElementException e) {
lostVariables.add(name + " (String)");
}
break;
}
specs[i] = cell;
}
cont.addRowToTable(new DefaultRow(m_rowID.getStringValue(), specs));
cont.close();
return new BufferedDataTable[] { cont.getTable() };
}
use of org.knime.core.util.Pair in project knime-core by knime.
the class VariableToTable2NodeModel method createOutSpec.
private DataTableSpec createOutSpec() throws InvalidSettingsException {
List<Pair<String, FlowVariable.Type>> vars = getVariablesOfInterest();
if (vars.isEmpty()) {
throw new InvalidSettingsException("No variables selected");
}
DataColumnSpec[] specs = new DataColumnSpec[vars.size()];
for (int i = 0; i < vars.size(); i++) {
Pair<String, FlowVariable.Type> c = vars.get(i);
DataType type;
switch(c.getSecond()) {
case DOUBLE:
type = DoubleCell.TYPE;
break;
case INTEGER:
type = IntCell.TYPE;
break;
case STRING:
type = StringCell.TYPE;
break;
default:
throw new InvalidSettingsException("Unsupported variable type: " + c.getSecond());
}
specs[i] = new DataColumnSpecCreator(c.getFirst(), type).createSpec();
}
return new DataTableSpec(specs);
}
use of org.knime.core.util.Pair in project knime-core by knime.
the class WorkflowManager method setInputNodes.
/**
* Counterpart to {@link #getInputNodes()} - it sets new values into quickform nodes on the root level. All nodes as
* per map argument will be reset as part of this call.
*
* @param input a map from {@link org.knime.core.node.dialog.DialogNode#getParameterName() node's parameter name} to
* its (JSON or string object value). Invalid entries cause an exception.
* @throws InvalidSettingsException If parameter name is not valid or a not uniquely defined in the workflow.
* @since 2.12
*/
public void setInputNodes(final Map<String, ExternalNodeData> input) throws InvalidSettingsException {
try (WorkflowLock lock = lock()) {
CheckUtils.checkState(!getNodeContainerState().isExecutionInProgress(), "Cannot apply new parameters - workflow still in execution");
List<ExternalNodeDataHandle> inputNodes = getExternalNodeDataHandles(InputNode.class, i -> i.getInputData());
// will contain all nodes that need a new data object
List<Pair<NativeNodeContainer, ExternalNodeData>> valuesToSetList = new LinkedList<>();
// find all the nodes, remember them and do some validation -- do not set new value yet.
for (Map.Entry<String, ExternalNodeData> entry : input.entrySet()) {
final String userParameter = entry.getKey();
Matcher parameterNameMatcher = ExternalNodeData.PARAMETER_NAME_PATTERN.matcher(userParameter);
Optional<ExternalNodeDataHandle> matchingNodeOptional;
if (parameterNameMatcher.matches()) {
// fully qualified (e.g. "param-name-32:34")
matchingNodeOptional = inputNodes.stream().filter(e -> e.getParameterNameFullyQualified().equals(userParameter)).findFirst();
} else {
// short notation, e.g. "param-name"
matchingNodeOptional = inputNodes.stream().filter(e -> e.getParameterNameShort().equals(userParameter)).findFirst();
}
ExternalNodeDataHandle matchingNode = matchingNodeOptional.orElseThrow(() -> new InvalidSettingsException(String.format("Parameter name \"%s\" doesn't match any node in the workflow, valid parameter names are: %s", userParameter, inputNodes.stream().map(e -> "\"" + e.getParameterNameShort() + "\"").collect(Collectors.joining(", ", "[", "]")))));
NativeNodeContainer nnc = matchingNode.getOwnerNodeContainer();
((InputNode) nnc.getNodeModel()).validateInputData(entry.getValue());
valuesToSetList.add(Pair.create(nnc, entry.getValue()));
}
// finally set the new (validated) value
for (Pair<NativeNodeContainer, ExternalNodeData> t : valuesToSetList) {
NativeNodeContainer inputNodeNC = t.getFirst();
ExternalNodeData data = t.getSecond();
LOGGER.debugWithFormat("Setting new parameter for node \"%s\"", inputNodeNC.getNameWithID());
((InputNode) inputNodeNC.getNodeModel()).setInputData(data);
inputNodeNC.getParent().resetAndConfigureNode(inputNodeNC.getID());
}
}
}
use of org.knime.core.util.Pair in project knime-core by knime.
the class WorkflowManager method collapseIntoMetaNode.
/**
* Collapse selected set of nodes into a metanode. Make sure connections from and to nodes not contained in this set
* are passed through appropriate ports of the new metanode.
*
* @param orgIDs the ids of the nodes to be moved to the new metanode.
* @param orgAnnos the workflow annotations to be moved
* @param name of the new metanode
* @return newly create metanode
* @throws IllegalArgumentException if collapse cannot be done
*/
public CollapseIntoMetaNodeResult collapseIntoMetaNode(final NodeID[] orgIDs, final WorkflowAnnotation[] orgAnnos, final String name) {
try (WorkflowLock lock = lock()) {
// make sure this is still true:
String res = canCollapseNodesIntoMetaNode(orgIDs);
CheckUtils.checkArgument(res == null, res);
// for quick search:
HashSet<NodeID> orgIDsHash = new HashSet<NodeID>(Arrays.asList(orgIDs));
// find outside Nodes/Ports that have connections to the inside.
// Map will hold SourceNodeID/PortIndex + Index of new MetanodeInport.
HashMap<Pair<NodeID, Integer>, VerticalPortIndex> exposedIncomingPorts = new HashMap<>();
// second Map holds list of affected connections
HashMap<ConnectionContainer, VerticalPortIndex> inportConnections = new HashMap<ConnectionContainer, VerticalPortIndex>();
for (NodeID id : orgIDs) {
if (m_workflow.getConnectionsByDest(id) != null) {
for (ConnectionContainer cc : m_workflow.getConnectionsByDest(id)) {
if (!orgIDsHash.contains(cc.getSource())) {
Pair<NodeID, Integer> npi = Pair.create(cc.getSource(), cc.getSourcePort());
if (!exposedIncomingPorts.containsKey(npi)) {
int yPos = npi.getSecond();
if (npi.getFirst().equals(this.getID())) {
// connection from metanode inport
// TODO: determine ypos of the port!
} else {
// connected to other node in this workflow
NodeContainer nc = getNodeContainer(npi.getFirst());
NodeUIInformation uii = nc.getUIInformation();
// correctly!
if (uii != null) {
int[] x = uii.getBounds();
if ((x != null) && (x.length >= 2)) {
// add node y position to port index
yPos += x[1];
}
}
}
VerticalPortIndex vpi = new VerticalPortIndex(yPos);
exposedIncomingPorts.put(npi, vpi);
}
VerticalPortIndex inportIndex = exposedIncomingPorts.get(npi);
inportConnections.put(cc, inportIndex);
}
}
}
}
// sort new input ports by vertical position of source nodes
VerticalPortIndex[] vpis = new VerticalPortIndex[exposedIncomingPorts.size()];
int vi = 0;
for (VerticalPortIndex vpi : exposedIncomingPorts.values()) {
vpis[vi] = vpi;
vi++;
}
Arrays.sort(vpis);
for (int i = 0; i < vpis.length; i++) {
vpis[i].setIndex(i);
}
// find Nodes/Ports that have outgoing connections to the outside.
// Map will hold SourceNodeID/PortIndex + Index of new MetanodeOutport.
HashMap<Pair<NodeID, Integer>, VerticalPortIndex> exposedOutports = new HashMap<>();
for (NodeID id : orgIDs) {
for (ConnectionContainer cc : m_workflow.getConnectionsBySource(id)) {
if (!orgIDsHash.contains(cc.getDest())) {
Pair<NodeID, Integer> npi = Pair.create(cc.getSource(), cc.getSourcePort());
if (!exposedOutports.containsKey(npi)) {
NodeContainer nc = getNodeContainer(npi.getFirst());
NodeUIInformation uii = nc.getUIInformation();
// also include source port index into the ypos
// to make sure ports of the same node are sorted
// correctly!
int yPos = npi.getSecond();
if (uii != null) {
int[] x = uii.getBounds();
if ((x != null) && (x.length >= 2)) {
// add node y position to port index
yPos += x[1];
}
}
VerticalPortIndex vpi = new VerticalPortIndex(yPos);
exposedOutports.put(npi, vpi);
}
}
}
}
// also sort new output ports by vertical position of source nodes
vpis = new VerticalPortIndex[exposedOutports.size()];
vi = 0;
for (VerticalPortIndex vpi : exposedOutports.values()) {
vpis[vi] = vpi;
vi++;
}
Arrays.sort(vpis);
for (int i = 0; i < vpis.length; i++) {
vpis[i].setIndex(i);
}
// determine types of new Metanode in- and outports:
// (note that we reach directly into the Node to get the port type
// so we need to correct the index for the - then missing - var
// port.)
PortType[] exposedIncomingPortTypes = new PortType[exposedIncomingPorts.size()];
for (Map.Entry<Pair<NodeID, Integer>, VerticalPortIndex> entry : exposedIncomingPorts.entrySet()) {
Pair<NodeID, Integer> npi = entry.getKey();
int index = entry.getValue().getIndex();
NodeID nID = npi.getFirst();
int portIndex = npi.getSecond();
if (nID.equals(this.getID())) {
// if this connection comes directly from a Metanode Inport:
exposedIncomingPortTypes[index] = this.getInPort(portIndex).getPortType();
} else {
// otherwise reach into Nodecontainer to find out port type:
NodeContainer nc = getNodeContainer(nID);
exposedIncomingPortTypes[index] = nc.getOutPort(portIndex).getPortType();
}
}
PortType[] exposedOutportTypes = new PortType[exposedOutports.size()];
for (Pair<NodeID, Integer> npi : exposedOutports.keySet()) {
int index = exposedOutports.get(npi).getIndex();
int portIndex = npi.getSecond();
NodeContainer nc = getNodeContainer(npi.getFirst());
exposedOutportTypes[index] = nc.getOutPort(portIndex).getPortType();
}
// create the new Metanode
WorkflowManager newWFM = createAndAddSubWorkflow(exposedIncomingPortTypes, exposedOutportTypes, name);
// move into center of nodes this one replaces...
int x = 0;
int y = 0;
int count = 0;
if (orgIDs.length >= 1) {
for (int i = 0; i < orgIDs.length; i++) {
NodeContainer nc = getNodeContainer(orgIDs[i]);
NodeUIInformation uii = nc.getUIInformation();
if (uii != null) {
int[] bounds = uii.getBounds();
if (bounds.length >= 2) {
x += bounds[0];
y += bounds[1];
count++;
}
}
}
}
if (count >= 1) {
NodeUIInformation newUii = NodeUIInformation.builder().setNodeLocation(x / count, y / count, -1, -1).build();
newWFM.setUIInformation(newUii);
}
// copy the nodes into the newly create WFM:
WorkflowCopyContent.Builder orgContentBuilder = WorkflowCopyContent.builder();
orgContentBuilder.setNodeIDs(orgIDs);
orgContentBuilder.setAnnotation(orgAnnos);
orgContentBuilder.setIncludeInOutConnections(true);
final WorkflowPersistor undoPersistor = copy(true, orgContentBuilder.build());
orgContentBuilder.setIncludeInOutConnections(false);
WorkflowCopyContent newContent = newWFM.copyFromAndPasteHere(this, orgContentBuilder.build());
NodeID[] newIDs = newContent.getNodeIDs();
Map<NodeID, NodeID> oldIDsHash = new HashMap<NodeID, NodeID>();
for (int i = 0; i < orgIDs.length; i++) {
oldIDsHash.put(orgIDs[i], newIDs[i]);
}
// move subworkflows into upper left corner but keep
// original layout (important for undo!)
// ATTENTION: if you change this, make sure it is revertable
// by extractMetanode (and correctly so!).
int xmin = Integer.MAX_VALUE;
int ymin = Integer.MAX_VALUE;
if (newIDs.length >= 1) {
// calculate shift
for (int i = 0; i < newIDs.length; i++) {
NodeContainer nc = newWFM.getNodeContainer(newIDs[i]);
NodeUIInformation uii = nc.getUIInformation();
if (uii != null) {
int[] bounds = uii.getBounds();
if (bounds.length >= 2) {
xmin = Math.min(bounds[0], xmin);
ymin = Math.min(bounds[1], ymin);
}
}
}
int xshift = 150 - Math.max(xmin, 70);
int yshift = 120 - Math.max(ymin, 20);
// move new nodes
for (int i = 0; i < newIDs.length; i++) {
NodeContainer nc = newWFM.getNodeContainer(newIDs[i]);
NodeUIInformation uii = nc.getUIInformation();
if (uii != null) {
NodeUIInformation newUii = NodeUIInformation.builder(uii).translate(new int[] { xshift, yshift }).build();
nc.setUIInformation(newUii);
}
}
// move new annotations
for (Annotation anno : newWFM.m_annotations) {
anno.shiftPosition(xshift, yshift);
}
// move bendpoints of all internal connections
for (ConnectionContainer cc : newWFM.getConnectionContainers()) {
if ((!cc.getSource().equals(newWFM.getID())) && (!cc.getDest().equals(newWFM.getID()))) {
ConnectionUIInformation uii = cc.getUIInfo();
if (uii != null) {
ConnectionUIInformation newUI = ConnectionUIInformation.builder(uii).translate(new int[] { xshift, yshift }).build();
cc.setUIInfo(newUI);
}
}
}
}
// create connections INSIDE the new workflow (from incoming ports)
for (ConnectionContainer cc : inportConnections.keySet()) {
int portIndex = inportConnections.get(cc).getIndex();
NodeID newID = oldIDsHash.get(cc.getDest());
newWFM.addConnection(newWFM.getID(), portIndex, newID, cc.getDestPort());
this.removeConnection(cc);
}
// create connections INSIDE the new workflow (to outgoing ports)
for (Pair<NodeID, Integer> npi : exposedOutports.keySet()) {
int index = exposedOutports.get(npi).getIndex();
NodeID newID = oldIDsHash.get(npi.getFirst());
newWFM.addConnection(newID, npi.getSecond(), newWFM.getID(), index);
}
// create OUTSIDE connections to the new workflow
for (Pair<NodeID, Integer> npi : exposedIncomingPorts.keySet()) {
int index = exposedIncomingPorts.get(npi).getIndex();
this.addConnection(npi.getFirst(), npi.getSecond(), newWFM.getID(), index);
}
// create OUTSIDE connections from the new workflow
for (NodeID id : orgIDs) {
// convert to a seperate array so we can delete connections!
ConnectionContainer[] cca = new ConnectionContainer[0];
cca = m_workflow.getConnectionsBySource(id).toArray(cca);
for (ConnectionContainer cc : cca) {
if (!orgIDsHash.contains(cc.getDest())) {
Pair<NodeID, Integer> npi = new Pair<NodeID, Integer>(cc.getSource(), cc.getSourcePort());
int newPort = exposedOutports.get(npi).getIndex();
this.removeConnection(cc);
this.addConnection(newWFM.getID(), newPort, cc.getDest(), cc.getDestPort());
}
}
}
// and finally: delete the original nodes and annotations.
Stream.of(orgIDs).forEach(id -> removeNode(id));
Stream.of(orgAnnos).forEach(anno -> removeAnnotation(anno));
return new CollapseIntoMetaNodeResult(this, newWFM.getID(), undoPersistor);
}
}
Aggregations