use of org.knime.core.node.port.PortObject in project knime-core by knime.
the class VirtualSubNodeInputNodeModel method createStreamableOperator.
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
assert inputs.length == 0;
PortObject[] dataFromParent = ArrayUtils.remove(m_subNodeContainer.fetchInputDataFromParent(), 0);
for (int i = 0; i < outputs.length; i++) {
if (BufferedDataTable.TYPE.equals(getOutPortType(i))) {
// stream port content if it's data
BufferedDataTable bdt = (BufferedDataTable) (dataFromParent[i]);
RowOutput rowOutput = (RowOutput) outputs[i];
for (DataRow dr : bdt) {
rowOutput.push(dr);
}
rowOutput.close();
} else {
((PortObjectOutput) outputs[i]).setPortObject(dataFromParent[i]);
}
}
}
};
}
use of org.knime.core.node.port.PortObject in project knime-core by knime.
the class DecisionTreeLearnerNodeModel method execute.
/**
* Start of decision tree induction.
*
* @param exec the execution context for this run
* @param data the input data to build the decision tree from
* @return an empty data table array, as just a model is provided
* @throws Exception any type of exception, e.g. for cancellation,
* invalid input,...
* @see NodeModel#execute(BufferedDataTable[],ExecutionContext)
*/
@Override
protected PortObject[] execute(final PortObject[] data, final ExecutionContext exec) throws Exception {
// holds the warning message displayed after execution
StringBuilder warningMessageSb = new StringBuilder();
ParallelProcessing parallelProcessing = new ParallelProcessing(m_parallelProcessing.getIntValue());
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Number available threads: " + parallelProcessing.getMaxNumberThreads() + " used threads: " + parallelProcessing.getCurrentThreadsInUse());
}
exec.setProgress("Preparing...");
// check input data
assert (data != null && data[DATA_INPORT] != null);
BufferedDataTable inData = (BufferedDataTable) data[DATA_INPORT];
// get column with color information
String colorColumn = null;
for (DataColumnSpec s : inData.getDataTableSpec()) {
if (s.getColorHandler() != null) {
colorColumn = s.getName();
break;
}
}
// the data table must have more than 2 records
if (inData.getRowCount() <= 1) {
throw new IllegalArgumentException("Input data table must have at least 2 records!");
}
// get class column index
int classColumnIndex = inData.getDataTableSpec().findColumnIndex(m_classifyColumn.getStringValue());
assert classColumnIndex > -1;
// create initial In-Memory table
exec.setProgress("Create initial In-Memory table...");
InMemoryTableCreator tableCreator = new InMemoryTableCreator(inData, classColumnIndex, m_minNumberRecordsPerNode.getIntValue(), m_skipColumns.getBooleanValue());
InMemoryTable initialTable = tableCreator.createInMemoryTable(exec.createSubExecutionContext(0.05));
int removedRows = tableCreator.getRemovedRowsDueToMissingClassValue();
if (removedRows == inData.getRowCount()) {
throw new IllegalArgumentException("Class column contains only " + "missing values");
}
if (removedRows > 0) {
warningMessageSb.append(removedRows);
warningMessageSb.append(" rows removed due to missing class value;");
}
// the all over row count is used to report progress
m_alloverRowCount = initialTable.getSumOfWeights();
// set the finishing counter
// this counter will always be incremented when a leaf node is
// created, as this determines the recursion end and can thus
// be used for progress indication
m_finishedCounter = new AtomicDouble(0);
// get the number of attributes
m_numberAttributes = initialTable.getNumAttributes();
// create the quality measure
final SplitQualityMeasure splitQualityMeasure;
if (m_splitQualityMeasureType.getStringValue().equals(SPLIT_QUALITY_GINI)) {
splitQualityMeasure = new SplitQualityGini();
} else {
splitQualityMeasure = new SplitQualityGainRatio();
}
// build the tree
// before this set the node counter to 0
m_counter.set(0);
exec.setMessage("Building tree...");
DecisionTreeNode root = null;
root = buildTree(initialTable, exec, 0, splitQualityMeasure, parallelProcessing);
boolean isBinaryNominal = m_binaryNominalSplitMode.getBooleanValue();
boolean isFilterInvalidAttributeValues = m_filterNominalValuesFromParent.getBooleanValue();
if (isBinaryNominal && isFilterInvalidAttributeValues) {
// traverse tree nodes and remove from the children the attribute
// values that were filtered out further up in the tree. "Bug" 3124
root.filterIllegalAttributes(Collections.EMPTY_MAP);
}
// the decision tree model saved as PMML at the second out-port
DecisionTree decisionTree = new DecisionTree(root, m_classifyColumn.getStringValue(), /* strategy has to be set explicitly as the default in PMML is
none, which means rows with missing values are not
classified. */
PMMLMissingValueStrategy.LAST_PREDICTION);
decisionTree.setColorColumn(colorColumn);
// prune the tree
exec.setMessage("Prune tree with " + m_pruningMethod.getStringValue() + "...");
pruneTree(decisionTree);
// add highlight patterns and color information
exec.setMessage("Adding hilite and color info to tree...");
addHiliteAndColorInfo(inData, decisionTree);
LOGGER.info("Decision tree consisting of " + decisionTree.getNumberNodes() + " nodes created with pruning method " + m_pruningMethod.getStringValue());
// set the warning message if available
if (warningMessageSb.length() > 0) {
setWarningMessage(warningMessageSb.toString());
}
// reset the number available threads
parallelProcessing.reset();
parallelProcessing = null;
// no data out table is created -> return an empty table array
exec.setMessage("Creating PMML decision tree model...");
// handle the optional PMML input
PMMLPortObject inPMMLPort = (PMMLPortObject) data[1];
DataTableSpec inSpec = inData.getSpec();
PMMLPortObjectSpec outPortSpec = createPMMLPortObjectSpec(inPMMLPort == null ? null : inPMMLPort.getSpec(), inSpec);
PMMLPortObject outPMMLPort = new PMMLPortObject(outPortSpec, inPMMLPort, inData.getSpec());
outPMMLPort.addModelTranslater(new PMMLDecisionTreeTranslator(decisionTree));
m_decisionTree = decisionTree;
return new PortObject[] { outPMMLPort };
}
use of org.knime.core.node.port.PortObject in project knime-core by knime.
the class SubNodeContainer method performLoadContent.
/**
* {@inheritDoc}
*/
@Override
WorkflowCopyContent performLoadContent(final SingleNodeContainerPersistor nodePersistor, final Map<Integer, BufferedDataTable> tblRep, final FlowObjectStack inStack, final ExecutionMonitor exec, final LoadResult loadResult, final boolean preserveNodeMessage) throws CanceledExecutionException {
SubNodeContainerPersistor subNodePersistor = (SubNodeContainerPersistor) nodePersistor;
WorkflowPersistor workflowPersistor = subNodePersistor.getWorkflowPersistor();
// TODO pass in a filter input stack
m_wfm.loadContent(workflowPersistor, tblRep, inStack, exec, loadResult, preserveNodeMessage);
if (workflowPersistor.isDirtyAfterLoad() || m_wfm.isDirty()) {
setDirty();
}
InternalNodeContainerState loadState = nodePersistor.getMetaPersistor().getState();
if (!m_wfm.getInternalState().equals(loadState)) {
// can happen for workflows that were exported without data;
// the same check is done by the caller (WorkflowManager#postLoad) and handled appropriately
setInternalState(m_wfm.getInternalState(), false);
}
NodeSettingsRO modelSettings = subNodePersistor.getSNCSettings().getModelSettings();
if (modelSettings != null) {
try {
loadModelSettingsIntoDialogNodes(modelSettings, false);
} catch (InvalidSettingsException e) {
final String msg = "Could not load Wrapped Metanode configuration into dialog-nodes: " + e.getMessage();
LOGGER.error(msg, e);
loadResult.addError(msg);
setDirty();
}
}
checkInOutNodesAfterLoad(subNodePersistor, loadResult);
// put data input output node if it was executed;
final NativeNodeContainer virtualOutNode = getVirtualOutNode();
LoadVersion l = nodePersistor instanceof FileSingleNodeContainerPersistor ? ((FileSingleNodeContainerPersistor) nodePersistor).getLoadVersion() : LoadVersion.V3010;
if (l.isOlderThan(LoadVersion.V3010) && virtualOutNode.getInternalState().isExecuted()) {
VirtualSubNodeOutputNodeModel outNodeModel = getVirtualOutNodeModel();
PortObject[] outputData = new PortObject[virtualOutNode.getNrInPorts()];
m_wfm.assembleInputData(getVirtualOutNodeID(), outputData);
outNodeModel.postLoadExecute(ArrayUtils.removeAll(outputData, 0));
// allow node to receive the internal held objects so that the next save operation also persists the
// array of internal held objects - otherwise we get strange errors with nodes saved in 2.x, then loaded
// and saved in 3.1+ (and converted ... although unmodified)
getVirtualOutNode().getNode().assignInternalHeldObjects(outputData, null, getVirtualOutNode().createExecutionContext(), new PortObject[0]);
}
setVirtualOutputIntoOutport(m_wfm.getInternalState());
m_wfmStateChangeListener = createAndAddStateListener();
getInPort(0).setPortName("Variable Inport");
getOutPort(0).setPortName("Variable Outport");
getVirtualInNode().addNodeStateChangeListener(new RefreshPortNamesListener());
getVirtualOutNode().addNodeStateChangeListener(new RefreshPortNamesListener());
refreshPortNames();
return null;
}
use of org.knime.core.node.port.PortObject in project knime-core by knime.
the class SubNodeContainer method setVirtualOutputIntoOutport.
/**
* Copies data from virtual output node into m_outputs, notifies state listeners if not
* processing call from parent.
* @param newState State of the internal WFM to decide whether to publish ports and/or specs.
*/
@SuppressWarnings("null")
private boolean setVirtualOutputIntoOutport(final InternalNodeContainerState newState) {
// retrieve results and copy to outports
final VirtualSubNodeOutputNodeModel virtualOutNodeModel = getVirtualOutNodeModel();
final boolean isInactive = getVirtualOutNode().isInactive();
final VirtualSubNodeExchange outputExchange = virtualOutNodeModel.getOutputExchange();
// put objects into output if state of WFM is executed
boolean publishObjects = newState.isExecuted();
// publishObjects implies that output node has data or is inactive
assert !publishObjects || (isInactive || (outputExchange != null && outputExchange.getPortObjects() != null)) : String.format("output node must have data or be inactive, status: %s, inactive: %b, exhange is null: %b, " + "exchange content is null: %s", newState, isInactive, outputExchange == null, outputExchange == null ? "<invalid>" : String.valueOf(outputExchange.getPortObjects() == null));
boolean publishSpecs = (isInactive || outputExchange != null) && (newState.isConfigured() || newState.isExecuted() || newState.isExecutionInProgress());
boolean changed = false;
for (int i = 1; i < m_outputs.length; i++) {
// not publish spec: null output
// inactive output node: inactive branch port object
// otherwise: use data from output node
final PortObjectSpec spec = publishSpecs ? (isInactive ? InactiveBranchPortObjectSpec.INSTANCE : outputExchange.getPortSpecs()[i - 1]) : null;
changed = m_outputs[i].setSpec(spec) || changed;
final PortObject object = publishObjects ? (isInactive ? InactiveBranchPortObject.INSTANCE : outputExchange.getPortObjects()[i - 1]) : null;
changed = m_outputs[i].setObject(object) || changed;
}
final PortObjectSpec spec = publishSpecs ? (isInactive ? InactiveBranchPortObjectSpec.INSTANCE : FlowVariablePortObjectSpec.INSTANCE) : null;
changed = m_outputs[0].setSpec(spec) || changed;
final PortObject object = publishObjects ? (isInactive ? InactiveBranchPortObject.INSTANCE : FlowVariablePortObject.INSTANCE) : null;
changed = m_outputs[0].setObject(object) || changed;
final FlowObjectStack outgoingFlowObjectStack = getOutgoingFlowObjectStack();
// TODO API to remove variables from stack, then remove variables no longer in output node and update "changed"
if (publishObjects && !isInactive) {
for (FlowVariable f : outputExchange.getFlowVariables()) {
outgoingFlowObjectStack.push(f.cloneAndUnsetOwner());
}
} else {
// outgoing stack may be null if reset is called twice in a row (or once but no configure was called)
if (outgoingFlowObjectStack != null) {
while (!outgoingFlowObjectStack.isEmpty()) {
outgoingFlowObjectStack.pop(FlowObject.class);
}
}
}
if (changed && !m_isPerformingActionCalledFromParent) {
// updates port views
notifyStateChangeListeners(new NodeStateEvent(this));
}
return changed;
}
use of org.knime.core.node.port.PortObject in project knime-core by knime.
the class SubNodeContainer method fetchInputDataFromParent.
/* -------------------- Virtual node callbacks -------------- */
/**
* Called from virtual input node when executed - in possibly executes nodes in the parent wfm and then
* fetches the data from it.
* @return the subnode data input (incl. mandatory flow var port object).
* @throws ExecutionException any exception thrown while waiting for upstream nodes to finish execution.
*/
public PortObject[] fetchInputDataFromParent() throws ExecutionException {
Callable<PortObject[]> c = new Callable<PortObject[]>() {
@Override
public PortObject[] call() throws Exception {
final WorkflowManager parent = getParent();
// might be not yet or no longer in workflow (e.g. part of construction)
if (parent.containsNodeContainer(getID())) {
PortObject[] results = new PortObject[getNrInPorts()];
parent.executePredecessorsAndWait(getID());
if (parent.assembleInputData(getID(), results)) {
return results;
}
}
return null;
}
};
ThreadPool currentPool = ThreadPool.currentPool();
if (currentPool != null) {
return currentPool.runInvisible(c);
} else {
try {
return c.call();
} catch (Exception e) {
throw new ExecutionException(e);
}
}
}
Aggregations