use of org.knime.core.node.port.PortObject in project knime-core by knime.
the class FileNodePersistor method loadPorts.
/**
* @noreference
* @nooverride
*/
void loadPorts(final Node node, final ExecutionMonitor exec, final NodeSettingsRO settings, final Map<Integer, BufferedDataTable> loadTblRep, final HashMap<Integer, ContainerTable> tblRep, final FileStoreHandlerRepository fileStoreHandlerRepository) throws IOException, InvalidSettingsException, CanceledExecutionException {
final int nrOutPorts = node.getNrOutPorts();
if (getLoadVersion().isOlderThan(FileWorkflowPersistor.LoadVersion.V200)) {
// skip flow variables port (introduced in v2.2)
for (int i = 1; i < nrOutPorts; i++) {
int oldIndex = getOldPortIndex(i);
ExecutionMonitor execPort = exec.createSubProgress(1.0 / nrOutPorts);
exec.setMessage("Port " + oldIndex);
PortType type = node.getOutputType(i);
boolean isDataPort = BufferedDataTable.class.isAssignableFrom(type.getPortObjectClass());
if (m_isConfigured) {
PortObjectSpec spec = loadPortObjectSpec(node, settings, oldIndex);
setPortObjectSpec(i, spec);
}
if (m_isExecuted) {
PortObject object;
if (isDataPort) {
object = loadBufferedDataTable(node, settings, execPort, loadTblRep, oldIndex, tblRep, fileStoreHandlerRepository);
} else {
throw new IOException("Can't restore model ports of " + "old 1.x workflows. Execute node again.");
}
String summary = object != null ? object.getSummary() : null;
setPortObject(i, object);
setPortObjectSummary(i, summary);
}
execPort.setProgress(1.0);
}
} else {
if (nrOutPorts == 1) {
// only the mandatory flow variable port
return;
}
NodeSettingsRO portsSettings = loadPortsSettings(settings);
exec.setMessage("Reading outport data");
for (String key : portsSettings.keySet()) {
NodeSettingsRO singlePortSetting = portsSettings.getNodeSettings(key);
ExecutionMonitor subProgress = exec.createSubProgress(1 / (double) nrOutPorts);
int index = loadPortIndex(singlePortSetting);
if (index < 0 || index >= nrOutPorts) {
throw new InvalidSettingsException("Invalid outport index in settings: " + index);
}
String portDirN = singlePortSetting.getString("port_dir_location");
if (portDirN != null) {
ReferencedFile portDir = new ReferencedFile(getNodeDirectory(), portDirN);
subProgress.setMessage("Port " + index);
loadPort(node, portDir, singlePortSetting, subProgress, index, loadTblRep, tblRep, fileStoreHandlerRepository);
}
subProgress.setProgress(1.0);
}
}
}
use of org.knime.core.node.port.PortObject in project knime-core by knime.
the class RandomForestClassificationLearnerNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inObjects, final ExecutionContext exec) throws Exception {
BufferedDataTable t = (BufferedDataTable) inObjects[0];
DataTableSpec spec = t.getDataTableSpec();
final FilterLearnColumnRearranger learnRearranger = m_configuration.filterLearnColumns(spec);
String warn = learnRearranger.getWarning();
BufferedDataTable learnTable = exec.createColumnRearrangeTable(t, learnRearranger, exec.createSubProgress(0.0));
DataTableSpec learnSpec = learnTable.getDataTableSpec();
TreeEnsembleModelPortObjectSpec ensembleSpec = m_configuration.createPortObjectSpec(learnSpec);
Map<String, DataCell> targetValueMap = ensembleSpec.getTargetColumnPossibleValueMap();
if (targetValueMap == null) {
throw new InvalidSettingsException("The target column does not " + "have possible values assigned. Most likely it " + "has too many different distinct values (learning an ID " + "column?) Fix it by preprocessing the table using " + "a \"Domain Calculator\".");
}
ExecutionMonitor readInExec = exec.createSubProgress(0.1);
ExecutionMonitor learnExec = exec.createSubProgress(0.8);
ExecutionMonitor outOfBagExec = exec.createSubProgress(0.1);
TreeDataCreator dataCreator = new TreeDataCreator(m_configuration, learnSpec, learnTable.getRowCount());
exec.setProgress("Reading data into memory");
TreeData data = dataCreator.readData(learnTable, m_configuration, readInExec);
m_hiliteRowSample = dataCreator.getDataRowsForHilite();
m_viewMessage = dataCreator.getViewMessage();
String dataCreationWarning = dataCreator.getAndClearWarningMessage();
if (dataCreationWarning != null) {
if (warn == null) {
warn = dataCreationWarning;
} else {
warn = warn + "\n" + dataCreationWarning;
}
}
readInExec.setProgress(1.0);
exec.setMessage("Learning trees");
TreeEnsembleLearner learner = new TreeEnsembleLearner(m_configuration, data);
TreeEnsembleModel model;
try {
model = learner.learnEnsemble(learnExec);
} catch (ExecutionException e) {
Throwable cause = e.getCause();
if (cause instanceof Exception) {
throw (Exception) cause;
}
throw e;
}
TreeEnsembleModelPortObject modelPortObject = new TreeEnsembleModelPortObject(ensembleSpec, model);
learnExec.setProgress(1.0);
exec.setMessage("Out of bag prediction");
TreeEnsemblePredictor outOfBagPredictor = createOutOfBagPredictor(ensembleSpec, modelPortObject, spec);
outOfBagPredictor.setOutofBagFilter(learner.getRowSamples(), data.getTargetColumn());
ColumnRearranger outOfBagRearranger = outOfBagPredictor.getPredictionRearranger();
BufferedDataTable outOfBagTable = exec.createColumnRearrangeTable(t, outOfBagRearranger, outOfBagExec);
BufferedDataTable colStatsTable = learner.createColumnStatisticTable(exec.createSubExecutionContext(0.0));
m_ensembleModelPortObject = modelPortObject;
if (warn != null) {
setWarningMessage(warn);
}
return new PortObject[] { outOfBagTable, colStatsTable, modelPortObject };
}
use of org.knime.core.node.port.PortObject in project knime-core by knime.
the class TreeEnsembleClassificationLearnerNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inObjects, final ExecutionContext exec) throws Exception {
BufferedDataTable t = (BufferedDataTable) inObjects[0];
DataTableSpec spec = t.getDataTableSpec();
final FilterLearnColumnRearranger learnRearranger = m_configuration.filterLearnColumns(spec);
String warn = learnRearranger.getWarning();
BufferedDataTable learnTable = exec.createColumnRearrangeTable(t, learnRearranger, exec.createSubProgress(0.0));
DataTableSpec learnSpec = learnTable.getDataTableSpec();
TreeEnsembleModelPortObjectSpec ensembleSpec = m_configuration.createPortObjectSpec(learnSpec);
Map<String, DataCell> targetValueMap = ensembleSpec.getTargetColumnPossibleValueMap();
if (targetValueMap == null) {
throw new InvalidSettingsException("The target column does not " + "have possible values assigned. Most likely it " + "has too many different distinct values (learning an ID " + "column?) Fix it by preprocessing the table using " + "a \"Domain Calculator\".");
}
ExecutionMonitor readInExec = exec.createSubProgress(0.1);
ExecutionMonitor learnExec = exec.createSubProgress(0.8);
ExecutionMonitor outOfBagExec = exec.createSubProgress(0.1);
TreeDataCreator dataCreator = new TreeDataCreator(m_configuration, learnSpec, learnTable.getRowCount());
exec.setProgress("Reading data into memory");
TreeData data = dataCreator.readData(learnTable, m_configuration, readInExec);
m_hiliteRowSample = dataCreator.getDataRowsForHilite();
m_viewMessage = dataCreator.getViewMessage();
String dataCreationWarning = dataCreator.getAndClearWarningMessage();
if (dataCreationWarning != null) {
if (warn == null) {
warn = dataCreationWarning;
} else {
warn = warn + "\n" + dataCreationWarning;
}
}
readInExec.setProgress(1.0);
exec.setMessage("Learning trees");
TreeEnsembleLearner learner = new TreeEnsembleLearner(m_configuration, data);
TreeEnsembleModel model;
try {
model = learner.learnEnsemble(learnExec);
} catch (ExecutionException e) {
Throwable cause = e.getCause();
if (cause instanceof Exception) {
throw (Exception) cause;
}
throw e;
}
TreeEnsembleModelPortObject modelPortObject = new TreeEnsembleModelPortObject(ensembleSpec, model);
learnExec.setProgress(1.0);
exec.setMessage("Out of bag prediction");
TreeEnsemblePredictor outOfBagPredictor = createOutOfBagPredictor(ensembleSpec, modelPortObject, spec);
outOfBagPredictor.setOutofBagFilter(learner.getRowSamples(), data.getTargetColumn());
ColumnRearranger outOfBagRearranger = outOfBagPredictor.getPredictionRearranger();
BufferedDataTable outOfBagTable = exec.createColumnRearrangeTable(t, outOfBagRearranger, outOfBagExec);
BufferedDataTable colStatsTable = learner.createColumnStatisticTable(exec.createSubExecutionContext(0.0));
m_ensembleModelPortObject = modelPortObject;
if (warn != null) {
setWarningMessage(warn);
}
return new PortObject[] { outOfBagTable, colStatsTable, modelPortObject };
}
use of org.knime.core.node.port.PortObject in project knime-core by knime.
the class RandomForestRegressionLearnerNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inObjects, final ExecutionContext exec) throws Exception {
BufferedDataTable t = (BufferedDataTable) inObjects[0];
DataTableSpec spec = t.getDataTableSpec();
final FilterLearnColumnRearranger learnRearranger = m_configuration.filterLearnColumns(spec);
String warn = learnRearranger.getWarning();
BufferedDataTable learnTable = exec.createColumnRearrangeTable(t, learnRearranger, exec.createSubProgress(0.0));
DataTableSpec learnSpec = learnTable.getDataTableSpec();
TreeEnsembleModelPortObjectSpec ensembleSpec = m_configuration.createPortObjectSpec(learnSpec);
ExecutionMonitor readInExec = exec.createSubProgress(0.1);
ExecutionMonitor learnExec = exec.createSubProgress(0.8);
ExecutionMonitor outOfBagExec = exec.createSubProgress(0.1);
TreeDataCreator dataCreator = new TreeDataCreator(m_configuration, learnSpec, learnTable.getRowCount());
exec.setProgress("Reading data into memory");
TreeData data = dataCreator.readData(learnTable, m_configuration, readInExec);
m_hiliteRowSample = dataCreator.getDataRowsForHilite();
m_viewMessage = dataCreator.getViewMessage();
String dataCreationWarning = dataCreator.getAndClearWarningMessage();
if (dataCreationWarning != null) {
if (warn == null) {
warn = dataCreationWarning;
} else {
warn = warn + "\n" + dataCreationWarning;
}
}
readInExec.setProgress(1.0);
exec.setMessage("Learning trees");
TreeEnsembleLearner learner = new TreeEnsembleLearner(m_configuration, data);
TreeEnsembleModel model;
try {
model = learner.learnEnsemble(learnExec);
} catch (ExecutionException e) {
Throwable cause = e.getCause();
if (cause instanceof Exception) {
throw (Exception) cause;
}
throw e;
}
TreeEnsembleModelPortObject modelPortObject = new TreeEnsembleModelPortObject(ensembleSpec, model);
learnExec.setProgress(1.0);
exec.setMessage("Out of bag prediction");
TreeEnsemblePredictor outOfBagPredictor = createOutOfBagPredictor(ensembleSpec, modelPortObject, spec);
outOfBagPredictor.setOutofBagFilter(learner.getRowSamples(), data.getTargetColumn());
ColumnRearranger outOfBagRearranger = outOfBagPredictor.getPredictionRearranger();
BufferedDataTable outOfBagTable = exec.createColumnRearrangeTable(t, outOfBagRearranger, outOfBagExec);
BufferedDataTable colStatsTable = learner.createColumnStatisticTable(exec.createSubExecutionContext(0.0));
m_ensembleModelPortObject = modelPortObject;
if (warn != null) {
setWarningMessage(warn);
}
return new PortObject[] { outOfBagTable, colStatsTable, modelPortObject };
}
use of org.knime.core.node.port.PortObject in project knime-core by knime.
the class TreeEnsembleShrinkerNodeModel method execute.
@Override
protected PortObject[] execute(final PortObject[] inObjects, final ExecutionContext exec) throws Exception {
TreeEnsembleModel treeEnsemble = ((TreeEnsembleModelPortObject) inObjects[0]).getEnsembleModel();
TreeEnsembleModelPortObject resultEnsemble;
int resultSize = m_config.getResultSize(treeEnsemble.getNrModels());
boolean shrink = true;
if (!m_config.isResultSizeAutomatic()) {
// Check if result size is valid
if (resultSize < 1) {
// Result size is to small, use 1
setWarningMessage("The configured result size is smaller than 1, defaulting to 1");
resultSize = 1;
} else if (resultSize > treeEnsemble.getNrModels()) {
// Result size is to big, just keep current ensemble
setWarningMessage("The configured result size is bigger than the size of the input ensemble, defaulting to the input ensembles size");
shrink = false;
} else if (resultSize == treeEnsemble.getNrModels()) {
// Result size is ensemble size -> we don't need to shrink
shrink = false;
}
}
// If our result size is not smaller than the current ensemble we don't have to do the following and therefore can save time
if (shrink) {
BufferedDataTable inData = (BufferedDataTable) inObjects[1];
// Create shrinker
TreeEnsembleShrinker shrinker = new TreeEnsembleShrinker(treeEnsemble, inData, m_config.getTargetColumn(), exec);
// Shrink ensemble
if (m_config.isResultSizeAutomatic()) {
shrinker.autoShrink();
} else {
shrinker.shrinkTo(resultSize);
}
// Get shrunk ensemble
TreeEnsembleModel newEnsemble = shrinker.getModel();
// Push flow variable with archived accuracy
pushFlowVariableDouble("Tree Ensemble Shrinker Prediction Accuracy", shrinker.getAccuracy());
// Create port object for tree ensemble
resultEnsemble = new TreeEnsembleModelPortObject(((TreeEnsembleModelPortObject) inObjects[0]).getSpec(), newEnsemble);
} else {
// We did not need to shrink just use input tree ensemble port object
resultEnsemble = (TreeEnsembleModelPortObject) inObjects[0];
}
// Convert tree ensemble port object to PMML
PMMLPortObject pmmlEnsemble = convertToPmmlEnsemble(resultEnsemble, exec);
return new PortObject[] { pmmlEnsemble };
}
Aggregations