use of org.knime.core.node.ExecutionContext in project knime-core by knime.
the class NumericOutliersIntervalsCalculator method calculatePermittedIntervals.
/**
* Calculates the permitted intervals.
*
* @param inTable the data table for which the outliers have to be detected
* @param exec the execution context
* @return returns the mapping between groups and the permitted intervals for each outlier column
* @throws Exception if the execution failed, due to internal reasons or cancelation from the outside
*/
NumericOutliersModel calculatePermittedIntervals(final BufferedDataTable inTable, final ExecutionContext exec) throws Exception {
// the quartile calculation progress
final double quartilesProgress = 0.8;
// the interval calculation progress
final double intervalsProgress = 1 - quartilesProgress;
// start the computation of the first and third quartile (and some additional stuff)
exec.setMessage(STATISTICS_MSG);
GroupByTable t;
// out of memory exception while initializing/cloning the aggregators. However, this is very unlikely
try {
ExecutionContext quartilesCalcExec = exec.createSubExecutionContext(quartilesProgress);
t = getGroupByTable(inTable, quartilesCalcExec);
quartilesCalcExec.setProgress(1.0);
} catch (final OutOfMemoryError e) {
throw new IllegalArgumentException(MEMORY_EXCEPTION, e);
}
// carried out inside the memory
if (!t.getSkippedGroupsByColName().isEmpty()) {
throw new IllegalArgumentException(MEMORY_EXCEPTION);
}
// start the permitted interval calculation
exec.setMessage(INTERVAL_MSG);
// interval subexecution context
ExecutionContext intervalExec = exec.createSubExecutionContext(intervalsProgress);
// calculate the permitted intervals and store them to the model
final NumericOutliersModel model = calcPermittedIntervals(intervalExec, t.getBufferedTable());
// update the progress and return the permitted intervals
exec.setProgress(1);
// return the model
return model;
}
use of org.knime.core.node.ExecutionContext in project knime-core by knime.
the class NewToOldTimeNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public StreamableOperatorInternals saveInternals() {
return null;
}
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
final RowInput in = (RowInput) inputs[0];
final RowOutput out = (RowOutput) outputs[0];
final DataTableSpec inSpec = in.getDataTableSpec();
String[] includeList = m_colSelect.applyTo(inSpec).getIncludes();
final int[] includeIndeces = Arrays.stream(includeList).mapToInt(s -> inSpec.findColumnIndex(s)).toArray();
DataRow row;
while ((row = in.poll()) != null) {
exec.checkCanceled();
DataCell[] datacells = new DataCell[includeIndeces.length];
for (int i = 0; i < includeIndeces.length; i++) {
if (m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE)) {
final DataColumnSpecCreator dataColumnSpecCreator = new DataColumnSpecCreator(includeList[i], DateAndTimeCell.TYPE);
final ConvertTimeCellFactory cellFac = new ConvertTimeCellFactory(dataColumnSpecCreator.createSpec(), includeIndeces[i]);
datacells[i] = cellFac.getCells(row)[0];
} else {
final DataColumnSpec dataColSpec = new UniqueNameGenerator(inSpec).newColumn(includeList[i] + m_suffix.getStringValue(), DateAndTimeCell.TYPE);
final ConvertTimeCellFactory cellFac = new ConvertTimeCellFactory(dataColSpec, includeIndeces[i]);
datacells[i] = cellFac.getCells(row)[0];
}
}
if (m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE)) {
out.push(new ReplacedColumnsDataRow(row, datacells, includeIndeces));
} else {
out.push(new AppendedColumnRow(row, datacells));
}
}
in.close();
out.close();
}
};
}
use of org.knime.core.node.ExecutionContext in project knime-core by knime.
the class DateTimeToStringNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
final RowInput in = (RowInput) inputs[0];
final RowOutput out = (RowOutput) outputs[0];
final DataTableSpec inSpec = in.getDataTableSpec();
final String[] includeList = m_colSelect.applyTo(inSpec).getIncludes();
final int[] includeIndeces = Arrays.stream(m_colSelect.applyTo(inSpec).getIncludes()).mapToInt(s -> inSpec.findColumnIndex(s)).toArray();
final boolean isReplace = m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE);
DataRow row;
while ((row = in.poll()) != null) {
exec.checkCanceled();
DataCell[] datacells = new DataCell[includeIndeces.length];
for (int i = 0; i < includeIndeces.length; i++) {
if (isReplace) {
final DataColumnSpecCreator dataColumnSpecCreator = new DataColumnSpecCreator(includeList[i], StringCell.TYPE);
final TimeToStringCellFactory cellFac = new TimeToStringCellFactory(dataColumnSpecCreator.createSpec(), includeIndeces[i]);
datacells[i] = cellFac.getCell(row);
} else {
final DataColumnSpec dataColSpec = new UniqueNameGenerator(inSpec).newColumn(includeList[i] + m_suffix.getStringValue(), StringCell.TYPE);
final TimeToStringCellFactory cellFac = new TimeToStringCellFactory(dataColSpec, includeIndeces[i]);
datacells[i] = cellFac.getCell(row);
}
}
if (isReplace) {
out.push(new ReplacedColumnsDataRow(row, datacells, includeIndeces));
} else {
out.push(new AppendedColumnRow(row, datacells));
}
}
in.close();
out.close();
}
};
}
use of org.knime.core.node.ExecutionContext in project knime-core by knime.
the class TreeEnsembleClassificationPredictorNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
TreeEnsembleModelPortObject model = (TreeEnsembleModelPortObject) ((PortObjectInput) inputs[0]).getPortObject();
TreeEnsembleModelPortObjectSpec modelSpec = model.getSpec();
DataTableSpec dataSpec = (DataTableSpec) inSpecs[1];
final TreeEnsemblePredictor pred = new TreeEnsemblePredictor(modelSpec, model, dataSpec, m_configuration);
ColumnRearranger rearranger = pred.getPredictionRearranger();
StreamableFunction func = rearranger.createStreamableFunction(1, 0);
func.runFinal(inputs, outputs, exec);
}
};
}
use of org.knime.core.node.ExecutionContext in project knime-core by knime.
the class TreeEnsembleRegressionPredictorNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
TreeEnsembleModelPortObject model = (TreeEnsembleModelPortObject) ((PortObjectInput) inputs[0]).getPortObject();
TreeEnsembleModelPortObjectSpec modelSpec = model.getSpec();
DataTableSpec dataSpec = (DataTableSpec) inSpecs[1];
final TreeEnsemblePredictor pred = new TreeEnsemblePredictor(modelSpec, model, dataSpec, m_configuration);
ColumnRearranger rearranger = pred.getPredictionRearranger();
StreamableFunction func = rearranger.createStreamableFunction(1, 0);
func.runFinal(inputs, outputs, exec);
}
};
}
Aggregations