use of org.knime.core.node.BufferedDataContainer in project knime-core by knime.
the class NumericRowSplitterNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
DataTableSpec inSpec = inData[0].getDataTableSpec();
BufferedDataContainer buf1 = exec.createDataContainer(inSpec);
BufferedDataContainer buf2 = exec.createDataContainer(inSpec);
final int columnIndex = inSpec.findColumnIndex(m_columnSelection.getStringValue());
long count = 0;
final long nrRows = inData[0].size();
for (DataRow row : inData[0]) {
if (matches(row.getCell(columnIndex))) {
buf1.addRowToTable(row);
} else {
buf2.addRowToTable(row);
}
exec.checkCanceled();
exec.setProgress(count / (double) nrRows, "Added row " + (++count) + "/" + nrRows + " (\"" + row.getKey() + "\")");
}
buf1.close();
buf2.close();
BufferedDataTable outData1 = buf1.getTable();
BufferedDataTable outData2 = buf2.getTable();
return new BufferedDataTable[] { outData1, outData2 };
}
use of org.knime.core.node.BufferedDataContainer in project knime-core by knime.
the class Normalizer3NodeModel method calculate.
/**
* New normalized {@link org.knime.core.data.DataTable} is created depending on the mode.
*/
/**
* @param inData The input data.
* @param exec For BufferedDataTable creation and progress.
* @return the result of the calculation
* @throws Exception If the node calculation fails for any reason.
*/
protected CalculationResult calculate(final PortObject[] inData, final ExecutionContext exec) throws Exception {
BufferedDataTable inTable = (BufferedDataTable) inData[0];
DataTableSpec inSpec = inTable.getSpec();
// extract selected numeric columns
String[] includedColumns = getIncludedComlumns(inSpec);
Normalizer2 ntable = new Normalizer2(inTable, includedColumns);
long rowcount = inTable.size();
ExecutionContext prepareExec = exec.createSubExecutionContext(0.3);
AffineTransTable outTable;
boolean fixDomainBounds = false;
switch(m_config.getMode()) {
case MINMAX:
fixDomainBounds = true;
outTable = ntable.doMinMaxNorm(m_config.getMax(), m_config.getMin(), prepareExec);
break;
case Z_SCORE:
outTable = ntable.doZScoreNorm(prepareExec);
break;
case DECIMALSCALING:
outTable = ntable.doDecimalScaling(prepareExec);
break;
default:
throw new InvalidSettingsException("No mode set");
}
if (outTable.getErrorMessage() != null) {
// something went wrong, report and throw an exception
throw new Exception(outTable.getErrorMessage());
}
if (ntable.getErrorMessage() != null) {
// something went wrong during initialization, report.
setWarningMessage(ntable.getErrorMessage());
}
DataTableSpec modelSpec = FilterColumnTable.createFilterTableSpec(inSpec, includedColumns);
AffineTransConfiguration configuration = outTable.getConfiguration();
DataTableSpec spec = outTable.getDataTableSpec();
// the same transformation, which is not guaranteed to snap to min/max)
if (fixDomainBounds) {
DataColumnSpec[] newColSpecs = new DataColumnSpec[spec.getNumColumns()];
for (int i = 0; i < newColSpecs.length; i++) {
newColSpecs[i] = spec.getColumnSpec(i);
}
for (int i = 0; i < includedColumns.length; i++) {
int index = spec.findColumnIndex(includedColumns[i]);
DataColumnSpecCreator creator = new DataColumnSpecCreator(newColSpecs[index]);
DataColumnDomainCreator domCreator = new DataColumnDomainCreator(newColSpecs[index].getDomain());
domCreator.setLowerBound(new DoubleCell(m_config.getMin()));
domCreator.setUpperBound(new DoubleCell(m_config.getMax()));
creator.setDomain(domCreator.createDomain());
newColSpecs[index] = creator.createSpec();
}
spec = new DataTableSpec(spec.getName(), newColSpecs);
}
ExecutionMonitor normExec = exec.createSubProgress(.7);
BufferedDataContainer container = exec.createDataContainer(spec);
long count = 1;
for (DataRow row : outTable) {
normExec.checkCanceled();
normExec.setProgress(count / (double) rowcount, "Normalizing row no. " + count + " of " + rowcount + " (\"" + row.getKey() + "\")");
container.addRowToTable(row);
count++;
}
container.close();
return new CalculationResult(container.getTable(), modelSpec, configuration);
}
use of org.knime.core.node.BufferedDataContainer in project knime-core by knime.
the class MemoryGroupByTable method createResultTable.
private BufferedDataTable createResultTable(final ExecutionContext exec, final DataTableSpec resultSpec) throws CanceledExecutionException {
final BufferedDataContainer dc = exec.createDataContainer(resultSpec);
int groupCounter = 0;
final int size = m_vals.size();
for (final Entry<GroupKey, ColumnAggregator[]> entry : m_vals.entrySet()) {
exec.checkCanceled();
exec.setProgress(groupCounter / (double) size, "Writing group " + groupCounter + " of " + size);
final GroupKey groupVals = entry.getKey();
final ColumnAggregator[] colAggregators = entry.getValue();
final RowKey rowKey = RowKey.createRowKey(groupCounter++);
final DataCell[] rowVals = new DataCell[groupVals.size() + colAggregators.length];
// add the group values first
int valIdx = 0;
for (final DataCell groupCell : groupVals.getGroupVals()) {
rowVals[valIdx++] = groupCell;
}
// add the aggregation values
for (final ColumnAggregator colAggr : colAggregators) {
final AggregationOperator operator = colAggr.getOperator(getGlobalSettings());
rowVals[valIdx++] = operator.getResult();
if (operator.isSkipped()) {
// add skipped groups and the column that causes the skipping
// into the skipped groups map
addSkippedGroup(colAggr.getOriginalColName(), operator.getSkipMessage(), groupVals.getGroupVals());
}
// reset the operator for the next group
operator.reset();
}
final DataRow newRow = new DefaultRow(rowKey, rowVals);
dc.addRowToTable(newRow);
// add hilite mappings if enabled
if (isEnableHilite()) {
final Set<RowKey> oldKeys = m_rowKeys.get(groupVals);
addHiliteMapping(rowKey, oldKeys);
}
}
dc.close();
return dc.getTable();
}
use of org.knime.core.node.BufferedDataContainer in project knime-core by knime.
the class PartitionNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws CanceledExecutionException, Exception {
BufferedDataTable in = inData[0];
BufferedDataTable[] outs = new BufferedDataTable[2];
IRowFilter filter = getSamplingRowFilter(in, exec);
BufferedDataContainer firstOutCont = exec.createDataContainer(in.getDataTableSpec());
BufferedDataContainer secondOutCont = exec.createDataContainer(in.getDataTableSpec());
// floating point op. below
final double rowCount = in.size();
// one of the flags will be set if one of the exceptions below
// is thrown.
boolean putRestInOut1 = false;
boolean putRestInOut2 = false;
try {
int count = 0;
for (DataRow row : in) {
boolean matches = putRestInOut1;
try {
// conditional check, will call "matches" only if necessary
matches |= (!putRestInOut2 && filter.matches(row, count));
} catch (IncludeFromNowOn icf) {
assert !putRestInOut2;
putRestInOut1 = true;
matches = true;
} catch (EndOfTableException ete) {
assert !putRestInOut1;
putRestInOut2 = true;
matches = false;
}
if (matches) {
firstOutCont.addRowToTable(row);
} else {
secondOutCont.addRowToTable(row);
}
exec.setProgress(count / rowCount, "Processed row " + count + " (\"" + row.getKey() + "\")");
exec.checkCanceled();
count++;
}
} finally {
firstOutCont.close();
secondOutCont.close();
}
outs[0] = firstOutCont.getTable();
outs[1] = secondOutCont.getTable();
if (filter instanceof StratifiedSamplingRowFilter) {
int classCount = ((StratifiedSamplingRowFilter) filter).getClassCount();
if (classCount > outs[0].size()) {
setWarningMessage("Class column contains more classes (" + classCount + ") than sampled rows (" + outs[0].size() + ")");
}
}
return outs;
}
use of org.knime.core.node.BufferedDataContainer in project knime-core by knime.
the class RowFilter2PortNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
BufferedDataTable in = inData[0];
// in case the node was configured and the workflow is closed
// (and saved), the row filter isn't configured upon reloading.
// here, we give it a chance to configure itself (e.g. find the column
// index)
m_rowFilter.configure(in.getDataTableSpec());
BufferedDataContainer match = exec.createDataContainer(in.getDataTableSpec());
BufferedDataContainer miss = exec.createDataContainer(in.getDataTableSpec());
RowOutput rowOutput1 = new BufferedDataTableRowOutput(match);
RowOutput rowOutput2 = new BufferedDataTableRowOutput(miss);
RowInput rowInput = new DataTableRowInput(inData[0]);
// do it
this.execute(rowInput, rowOutput1, rowOutput2, inData[0].size(), exec);
// note: tables are closed in the private execute method
return new BufferedDataTable[] { match.getTable(), miss.getTable() };
}
Aggregations