use of org.knime.core.node.BufferedDataTable in project knime-core by knime.
the class SimpleStreamableFunctionNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
BufferedDataTable in = inData[0];
ColumnRearranger r = createColumnRearranger(in.getDataTableSpec());
BufferedDataTable out = exec.createColumnRearrangeTable(in, r, exec);
return new BufferedDataTable[] { out };
}
use of org.knime.core.node.BufferedDataTable in project knime-core by knime.
the class JoinedTable method load.
/**
* Method being called when the workflow is restored and the table shall
* recreated.
* @param s The settings object, contains tables ids.
* @param spec The final spec.
* @param tblRep The table repository
* @return The restored table.
* @throws InvalidSettingsException If the settings can't be read.
*/
public static JoinedTable load(final NodeSettingsRO s, final DataTableSpec spec, final Map<Integer, BufferedDataTable> tblRep) throws InvalidSettingsException {
NodeSettingsRO subSettings = s.getNodeSettings(CFG_INTERNAL_META);
int leftID = subSettings.getInt(CFG_LEFT_TABLE_ID);
int rightID = subSettings.getInt(CFG_RIGHT_TABLE_ID);
BufferedDataTable leftTable = BufferedDataTable.getDataTable(tblRep, leftID);
BufferedDataTable rightTable = BufferedDataTable.getDataTable(tblRep, rightID);
return new JoinedTable(leftTable, rightTable, spec);
}
use of org.knime.core.node.BufferedDataTable in project knime-core by knime.
the class TableContentModel method setDataTableIntern.
/**
* Sets new data for this table. The table argument may be
* <code>null</code> to indicate invalid data (nothing displayed).
*/
private void setDataTableIntern(final DataTable originalData, final DataTable data, final TableSortOrder sortOrder) {
assert SwingUtilities.isEventDispatchThread();
if (m_data == data) {
// do not start event storm
return;
}
boolean clearOldTable = m_tableSortOrder != null;
if (m_tableSorterWorker != null) {
m_tableSorterWorker.cancel(true);
m_tableSorterWorker = null;
}
m_tableSortOrder = sortOrder;
cancelRowCountingInBackground();
int oldColCount = getColumnCount();
int newColCount = data != null ? data.getDataTableSpec().getNumColumns() : 0;
int oldRowCount = getRowCount();
DataTable oldData = m_data;
m_originalUnsortedTable = originalData;
m_data = data;
m_cachedRows = null;
m_hilitSet = null;
if (m_iterator instanceof CloseableRowIterator) {
((CloseableRowIterator) m_iterator).close();
}
m_iterator = null;
m_rowCountOfInterestInIterator = 0;
m_rowCountOfInterest = 0;
m_maxRowCount = 0;
cancelRowCountingInBackground();
m_isMaxRowCountFinal = true;
m_isRowCountOfInterestFinal = true;
boolean structureChanged = oldColCount != newColCount;
if (oldColCount == newColCount) {
if (oldRowCount > 0) {
fireTableRowsDeleted(0, oldRowCount - 1);
}
if (newColCount > 0) {
structureChanged = !data.getDataTableSpec().equalStructure(oldData.getDataTableSpec());
}
}
if (data != null) {
// new data available, release old stuff
// assume that there are rows, may change in cacheNextRow() below
m_isMaxRowCountFinal = false;
m_isRowCountOfInterestFinal = false;
final long rowCountFromTable;
if (data instanceof BufferedDataTable) {
rowCountFromTable = ((BufferedDataTable) data).size();
} else if (data instanceof ContainerTable) {
rowCountFromTable = ((ContainerTable) data).size();
} else {
// unknown
rowCountFromTable = -1;
}
if (rowCountFromTable >= 0) {
m_isMaxRowCountFinal = true;
if (rowCountFromTable > Integer.MAX_VALUE) {
NodeLogger.getLogger(getClass()).warn("Table view will show only the first " + Integer.MAX_VALUE + " rows of " + rowCountFromTable + ".");
m_maxRowCount = Integer.MAX_VALUE;
} else {
m_maxRowCount = (int) rowCountFromTable;
}
if (!m_tableFilter.performsFiltering()) {
m_rowCountOfInterest = m_maxRowCount;
m_isRowCountOfInterestFinal = true;
}
}
int cacheSize = getCacheSize();
m_cachedRows = new DataRow[cacheSize];
m_hilitSet = new BitSet(cacheSize);
// will instantiate a new iterator.
clearCache();
// will also set m_isRowCountOfInterestFinal etc. accordingly
cacheNextRow();
}
if (structureChanged) {
// notify listeners
fireTableStructureChanged();
} else {
int newRowCount = getRowCount();
if (newRowCount > 0) {
fireTableRowsInserted(0, newRowCount);
}
}
m_propertySupport.firePropertyChange(PROPERTY_DATA, oldData, m_data);
if (clearOldTable && oldData instanceof ContainerTable) {
((ContainerTable) oldData).clear();
}
}
use of org.knime.core.node.BufferedDataTable in project knime-core by knime.
the class TreeDataCreator method readData.
/**
* Reads the data from <b>learnData</b> into memory.
* Each column is represented by a TreeColumnData object corresponding to its type
* and whether it is a attribute or target column.
*
* @param learnData
* @param configuration
* @param exec
* @return the TreeData object that holds all data in memory
* @throws CanceledExecutionException
*/
public TreeData readData(final BufferedDataTable learnData, final TreeEnsembleLearnerConfiguration configuration, final ExecutionMonitor exec) throws CanceledExecutionException {
if (learnData.size() <= 1) {
throw new IllegalArgumentException("The input table must contain at least 2 rows!");
}
int index = 0;
final long nrRows = learnData.size();
final int nrLearnCols = m_attrColCreators.length;
final boolean[] supportMissings = new boolean[nrLearnCols];
for (int i = 0; i < nrLearnCols; i++) {
supportMissings[i] = m_attrColCreators[i].acceptsMissing();
}
int rejectedMissings = 0;
final int nrHilitePatterns = m_configuration.getNrHilitePatterns();
// sort learnData according to the target column to enable equal size sampling
final int targetColIdx = learnData.getDataTableSpec().findColumnIndex(m_configuration.getTargetColumn());
Comparator<DataCell> targetComp = learnData.getDataTableSpec().getColumnSpec(targetColIdx).getType().getComparator();
DataTableSorter sorter = new DataTableSorter(learnData, learnData.size(), new Comparator<DataRow>() {
@Override
public int compare(final DataRow arg0, final DataRow arg1) {
return targetComp.compare(arg0.getCell(targetColIdx), arg1.getCell(targetColIdx));
}
});
final ExecutionMonitor sortExec = exec.createSubProgress(0.5);
final DataTable sortedTable = sorter.sort(sortExec);
final ExecutionMonitor readExec = exec.createSubProgress(0.5);
for (DataRow r : sortedTable) {
double progress = index / (double) nrRows;
readExec.setProgress(progress, "Row " + index + " of " + nrRows + " (\"" + r.getKey() + "\")");
readExec.checkCanceled();
boolean shouldReject = false;
for (int i = 0; i < nrLearnCols; i++) {
DataCell c = r.getCell(i);
if (c.isMissing() && !supportMissings[i]) {
shouldReject = true;
break;
}
}
DataCell targetCell = r.getCell(nrLearnCols);
if (targetCell.isMissing()) {
shouldReject = true;
}
if (shouldReject) {
rejectedMissings += 1;
continue;
}
if (index < nrHilitePatterns) {
m_dataRowsForHiliteContainer.addRowToTable(r);
}
final RowKey key = r.getKey();
for (int i = 0; i < nrLearnCols; i++) {
DataCell c = r.getCell(i);
m_attrColCreators[i].add(key, c);
}
m_targetColCreator.add(key, targetCell);
index++;
}
if (nrHilitePatterns > 0 && index > nrHilitePatterns) {
m_viewMessage = "Hilite (& color graphs) are based on a subset of " + "the data (" + nrHilitePatterns + "/" + index + ")";
}
if (rejectedMissings > 0) {
StringBuffer warnMsgBuilder = new StringBuffer();
warnMsgBuilder.append(rejectedMissings).append("/");
warnMsgBuilder.append(learnData.size());
warnMsgBuilder.append(" row(s) were ignored because they ");
warnMsgBuilder.append("contain missing values.");
m_warningMessage = warnMsgBuilder.toString();
}
CheckUtils.checkArgument(rejectedMissings < learnData.size(), "No rows left after removing missing values (table has %d row(s))", learnData.size());
int nrLearnAttributes = 0;
for (int i = 0; i < m_attrColCreators.length; i++) {
nrLearnAttributes += m_attrColCreators[i].getNrAttributes();
}
TreeAttributeColumnData[] columns = new TreeAttributeColumnData[nrLearnAttributes];
int learnAttributeIndex = 0;
for (int i = 0; i < m_attrColCreators.length; i++) {
TreeAttributeColumnDataCreator creator = m_attrColCreators[i];
for (int a = 0; a < creator.getNrAttributes(); a++) {
final TreeAttributeColumnData columnData = creator.createColumnData(a, configuration);
columnData.getMetaData().setAttributeIndex(learnAttributeIndex);
columns[learnAttributeIndex++] = columnData;
}
}
TreeTargetColumnData targetCol = m_targetColCreator.createColumnData();
return new TreeData(columns, targetCol, m_treeType);
}
use of org.knime.core.node.BufferedDataTable in project knime-core by knime.
the class TreeEnsembleRegressionPredictorNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inObjects, final ExecutionContext exec) throws Exception {
TreeEnsembleModelPortObject model = (TreeEnsembleModelPortObject) inObjects[0];
TreeEnsembleModelPortObjectSpec modelSpec = model.getSpec();
BufferedDataTable data = (BufferedDataTable) inObjects[1];
DataTableSpec dataSpec = data.getDataTableSpec();
final TreeEnsemblePredictor pred = new TreeEnsemblePredictor(modelSpec, model, dataSpec, m_configuration);
ColumnRearranger rearranger = pred.getPredictionRearranger();
BufferedDataTable outTable = exec.createColumnRearrangeTable(data, rearranger, exec);
return new BufferedDataTable[] { outTable };
}
Aggregations