use of org.knime.core.data.DataTable in project knime-core by knime.
the class ReadSysPropertyNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
Result result = m_config.createResult();
String message = result.getWarningMessage();
if (message != null) {
setWarningMessage(message);
}
DataTable table = result.getTable();
BufferedDataTable bdtTable = exec.createBufferedDataTable(table, exec);
return new BufferedDataTable[] { bdtTable };
}
use of org.knime.core.data.DataTable in project knime-core by knime.
the class MissingValueHandling2Table method createMissingValueHandlingTable.
// getColSetting(DataTableSpec, ColSetting[])
/**
* Does missing value handling to the argument table given the col settings
* in an array and also reports progress.
*
* @param table the table to do missing value handling on
* @param colSettings the settings
* @param exec for progress/cancel and to create the buffered data table
* @param warningBuffer To which potential warning messages are added.
* @return a cache table, cleaned up
* @throws CanceledExecutionException if canceled
*/
public static BufferedDataTable createMissingValueHandlingTable(final DataTable table, final MissingValueHandling2ColSetting[] colSettings, final ExecutionContext exec, final StringBuffer warningBuffer) throws CanceledExecutionException {
MissingValueHandling2ColSetting[] colSetting;
try {
colSetting = getColSetting(table.getDataTableSpec(), colSettings, false);
} catch (InvalidSettingsException ise) {
LOGGER.coding("getColSetting method is not supposed to throw " + "an exception, ignoring settings", ise);
DataTableSpec s = table.getDataTableSpec();
colSetting = new MissingValueHandling2ColSetting[s.getNumColumns()];
for (int i = 0; i < s.getNumColumns(); i++) {
colSetting[i] = new MissingValueHandling2ColSetting(s.getColumnSpec(i));
colSetting[i].setMethod(MissingValueHandling2ColSetting.METHOD_NO_HANDLING);
}
}
boolean needStatistics = false;
int mostFrequentColCount = 0;
for (int i = 0; i < colSetting.length; i++) {
MissingValueHandling2ColSetting c = colSetting[i];
switch(c.getMethod()) {
case MissingValueHandling2ColSetting.METHOD_MOST_FREQUENT:
mostFrequentColCount++;
case MissingValueHandling2ColSetting.METHOD_MAX:
case MissingValueHandling2ColSetting.METHOD_MIN:
case MissingValueHandling2ColSetting.METHOD_MEAN:
needStatistics = true;
break;
default:
}
}
int[] mostFrequentCols = new int[mostFrequentColCount];
if (mostFrequentColCount > 0) {
int index = 0;
for (int i = 0; i < colSetting.length; i++) {
MissingValueHandling2ColSetting c = colSetting[i];
switch(c.getMethod()) {
case MissingValueHandling2ColSetting.METHOD_MOST_FREQUENT:
mostFrequentCols[index++] = i;
break;
default:
}
}
}
DataTable t;
ExecutionMonitor e;
if (needStatistics && !(table instanceof StatisticsTable)) {
// for creating statistics table
ExecutionMonitor subExec = exec.createSubProgress(0.5);
t = new MyStatisticsTable(table, subExec, mostFrequentCols);
if (((MyStatisticsTable) t).m_warningMessage != null) {
warningBuffer.append(((MyStatisticsTable) t).m_warningMessage);
}
// for the iterator
e = exec.createSubProgress(0.5);
} else {
t = table;
e = exec;
}
MissingValueHandling2Table mvht = new MissingValueHandling2Table(t, colSetting);
BufferedDataContainer container = exec.createDataContainer(mvht.getDataTableSpec());
e.setMessage("Adding rows...");
int count = 0;
try {
MissingValueHandling2TableIterator it = new MissingValueHandling2TableIterator(mvht, e);
while (it.hasNext()) {
DataRow next;
next = it.next();
e.setMessage("Adding row " + (count + 1) + " (\"" + next.getKey() + "\")");
container.addRowToTable(next);
count++;
}
} catch (MissingValueHandling2TableIterator.RuntimeCanceledExecutionException rcee) {
throw rcee.getCause();
} finally {
container.close();
}
return container.getTable();
}
use of org.knime.core.data.DataTable in project knime-core by knime.
the class AppendedRowsTableTest method testGetRowIterator.
/**
* Test method for getRowIterator().
*/
public void testGetRowIterator() {
DataTable firstTable = new DefaultTable(DATA, DATA_H, DATA_TYPES);
DataTable firstTableShuffle = new DefaultTable(DATA_SHUFFLE, DATA_SHUFFLE_H, DATA_SHUFFLE_TYPES);
DataTable ap = new AppendedRowsTable(new DataTable[] { firstTable, firstTableShuffle });
RowIterator apIt = ap.iterator();
for (RowIterator fiIt = firstTable.iterator(); fiIt.hasNext(); ) {
assertTrue(apIt.hasNext());
DataRow apRow = apIt.next();
DataRow fiRow = fiIt.next();
assertEquals(apRow.getKey(), fiRow.getKey());
assertEquals(apRow.getCell(0), fiRow.getCell(0));
assertEquals(apRow.getCell(1), fiRow.getCell(1));
assertEquals(apRow.getCell(2), fiRow.getCell(2));
}
for (RowIterator seIt = firstTableShuffle.iterator(); seIt.hasNext(); ) {
assertTrue(apIt.hasNext());
DataRow apRow = apIt.next();
DataRow seRow = seIt.next();
assertEquals(apRow.getKey(), seRow.getKey());
// first and second are swapped!
assertEquals(apRow.getCell(0), seRow.getCell(1));
assertEquals(apRow.getCell(1), seRow.getCell(0));
assertEquals(apRow.getCell(2), seRow.getCell(2));
}
assertFalse(apIt.hasNext());
DataTable duplicateTable = new AppendedRowsTable(new DataTable[] { firstTable, firstTable });
RowIterator dupIt = duplicateTable.iterator();
for (RowIterator fiIt = firstTable.iterator(); fiIt.hasNext(); ) {
dupIt.next();
fiIt.next();
}
// it should not return duplicate keys.
assertFalse(dupIt.hasNext());
}
use of org.knime.core.data.DataTable in project knime-core by knime.
the class DefaultVisualizationNodeModel method execute.
/**
* Converts the input data at inport 0 into a
* {@link org.knime.base.node.util.DataArray} with maximum number of rows as
* defined in the {@link DefaultVisualizationNodeDialog}. Thereby nominal
* columns are irgnored whose possible values are null or more than 60.
*
* {@inheritDoc}
*/
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
// generate list of excluded columns, suppressing warning
findCompatibleColumns(inData[0].getDataTableSpec(), false);
DataTable filter = new FilterColumnTable(inData[0], false, getExcludedColumns());
m_input = new DefaultDataArray(filter, 1, m_maxRows.getIntValue(), exec);
if (m_maxRows.getIntValue() < inData[0].size()) {
setWarningMessage("Only the first " + m_maxRows.getIntValue() + " rows are displayed.");
}
return new BufferedDataTable[0];
}
use of org.knime.core.data.DataTable in project knime-core by knime.
the class EntropyCalculator method load.
/**
* Factory method to restore this object given a directory in which the
* content is saved.
*
* @param dir the dir to read from
* @param exec for cancellation.
* @return a new object as read from dir
* @throws IOException if that fails
* @throws InvalidSettingsException if the internals don't match
* @throws CanceledExecutionException if canceled
*/
public static EntropyCalculator load(final File dir, final ExecutionMonitor exec) throws IOException, InvalidSettingsException, CanceledExecutionException {
File scorerFile = new File(dir, FILE_SCORER_TABLE);
File tempFile = DataContainer.createTempFile(".table");
FileUtil.copy(scorerFile, tempFile);
DataTable scorerTable = DataContainer.readFromZip(tempFile);
File settingsFile = new File(dir, FILE_SETTINGS);
NodeSettingsRO config = NodeSettings.loadFromXML(new BufferedInputStream(new GZIPInputStream(new FileInputStream(settingsFile))));
double entropy = config.getDouble(CFG_ENTROPY);
double quality = config.getDouble(CFG_QUALITY);
int patternsInCluster = config.getInt(CFG_PAT_IN_CLUSTER);
int patternsInReference = config.getInt(CFG_PAT_IN_REFERENCE);
int nrClusters = config.getInt(CFG_NR_CLUSTER);
int nrReferences = config.getInt(CFG_NR_REFERENCES);
NodeSettingsRO subConfig = config.getNodeSettings(CFG_CLUSTERING_MAP);
LinkedHashMap<RowKey, Set<RowKey>> map = new LinkedHashMap<RowKey, Set<RowKey>>();
for (String key : subConfig.keySet()) {
exec.checkCanceled();
NodeSettingsRO keySettings = subConfig.getNodeSettings(key);
Set<RowKey> rowKeys;
RowKey keyCell;
try {
keyCell = new RowKey(keySettings.getDataCell(key).toString());
// load settings before 2.0
DataCell[] mappedKeys = keySettings.getDataCellArray(CFG_MAPPED_KEYS);
rowKeys = new LinkedHashSet<RowKey>();
for (DataCell dc : mappedKeys) {
rowKeys.add(new RowKey(dc.toString()));
}
} catch (InvalidSettingsException ise) {
keyCell = keySettings.getRowKey(key);
RowKey[] mappedKeys = keySettings.getRowKeyArray(CFG_MAPPED_KEYS);
rowKeys = new LinkedHashSet<RowKey>(Arrays.asList(mappedKeys));
}
map.put(keyCell, rowKeys);
}
return new EntropyCalculator(entropy, quality, patternsInCluster, nrClusters, patternsInReference, nrReferences, scorerTable, map);
}
Aggregations