use of org.knime.core.data.RowIterator in project knime-core by knime.
the class DefaultDataArray method init.
private void init(final DataTable dTable, final int firstRow, final int numOfRows, final ExecutionMonitor execMon) throws CanceledExecutionException {
if (dTable == null) {
throw new IllegalArgumentException("Must provide non-null data table" + " for DataArray");
}
if (firstRow < 1) {
throw new IllegalArgumentException("Starting row must be greater" + " than zero");
}
if (numOfRows < 0) {
throw new IllegalArgumentException("Number of rows to read must be" + " greater than or equal zero");
}
DataTableSpec tSpec = dTable.getDataTableSpec();
DataTableDomainCreator domainCreator = new DataTableDomainCreator(tSpec, true);
int numOfColumns = tSpec.getNumColumns();
m_firstRow = firstRow;
m_rows = new ArrayList<DataRow>(numOfColumns);
// now fill our data structures
RowIterator rIter = dTable.iterator();
int rowNumber = 0;
while ((rIter.hasNext()) && (m_rows.size() < numOfRows)) {
// get the next row
DataRow row = rIter.next();
rowNumber++;
if (rowNumber < firstRow) {
// skip all rows until we see the specified first row
continue;
}
// store it.
m_rows.add(row);
domainCreator.updateDomain(row);
// see if user wants us to stop
if (execMon != null) {
// will throw an exception if we are supposed to cancel
execMon.checkCanceled();
execMon.setProgress((double) m_rows.size() / (double) numOfRows, "read row " + m_rows.size() + " of max. " + numOfRows);
}
}
if (rIter instanceof CloseableRowIterator) {
((CloseableRowIterator) rIter).close();
}
m_tSpec = domainCreator.createSpec();
}
use of org.knime.core.data.RowIterator in project knime-core by knime.
the class TableContentModelTest method testIsHiLit.
/**
* Method being tested: boolean isHiLit(int).
*/
public final void testIsHiLit() {
final HiLiteHandler hiliteHdl = new HiLiteHandler();
final TableContentModel m = new TableContentModel(DATA, hiliteHdl);
// hilite every other in DATA and check if it is correctly reflected
// in m
final HashSet<RowKey> set = new HashSet<RowKey>();
boolean isEvenNumber = true;
for (RowIterator it = DATA.iterator(); it.hasNext(); ) {
RowKey cell = it.next().getKey();
if (isEvenNumber) {
hiliteHdl.fireHiLiteEvent(cell);
set.add(cell);
}
isEvenNumber = !isEvenNumber;
}
flushEDTQueue();
for (int i = 0; i < m.getRowCount(); i++) {
RowKey key = m.getRow(i).getKey();
boolean isHiLit = m.isHiLit(i);
assertEquals(set.contains(key), isHiLit);
}
try {
m.isHiLit(-1);
fail("Expected " + IndexOutOfBoundsException.class + " not thrown");
} catch (IndexOutOfBoundsException e) {
NodeLogger.getLogger(getClass()).debug("Got expected exception: " + e.getClass().getName(), e);
}
try {
m.isHiLit(OBJECT_DATA.length);
fail("Expected " + IndexOutOfBoundsException.class + " not thrown");
} catch (IndexOutOfBoundsException e) {
NodeLogger.getLogger(getClass()).debug("Got expected exception: " + e.getClass().getName(), e);
}
}
use of org.knime.core.data.RowIterator in project knime-core by knime.
the class AbstractParallelNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected final BufferedDataTable[] execute(final BufferedDataTable[] data, final ExecutionContext exec) throws Exception {
final DataTableSpec[] outSpecs = prepareExecute(data);
final List<Future<BufferedDataContainer[]>> futures = new ArrayList<>();
final BufferedDataTable[] additionalTables = new BufferedDataTable[Math.max(0, data.length - 1)];
System.arraycopy(data, 1, additionalTables, 0, additionalTables.length);
// do some consistency checks to bail out as early as possible
if (outSpecs == null) {
throw new NullPointerException("Implementation Error: The " + "array of generated output table specs can't be null.");
}
if (outSpecs.length != getNrOutPorts()) {
throw new IllegalStateException("Implementation Error: Number of" + " provided DataTableSpecs doesn't match number of output" + " ports");
}
for (DataTableSpec outSpec : outSpecs) {
if (outSpec == null) {
throw new IllegalStateException("Implementation Error: The" + " generated output DataTableSpec is null.");
}
}
final double max = data[0].size();
final Callable<Void> submitter = new Callable<Void>() {
@Override
public Void call() throws Exception {
final RowIterator it = data[0].iterator();
BufferedDataContainer container = null;
int count = 0, chunks = 0;
while (true) {
if ((count++ % m_chunkSize == 0) || !it.hasNext()) {
exec.checkCanceled();
if (container != null) {
container.close();
final BufferedDataContainer temp = container;
chunks++;
final int temp2 = chunks;
futures.add(m_workers.submit(new Callable<BufferedDataContainer[]>() {
@Override
public BufferedDataContainer[] call() throws Exception {
ExecutionMonitor subProg = exec.createSilentSubProgress((m_chunkSize > max) ? 1 : m_chunkSize / max);
exec.setMessage("Processing chunk " + temp2);
BufferedDataContainer[] result = new BufferedDataContainer[outSpecs.length];
for (int i = 0; i < outSpecs.length; i++) {
result[i] = exec.createDataContainer(outSpecs[i], true, 0);
}
executeByChunk(temp.getTable(), additionalTables, result, subProg);
for (DataContainer c : result) {
c.close();
}
exec.setProgress(temp2 * m_chunkSize / max);
return result;
}
}));
}
if (!it.hasNext()) {
break;
}
container = exec.createDataContainer(data[0].getDataTableSpec());
}
container.addRowToTable(it.next());
}
return null;
}
};
try {
m_workers.runInvisible(submitter);
} catch (IllegalThreadStateException ex) {
// this node has not been started by a thread from a thread pool.
// This is odd, but may happen
submitter.call();
}
final BufferedDataTable[][] tempTables = new BufferedDataTable[outSpecs.length][futures.size()];
int k = 0;
for (Future<BufferedDataContainer[]> results : futures) {
try {
exec.checkCanceled();
} catch (CanceledExecutionException ex) {
for (Future<BufferedDataContainer[]> cancel : futures) {
cancel.cancel(true);
}
throw ex;
}
final BufferedDataContainer[] temp = results.get();
if ((temp == null) || (temp.length != getNrOutPorts())) {
throw new IllegalStateException("Invalid result. Execution " + " failed, reason: data is null or number " + "of outputs wrong.");
}
for (int i = 0; i < temp.length; i++) {
tempTables[i][k] = temp[i].getTable();
}
k++;
}
final BufferedDataTable[] resultTables = new BufferedDataTable[outSpecs.length];
for (int i = 0; i < resultTables.length; i++) {
resultTables[i] = exec.createConcatenateTable(exec, tempTables[i]);
}
return resultTables;
}
use of org.knime.core.data.RowIterator in project knime-core by knime.
the class EntropyCalculator method getMap.
private static HashMap<RowKey, RowKey> getMap(final DataTable table, final int colIndex, final ExecutionMonitor ex) throws CanceledExecutionException {
HashMap<RowKey, RowKey> result = new LinkedHashMap<RowKey, RowKey>();
int rowCount = -1;
if (table instanceof BufferedDataTable) {
rowCount = ((BufferedDataTable) table).getRowCount();
}
// row counter
int i = 1;
final String name = table.getDataTableSpec().getName();
for (RowIterator it = table.iterator(); it.hasNext(); i++) {
DataRow row = it.next();
String m = "Scanning row " + i + " of table \"" + name + "\".";
if (rowCount >= 0) {
ex.setProgress(i / (double) rowCount, m);
} else {
ex.setMessage(m);
}
ex.checkCanceled();
RowKey id = row.getKey();
RowKey clusterMember = new RowKey(row.getCell(colIndex).toString());
result.put(id, clusterMember);
}
return result;
}
use of org.knime.core.data.RowIterator in project knime-core by knime.
the class MDSProjectionManager method doEpoch.
/**
* Computing one epoch if the iterative mds. In one epoch all points are
* adjusted according to all fixed points and if <code>projectOnly</code>
* is set <code>false</code> to all other points too.
*
* @param epoch The current epoch.
* @param exec The execution monitor to show the progress and enable
* canceling.
* @throws CanceledExecutionException If the process was canceled.
*/
protected void doEpoch(final int epoch, final ExecutionMonitor exec) throws CanceledExecutionException {
// through all data points
RowIterator it1 = m_inData.iterator();
while (it1.hasNext()) {
exec.checkCanceled();
DataRow r1 = it1.next();
DataPoint p1 = m_points.get(r1.getKey());
// first adjust point at the fixed points
RowIterator fit = m_fixedDataPoints.iterator();
while (fit.hasNext()) {
DataRow fixedRow = fit.next();
DataPoint p2 = m_fixedPoints.get(fixedRow.getKey());
adjustDataPoint(p1, p2, r1, fixedRow);
}
// through all data points again
if (!m_projectOnly) {
RowIterator it2 = m_inData.iterator();
while (it2.hasNext()) {
DataRow r2 = it2.next();
DataPoint p2 = m_points.get(r2.getKey());
adjustDataPoint(p1, p2, r1, r2);
}
}
}
adjustLearningRate(epoch);
}
Aggregations