use of org.knime.core.node.streamable.RowOutput in project knime-core by knime.
the class VirtualSubNodeInputNodeModel method createStreamableOperator.
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
assert inputs.length == 0;
PortObject[] dataFromParent = ArrayUtils.remove(m_subNodeContainer.fetchInputDataFromParent(), 0);
for (int i = 0; i < outputs.length; i++) {
if (BufferedDataTable.TYPE.equals(getOutPortType(i))) {
// stream port content if it's data
BufferedDataTable bdt = (BufferedDataTable) (dataFromParent[i]);
RowOutput rowOutput = (RowOutput) outputs[i];
for (DataRow dr : bdt) {
rowOutput.push(dr);
}
rowOutput.close();
} else {
((PortObjectOutput) outputs[i]).setPortObject(dataFromParent[i]);
}
}
}
};
}
use of org.knime.core.node.streamable.RowOutput in project knime-core by knime.
the class JavaRowSplitterNodeModel method execute.
private void execute(final RowInput inData, final RowOutput[] outputs, final ExecutionContext exec) throws Exception {
DataTableSpec spec = inData.getDataTableSpec();
m_settings.setInputAndCompile(spec);
ColumnCalculator cc = new ColumnCalculator(m_settings, this);
int rowIndex = 0;
DataRow r;
RowOutput trueMatch = outputs[0];
RowOutput falseMatch = outputs.length > 1 ? outputs[1] : null;
while ((r = inData.poll()) != null) {
cc.setProgress(rowIndex, m_rowCount, r.getKey(), exec);
DataCell result = cc.calculate(r);
boolean b;
if (result.isMissing()) {
b = false;
setWarningMessage("Expression returned missing value for some rows (interpreted as no match)");
} else {
b = ((BooleanValue) result).getBooleanValue();
}
if (b) {
trueMatch.push(r);
} else if (falseMatch != null) {
falseMatch.push(r);
}
exec.checkCanceled();
rowIndex++;
}
trueMatch.close();
if (falseMatch != null) {
falseMatch.close();
}
}
use of org.knime.core.node.streamable.RowOutput in project knime-core by knime.
the class FileReaderNodeModel method createStreamableOperator.
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
assert inputs.length == 0;
LOGGER.info("Preparing to read from '" + m_frSettings.getDataFileLocation().toString() + "'.");
// check again the settings - especially file existence (under Linux
// files could be deleted/renamed since last config-call...
SettingsStatus status = m_frSettings.getStatusOfSettings(true, null);
if (status.getNumOfErrors() > 0) {
throw new InvalidSettingsException(status.getAllErrorMessages(10));
}
DataTableSpec tSpec = m_frSettings.createDataTableSpec();
FileTable fTable = new FileTable(tSpec, m_frSettings, m_frSettings.getSkippedColumns(), exec);
// data output port
RowOutput rowOutput = (RowOutput) outputs[0];
int row = 0;
FileRowIterator it = fTable.iterator();
try {
if (it.getZipEntryName() != null) {
// seems we are reading a ZIP archive.
LOGGER.info("Reading entry '" + it.getZipEntryName() + "' from the specified ZIP archive.");
}
while (it.hasNext()) {
row++;
DataRow next = it.next();
final int finalRow = row;
exec.setMessage(() -> "Reading row #" + finalRow + " (\"" + next.getKey() + "\")");
exec.checkCanceled();
rowOutput.push(next);
}
rowOutput.close();
if (it.zippedSourceHasMoreEntries()) {
// after reading til the end of the file this returns a valid
// result
setWarningMessage("Source is a ZIP archive with multiple " + "entries. Only reading first entry!");
}
} catch (DuplicateKeyException dke) {
String msg = dke.getMessage();
if (msg == null) {
msg = "Duplicate row IDs";
}
msg += ". Consider making IDs unique in the advanced settings.";
DuplicateKeyException newDKE = new DuplicateKeyException(msg);
newDKE.initCause(dke);
throw newDKE;
}
// user settings allow for truncating the table
if (it.iteratorEndedEarly()) {
setWarningMessage("Data was truncated due to user settings.");
}
// closes all sources.
fTable.dispose();
}
};
}
use of org.knime.core.node.streamable.RowOutput in project knime-core by knime.
the class UngroupNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
DataTableSpec spec = (DataTableSpec) inSpecs[0];
int[] idxs = getSelectedColIdxs(spec, getColumnNames(spec));
UngroupOperation2 ugO = createUngroupOperation(spec, idxs);
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
RowInput input = (RowInput) inputs[0];
RowOutput output = (RowOutput) outputs[0];
ugO.compute(input, output, exec, -1, m_trans);
input.close();
output.close();
}
};
}
use of org.knime.core.node.streamable.RowOutput in project knime-core by knime.
the class RowKeyUtil2 method changeRowKey.
/**
* <p>Replaces the row key by the values of the column with the given name
* and appends a new column with the old key values if the
* <code>newColName</code> variable is a non empty <code>String</code>.</p>
* <p>
* Call the {@link RowKeyUtil2#getDuplicatesCounter()} and
* {@link RowKeyUtil2#getMissingValueCounter()}
* methods to get information about the replaced duplicates and missing
* values after this method is completed.
* </p>
* @param inData The {@link BufferedDataTable} with the input data
* @param exec the {@link ExecutionContext} to check for cancel and to
* provide status messages
* @param selRowKeyColName the name of the column which should replace
* the row key or <code>null</code> if a new one should be created
* @param appendColumn <code>true</code> if a new column should be created
* @param newColSpec the {@link DataColumnSpec} of the new column or
* <code>null</code> if no column should be created at all
* @param ensureUniqueness if set to <code>true</code> the method ensures
* the uniqueness of the row key even if the values of the selected row
* aren't unique
* @param replaceMissingVals if set to <code>true</code> the method
* replaces missing values with ?
* @param removeRowKeyCol removes the selected row key column if set
* to <code>true</code>
* @param hiliteMap <code>true</code> if a map should be maintained that
* maps the new row id to the old row id
* @return the {@link BufferedDataTable} with the replaced row key and
* the optional appended new column with the old row keys.
* @throws Exception if the cancel button was pressed or the input data
* isn't valid.
*/
public BufferedDataTable changeRowKey(final BufferedDataTable inData, final ExecutionContext exec, final String selRowKeyColName, final boolean appendColumn, final DataColumnSpec newColSpec, final boolean ensureUniqueness, final boolean replaceMissingVals, final boolean removeRowKeyCol, final boolean hiliteMap) throws Exception {
LOGGER.debug("Entering changeRowKey(inData, exec, selRowKeyColName, " + "newColName) of class RowKeyUtil.");
DataTableSpec outSpec = inData.getDataTableSpec();
if (removeRowKeyCol) {
outSpec = createTableSpec(outSpec, selRowKeyColName);
}
if (appendColumn) {
if (newColSpec == null) {
throw new NullPointerException("NewColumnSpec must not be null");
}
outSpec = AppendedColumnTable.getTableSpec(outSpec, newColSpec);
}
final BufferedDataContainer newContainer = exec.createDataContainer(outSpec, true);
RowInput rowInput = new DataTableRowInput(inData);
RowOutput rowOutput = new BufferedDataTableRowOutput(newContainer);
changeRowKey(rowInput, rowOutput, exec, selRowKeyColName, appendColumn, newColSpec, ensureUniqueness, replaceMissingVals, removeRowKeyCol, hiliteMap, outSpec.getNumColumns(), inData.getRowCount());
newContainer.close();
return newContainer.getTable();
}
Aggregations