use of org.knime.core.node.streamable.RowInput in project knime-core by knime.
the class AbstractColumnRefNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
ColumnRearranger[] cr = createRearranger((DataTableSpec) inSpecs[0], (DataTableSpec) inSpecs[1]);
StreamableFunction func1 = cr[0].createStreamableFunction(0, 0);
if (m_isSplitter) {
StreamableFunction func2 = cr[1].createStreamableFunction(0, 1);
RowInput rowInput = ((RowInput) inputs[0]);
RowOutput rowOutput1 = ((RowOutput) outputs[0]);
RowOutput rowOutput2 = ((RowOutput) outputs[1]);
StreamableFunction.runFinalInterwoven(rowInput, func1, rowOutput1, func2, rowOutput2, exec);
} else {
func1.runFinal(inputs, outputs, exec);
}
}
};
}
use of org.knime.core.node.streamable.RowInput in project knime-core by knime.
the class RowFilter2PortNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
BufferedDataTable in = inData[0];
// in case the node was configured and the workflow is closed
// (and saved), the row filter isn't configured upon reloading.
// here, we give it a chance to configure itself (e.g. find the column
// index)
m_rowFilter.configure(in.getDataTableSpec());
BufferedDataContainer match = exec.createDataContainer(in.getDataTableSpec());
BufferedDataContainer miss = exec.createDataContainer(in.getDataTableSpec());
RowOutput rowOutput1 = new BufferedDataTableRowOutput(match);
RowOutput rowOutput2 = new BufferedDataTableRowOutput(miss);
RowInput rowInput = new DataTableRowInput(inData[0]);
// do it
this.execute(rowInput, rowOutput1, rowOutput2, inData[0].size(), exec);
// note: tables are closed in the private execute method
return new BufferedDataTable[] { match.getTable(), miss.getTable() };
}
use of org.knime.core.node.streamable.RowInput in project knime-core by knime.
the class CSVWriterNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
assert outputs.length == 0;
RowInput input = (RowInput) inputs[0];
doIt(null, input, exec);
return;
}
};
}
use of org.knime.core.node.streamable.RowInput in project knime-core by knime.
the class DBWriterNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
exec.setProgress("Opening database connection to write data...");
DatabaseConnectionSettings connSettings;
PortObject portObj = ((PortObjectInput) inputs[1]).getPortObject();
if (portObj != null && (portObj instanceof DatabaseConnectionPortObject)) {
connSettings = ((DatabaseConnectionPortObject) portObj).getConnectionSettings(getCredentialsProvider());
} else {
connSettings = m_conn;
}
DBWriter writer = connSettings.getUtility().getWriter(connSettings);
// write entire data
m_errorMessage = writer.writeData(m_tableName, (RowInput) inputs[0], -1, m_append, exec, m_types, getCredentialsProvider(), m_batchSize, m_insertNullForMissingCols);
}
};
}
use of org.knime.core.node.streamable.RowInput in project knime-core by knime.
the class UngroupNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
DataTableSpec spec = (DataTableSpec) inSpecs[0];
int[] idxs = getSelectedColIdxs(spec, getColumnNames(spec));
UngroupOperation2 ugO = createUngroupOperation(spec, idxs);
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
RowInput input = (RowInput) inputs[0];
RowOutput output = (RowOutput) outputs[0];
ugO.compute(input, output, exec, -1, m_trans);
input.close();
output.close();
}
};
}
Aggregations