use of org.knime.core.node.streamable.StreamableOperator in project knime-core by knime.
the class CSVWriterNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
assert outputs.length == 0;
RowInput input = (RowInput) inputs[0];
doIt(null, input, exec);
return;
}
};
}
use of org.knime.core.node.streamable.StreamableOperator in project knime-core by knime.
the class DBWriterNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
exec.setProgress("Opening database connection to write data...");
DatabaseConnectionSettings connSettings;
PortObject portObj = ((PortObjectInput) inputs[1]).getPortObject();
if (portObj != null && (portObj instanceof DatabaseConnectionPortObject)) {
connSettings = ((DatabaseConnectionPortObject) portObj).getConnectionSettings(getCredentialsProvider());
} else {
connSettings = m_conn;
}
DBWriter writer = connSettings.getUtility().getWriter(connSettings);
// write entire data
m_errorMessage = writer.writeData(m_tableName, (RowInput) inputs[0], -1, m_append, exec, m_types, getCredentialsProvider(), m_batchSize, m_insertNullForMissingCols);
}
};
}
use of org.knime.core.node.streamable.StreamableOperator in project knime-core by knime.
the class FileReaderNodeModel method createStreamableOperator.
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
assert inputs.length == 0;
LOGGER.info("Preparing to read from '" + m_frSettings.getDataFileLocation().toString() + "'.");
// check again the settings - especially file existence (under Linux
// files could be deleted/renamed since last config-call...
SettingsStatus status = m_frSettings.getStatusOfSettings(true, null);
if (status.getNumOfErrors() > 0) {
throw new InvalidSettingsException(status.getAllErrorMessages(10));
}
DataTableSpec tSpec = m_frSettings.createDataTableSpec();
FileTable fTable = new FileTable(tSpec, m_frSettings, m_frSettings.getSkippedColumns(), exec);
// data output port
RowOutput rowOutput = (RowOutput) outputs[0];
int row = 0;
FileRowIterator it = fTable.iterator();
try {
if (it.getZipEntryName() != null) {
// seems we are reading a ZIP archive.
LOGGER.info("Reading entry '" + it.getZipEntryName() + "' from the specified ZIP archive.");
}
while (it.hasNext()) {
row++;
DataRow next = it.next();
final int finalRow = row;
exec.setMessage(() -> "Reading row #" + finalRow + " (\"" + next.getKey() + "\")");
exec.checkCanceled();
rowOutput.push(next);
}
rowOutput.close();
if (it.zippedSourceHasMoreEntries()) {
// after reading til the end of the file this returns a valid
// result
setWarningMessage("Source is a ZIP archive with multiple " + "entries. Only reading first entry!");
}
} catch (DuplicateKeyException dke) {
String msg = dke.getMessage();
if (msg == null) {
msg = "Duplicate row IDs";
}
msg += ". Consider making IDs unique in the advanced settings.";
DuplicateKeyException newDKE = new DuplicateKeyException(msg);
newDKE.initCause(dke);
throw newDKE;
}
// user settings allow for truncating the table
if (it.iteratorEndedEarly()) {
setWarningMessage("Data was truncated due to user settings.");
}
// closes all sources.
fTable.dispose();
}
};
}
use of org.knime.core.node.streamable.StreamableOperator in project knime-core by knime.
the class UngroupNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
DataTableSpec spec = (DataTableSpec) inSpecs[0];
int[] idxs = getSelectedColIdxs(spec, getColumnNames(spec));
UngroupOperation2 ugO = createUngroupOperation(spec, idxs);
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
RowInput input = (RowInput) inputs[0];
RowOutput output = (RowOutput) outputs[0];
ugO.compute(input, output, exec, -1, m_trans);
input.close();
output.close();
}
};
}
use of org.knime.core.node.streamable.StreamableOperator in project knime-core by knime.
the class SplitNodeModel2 method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
if (m_conf == null) {
m_conf = createColFilterConf();
}
final DataTableSpec inSpec = (DataTableSpec) inSpecs[0];
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
ColumnRearranger[] a = createColumnRearrangers(inSpec);
StreamableFunction func1 = a[0].createStreamableFunction(0, 0);
StreamableFunction func2 = a[1].createStreamableFunction(0, 1);
// use both functions to actually do it
RowInput rowInput = ((RowInput) inputs[0]);
RowOutput rowOutput1 = ((RowOutput) outputs[0]);
RowOutput rowOutput2 = ((RowOutput) outputs[1]);
StreamableFunction.runFinalInterwoven(rowInput, func1, rowOutput1, func2, rowOutput2, exec);
}
};
}
Aggregations