use of org.knime.core.node.streamable.RowInput in project knime-core by knime.
the class DateTimeToStringNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
final RowInput in = (RowInput) inputs[0];
final RowOutput out = (RowOutput) outputs[0];
final DataTableSpec inSpec = in.getDataTableSpec();
final String[] includeList = m_colSelect.applyTo(inSpec).getIncludes();
final int[] includeIndeces = Arrays.stream(m_colSelect.applyTo(inSpec).getIncludes()).mapToInt(s -> inSpec.findColumnIndex(s)).toArray();
final boolean isReplace = m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE);
DataRow row;
while ((row = in.poll()) != null) {
exec.checkCanceled();
DataCell[] datacells = new DataCell[includeIndeces.length];
for (int i = 0; i < includeIndeces.length; i++) {
if (isReplace) {
final DataColumnSpecCreator dataColumnSpecCreator = new DataColumnSpecCreator(includeList[i], StringCell.TYPE);
final TimeToStringCellFactory cellFac = new TimeToStringCellFactory(dataColumnSpecCreator.createSpec(), includeIndeces[i]);
datacells[i] = cellFac.getCell(row);
} else {
final DataColumnSpec dataColSpec = new UniqueNameGenerator(inSpec).newColumn(includeList[i] + m_suffix.getStringValue(), StringCell.TYPE);
final TimeToStringCellFactory cellFac = new TimeToStringCellFactory(dataColSpec, includeIndeces[i]);
datacells[i] = cellFac.getCell(row);
}
}
if (isReplace) {
out.push(new ReplacedColumnsDataRow(row, datacells, includeIndeces));
} else {
out.push(new AppendedColumnRow(row, datacells));
}
}
in.close();
out.close();
}
};
}
use of org.knime.core.node.streamable.RowInput in project knime-core by knime.
the class ColumnAppenderNodeModel method createStreamableOperator.
// ////////////// STREAMING FUNCTIONS ////////////////
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
RowInput in1 = (RowInput) inputs[0];
RowInput in2 = (RowInput) inputs[1];
RowOutput out = (RowOutput) outputs[0];
CustomRowIterator tableIt1 = new CustomRowIteratorImpl2(in1);
CustomRowIterator tableIt2 = new CustomRowIteratorImpl2(in2);
compute(tableIt1, tableIt2, in1.getDataTableSpec().getNumColumns() + in2.getDataTableSpec().getNumColumns(), row -> {
out.push(row);
}, exec, -1, -1);
// poll all the remaining rows if there are any but don't do anything with them
while (tableIt1.hasNext()) {
tableIt1.next();
}
while (tableIt2.hasNext()) {
tableIt2.next();
}
in1.close();
in2.close();
out.close();
}
};
}
use of org.knime.core.node.streamable.RowInput in project knime-core by knime.
the class LagColumnStreamableOperator method runFinal.
/**
* {@inheritDoc}
*/
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
long counter = 0;
int maxLag = m_configuration.getLagInterval() * m_configuration.getLag();
RingBuffer ringBuffer = new RingBuffer(maxLag);
RowInput input = (RowInput) inputs[0];
RowOutput output = (RowOutput) outputs[0];
int skippedFirstCount = !m_configuration.isSkipInitialIncompleteRows() ? -1 : m_configuration.getLagInterval() * m_configuration.getLag();
DataRow row;
while ((row = input.poll()) != null) {
if (counter >= skippedFirstCount) {
DataCell[] newCells = getAdditionalCells(ringBuffer);
output.push(copyWithNewCells(row, newCells));
}
DataCell toBeCached = m_columnIndex < 0 ? new StringCell(row.getKey().toString()) : row.getCell(m_columnIndex);
ringBuffer.add(toBeCached);
setProgress(exec, counter, row);
counter += 1;
}
if (!m_configuration.isSkipLastIncompleteRows()) {
DataCell[] missings = new DataCell[input.getDataTableSpec().getNumColumns()];
Arrays.fill(missings, DataType.getMissingCell());
for (int i = 0; i < maxLag; i++) {
DataRow missingRow = new DefaultRow("overflow-" + i, missings);
DataCell[] newCells = getAdditionalCells(ringBuffer);
output.push(copyWithNewCells(missingRow, newCells));
ringBuffer.add(DataType.getMissingCell());
}
}
output.close();
}
use of org.knime.core.node.streamable.RowInput in project knime-core by knime.
the class RowFilterNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public StreamableOperatorInternals saveInternals() {
return null;
}
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext ctx) throws Exception {
RowInput in = (RowInput) inputs[0];
RowOutput out = (RowOutput) outputs[0];
RowFilterNodeModel.this.execute(in, out, ctx);
}
};
}
use of org.knime.core.node.streamable.RowInput in project knime-core by knime.
the class ReferenceColumnResorterNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
private String[] m_streamableOperatorOrder;
/**
* {@inheritDoc}
*/
@Override
public void runIntermediate(final PortInput[] inputs, final ExecutionContext exec) throws Exception {
BufferedDataTable orderTable = (BufferedDataTable) ((PortObjectInput) inputs[1]).getPortObject();
m_streamableOperatorOrder = readOrderFromTable(orderTable);
}
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
RowInput dataInput = (RowInput) inputs[0];
DataTableSpec dataInputSpec = dataInput.getDataTableSpec();
ColumnRearranger rearranger = createColumnRearranger(dataInputSpec, m_streamableOperatorOrder);
StreamableFunction streamableFunction = rearranger.createStreamableFunction();
streamableFunction.runFinal(new PortInput[] { dataInput }, outputs, exec);
}
/**
* {@inheritDoc}
*/
@Override
public StreamableOperatorInternals saveInternals() {
return createStreamableOperatorInternalsFromOrder(m_streamableOperatorOrder);
}
/**
* {@inheritDoc}
*/
@Override
public void loadInternals(final StreamableOperatorInternals internals) {
m_streamableOperatorOrder = readOrderFromStreamableOperatorInternals(internals);
}
};
}
Aggregations