use of org.knime.core.node.streamable.RowOutput in project knime-core by knime.
the class RuleEngineFilter2PortsNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
private SimpleStreamableOperatorInternals m_internals;
/**
* {@inheritDoc}
*/
@Override
public void loadInternals(final StreamableOperatorInternals internals) {
m_internals = (SimpleStreamableOperatorInternals) internals;
}
/**
* {@inheritDoc}
*/
@Override
public void runIntermediate(final PortInput[] inputs, final ExecutionContext exec) throws Exception {
// count number of rows
long count = 0;
RowInput rowInput = (RowInput) inputs[DATA_PORT];
while (rowInput.poll() != null) {
count++;
}
m_internals.getConfig().addLong(CFG_ROW_COUNT, count);
}
/**
* {@inheritDoc}
*/
@Override
public StreamableOperatorInternals saveInternals() {
return m_internals;
}
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
long rowCount = -1L;
if (m_internals.getConfig().containsKey(CFG_ROW_COUNT)) {
rowCount = m_internals.getConfig().getLong(CFG_ROW_COUNT);
}
m_rulesList.clear();
final PortInput rulePort = inputs[RULE_PORT];
if (rulePort instanceof PortObjectInput) {
PortObjectInput poRule = (PortObjectInput) rulePort;
m_rulesList.addAll(RuleEngineVariable2PortsNodeModel.rules((BufferedDataTable) poRule.getPortObject(), m_settings, RuleNodeSettings.RuleFilter));
} else if (rulePort instanceof RowInput) {
RowInput riRule = (RowInput) rulePort;
m_rulesList.addAll(RuleEngineVariable2PortsNodeModel.rules(riRule, m_settings, RuleNodeSettings.RuleFilter));
}
final DataTableSpec spec = (DataTableSpec) inSpecs[DATA_PORT];
try {
parseRules(spec, RuleNodeSettings.RuleSplitter);
} catch (final ParseException e) {
throw new InvalidSettingsException(e);
}
final RowInput inputPartitions = (RowInput) inputs[DATA_PORT];
final List<Rule> rules = parseRules(inputPartitions.getDataTableSpec(), RuleNodeSettings.RuleFilter);
final RowOutput first = (RowOutput) outputs[0];
final int nrOutPorts = getNrOutPorts();
final RowOutput second = nrOutPorts > 1 ? (RowOutput) outputs[1] : new RowOutput() {
@Override
public void push(final DataRow row) throws InterruptedException {
// do nothing
}
@Override
public void close() throws InterruptedException {
// do nothing
}
};
final RowOutput[] containers = new RowOutput[] { first, second };
final int matchIndex = m_includeOnMatch ? 0 : 1;
final int otherIndex = 1 - matchIndex;
try {
final MutableLong rowIdx = new MutableLong(0L);
final long rows = rowCount;
final VariableProvider provider = new VariableProvider() {
@Override
public Object readVariable(final String name, final Class<?> type) {
return RuleEngineFilter2PortsNodeModel.this.readVariable(name, type);
}
@Override
@Deprecated
public int getRowCount() {
throw new UnsupportedOperationException();
}
@Override
public long getRowCountLong() {
return rows;
}
@Override
@Deprecated
public int getRowIndex() {
throw new UnsupportedOperationException();
}
@Override
public long getRowIndexLong() {
return rowIdx.longValue();
}
};
DataRow row;
while ((row = inputPartitions.poll()) != null) {
rowIdx.increment();
if (rows > 0) {
exec.setProgress(rowIdx.longValue() / (double) rows, () -> "Adding row " + rowIdx.longValue() + " of " + rows);
} else {
exec.setMessage(() -> "Adding row " + rowIdx.longValue() + " of " + rows);
}
exec.checkCanceled();
boolean wasMatch = false;
for (Rule r : rules) {
if (r.getCondition().matches(row, provider).getOutcome() == MatchState.matchedAndStop) {
// r.getSideEffect().perform(row, provider);
DataValue value = r.getOutcome().getComputedResult(row, provider);
if (value instanceof BooleanValue) {
final BooleanValue bv = (BooleanValue) value;
containers[bv.getBooleanValue() ? matchIndex : otherIndex].push(row);
} else {
containers[matchIndex].push(row);
}
wasMatch = true;
break;
}
}
if (!wasMatch) {
containers[otherIndex].push(row);
}
}
} finally {
try {
second.close();
} finally {
first.close();
}
}
}
};
}
use of org.knime.core.node.streamable.RowOutput in project knime-core by knime.
the class NewToOldTimeNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public StreamableOperatorInternals saveInternals() {
return null;
}
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
final RowInput in = (RowInput) inputs[0];
final RowOutput out = (RowOutput) outputs[0];
final DataTableSpec inSpec = in.getDataTableSpec();
String[] includeList = m_colSelect.applyTo(inSpec).getIncludes();
final int[] includeIndeces = Arrays.stream(includeList).mapToInt(s -> inSpec.findColumnIndex(s)).toArray();
DataRow row;
while ((row = in.poll()) != null) {
exec.checkCanceled();
DataCell[] datacells = new DataCell[includeIndeces.length];
for (int i = 0; i < includeIndeces.length; i++) {
if (m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE)) {
final DataColumnSpecCreator dataColumnSpecCreator = new DataColumnSpecCreator(includeList[i], DateAndTimeCell.TYPE);
final ConvertTimeCellFactory cellFac = new ConvertTimeCellFactory(dataColumnSpecCreator.createSpec(), includeIndeces[i]);
datacells[i] = cellFac.getCells(row)[0];
} else {
final DataColumnSpec dataColSpec = new UniqueNameGenerator(inSpec).newColumn(includeList[i] + m_suffix.getStringValue(), DateAndTimeCell.TYPE);
final ConvertTimeCellFactory cellFac = new ConvertTimeCellFactory(dataColSpec, includeIndeces[i]);
datacells[i] = cellFac.getCells(row)[0];
}
}
if (m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE)) {
out.push(new ReplacedColumnsDataRow(row, datacells, includeIndeces));
} else {
out.push(new AppendedColumnRow(row, datacells));
}
}
in.close();
out.close();
}
};
}
use of org.knime.core.node.streamable.RowOutput in project knime-core by knime.
the class DateTimeToStringNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
final RowInput in = (RowInput) inputs[0];
final RowOutput out = (RowOutput) outputs[0];
final DataTableSpec inSpec = in.getDataTableSpec();
final String[] includeList = m_colSelect.applyTo(inSpec).getIncludes();
final int[] includeIndeces = Arrays.stream(m_colSelect.applyTo(inSpec).getIncludes()).mapToInt(s -> inSpec.findColumnIndex(s)).toArray();
final boolean isReplace = m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE);
DataRow row;
while ((row = in.poll()) != null) {
exec.checkCanceled();
DataCell[] datacells = new DataCell[includeIndeces.length];
for (int i = 0; i < includeIndeces.length; i++) {
if (isReplace) {
final DataColumnSpecCreator dataColumnSpecCreator = new DataColumnSpecCreator(includeList[i], StringCell.TYPE);
final TimeToStringCellFactory cellFac = new TimeToStringCellFactory(dataColumnSpecCreator.createSpec(), includeIndeces[i]);
datacells[i] = cellFac.getCell(row);
} else {
final DataColumnSpec dataColSpec = new UniqueNameGenerator(inSpec).newColumn(includeList[i] + m_suffix.getStringValue(), StringCell.TYPE);
final TimeToStringCellFactory cellFac = new TimeToStringCellFactory(dataColSpec, includeIndeces[i]);
datacells[i] = cellFac.getCell(row);
}
}
if (isReplace) {
out.push(new ReplacedColumnsDataRow(row, datacells, includeIndeces));
} else {
out.push(new AppendedColumnRow(row, datacells));
}
}
in.close();
out.close();
}
};
}
use of org.knime.core.node.streamable.RowOutput in project knime-core by knime.
the class SampleDataNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
DataTableSpec[] outSpecs = configure(new DataTableSpec[2]);
DataTableSpec dataSpec = outSpecs[0];
DataTableSpec clusterSpec = outSpecs[1];
RowOutput dataOut = (RowOutput) outputs[0];
RowOutput clusterOut = (RowOutput) outputs[1];
run(dataSpec, dataOut, clusterSpec, clusterOut, exec);
}
};
}
use of org.knime.core.node.streamable.RowOutput in project knime-core by knime.
the class ColumnAppenderNodeModel method createStreamableOperator.
// ////////////// STREAMING FUNCTIONS ////////////////
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
RowInput in1 = (RowInput) inputs[0];
RowInput in2 = (RowInput) inputs[1];
RowOutput out = (RowOutput) outputs[0];
CustomRowIterator tableIt1 = new CustomRowIteratorImpl2(in1);
CustomRowIterator tableIt2 = new CustomRowIteratorImpl2(in2);
compute(tableIt1, tableIt2, in1.getDataTableSpec().getNumColumns() + in2.getDataTableSpec().getNumColumns(), row -> {
out.push(row);
}, exec, -1, -1);
// poll all the remaining rows if there are any but don't do anything with them
while (tableIt1.hasNext()) {
tableIt1.next();
}
while (tableIt2.hasNext()) {
tableIt2.next();
}
in1.close();
in2.close();
out.close();
}
};
}
Aggregations