use of org.knime.core.node.streamable.PartitionInfo in project knime-core by knime.
the class RuleEngineFilter2PortsNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
BufferedDataTable ruleTable = inData[RuleEngine2PortsNodeModel.RULE_PORT];
BufferedDataTable dataTable = inData[RuleEngine2PortsNodeModel.DATA_PORT];
// m_rulesList.clear();
// m_rulesList.addAll(RuleEngineVariable2PortsNodeModel.rules(ruleTable, m_settings, RuleNodeSettings.RuleFilter));
// final List<Rule> rules = parseRules(dataTable.getDataTableSpec(), RuleNodeSettings.RuleFilter);
final BufferedDataContainer first = exec.createDataContainer(dataTable.getDataTableSpec(), true);
final int nrOutPorts = getNrOutPorts();
final RowAppender second = nrOutPorts > 1 ? exec.createDataContainer(dataTable.getDataTableSpec(), true) : new RowAppender() {
@Override
public void addRowToTable(final DataRow row) {
// do nothing
}
};
// final RowAppender[] containers = new RowAppender[]{first, second};
// final int matchIndex = m_includeOnMatch ? 0 : 1;
// final int otherIndex = 1 - matchIndex;
//
final BufferedDataTable[] ret = new BufferedDataTable[nrOutPorts];
// try {
// final MutableLong rowIdx = new MutableLong();
// final long rows = inData[DATA_PORT].size();
// final VariableProvider provider = new VariableProvider() {
// @Override
// public Object readVariable(final String name, final Class<?> type) {
// return RuleEngineFilter2PortsNodeModel.this.readVariable(name, type);
// }
//
// @Override
// @Deprecated
// public int getRowCount() {
// throw new UnsupportedOperationException();
// }
//
// @Override
// public long getRowCountLong() {
// return rows;
// }
//
// @Override
// @Deprecated
// public int getRowIndex() {
// throw new UnsupportedOperationException();
// }
//
// @Override
// public long getRowIndexLong() {
// return rowIdx.longValue();
// }
// };
// for (DataRow row : inData[DATA_PORT]) {
// rowIdx.increment();
// exec.setProgress(rowIdx.longValue() / (double)rows, "Adding row " + rowIdx.longValue() + " of " + rows);
// exec.checkCanceled();
// boolean wasMatch = false;
// for (final Rule r : rules) {
// if (r.getCondition().matches(row, provider).getOutcome() == MatchState.matchedAndStop) {
// // r.getSideEffect().perform(row, provider);
// DataValue value = r.getOutcome().getComputedResult(row, provider);
// if (value instanceof BooleanValue) {
// final BooleanValue bv = (BooleanValue)value;
// containers[bv.getBooleanValue() ? matchIndex : otherIndex].addRowToTable(row);
// } else {
// containers[matchIndex].addRowToTable(row);
// }
// wasMatch = true;
// break;
// }
// }
// if (!wasMatch) {
// containers[otherIndex].addRowToTable(row);
// }
// }
// } finally {
// first.close();
// ret[0] = first.getTable();
// if (second instanceof BufferedDataContainer) {
// BufferedDataContainer container = (BufferedDataContainer)second;
// container.close();
// ret[1] = container.getTable();
// }
// }
final PortOutput[] outputs = new PortOutput[] { new BufferedDataTableRowOutput(first), new RowAppenderRowOutput(second) };
final StreamableOperator streamableOperator = createStreamableOperator(new PartitionInfo(0, 1), new DataTableSpec[] { inData[0].getSpec(), inData[1].getSpec() });
final PortInput[] inputs = new PortInput[] { new DataTableRowInput(dataTable), new DataTableRowInput(ruleTable) };
final SimpleStreamableOperatorInternals internals = new SimpleStreamableOperatorInternals();
internals.getConfig().addLong(CFG_ROW_COUNT, dataTable.size());
streamableOperator.loadInternals(internals);
streamableOperator.runFinal(inputs, outputs, exec);
ret[0] = first.getTable();
if (ret.length > 1) {
ret[1] = ((BufferedDataContainer) second).getTable();
}
return ret;
}
use of org.knime.core.node.streamable.PartitionInfo in project knime-core by knime.
the class NewToOldTimeNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public StreamableOperatorInternals saveInternals() {
return null;
}
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
final RowInput in = (RowInput) inputs[0];
final RowOutput out = (RowOutput) outputs[0];
final DataTableSpec inSpec = in.getDataTableSpec();
String[] includeList = m_colSelect.applyTo(inSpec).getIncludes();
final int[] includeIndeces = Arrays.stream(includeList).mapToInt(s -> inSpec.findColumnIndex(s)).toArray();
DataRow row;
while ((row = in.poll()) != null) {
exec.checkCanceled();
DataCell[] datacells = new DataCell[includeIndeces.length];
for (int i = 0; i < includeIndeces.length; i++) {
if (m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE)) {
final DataColumnSpecCreator dataColumnSpecCreator = new DataColumnSpecCreator(includeList[i], DateAndTimeCell.TYPE);
final ConvertTimeCellFactory cellFac = new ConvertTimeCellFactory(dataColumnSpecCreator.createSpec(), includeIndeces[i]);
datacells[i] = cellFac.getCells(row)[0];
} else {
final DataColumnSpec dataColSpec = new UniqueNameGenerator(inSpec).newColumn(includeList[i] + m_suffix.getStringValue(), DateAndTimeCell.TYPE);
final ConvertTimeCellFactory cellFac = new ConvertTimeCellFactory(dataColSpec, includeIndeces[i]);
datacells[i] = cellFac.getCells(row)[0];
}
}
if (m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE)) {
out.push(new ReplacedColumnsDataRow(row, datacells, includeIndeces));
} else {
out.push(new AppendedColumnRow(row, datacells));
}
}
in.close();
out.close();
}
};
}
use of org.knime.core.node.streamable.PartitionInfo in project knime-core by knime.
the class DateTimeToStringNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
final RowInput in = (RowInput) inputs[0];
final RowOutput out = (RowOutput) outputs[0];
final DataTableSpec inSpec = in.getDataTableSpec();
final String[] includeList = m_colSelect.applyTo(inSpec).getIncludes();
final int[] includeIndeces = Arrays.stream(m_colSelect.applyTo(inSpec).getIncludes()).mapToInt(s -> inSpec.findColumnIndex(s)).toArray();
final boolean isReplace = m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE);
DataRow row;
while ((row = in.poll()) != null) {
exec.checkCanceled();
DataCell[] datacells = new DataCell[includeIndeces.length];
for (int i = 0; i < includeIndeces.length; i++) {
if (isReplace) {
final DataColumnSpecCreator dataColumnSpecCreator = new DataColumnSpecCreator(includeList[i], StringCell.TYPE);
final TimeToStringCellFactory cellFac = new TimeToStringCellFactory(dataColumnSpecCreator.createSpec(), includeIndeces[i]);
datacells[i] = cellFac.getCell(row);
} else {
final DataColumnSpec dataColSpec = new UniqueNameGenerator(inSpec).newColumn(includeList[i] + m_suffix.getStringValue(), StringCell.TYPE);
final TimeToStringCellFactory cellFac = new TimeToStringCellFactory(dataColSpec, includeIndeces[i]);
datacells[i] = cellFac.getCell(row);
}
}
if (isReplace) {
out.push(new ReplacedColumnsDataRow(row, datacells, includeIndeces));
} else {
out.push(new AppendedColumnRow(row, datacells));
}
}
in.close();
out.close();
}
};
}
use of org.knime.core.node.streamable.PartitionInfo in project knime-core by knime.
the class StringToDurationPeriodNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
private SimpleStreamableOperatorInternals m_internals = new SimpleStreamableOperatorInternals();
/**
* {@inheritDoc}
*/
@Override
public void runIntermediate(final PortInput[] inputs, final ExecutionContext exec) throws Exception {
final RowInput rowInput = (RowInput) inputs[0];
final DataRow row = rowInput.poll();
if (row != null) {
final DataTableSpec inSpec = rowInput.getDataTableSpec();
final int[] includeIndexes = Arrays.stream(m_colSelect.applyTo(inSpec).getIncludes()).mapToInt(s -> inSpec.findColumnIndex(s)).toArray();
final Config config = m_internals.getConfig();
// detect types
detectTypes(new OneRowAdditionalRowInput(rowInput, row));
for (int i = 0; i < m_detectedTypes.length; i++) {
config.addDataType("detected_type" + i, m_detectedTypes[i]);
}
// write detected types and column names into config
if (m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE)) {
for (int i = 0; i < rowInput.getDataTableSpec().getNumColumns(); i++) {
final int searchIdx = Arrays.binarySearch(includeIndexes, i);
config.addString("colname" + i, inSpec.getColumnNames()[i]);
if (searchIdx < 0) {
config.addDataType("type" + i, inSpec.getColumnSpec(i).getType());
} else {
config.addDataType("type" + i, m_detectedTypes[searchIdx] != null ? m_detectedTypes[searchIdx] : null);
}
}
config.addInt("sizeRow", rowInput.getDataTableSpec().getNumColumns());
} else {
for (int i = 0; i < inSpec.getNumColumns(); i++) {
config.addString("colname" + i, inSpec.getColumnNames()[i]);
config.addDataType("type" + i, inSpec.getColumnSpec(i).getType());
}
for (int i = 0; i < m_detectedTypes.length; i++) {
config.addString("colname" + (i + inSpec.getNumColumns()), new UniqueNameGenerator(inSpec).newName(inSpec.getColumnSpec(includeIndexes[i]).getName() + m_suffix.getStringValue()));
config.addDataType("type" + (i + inSpec.getNumColumns()), m_detectedTypes[i]);
}
config.addInt("sizeRow", inSpec.getNumColumns() + m_detectedTypes.length);
}
config.addBoolean("needsIteration", false);
} else {
m_internals.getConfig().addInt("sizeRow", 0);
}
rowInput.close();
}
/**
* {@inheritDoc}
*/
@Override
public StreamableOperatorInternals saveInternals() {
return m_internals;
}
/**
* {@inheritDoc}
*/
@Override
public void loadInternals(final StreamableOperatorInternals internals) {
m_internals = (SimpleStreamableOperatorInternals) internals;
}
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
final RowInput in = (RowInput) inputs[0];
final RowOutput out = (RowOutput) outputs[0];
final DataTableSpec inSpec = in.getDataTableSpec();
final int[] includeIndexes = Arrays.stream(m_colSelect.applyTo(inSpec).getIncludes()).mapToInt(s -> inSpec.findColumnIndex(s)).toArray();
// read detected types from config
final DataType[] detectedTypes = new DataType[includeIndexes.length];
final Config config = m_internals.getConfig();
for (int i = 0; i < includeIndexes.length; i++) {
detectedTypes[i] = config.getDataType("detected_type" + i, null);
}
// compute every row
DataRow row;
while ((row = in.poll()) != null) {
exec.checkCanceled();
DataCell[] datacells = new DataCell[includeIndexes.length];
for (int i = 0; i < includeIndexes.length; i++) {
if (m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE)) {
final StringToDurationPeriodCellFactory cellFac = new StringToDurationPeriodCellFactory(new DataColumnSpecCreator(inSpec.getColumnNames()[includeIndexes[i]], detectedTypes[i]).createSpec(), includeIndexes[i]);
datacells[i] = cellFac.getCells(row)[0];
} else {
final DataColumnSpec dataColSpec = new UniqueNameGenerator(inSpec).newColumn(inSpec.getColumnNames()[includeIndexes[i]] + m_suffix.getStringValue(), detectedTypes[i]);
final StringToDurationPeriodCellFactory cellFac = new StringToDurationPeriodCellFactory(dataColSpec, includeIndexes[i]);
datacells[i] = cellFac.getCells(row)[0];
}
}
if (m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE)) {
out.push(new ReplacedColumnsDataRow(row, datacells, includeIndexes));
} else {
out.push(new AppendedColumnRow(row, datacells));
}
}
in.close();
out.close();
}
};
}
use of org.knime.core.node.streamable.PartitionInfo in project knime-core by knime.
the class OldToNewTimeNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
SimpleStreamableOperatorInternals m_internals = new SimpleStreamableOperatorInternals();
/**
* {@inheritDoc}
*/
@Override
public void runIntermediate(final PortInput[] inputs, final ExecutionContext exec) throws Exception {
if (partitionInfo.getPartitionIndex() == 0) {
final RowInput rowInput = (RowInput) inputs[0];
final DataRow row = rowInput.poll();
if (row != null) {
if (m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE)) {
final DataColumnSpec[] colSpecs = new DataColumnSpec[row.getNumCells()];
final DataTableSpec inSpec = rowInput.getDataTableSpec();
final DataColumnSpec[] newColumnSpecs = getNewIncludedColumnSpecs(inSpec, row);
final int[] includeIndexes = Arrays.stream(m_colSelect.applyTo(inSpec).getIncludes()).mapToInt(s -> inSpec.findColumnIndex(s)).toArray();
for (int i = 0; i < inSpec.getNumColumns(); i++) {
final int searchIdx = Arrays.binarySearch(includeIndexes, i);
if (searchIdx < 0) {
colSpecs[i] = inSpec.getColumnSpec(i);
} else {
colSpecs[i] = newColumnSpecs[searchIdx];
}
}
final Config config = m_internals.getConfig();
config.addBoolean("hasIterated", false);
for (int i = 0; i < inSpec.getNumColumns(); i++) {
config.addDataType("type" + i, colSpecs[i].getType());
config.addString("colname" + i, colSpecs[i].getName());
}
config.addInt("sizeRow", colSpecs.length);
} else {
final DataTableSpec inSpec = rowInput.getDataTableSpec();
final DataColumnSpec[] newColumnSpecs = getNewIncludedColumnSpecs(inSpec, row);
final int[] includeIndexes = Arrays.stream(m_colSelect.applyTo(inSpec).getIncludes()).mapToInt(s -> inSpec.findColumnIndex(s)).toArray();
final DataColumnSpec[] colSpecs = new DataColumnSpec[row.getNumCells() + includeIndexes.length];
for (int i = 0; i < inSpec.getNumColumns(); i++) {
colSpecs[i] = inSpec.getColumnSpec(i);
}
for (int i = 0; i < newColumnSpecs.length; i++) {
colSpecs[i + inSpec.getNumColumns()] = new UniqueNameGenerator(inSpec).newColumn(newColumnSpecs[i].getName() + m_suffix.getStringValue(), newColumnSpecs[i].getType());
}
final Config config = m_internals.getConfig();
config.addBoolean("hasIterated", false);
for (int i = 0; i < colSpecs.length; i++) {
config.addDataType("type" + i, colSpecs[i].getType());
config.addString("colname" + i, colSpecs[i].getName());
}
config.addInt("sizeRow", colSpecs.length);
}
} else {
m_internals.getConfig().addInt("sizeRow", 0);
}
}
}
/**
* {@inheritDoc}
*/
@Override
public StreamableOperatorInternals saveInternals() {
return m_internals;
}
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
final RowInput in = (RowInput) inputs[0];
final RowOutput out = (RowOutput) outputs[0];
final DataTableSpec inSpec = in.getDataTableSpec();
final int[] includeIndexes = Arrays.stream(m_colSelect.applyTo(inSpec).getIncludes()).mapToInt(s -> inSpec.findColumnIndex(s)).toArray();
DataRow row;
while ((row = in.poll()) != null) {
exec.checkCanceled();
final DataColumnSpec[] newColumnSpecs = getNewIncludedColumnSpecs(inSpec, row);
DataCell[] datacells = new DataCell[includeIndexes.length];
for (int i = 0; i < includeIndexes.length; i++) {
if (m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE)) {
ConvertTimeCellFactory cellFac = new ConvertTimeCellFactory(newColumnSpecs[i], i, includeIndexes[i]);
datacells[i] = cellFac.getCells(row)[0];
} else {
final DataColumnSpec dataColSpec = new UniqueNameGenerator(inSpec).newColumn(newColumnSpecs[i].getName() + m_suffix.getStringValue(), newColumnSpecs[i].getType());
ConvertTimeCellFactory cellFac = new ConvertTimeCellFactory(dataColSpec, i, includeIndexes[i]);
datacells[i] = cellFac.getCells(row)[0];
}
}
if (m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE)) {
out.push(new ReplacedColumnsDataRow(row, datacells, includeIndexes));
} else {
out.push(new AppendedColumnRow(row, datacells));
}
}
in.close();
out.close();
}
};
}
Aggregations