use of org.knime.core.node.ExecutionContext in project knime-core by knime.
the class FileReaderNodeModel method createStreamableOperator.
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
assert inputs.length == 0;
LOGGER.info("Preparing to read from '" + m_frSettings.getDataFileLocation().toString() + "'.");
// check again the settings - especially file existence (under Linux
// files could be deleted/renamed since last config-call...
SettingsStatus status = m_frSettings.getStatusOfSettings(true, null);
if (status.getNumOfErrors() > 0) {
throw new InvalidSettingsException(status.getAllErrorMessages(10));
}
DataTableSpec tSpec = m_frSettings.createDataTableSpec();
FileTable fTable = new FileTable(tSpec, m_frSettings, m_frSettings.getSkippedColumns(), exec);
// data output port
RowOutput rowOutput = (RowOutput) outputs[0];
int row = 0;
FileRowIterator it = fTable.iterator();
try {
if (it.getZipEntryName() != null) {
// seems we are reading a ZIP archive.
LOGGER.info("Reading entry '" + it.getZipEntryName() + "' from the specified ZIP archive.");
}
while (it.hasNext()) {
row++;
DataRow next = it.next();
final int finalRow = row;
exec.setMessage(() -> "Reading row #" + finalRow + " (\"" + next.getKey() + "\")");
exec.checkCanceled();
rowOutput.push(next);
}
rowOutput.close();
if (it.zippedSourceHasMoreEntries()) {
// after reading til the end of the file this returns a valid
// result
setWarningMessage("Source is a ZIP archive with multiple " + "entries. Only reading first entry!");
}
} catch (DuplicateKeyException dke) {
String msg = dke.getMessage();
if (msg == null) {
msg = "Duplicate row IDs";
}
msg += ". Consider making IDs unique in the advanced settings.";
DuplicateKeyException newDKE = new DuplicateKeyException(msg);
newDKE.initCause(dke);
throw newDKE;
}
// user settings allow for truncating the table
if (it.iteratorEndedEarly()) {
setWarningMessage("Data was truncated due to user settings.");
}
// closes all sources.
fTable.dispose();
}
};
}
use of org.knime.core.node.ExecutionContext in project knime-core by knime.
the class CovarianceMatrixCalculatorTest method setUp.
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
@SuppressWarnings({ "unchecked", "rawtypes" }) NodeFactory<NodeModel> dummyFactory = (NodeFactory) new VirtualParallelizedChunkPortObjectInNodeFactory(new PortType[0]);
m_exec = new ExecutionContext(new DefaultNodeProgressMonitor(), new Node(dummyFactory), SingleNodeContainer.MemoryPolicy.CacheOnDisc, new HashMap<Integer, ContainerTable>());
}
use of org.knime.core.node.ExecutionContext in project knime-core by knime.
the class LearnerTest method setUp.
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
NodeFactory nodeFactory = new LogRegLearnerNodeFactory();
Node node = new Node(nodeFactory);
m_exec = new ExecutionContext(new DefaultNodeProgressMonitor(), node, SingleNodeContainer.MemoryPolicy.CacheOnDisc, new HashMap<Integer, ContainerTable>());
}
use of org.knime.core.node.ExecutionContext in project knime-core by knime.
the class JoinerTest method setUp.
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
NodeFactory<NodeModel> dummyFactory = (NodeFactory) new VirtualParallelizedChunkPortObjectInNodeFactory(new PortType[0]);
m_exec = new ExecutionContext(new DefaultNodeProgressMonitor(), new Node(dummyFactory), SingleNodeContainer.MemoryPolicy.CacheOnDisc, new HashMap<Integer, ContainerTable>());
}
use of org.knime.core.node.ExecutionContext in project knime-core by knime.
the class UngroupNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
DataTableSpec spec = (DataTableSpec) inSpecs[0];
int[] idxs = getSelectedColIdxs(spec, getColumnNames(spec));
UngroupOperation2 ugO = createUngroupOperation(spec, idxs);
return new StreamableOperator() {
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
RowInput input = (RowInput) inputs[0];
RowOutput output = (RowOutput) outputs[0];
ugO.compute(input, output, exec, -1, m_trans);
input.close();
output.close();
}
};
}
Aggregations