use of org.knime.core.data.container.ContainerTable in project knime-core by knime.
the class DataTableSorter method clearTable.
/**
* {@inheritDoc}
*/
@Override
void clearTable(final DataTable table) {
// the DataContainer returns ContainerTable
if (!(table instanceof ContainerTable)) {
NodeLogger.getLogger(getClass()).warn("Can't clear table instance " + "of \"" + table.getClass().getSimpleName() + "\" - expected \"" + ContainerTable.class.getSimpleName() + "\"");
} else {
ContainerTable t = (ContainerTable) table;
t.clear();
}
}
use of org.knime.core.data.container.ContainerTable in project knime-core by knime.
the class FileNodePersistor method loadBufferedDataTable.
private BufferedDataTable loadBufferedDataTable(final Node node, final NodeSettingsRO settings, final ExecutionMonitor execMon, final Map<Integer, BufferedDataTable> loadTblRep, final int index, final HashMap<Integer, ContainerTable> tblRep, final FileStoreHandlerRepository fileStoreHandlerRepository) throws InvalidSettingsException, IOException, CanceledExecutionException {
// in 1.1.x and before the settings.xml contained the location
// of the data table specs file (spec_0.xml, e.g.). From 1.2.0 on,
// the spec is saved in data/data_0/spec.xml
boolean isVersion11x = settings.containsKey(CFG_SPEC_FILES);
ExecutionMonitor execSubData = execMon.createSubProgress(0.25);
ReferencedFile nodeDirectory = getNodeDirectory();
if (isVersion11x) {
/* In version 1.1.x the data was stored in a different way. The
* data.xml that is now contained in the data/data_x/ directory was
* aggregated in a data.xml file directly in the m_nodeDir. Also the
* spec was located at a different location.
*/
String dataConfigFileName = settings.getString(CFG_DATA_FILE);
File nodeDir = nodeDirectory.getFile();
// dataConfigFile = data.xml in node dir
File dataConfigFile = new File(nodeDir, dataConfigFileName);
NodeSettingsRO dataSettings = NodeSettings.loadFromXML(new BufferedInputStream(new FileInputStream(dataConfigFile)));
String dataPath = dataSettings.getString(CFG_DATA_FILE_DIR);
// dataDir = /data
ReferencedFile dataDirRef = new ReferencedFile(nodeDirectory, dataPath);
// note: we do not check for existence here - in some cases
// this directory may not exist (when exported and empty
// directories are pruned)
NodeSettingsRO portSettings = dataSettings.getNodeSettings(CFG_OUTPUT_PREFIX + index);
String dataName = portSettings.getString(CFG_DATA_FILE_DIR);
// dir = /data/data_i
ReferencedFile dirRef = new ReferencedFile(dataDirRef, dataName);
BufferedDataTable t = BufferedDataTable.loadFromFile(dirRef, portSettings, execSubData, loadTblRep, // no blobs or file stores in 1.1.x
new HashMap<Integer, ContainerTable>(), new WorkflowFileStoreHandlerRepository());
t.setOwnerRecursively(node);
return t;
} else {
NodeSettingsRO dataSettings = settings.getNodeSettings(CFG_DATA_FILE);
String dataDirStr = dataSettings.getString(CFG_DATA_FILE_DIR);
ReferencedFile dataDirRef = new ReferencedFile(nodeDirectory, dataDirStr);
NodeSettingsRO portSettings = dataSettings.getNodeSettings(CFG_OUTPUT_PREFIX + index);
String dataName = portSettings.getString(CFG_DATA_FILE_DIR);
ReferencedFile dirRef = new ReferencedFile(dataDirRef, dataName);
File dir = dirRef.getFile();
readDirectory(dir);
BufferedDataTable t = BufferedDataTable.loadFromFile(dirRef, /* ignored in 1.2.0+ */
null, execMon, loadTblRep, tblRep, fileStoreHandlerRepository);
t.setOwnerRecursively(node);
return t;
}
}
use of org.knime.core.data.container.ContainerTable in project knime-core by knime.
the class Bug5405_WorkflowLocationAfterSaveAs method testExecAfterSaveAs.
/**
* Loads the workflow, saves it to new location, then executes.
*/
@Test
public void testExecAfterSaveAs() throws Exception {
WorkflowManager manager = getManager();
ContainerTable fileReaderTable = getExecuteFileReaderTable();
// tables are not extracted to workflow temp space after load (lazy init)
Assert.assertFalse(fileReaderTable.isOpen());
Assert.assertNotNull(manager.getContext());
Assert.assertEquals(manager.getNodeContainerDirectory().getFile(), manager.getContext().getCurrentLocation());
File saveAsFolder = FileUtil.createTempDir(getClass().getName());
saveAsFolder.delete();
WorkflowContext.Factory fac = new WorkflowContext.Factory(manager.getContext()).setCurrentLocation(saveAsFolder);
manager.saveAs(fac.createContext(), new ExecutionMonitor());
Assert.assertEquals(saveAsFolder, manager.getNodeContainerDirectory().getFile());
Assert.assertEquals(saveAsFolder, manager.getContext().getCurrentLocation());
// if this fails (= assertion thrown) this means the workflow format has changed and all nodes are dirty
// when save-as is called.
Assert.assertFalse(fileReaderTable.isOpen());
executeAndWait(m_diffChecker3);
checkStateOfMany(InternalNodeContainerState.EXECUTED, m_fileReader2, m_fileReader1, m_diffChecker3);
Assert.assertTrue(fileReaderTable.isOpen());
}
use of org.knime.core.data.container.ContainerTable in project knime-core by knime.
the class DoubleVectorCellTest method testSerialization.
@Test
public void testSerialization() throws Exception {
double[] d = IntStream.range(0, 10000).mapToDouble(i -> i).toArray();
DataCell cell = DoubleVectorCellFactory.createCell(d);
DataContainer c = new DataContainer(new DataTableSpec(new DataColumnSpecCreator("foo", DoubleVectorCellFactory.TYPE).createSpec()));
c.addRowToTable(new DefaultRow("row", cell));
c.close();
DataTable table = c.getTable();
byte[] bytes;
try (ByteArrayOutputStream output = new ByteArrayOutputStream()) {
DataContainer.writeToStream(table, output, new ExecutionMonitor());
output.close();
bytes = output.toByteArray();
}
ContainerTable containerTable;
try (ByteArrayInputStream input = new ByteArrayInputStream(bytes)) {
containerTable = DataContainer.readFromStream(input);
}
DataCell cell2 = containerTable.iterator().next().getCell(0);
Assert.assertNotSame(c, cell2);
Assert.assertEquals(cell, cell2);
}
use of org.knime.core.data.container.ContainerTable in project knime-core by knime.
the class Rule2DNodeModel method loadInternals.
/**
* Load internals.
*
* @param internDir The intern node directory.
* @param exec Used to report progress or cancel saving.
* @throws IOException Always, since this method has not been implemented
* yet.
* @throws CanceledExecutionException -if the user abnorts the operation.
* @see org.knime.core.node.NodeModel
* #loadInternals(java.io.File,ExecutionMonitor)
*/
@Override
protected void loadInternals(final File internDir, final ExecutionMonitor exec) throws IOException, CanceledExecutionException {
File rules = new File(internDir, RULE_FILE_NAME);
ContainerTable ruleTable = DataContainer.readFromZip(rules);
int rowCount = ruleTable.getRowCount();
m_fuzzyRules = new DefaultDataArray(ruleTable, 1, rowCount, exec);
File data = new File(internDir, DATA_FILE_NAME);
ContainerTable table = DataContainer.readFromZip(data);
rowCount = table.getRowCount();
m_data = new DefaultDataArray(table, 1, rowCount, exec);
}
Aggregations