use of org.knime.core.data.container.ContainerTable in project knime-core by knime.
the class HistogramColumn method loadHistograms.
/**
* Loads the histograms from the saved internal files.
*
* @param histogramsGz The file for the histograms.
* @param dataArrayGz The data array file for the row keys.
* @param nominalColumns The nominal columns.
* @param strategy The strategy used to compute the bins.
* @param means The mean values for the numeric columns.
* @return A triple (Pair(Pair(,),)) of histograms, numeric and nominal row keys.
* @throws IOException Failed to read the files.
* @throws InvalidSettingsException Something went wrong.
*/
public static Pair<Pair<Map<Integer, ? extends HistogramModel<?>>, Map<Integer, Map<Integer, Set<RowKey>>>>, Map<Integer, Map<DataValue, Set<RowKey>>>> loadHistograms(final File histogramsGz, final File dataArrayGz, final Set<String> nominalColumns, final BinNumberSelectionStrategy strategy, final double[] means) throws IOException, InvalidSettingsException {
Map<Integer, Map<Integer, Set<RowKey>>> numericKeys = new HashMap<Integer, Map<Integer, Set<RowKey>>>();
Map<Integer, HistogramNumericModel> histograms = loadHistogramsPrivate(histogramsGz, numericKeys, strategy, means);
Map<Integer, Map<DataValue, Set<RowKey>>> nominalKeys = new HashMap<Integer, Map<DataValue, Set<RowKey>>>();
ContainerTable table = DataContainer.readFromZip(dataArrayGz);
Set<Integer> numericColIndices = numericKeys.keySet();
for (String colName : nominalColumns) {
int colIndex = table.getDataTableSpec().findColumnIndex(colName);
if (colIndex < 0) {
continue;
}
nominalKeys.put(Integer.valueOf(colIndex), new HashMap<DataValue, Set<RowKey>>());
}
for (DataRow dataRow : table) {
for (Integer col : numericColIndices) {
// Integer col = Integer.valueOf(colIdx);
HistogramNumericModel hd = histograms.get(col);
Map<Integer, Set<RowKey>> map = numericKeys.get(col);
DataCell cell = dataRow.getCell(col.intValue());
if (!cell.isMissing() && cell instanceof DoubleValue) {
DoubleValue dv = (DoubleValue) cell;
Integer bin = Integer.valueOf(hd.findBin(dv));
if (!map.containsKey(bin)) {
map.put(bin, new HashSet<RowKey>());
}
map.get(bin).add(dataRow.getKey());
}
}
for (Entry<Integer, Map<DataValue, Set<RowKey>>> entry : nominalKeys.entrySet()) {
DataCell value = dataRow.getCell(entry.getKey().intValue());
Map<DataValue, Set<RowKey>> map = entry.getValue();
if (!map.containsKey(value)) {
map.put(value, new HashSet<RowKey>());
}
map.get(value).add(dataRow.getKey());
}
}
return Pair.create(new Pair<Map<Integer, ? extends HistogramModel<?>>, Map<Integer, Map<Integer, Set<RowKey>>>>(histograms, numericKeys), nominalKeys);
}
use of org.knime.core.data.container.ContainerTable in project knime-core by knime.
the class DataTableSpecView method dispose.
/**
* {@inheritDoc}
* @since 3.6
*/
@Override
public void dispose() {
TableContentModel contentModel = m_specView.getContentModel();
DataTable dataTable = contentModel.getDataTable();
contentModel.setDataTable(null);
if (dataTable instanceof ContainerTable) {
((ContainerTable) dataTable).clear();
}
}
use of org.knime.core.data.container.ContainerTable in project knime-core by knime.
the class FileNativeNodeContainerPersistor method loadNCAndWashModelSettings.
/**
* {@inheritDoc}
*/
@Override
NodeSettingsRO loadNCAndWashModelSettings(final NodeSettingsRO settingsForNode, final NodeSettingsRO modelSettings, final Map<Integer, BufferedDataTable> tblRep, final ExecutionMonitor exec, final LoadResult result) throws InvalidSettingsException, CanceledExecutionException, IOException {
final FileNodePersistor nodePersistor = createNodePersistor(settingsForNode);
nodePersistor.preLoad(m_node, result);
NodeSettingsRO washedModelSettings = modelSettings;
try {
if (modelSettings != null) {
// null if the node never had settings - no reason to load them
m_node.validateModelSettings(modelSettings);
m_node.loadModelSettingsFrom(modelSettings);
// previous versions of KNIME (2.7 and before) kept the model settings only in the node;
// NodeModel#saveSettingsTo was always called before the dialog was opened (some dialog implementations
// rely on the exact structure of the NodeSettings ... which may change between versions).
// We wash the settings through the node so that the model settings are updated (they possibly
// no longer map to the variable settings loaded further down below - if so, the inconsistency
// is warned later during configuration)
NodeSettings washedSettings = new NodeSettings("model");
m_node.saveModelSettingsTo(washedSettings);
washedModelSettings = washedSettings;
}
} catch (Exception e) {
final String error;
if (e instanceof InvalidSettingsException) {
error = "Loading model settings failed: " + e.getMessage();
} else {
error = "Caught \"" + e.getClass().getSimpleName() + "\", " + "Loading model settings failed: " + e.getMessage();
}
final LoadNodeModelSettingsFailPolicy pol = getModelSettingsFailPolicy(getMetaPersistor().getState(), nodePersistor.isInactive());
switch(pol) {
case IGNORE:
if (!(e instanceof InvalidSettingsException)) {
getLogger().coding(error, e);
}
break;
case FAIL:
result.addError(error);
m_node.createErrorMessageAndNotify(error, e);
setNeedsResetAfterLoad();
break;
case WARN:
m_node.createWarningMessageAndNotify(error, e);
result.addWarning(error);
setDirtyAfterLoad();
break;
}
}
try {
HashMap<Integer, ContainerTable> globalTableRepository = getGlobalTableRepository();
WorkflowFileStoreHandlerRepository fileStoreHandlerRepository = getFileStoreHandlerRepository();
nodePersistor.load(m_node, getParentPersistor(), exec, tblRep, globalTableRepository, fileStoreHandlerRepository, result);
} catch (final Exception e) {
String error = "Error loading node content: " + e.getMessage();
getLogger().warn(error, e);
needsResetAfterLoad();
result.addError(error);
}
if (nodePersistor.isDirtyAfterLoad()) {
setDirtyAfterLoad();
}
if (nodePersistor.needsResetAfterLoad()) {
setNeedsResetAfterLoad();
}
return washedModelSettings;
}
use of org.knime.core.data.container.ContainerTable in project knime-core by knime.
the class BufferedDataTable method loadFromFile.
/**
* Factory method to restore a table that has been written using
* the save method.
* @param dirRef The directory to load from.
* @param settings The settings to load from.
* @param exec The exec mon for progress/cancel
* @param tblRep The table repository
* @param bufferRep The buffer repository (needed for blobs).
* @param fileStoreHandlerRepository ...
* @return The table as written by save.
* @throws IOException If reading fails.
* @throws CanceledExecutionException If canceled.
* @throws InvalidSettingsException If settings are invalid.
*/
static BufferedDataTable loadFromFile(final ReferencedFile dirRef, final NodeSettingsRO settings, final ExecutionMonitor exec, final Map<Integer, BufferedDataTable> tblRep, final HashMap<Integer, ContainerTable> bufferRep, final FileStoreHandlerRepository fileStoreHandlerRepository) throws IOException, CanceledExecutionException, InvalidSettingsException {
File dir = dirRef.getFile();
NodeSettingsRO s;
// in version 1.1.x and before, the information was stored in
// an external data.xml (directly in the node dir)
boolean isVersion11x;
File dataXML = new File(dir, TABLE_DESCRIPTION_FILE);
// loading an exported workflow without data
if (!dataXML.exists() && settings == null) {
throw new IOException("No such data file: " + dataXML.getAbsolutePath());
}
DataTableSpec spec;
if (dataXML.exists()) {
// version 1.2.0 and later
s = NodeSettings.loadFromXML(new BufferedInputStream(new FileInputStream(dataXML)));
spec = loadSpec(dirRef);
isVersion11x = false;
} else {
// version 1.1.x
s = settings.getNodeSettings(CFG_TABLE_META);
// needs to be read from zip file!
spec = null;
isVersion11x = true;
}
int id = s.getInt(CFG_TABLE_ID);
LAST_ID.set(Math.max(LAST_ID.get(), id + 1));
String fileName = s.getString(CFG_TABLE_FILE_NAME);
ReferencedFile fileRef;
if (fileName != null) {
fileRef = new ReferencedFile(dirRef, fileName);
File file = fileRef.getFile();
if (!file.exists()) {
throw new IOException("No such data file: " + fileRef);
}
if (!file.isFile() || !file.canRead()) {
throw new IOException("Cannot read file " + fileRef);
}
} else {
// for instance for a column filter node this is null.
fileRef = null;
}
String tableType = s.getString(CFG_TABLE_TYPE);
BufferedDataTable t;
if (tableType.equals(TABLE_TYPE_REFERENCE_IN_SAME_NODE)) {
t = tblRep.get(id);
if (t == null) {
throw new InvalidSettingsException("Table reference with ID " + id + " not found in load map");
}
return t;
} else if (tableType.equals(TABLE_TYPE_CONTAINER)) {
ContainerTable fromContainer;
if (isVersion11x) {
fromContainer = DataContainer.readFromZip(fileRef.getFile());
} else {
fromContainer = BufferedDataContainer.readFromZipDelayed(fileRef, spec, id, bufferRep, fileStoreHandlerRepository);
}
t = new BufferedDataTable(fromContainer, id);
} else {
String[] referenceDirs;
// (no concatenate table in those versions)
if (s.containsKey("table_reference")) {
String refDir = s.getString("table_reference");
referenceDirs = refDir == null ? new String[0] : new String[] { refDir };
} else {
referenceDirs = s.getStringArray(CFG_TABLE_REFERENCE);
}
for (String reference : referenceDirs) {
if (reference == null) {
throw new InvalidSettingsException("Reference dir is \"null\"");
}
ReferencedFile referenceDirRef = new ReferencedFile(dirRef, reference);
loadFromFile(referenceDirRef, s, exec, tblRep, bufferRep, fileStoreHandlerRepository);
}
if (tableType.equals(TABLE_TYPE_REARRANGE_COLUMN)) {
t = new BufferedDataTable(new RearrangeColumnsTable(fileRef, s, tblRep, spec, id, bufferRep, fileStoreHandlerRepository));
} else if (tableType.equals(TABLE_TYPE_JOINED)) {
JoinedTable jt = JoinedTable.load(s, spec, tblRep);
t = new BufferedDataTable(jt);
} else if (tableType.equals(TABLE_TYPE_VOID)) {
VoidTable jt = VoidTable.load(spec);
t = new BufferedDataTable(jt);
} else if (tableType.equals(TABLE_TYPE_CONCATENATE)) {
ConcatenateTable ct = ConcatenateTable.load(s, spec, tblRep);
t = new BufferedDataTable(ct);
} else if (tableType.equals(TABLE_TYPE_WRAPPED)) {
WrappedTable wt = WrappedTable.load(s, tblRep);
t = new BufferedDataTable(wt);
} else if (tableType.equals(TABLE_TYPE_NEW_SPEC)) {
TableSpecReplacerTable replTable;
if (isVersion11x) {
replTable = TableSpecReplacerTable.load11x(fileRef.getFile(), s, tblRep);
} else {
replTable = TableSpecReplacerTable.load(s, spec, tblRep);
}
t = new BufferedDataTable(replTable);
} else if (tableType.equals(TABLE_TYPE_EXTENSION)) {
ExtensionTable et = ExtensionTable.loadExtensionTable(fileRef, spec, s, tblRep, exec);
t = new BufferedDataTable(et);
} else {
throw new InvalidSettingsException("Unknown table identifier: " + tableType);
}
}
t.m_tableID = id;
tblRep.put(id, t);
return t;
}
use of org.knime.core.data.container.ContainerTable in project knime-core by knime.
the class BufferedDataTable method save.
/**
* Saves the table to a directory and writes some settings to the argument
* NodeSettingsWO object. It will also write the reference table in case
* this node is responsible for it (i.e. this node created the reference
* table).
* @param dir The directory to write to.
* @param savedTableIDs Ids of tables that were previously saved, used to identify
* tables that are referenced by the same nodes multiple times.
* @param exec The progress monitor for cancellation.
* @throws IOException If writing fails.
* @throws CanceledExecutionException If canceled.
*/
void save(final File dir, final Set<Integer> savedTableIDs, final ExecutionMonitor exec) throws IOException, CanceledExecutionException {
NodeSettings s = new NodeSettings(CFG_TABLE_META);
Integer bufferedTableID = getBufferedTableId();
s.addInt(CFG_TABLE_ID, bufferedTableID);
File outFile = new File(dir, TABLE_FILE);
if (!savedTableIDs.add(bufferedTableID)) {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_REFERENCE_IN_SAME_NODE);
} else if (m_delegate instanceof ContainerTable) {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_CONTAINER);
m_delegate.saveToFile(outFile, s, exec);
} else {
if (m_delegate instanceof RearrangeColumnsTable) {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_REARRANGE_COLUMN);
} else if (m_delegate instanceof TableSpecReplacerTable) {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_NEW_SPEC);
} else if (m_delegate instanceof WrappedTable) {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_WRAPPED);
} else if (m_delegate instanceof JoinedTable) {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_JOINED);
} else if (m_delegate instanceof VoidTable) {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_VOID);
} else if (m_delegate instanceof ConcatenateTable) {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_CONCATENATE);
} else {
assert m_delegate instanceof ExtensionTable;
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_EXTENSION);
}
BufferedDataTable[] references = m_delegate.getReferenceTables();
ArrayList<String> referenceDirs = new ArrayList<String>();
for (BufferedDataTable reference : references) {
if (reference.getOwner() == getOwner() && !savedTableIDs.contains(reference.getBufferedTableId())) {
int index = referenceDirs.size();
String dirName = "r" + index;
File subDir = new File(dir, dirName);
if (!subDir.mkdir() && !subDir.isDirectory()) {
throw new IOException("Could not create directory " + subDir.getAbsolutePath());
}
if (!subDir.canWrite()) {
throw new IOException("Unable to write directory " + subDir.getAbsolutePath());
}
referenceDirs.add(dirName);
reference.save(subDir, savedTableIDs, exec);
}
}
s.addStringArray(CFG_TABLE_REFERENCE, referenceDirs.toArray(new String[referenceDirs.size()]));
m_delegate.saveToFile(outFile, s, exec);
}
// only write the data file to the settings if it has been created
if (outFile.exists()) {
s.addString(CFG_TABLE_FILE_NAME, TABLE_FILE);
} else {
s.addString(CFG_TABLE_FILE_NAME, null);
}
saveSpec(getDataTableSpec(), dir);
File dataXML = new File(dir, TABLE_DESCRIPTION_FILE);
s.saveToXML(new BufferedOutputStream(new FileOutputStream(dataXML)));
}
Aggregations