use of org.knime.core.data.container.JoinedTable in project knime-core by knime.
the class BufferedDataTable method loadFromFile.
/**
* Factory method to restore a table that has been written using
* the save method.
* @param dirRef The directory to load from.
* @param settings The settings to load from.
* @param exec The exec mon for progress/cancel
* @param tblRep The table repository
* @param bufferRep The buffer repository (needed for blobs).
* @param fileStoreHandlerRepository ...
* @return The table as written by save.
* @throws IOException If reading fails.
* @throws CanceledExecutionException If canceled.
* @throws InvalidSettingsException If settings are invalid.
*/
static BufferedDataTable loadFromFile(final ReferencedFile dirRef, final NodeSettingsRO settings, final ExecutionMonitor exec, final Map<Integer, BufferedDataTable> tblRep, final HashMap<Integer, ContainerTable> bufferRep, final FileStoreHandlerRepository fileStoreHandlerRepository) throws IOException, CanceledExecutionException, InvalidSettingsException {
File dir = dirRef.getFile();
NodeSettingsRO s;
// in version 1.1.x and before, the information was stored in
// an external data.xml (directly in the node dir)
boolean isVersion11x;
File dataXML = new File(dir, TABLE_DESCRIPTION_FILE);
// loading an exported workflow without data
if (!dataXML.exists() && settings == null) {
throw new IOException("No such data file: " + dataXML.getAbsolutePath());
}
DataTableSpec spec;
if (dataXML.exists()) {
// version 1.2.0 and later
s = NodeSettings.loadFromXML(new BufferedInputStream(new FileInputStream(dataXML)));
spec = loadSpec(dirRef);
isVersion11x = false;
} else {
// version 1.1.x
s = settings.getNodeSettings(CFG_TABLE_META);
// needs to be read from zip file!
spec = null;
isVersion11x = true;
}
int id = s.getInt(CFG_TABLE_ID);
LAST_ID.set(Math.max(LAST_ID.get(), id + 1));
String fileName = s.getString(CFG_TABLE_FILE_NAME);
ReferencedFile fileRef;
if (fileName != null) {
fileRef = new ReferencedFile(dirRef, fileName);
File file = fileRef.getFile();
if (!file.exists()) {
throw new IOException("No such data file: " + fileRef);
}
if (!file.isFile() || !file.canRead()) {
throw new IOException("Cannot read file " + fileRef);
}
} else {
// for instance for a column filter node this is null.
fileRef = null;
}
String tableType = s.getString(CFG_TABLE_TYPE);
BufferedDataTable t;
if (tableType.equals(TABLE_TYPE_REFERENCE_IN_SAME_NODE)) {
t = tblRep.get(id);
if (t == null) {
throw new InvalidSettingsException("Table reference with ID " + id + " not found in load map");
}
return t;
} else if (tableType.equals(TABLE_TYPE_CONTAINER)) {
ContainerTable fromContainer;
if (isVersion11x) {
fromContainer = DataContainer.readFromZip(fileRef.getFile());
} else {
fromContainer = BufferedDataContainer.readFromZipDelayed(fileRef, spec, id, bufferRep, fileStoreHandlerRepository);
}
t = new BufferedDataTable(fromContainer, id);
} else {
String[] referenceDirs;
// (no concatenate table in those versions)
if (s.containsKey("table_reference")) {
String refDir = s.getString("table_reference");
referenceDirs = refDir == null ? new String[0] : new String[] { refDir };
} else {
referenceDirs = s.getStringArray(CFG_TABLE_REFERENCE);
}
for (String reference : referenceDirs) {
if (reference == null) {
throw new InvalidSettingsException("Reference dir is \"null\"");
}
ReferencedFile referenceDirRef = new ReferencedFile(dirRef, reference);
loadFromFile(referenceDirRef, s, exec, tblRep, bufferRep, fileStoreHandlerRepository);
}
if (tableType.equals(TABLE_TYPE_REARRANGE_COLUMN)) {
t = new BufferedDataTable(new RearrangeColumnsTable(fileRef, s, tblRep, spec, id, bufferRep, fileStoreHandlerRepository));
} else if (tableType.equals(TABLE_TYPE_JOINED)) {
JoinedTable jt = JoinedTable.load(s, spec, tblRep);
t = new BufferedDataTable(jt);
} else if (tableType.equals(TABLE_TYPE_VOID)) {
VoidTable jt = VoidTable.load(spec);
t = new BufferedDataTable(jt);
} else if (tableType.equals(TABLE_TYPE_CONCATENATE)) {
ConcatenateTable ct = ConcatenateTable.load(s, spec, tblRep);
t = new BufferedDataTable(ct);
} else if (tableType.equals(TABLE_TYPE_WRAPPED)) {
WrappedTable wt = WrappedTable.load(s, tblRep);
t = new BufferedDataTable(wt);
} else if (tableType.equals(TABLE_TYPE_NEW_SPEC)) {
TableSpecReplacerTable replTable;
if (isVersion11x) {
replTable = TableSpecReplacerTable.load11x(fileRef.getFile(), s, tblRep);
} else {
replTable = TableSpecReplacerTable.load(s, spec, tblRep);
}
t = new BufferedDataTable(replTable);
} else if (tableType.equals(TABLE_TYPE_EXTENSION)) {
ExtensionTable et = ExtensionTable.loadExtensionTable(fileRef, spec, s, tblRep, exec);
t = new BufferedDataTable(et);
} else {
throw new InvalidSettingsException("Unknown table identifier: " + tableType);
}
}
t.m_tableID = id;
tblRep.put(id, t);
return t;
}
use of org.knime.core.data.container.JoinedTable in project knime-core by knime.
the class BufferedDataTable method save.
/**
* Saves the table to a directory and writes some settings to the argument
* NodeSettingsWO object. It will also write the reference table in case
* this node is responsible for it (i.e. this node created the reference
* table).
* @param dir The directory to write to.
* @param savedTableIDs Ids of tables that were previously saved, used to identify
* tables that are referenced by the same nodes multiple times.
* @param exec The progress monitor for cancellation.
* @throws IOException If writing fails.
* @throws CanceledExecutionException If canceled.
*/
void save(final File dir, final Set<Integer> savedTableIDs, final ExecutionMonitor exec) throws IOException, CanceledExecutionException {
NodeSettings s = new NodeSettings(CFG_TABLE_META);
Integer bufferedTableID = getBufferedTableId();
s.addInt(CFG_TABLE_ID, bufferedTableID);
File outFile = new File(dir, TABLE_FILE);
if (!savedTableIDs.add(bufferedTableID)) {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_REFERENCE_IN_SAME_NODE);
} else if (m_delegate instanceof BufferedContainerTable) {
final TableStoreFormat format = ((BufferedContainerTable) m_delegate).getTableStoreFormat();
if (!DefaultTableStoreFormat.class.equals(format.getClass())) {
// use different identifier to cause old versions of KNIME to fail loading newer workflows
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_CONTAINER_CUSTOM);
s.addString(CFG_TABLE_CONTAINER_FORMAT, format.getClass().getName());
s.addString(CFG_TABLE_CONTAINER_FORMAT_VERSION, format.getVersion());
} else {
final DefaultTableStoreFormat defaultFormat = (DefaultTableStoreFormat) format;
if (!Arrays.asList(NONE, GZIP).contains(defaultFormat.getCompressionFormat())) {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_CONTAINER_COMPRESS);
s.addString(CFG_TABLE_COMPRESSION_FORMAT, defaultFormat.getCompressionFormat().toString());
} else {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_CONTAINER);
}
}
m_delegate.saveToFile(outFile, s, exec);
} else {
if (m_delegate instanceof RearrangeColumnsTable) {
final ContainerTable containerTable = ((RearrangeColumnsTable) m_delegate).getAppendTable();
if (containerTable != null && containerTable instanceof BufferedContainerTable) {
final BufferedContainerTable appendTable = (BufferedContainerTable) containerTable;
final TableStoreFormat format = appendTable.getTableStoreFormat();
if (!DefaultTableStoreFormat.class.equals(format.getClass())) {
// use different identifier to cause old versions of KNIME to fail loading newer workflows
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_REARRANGE_COLUMN_CUSTOM);
s.addString(CFG_TABLE_CONTAINER_FORMAT, appendTable.getTableStoreFormat().getClass().getName());
s.addString(CFG_TABLE_CONTAINER_FORMAT_VERSION, appendTable.getTableStoreFormat().getVersion());
} else {
final DefaultTableStoreFormat defaultFormat = (DefaultTableStoreFormat) format;
if (!Arrays.asList(NONE, GZIP).contains(defaultFormat.getCompressionFormat())) {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_REARRANGE_COLUMN_COMPRESS);
s.addString(CFG_TABLE_COMPRESSION_FORMAT, defaultFormat.getCompressionFormat().toString());
} else {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_REARRANGE_COLUMN);
}
}
} else {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_REARRANGE_COLUMN);
}
} else if (m_delegate instanceof TableSpecReplacerTable) {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_NEW_SPEC);
} else if (m_delegate instanceof WrappedTable) {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_WRAPPED);
} else if (m_delegate instanceof JoinedTable) {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_JOINED);
} else if (m_delegate instanceof VoidTable) {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_VOID);
} else if (m_delegate instanceof ConcatenateTable) {
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_CONCATENATE);
} else {
assert m_delegate instanceof ExtensionTable;
s.addString(CFG_TABLE_TYPE, TABLE_TYPE_EXTENSION);
}
BufferedDataTable[] references = m_delegate.getReferenceTables();
ArrayList<String> referenceDirs = new ArrayList<String>();
for (BufferedDataTable reference : references) {
if (reference.getOwner() == getOwner() && !savedTableIDs.contains(reference.getBufferedTableId())) {
int index = referenceDirs.size();
String dirName = "r" + index;
File subDir = new File(dir, dirName);
if (!subDir.mkdir() && !subDir.isDirectory()) {
throw new IOException("Could not create directory " + subDir.getAbsolutePath());
}
if (!subDir.canWrite()) {
throw new IOException("Unable to write directory " + subDir.getAbsolutePath());
}
referenceDirs.add(dirName);
reference.save(subDir, savedTableIDs, exec);
}
}
s.addStringArray(CFG_TABLE_REFERENCE, referenceDirs.toArray(new String[referenceDirs.size()]));
m_delegate.saveToFile(outFile, s, exec);
}
// only write the data file to the settings if it has been created
if (outFile.exists()) {
s.addString(CFG_TABLE_FILE_NAME, TABLE_FILE);
} else {
s.addString(CFG_TABLE_FILE_NAME, null);
}
saveSpec(getDataTableSpec(), dir);
File dataXML = new File(dir, TABLE_DESCRIPTION_FILE);
try (OutputStream out = new BufferedOutputStream(new FileOutputStream(dataXML))) {
s.saveToXML(out);
}
}
use of org.knime.core.data.container.JoinedTable in project knime-core by knime.
the class BufferedDataTable method loadFromFile.
/**
* Factory method to restore a table that has been written using
* the save method.
* @param dirRef The directory to load from.
* @param settings The settings to load from.
* @param exec The exec mon for progress/cancel
* @param tblRep The table repository
* @param dataRepository The data repository (needed for blobs and file stores).
* @return The table as written by save.
* @throws IOException If reading fails.
* @throws CanceledExecutionException If canceled.
* @throws InvalidSettingsException If settings are invalid.
*/
static BufferedDataTable loadFromFile(final ReferencedFile dirRef, final NodeSettingsRO settings, final ExecutionMonitor exec, final Map<Integer, BufferedDataTable> tblRep, final WorkflowDataRepository dataRepository) throws IOException, CanceledExecutionException, InvalidSettingsException {
File dir = dirRef.getFile();
NodeSettingsRO s;
// in version 1.1.x and before, the information was stored in
// an external data.xml (directly in the node dir)
boolean isVersion11x;
File dataXML = new File(dir, TABLE_DESCRIPTION_FILE);
// loading an exported workflow without data
if (!dataXML.exists() && settings == null) {
throw new IOException("No such data file: " + dataXML.getAbsolutePath());
}
DataTableSpec spec;
if (dataXML.exists()) {
// version 1.2.0 and later
try (InputStream input = new BufferedInputStream(new FileInputStream(dataXML))) {
s = NodeSettings.loadFromXML(input);
}
spec = loadSpec(dirRef);
isVersion11x = false;
} else {
// version 1.1.x
s = settings.getNodeSettings(CFG_TABLE_META);
// needs to be read from zip file!
spec = null;
isVersion11x = true;
}
int id = s.getInt(CFG_TABLE_ID);
dataRepository.updateLastId(id);
String fileName = s.getString(CFG_TABLE_FILE_NAME);
ReferencedFile fileRef;
if (fileName != null) {
fileRef = new ReferencedFile(dirRef, fileName);
File file = fileRef.getFile();
if (!file.exists()) {
throw new IOException("No such data file: " + fileRef);
}
if (!file.isFile() || !file.canRead()) {
throw new IOException("Cannot read file " + fileRef);
}
} else {
// for instance for a column filter node this is null.
fileRef = null;
}
String tableType = CheckUtils.checkSettingNotNull(s.getString(CFG_TABLE_TYPE), "Table type must not be null");
BufferedDataTable t;
if (Arrays.asList(TABLE_TYPE_CONTAINER_CUSTOM, TABLE_TYPE_REARRANGE_COLUMN_CUSTOM).contains(tableType)) {
checkFormat(s);
}
if (Arrays.asList(TABLE_TYPE_CONTAINER_COMPRESS, TABLE_TYPE_REARRANGE_COLUMN_COMPRESS).contains(tableType)) {
checkCompression(s);
}
switch(tableType) {
case TABLE_TYPE_REFERENCE_IN_SAME_NODE:
return CheckUtils.checkSettingNotNull(tblRep.get(id), "Table reference with ID %d not found in load map", id);
case TABLE_TYPE_CONTAINER:
if (isVersion11x) {
if (fileRef == null) {
throw new NullPointerException("Reference on file to load from has not been set.");
}
final ContainerTable cont = DataContainer.readFromZip(fileRef.getFile());
t = new BufferedDataTable(cont, id);
break;
}
// added in 3.6
case TABLE_TYPE_CONTAINER_CUSTOM:
case // added in 4.0
TABLE_TYPE_CONTAINER_COMPRESS:
final ContainerTable cont = BufferedDataContainer.readFromZipDelayed(fileRef, spec, id, dataRepository);
t = new BufferedDataTable(cont, id);
break;
case TABLE_TYPE_REARRANGE_COLUMN_CUSTOM:
case TABLE_TYPE_REARRANGE_COLUMN_COMPRESS:
case TABLE_TYPE_REARRANGE_COLUMN:
case TABLE_TYPE_JOINED:
case TABLE_TYPE_VOID:
case TABLE_TYPE_CONCATENATE:
case TABLE_TYPE_WRAPPED:
case TABLE_TYPE_NEW_SPEC:
case TABLE_TYPE_EXTENSION:
String[] referenceDirs;
// (no concatenate table in those versions)
if (s.containsKey("table_reference")) {
String refDir = s.getString("table_reference");
referenceDirs = refDir == null ? new String[0] : new String[] { refDir };
} else {
referenceDirs = s.getStringArray(CFG_TABLE_REFERENCE);
}
for (String reference : referenceDirs) {
CheckUtils.checkSettingNotNull(reference, "Reference dir is \"null\"");
ReferencedFile referenceDirRef = new ReferencedFile(dirRef, reference);
loadFromFile(referenceDirRef, s, exec, tblRep, dataRepository);
}
if (Arrays.asList(TABLE_TYPE_REARRANGE_COLUMN, TABLE_TYPE_REARRANGE_COLUMN_CUSTOM, TABLE_TYPE_REARRANGE_COLUMN_COMPRESS).contains(tableType)) {
t = new BufferedDataTable(new RearrangeColumnsTable(fileRef, s, tblRep, spec, id, dataRepository, exec), dataRepository);
} else if (tableType.equals(TABLE_TYPE_JOINED)) {
JoinedTable jt = JoinedTable.load(s, spec, tblRep, dataRepository);
t = new BufferedDataTable(jt, dataRepository);
} else if (tableType.equals(TABLE_TYPE_VOID)) {
VoidTable jt = VoidTable.load(spec);
t = new BufferedDataTable(jt, dataRepository);
} else if (tableType.equals(TABLE_TYPE_CONCATENATE)) {
ConcatenateTable ct = ConcatenateTable.load(s, spec, tblRep, dataRepository);
t = new BufferedDataTable(ct, dataRepository);
} else if (tableType.equals(TABLE_TYPE_WRAPPED)) {
WrappedTable wt = WrappedTable.load(s, tblRep, dataRepository);
t = new BufferedDataTable(wt, dataRepository);
} else if (tableType.equals(TABLE_TYPE_NEW_SPEC)) {
TableSpecReplacerTable replTable;
if (isVersion11x) {
if (fileRef == null) {
throw new NullPointerException("Reference on file to load from has not been set.");
}
replTable = TableSpecReplacerTable.load11x(fileRef.getFile(), s, tblRep, dataRepository);
} else {
replTable = TableSpecReplacerTable.load(s, spec, tblRep, dataRepository);
}
t = new BufferedDataTable(replTable, dataRepository);
} else if (tableType.equals(TABLE_TYPE_EXTENSION)) {
ExtensionTable et = ExtensionTable.loadExtensionTable(fileRef, spec, s, tblRep, exec, dataRepository);
t = new BufferedDataTable(et, dataRepository);
} else {
assert false : "Insufficent case switch: " + tableType;
throw new InvalidSettingsException("Unknown table identifier: " + tableType);
}
break;
default:
throw new InvalidSettingsException("Unknown table identifier: " + tableType);
}
t.m_tableID = id;
tblRep.put(id, t);
return t;
}
Aggregations