use of org.knime.core.internal.ReferencedFile in project knime-core by knime.
the class FileNodePersistor method loadPorts.
/**
* @noreference
* @nooverride
*/
void loadPorts(final Node node, final ExecutionMonitor exec, final NodeSettingsRO settings, final Map<Integer, BufferedDataTable> loadTblRep, final WorkflowDataRepository dataRepository) throws IOException, InvalidSettingsException, CanceledExecutionException {
final int nrOutPorts = node.getNrOutPorts();
if (getLoadVersion().isOlderThan(LoadVersion.V200)) {
// skip flow variables port (introduced in v2.2)
for (int i = 1; i < nrOutPorts; i++) {
int oldIndex = getOldPortIndex(i);
ExecutionMonitor execPort = exec.createSubProgress(1.0 / nrOutPorts);
exec.setMessage("Port " + oldIndex);
PortType type = node.getOutputType(i);
boolean isDataPort = BufferedDataTable.class.isAssignableFrom(type.getPortObjectClass());
if (m_isConfigured) {
PortObjectSpec spec = loadPortObjectSpec(node, settings, oldIndex);
setPortObjectSpec(i, spec);
}
if (m_isExecuted) {
PortObject object;
if (isDataPort) {
object = loadBufferedDataTable(node, settings, execPort, loadTblRep, oldIndex, dataRepository);
} else {
throw new IOException("Can't restore model ports of old 1.x workflows. Execute node again.");
}
String summary = object != null ? object.getSummary() : null;
setPortObject(i, object);
setPortObjectSummary(i, summary);
}
execPort.setProgress(1.0);
}
} else {
if (nrOutPorts == 1) {
// only the mandatory flow variable port
return;
}
NodeSettingsRO portsSettings = loadPortsSettings(settings);
exec.setMessage("Reading outport data");
for (String key : portsSettings.keySet()) {
NodeSettingsRO singlePortSetting = portsSettings.getNodeSettings(key);
ExecutionMonitor subProgress = exec.createSubProgress(1 / (double) nrOutPorts);
int index = loadPortIndex(singlePortSetting);
if (index < 0 || index >= nrOutPorts) {
throw new InvalidSettingsException("Invalid outport index in settings: " + index);
}
String portDirN = singlePortSetting.getString("port_dir_location");
if (portDirN != null) {
ReferencedFile portDir = new ReferencedFile(getNodeDirectory(), portDirN);
subProgress.setMessage("Port " + index);
loadPort(node, portDir, singlePortSetting, subProgress, index, loadTblRep, dataRepository);
}
subProgress.setProgress(1.0);
}
}
}
use of org.knime.core.internal.ReferencedFile in project knime-core by knime.
the class BufferedDataTable method loadFromFile.
/**
* Factory method to restore a table that has been written using
* the save method.
* @param dirRef The directory to load from.
* @param settings The settings to load from.
* @param exec The exec mon for progress/cancel
* @param tblRep The table repository
* @param dataRepository The data repository (needed for blobs and file stores).
* @return The table as written by save.
* @throws IOException If reading fails.
* @throws CanceledExecutionException If canceled.
* @throws InvalidSettingsException If settings are invalid.
*/
static BufferedDataTable loadFromFile(final ReferencedFile dirRef, final NodeSettingsRO settings, final ExecutionMonitor exec, final Map<Integer, BufferedDataTable> tblRep, final WorkflowDataRepository dataRepository) throws IOException, CanceledExecutionException, InvalidSettingsException {
File dir = dirRef.getFile();
NodeSettingsRO s;
// in version 1.1.x and before, the information was stored in
// an external data.xml (directly in the node dir)
boolean isVersion11x;
File dataXML = new File(dir, TABLE_DESCRIPTION_FILE);
// loading an exported workflow without data
if (!dataXML.exists() && settings == null) {
throw new IOException("No such data file: " + dataXML.getAbsolutePath());
}
DataTableSpec spec;
if (dataXML.exists()) {
// version 1.2.0 and later
try (InputStream input = new BufferedInputStream(new FileInputStream(dataXML))) {
s = NodeSettings.loadFromXML(input);
}
spec = loadSpec(dirRef);
isVersion11x = false;
} else {
// version 1.1.x
s = settings.getNodeSettings(CFG_TABLE_META);
// needs to be read from zip file!
spec = null;
isVersion11x = true;
}
int id = s.getInt(CFG_TABLE_ID);
dataRepository.updateLastId(id);
String fileName = s.getString(CFG_TABLE_FILE_NAME);
ReferencedFile fileRef;
if (fileName != null) {
fileRef = new ReferencedFile(dirRef, fileName);
File file = fileRef.getFile();
if (!file.exists()) {
throw new IOException("No such data file: " + fileRef);
}
if (!file.isFile() || !file.canRead()) {
throw new IOException("Cannot read file " + fileRef);
}
} else {
// for instance for a column filter node this is null.
fileRef = null;
}
String tableType = CheckUtils.checkSettingNotNull(s.getString(CFG_TABLE_TYPE), "Table type must not be null");
BufferedDataTable t;
if (Arrays.asList(TABLE_TYPE_CONTAINER_CUSTOM, TABLE_TYPE_REARRANGE_COLUMN_CUSTOM).contains(tableType)) {
checkFormat(s);
}
if (Arrays.asList(TABLE_TYPE_CONTAINER_COMPRESS, TABLE_TYPE_REARRANGE_COLUMN_COMPRESS).contains(tableType)) {
checkCompression(s);
}
switch(tableType) {
case TABLE_TYPE_REFERENCE_IN_SAME_NODE:
return CheckUtils.checkSettingNotNull(tblRep.get(id), "Table reference with ID %d not found in load map", id);
case TABLE_TYPE_CONTAINER:
if (isVersion11x) {
if (fileRef == null) {
throw new NullPointerException("Reference on file to load from has not been set.");
}
final ContainerTable cont = DataContainer.readFromZip(fileRef.getFile());
t = new BufferedDataTable(cont, id);
break;
}
// added in 3.6
case TABLE_TYPE_CONTAINER_CUSTOM:
case // added in 4.0
TABLE_TYPE_CONTAINER_COMPRESS:
final ContainerTable cont = BufferedDataContainer.readFromZipDelayed(fileRef, spec, id, dataRepository);
t = new BufferedDataTable(cont, id);
break;
case TABLE_TYPE_REARRANGE_COLUMN_CUSTOM:
case TABLE_TYPE_REARRANGE_COLUMN_COMPRESS:
case TABLE_TYPE_REARRANGE_COLUMN:
case TABLE_TYPE_JOINED:
case TABLE_TYPE_VOID:
case TABLE_TYPE_CONCATENATE:
case TABLE_TYPE_WRAPPED:
case TABLE_TYPE_NEW_SPEC:
case TABLE_TYPE_EXTENSION:
String[] referenceDirs;
// (no concatenate table in those versions)
if (s.containsKey("table_reference")) {
String refDir = s.getString("table_reference");
referenceDirs = refDir == null ? new String[0] : new String[] { refDir };
} else {
referenceDirs = s.getStringArray(CFG_TABLE_REFERENCE);
}
for (String reference : referenceDirs) {
CheckUtils.checkSettingNotNull(reference, "Reference dir is \"null\"");
ReferencedFile referenceDirRef = new ReferencedFile(dirRef, reference);
loadFromFile(referenceDirRef, s, exec, tblRep, dataRepository);
}
if (Arrays.asList(TABLE_TYPE_REARRANGE_COLUMN, TABLE_TYPE_REARRANGE_COLUMN_CUSTOM, TABLE_TYPE_REARRANGE_COLUMN_COMPRESS).contains(tableType)) {
t = new BufferedDataTable(new RearrangeColumnsTable(fileRef, s, tblRep, spec, id, dataRepository, exec), dataRepository);
} else if (tableType.equals(TABLE_TYPE_JOINED)) {
JoinedTable jt = JoinedTable.load(s, spec, tblRep, dataRepository);
t = new BufferedDataTable(jt, dataRepository);
} else if (tableType.equals(TABLE_TYPE_VOID)) {
VoidTable jt = VoidTable.load(spec);
t = new BufferedDataTable(jt, dataRepository);
} else if (tableType.equals(TABLE_TYPE_CONCATENATE)) {
ConcatenateTable ct = ConcatenateTable.load(s, spec, tblRep, dataRepository);
t = new BufferedDataTable(ct, dataRepository);
} else if (tableType.equals(TABLE_TYPE_WRAPPED)) {
WrappedTable wt = WrappedTable.load(s, tblRep, dataRepository);
t = new BufferedDataTable(wt, dataRepository);
} else if (tableType.equals(TABLE_TYPE_NEW_SPEC)) {
TableSpecReplacerTable replTable;
if (isVersion11x) {
if (fileRef == null) {
throw new NullPointerException("Reference on file to load from has not been set.");
}
replTable = TableSpecReplacerTable.load11x(fileRef.getFile(), s, tblRep, dataRepository);
} else {
replTable = TableSpecReplacerTable.load(s, spec, tblRep, dataRepository);
}
t = new BufferedDataTable(replTable, dataRepository);
} else if (tableType.equals(TABLE_TYPE_EXTENSION)) {
ExtensionTable et = ExtensionTable.loadExtensionTable(fileRef, spec, s, tblRep, exec, dataRepository);
t = new BufferedDataTable(et, dataRepository);
} else {
assert false : "Insufficent case switch: " + tableType;
throw new InvalidSettingsException("Unknown table identifier: " + tableType);
}
break;
default:
throw new InvalidSettingsException("Unknown table identifier: " + tableType);
}
t.m_tableID = id;
tblRep.put(id, t);
return t;
}
use of org.knime.core.internal.ReferencedFile in project knime-core by knime.
the class FileNodeContainerMetaPersistor method load.
/**
* {@inheritDoc}
*/
@Override
public boolean load(final NodeSettingsRO settings, final NodeSettingsRO parentSettings, final LoadResult loadResult) {
boolean isResetRequired = false;
try {
m_nodeAnnotationData = loadNodeAnnotationData(settings, parentSettings);
} catch (InvalidSettingsException e) {
String error = "Can't load node annotation: " + e.getMessage();
loadResult.addError(error);
getLogger().debug(error, e);
setDirtyAfterLoad();
m_nodeAnnotationData = null;
}
try {
m_customDescription = loadCustomDescription(settings, parentSettings);
} catch (InvalidSettingsException e) {
String error = "Invalid custom description in settings: " + e.getMessage();
loadResult.addError(error);
getLogger().debug(error, e);
setDirtyAfterLoad();
m_customDescription = null;
}
try {
m_jobManager = loadNodeExecutionJobManager(settings);
} catch (InvalidSettingsException e) {
String error = "Can't restore node execution job manager: " + e.getMessage();
loadResult.addError(error);
getLogger().debug(error, e);
isResetRequired = true;
setDirtyAfterLoad();
}
boolean hasJobManagerLoadFailed = m_jobManager == null;
try {
if (!hasJobManagerLoadFailed) {
m_executionJobSettings = loadNodeExecutionJobSettings(settings);
}
} catch (InvalidSettingsException e) {
String error = "Can't restore node execution job manager: " + e.getMessage();
loadResult.addError(error);
getLogger().debug(error, e);
setDirtyAfterLoad();
isResetRequired = true;
hasJobManagerLoadFailed = true;
}
try {
if (!hasJobManagerLoadFailed) {
ReferencedFile jobManagerInternalsDirectory = loadJobManagerInternalsDirectory(getNodeContainerDirectory(), settings);
if (jobManagerInternalsDirectory != null) {
m_jobManager.loadInternals(jobManagerInternalsDirectory);
}
}
} catch (Throwable e) {
String error = "Can't restore node execution job " + "manager internals directory " + e.getMessage();
loadResult.addError(error);
getLogger().debug(error, e);
setDirtyAfterLoad();
hasJobManagerLoadFailed = true;
}
try {
m_state = loadState(settings, parentSettings);
switch(m_state) {
case EXECUTED:
case EXECUTINGREMOTELY:
if (getLoadHelper().isTemplateFlow()) {
m_state = InternalNodeContainerState.CONFIGURED;
}
break;
default:
}
} catch (InvalidSettingsException e) {
String error = "Can't restore node's state, fallback to " + InternalNodeContainerState.IDLE + ": " + e.getMessage();
loadResult.addError(error);
getLogger().debug(error, e);
setDirtyAfterLoad();
isResetRequired = true;
m_state = InternalNodeContainerState.IDLE;
}
try {
if (!getLoadHelper().isTemplateFlow()) {
m_nodeMessage = loadNodeMessage(settings);
}
} catch (InvalidSettingsException ise) {
String e = "Unable to load node message: " + ise.getMessage();
loadResult.addError(e);
getLogger().warn(e, ise);
}
m_nodeLock = loadNodeLocks(settings);
return isResetRequired;
}
use of org.knime.core.internal.ReferencedFile in project knime-core by knime.
the class FileSubNodeContainerPersistor method loadExampleInputSpecs.
/**
* Helper to load the example data spec stored with a component
*
* @param settings settings that contain information required to restore the data, i.e. the location, file names,
* port types etc.
* @param nodeDir the directory of the component to load the data spec from
* @return the loaded port object specs
* @throws IOException if the data files couldn't be read
* @throws InvalidSettingsException if the provided settings are not as expected
*/
static PortObjectSpec[] loadExampleInputSpecs(final NodeSettingsRO settings, final ReferencedFile nodeDir) throws IOException, InvalidSettingsException {
String subDirName = settings.getString("location");
ReferencedFile subDirFile = new ReferencedFile(nodeDir, subDirName);
NodeSettingsRO portSettings = settings.getNodeSettings("content");
Set<String> keySet = portSettings.keySet();
PortObjectSpec[] result = new PortObjectSpec[keySet.size()];
for (String s : keySet) {
NodeSettingsRO singlePortSetting = portSettings.getNodeSettings(s);
int index = singlePortSetting.getInt("index");
if (index < 0 || index >= result.length) {
throw new InvalidSettingsException("Invalid index: " + index);
}
String type = singlePortSetting.getString("type");
PortObjectSpec spec = null;
if ("null".equals(type)) {
// object stays null
} else if ("table".equals(type)) {
String fileName = singlePortSetting.getString("table_file");
if (fileName != null) {
File portFile = new File(subDirFile.getFile(), fileName);
InputStream in = FileUtil.openInputStream(portFile.toString());
// http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=5077277
try (ZipInputStream zipIn = new ZipInputStream(new BufferedInputStream(in))) {
ZipEntry entry = zipIn.getNextEntry();
// functionality to DataContainer ... at least not yet.
if ("spec.xml".equals(entry != null ? entry.getName() : "")) {
NodeSettingsRO settingsFile = NodeSettings.loadFromXML(zipIn);
try {
NodeSettingsRO specSettings = settingsFile.getNodeSettings("table.spec");
spec = DataTableSpec.load(specSettings);
} catch (InvalidSettingsException ise) {
IOException ioe = new IOException("Unable to read spec from file");
ioe.initCause(ise);
throw ioe;
}
} else {
return null;
}
}
}
} else if ("flow-vars".equals(type)) {
spec = FlowVariablePortObjectSpec.INSTANCE;
} else if ("non-table".equals(type)) {
String fileName = singlePortSetting.getString("port_file");
if (fileName != null) {
File portFile = new File(subDirFile.getFile(), fileName);
spec = PortUtil.readObjectSpecFromFile(portFile);
}
} else {
CheckUtils.checkSetting(false, "Unknown object reference %s", type);
}
result[index] = spec;
}
return result;
}
use of org.knime.core.internal.ReferencedFile in project knime-core by knime.
the class FileSubNodeContainerPersistor method loadExampleInputData.
/**
* Helper to load example data stored with a component.
*
* @param settings settings that contain information required to restore the data, i.e. the location, file names,
* port types etc.
* @param nodeDir the component directory to load the data from
* @param exec to report progress, listen to cancel events, and, most important, to create the new buffered data
* tables
* @return the loaded port objects
* @throws IOException if the data files couldn't be read
* @throws InvalidSettingsException if the provided settings are not as expected
*/
static PortObject[] loadExampleInputData(final NodeSettingsRO settings, final ReferencedFile nodeDir, final ExecutionContext exec) throws IOException, InvalidSettingsException, CanceledExecutionException {
String subDirName = settings.getString("location");
ReferencedFile subDirFile = new ReferencedFile(nodeDir, subDirName);
NodeSettingsRO portSettings = settings.getNodeSettings("content");
Set<String> keySet = portSettings.keySet();
PortObject[] result = new PortObject[keySet.size()];
for (String s : keySet) {
ExecutionMonitor subProgress = exec.createSubProgress(1.0 / result.length);
NodeSettingsRO singlePortSetting = portSettings.getNodeSettings(s);
int index = singlePortSetting.getInt("index");
if (index < 0 || index >= result.length) {
throw new InvalidSettingsException("Invalid index: " + index);
}
String type = singlePortSetting.getString("type");
PortObject object = null;
if ("null".equals(type)) {
// object stays null
} else if ("table".equals(type)) {
String dataFileName = singlePortSetting.getString("table_file");
if (dataFileName != null) {
File portFile = new File(subDirFile.getFile(), dataFileName);
ContainerTable t = DataContainer.readFromZip(portFile);
object = exec.createBufferedDataTable(t, subProgress);
t.clear();
}
} else if ("flow-vars".equals(type)) {
object = FlowVariablePortObject.INSTANCE;
} else if ("non-table".equals(type)) {
String dataFileName = singlePortSetting.getString("port_file");
if (dataFileName != null) {
File portFile = new File(subDirFile.getFile(), dataFileName);
object = PortUtil.readObjectFromFile(portFile, exec);
}
} else {
CheckUtils.checkSetting(false, "Unknown object reference %s", type);
}
result[index] = object;
subProgress.setProgress(1.0);
}
return result;
}
Aggregations