use of org.knime.core.internal.ReferencedFile in project knime-core by knime.
the class FileWorkflowPersistor method saveContent.
/**
* @param wm The WFM to save.
* @param preFilledSettings The settings eventually written to workflow.knime (or workflow.knime.encrypted).
* For workflows it contains the version number, cipher, template infos etc. The name of the setting defines the
* output file name (so it's important!)
* @param rawWorkflowDirRef To save to.
* @param execMon ...
* @param saveHelper ...
* @throws IOException ...
* @throws CanceledExecutionException ...
* @throws LockFailedException ...
*/
private static void saveContent(final WorkflowManager wm, final NodeSettings preFilledSettings, final ReferencedFile rawWorkflowDirRef, final ExecutionMonitor execMon, final WorkflowSaveHelper saveHelper) throws IOException, CanceledExecutionException, LockFailedException {
ReferencedFile workflowDirRef = rawWorkflowDirRef;
Role r = wm.getTemplateInformation().getRole();
final String fName = preFilledSettings.getKey();
if (!workflowDirRef.fileLockRootForVM()) {
throw new LockFailedException("Can't write workflow to \"" + workflowDirRef + "\" because the directory can't be locked");
}
try {
final ReferencedFile nodeContainerDirectory = wm.getNodeContainerDirectory();
final ReferencedFile autoSaveDirectory = wm.getAutoSaveDirectory();
if (!saveHelper.isAutoSave() && workflowDirRef.equals(nodeContainerDirectory)) {
if (!nodeContainerDirectory.isDirty()) {
return;
} else {
// update variable assignment to do changes on member
workflowDirRef = nodeContainerDirectory;
// delete "old" node directories if not saving to the working
// directory -- do this before saving the nodes (dirs newly created)
WorkflowManager.deleteObsoleteNodeDirs(nodeContainerDirectory.getDeletedNodesFileLocations());
}
}
if (saveHelper.isAutoSave() && workflowDirRef.equals(autoSaveDirectory)) {
if (!autoSaveDirectory.isDirty()) {
return;
} else {
workflowDirRef = autoSaveDirectory;
WorkflowManager.deleteObsoleteNodeDirs(autoSaveDirectory.getDeletedNodesFileLocations());
}
}
File workflowDir = workflowDirRef.getFile();
workflowDir.mkdirs();
if (!workflowDir.isDirectory()) {
throw new IOException("Unable to create or write directory \": " + workflowDir + "\"");
}
saveWorkflowName(preFilledSettings, wm.getNameField());
saveAuthorInformation(wm.getAuthorInformation(), preFilledSettings);
saveWorkflowCipher(preFilledSettings, wm.getWorkflowCipher());
FileNodeContainerMetaPersistor.save(preFilledSettings, wm, workflowDirRef);
saveWorkflowVariables(wm, preFilledSettings);
saveCredentials(wm, preFilledSettings);
saveWorkflowAnnotations(wm, preFilledSettings);
NodeSettingsWO nodesSettings = saveSettingsForNodes(preFilledSettings);
Collection<NodeContainer> nodes = wm.getNodeContainers();
double progRatio = 1.0 / (nodes.size() + 1);
for (NodeContainer nextNode : nodes) {
int id = nextNode.getID().getIndex();
ExecutionMonitor subExec = execMon.createSubProgress(progRatio);
execMon.setMessage(nextNode.getNameWithID());
NodeSettingsWO sub = nodesSettings.addNodeSettings("node_" + id);
saveNodeContainer(sub, workflowDirRef, nextNode, subExec, saveHelper);
subExec.setProgress(1.0);
}
execMon.setMessage("connection information");
NodeSettingsWO connSettings = saveSettingsForConnections(preFilledSettings);
int connectionNumber = 0;
for (ConnectionContainer cc : wm.getConnectionContainers()) {
NodeSettingsWO nextConnectionConfig = connSettings.addNodeSettings("connection_" + connectionNumber);
saveConnection(nextConnectionConfig, cc);
connectionNumber += 1;
}
int inCount = wm.getNrInPorts();
NodeSettingsWO inPortsSetts = inCount > 0 ? saveInPortsSetting(preFilledSettings) : null;
NodeSettingsWO inPortsSettsEnum = null;
if (inPortsSetts != null) {
// TODO actually not neccessary to save the class name
saveInportsBarUIInfoClassName(inPortsSetts, wm.getInPortsBarUIInfo());
saveInportsBarUIInfoSettings(inPortsSetts, wm.getInPortsBarUIInfo());
inPortsSettsEnum = saveInPortsEnumSetting(inPortsSetts);
}
for (int i = 0; i < inCount; i++) {
NodeSettingsWO sPort = saveInPortSetting(inPortsSettsEnum, i);
saveInPort(sPort, wm, i);
}
int outCount = wm.getNrOutPorts();
NodeSettingsWO outPortsSetts = outCount > 0 ? saveOutPortsSetting(preFilledSettings) : null;
NodeSettingsWO outPortsSettsEnum = null;
if (outPortsSetts != null) {
saveOutportsBarUIInfoClassName(outPortsSetts, wm.getOutPortsBarUIInfo());
saveOutportsBarUIInfoSettings(outPortsSetts, wm.getOutPortsBarUIInfo());
outPortsSettsEnum = saveOutPortsEnumSetting(outPortsSetts);
}
for (int i = 0; i < outCount; i++) {
NodeSettingsWO singlePort = saveOutPortSetting(outPortsSettsEnum, i);
saveOutPort(singlePort, wm, i);
}
saveEditorUIInformation(wm, preFilledSettings);
File workflowFile = new File(workflowDir, fName);
String toBeDeletedFileName = Role.Template.equals(r) ? TEMPLATE_FILE : WORKFLOW_FILE;
new File(workflowDir, toBeDeletedFileName).delete();
new File(workflowDir, WorkflowCipher.getCipherFileName(toBeDeletedFileName)).delete();
OutputStream os = new FileOutputStream(workflowFile);
os = wm.getDirectNCParent().cipherOutput(os);
preFilledSettings.saveToXML(os);
if (saveHelper.isSaveData()) {
File saveWithDataFile = new File(workflowDir, SAVED_WITH_DATA_FILE);
BufferedWriter o = new BufferedWriter(new FileWriter(saveWithDataFile));
o.write("Do not delete this file!");
o.newLine();
o.write("This file serves to indicate that the workflow was written as part of the usual save " + "routine (not exported).");
o.newLine();
o.newLine();
o.write("Workflow was last saved by user ");
o.write(System.getProperty("user.name"));
o.write(" on " + new Date());
o.close();
}
if (saveHelper.isAutoSave() && autoSaveDirectory == null) {
wm.setAutoSaveDirectory(workflowDirRef);
}
if (!saveHelper.isAutoSave() && nodeContainerDirectory == null) {
wm.setNodeContainerDirectory(workflowDirRef);
}
NodeContainerState wmState = wm.getNodeContainerState();
// non remote executions
boolean isExecutingLocally = wmState.isExecutionInProgress() && !wmState.isExecutingRemotely();
if (workflowDirRef.equals(nodeContainerDirectory) && !isExecutingLocally) {
wm.unsetDirty();
}
workflowDirRef.setDirty(isExecutingLocally);
execMon.setProgress(1.0);
} finally {
workflowDirRef.fileUnlockRootForVM();
}
}
use of org.knime.core.internal.ReferencedFile in project knime-core by knime.
the class FileWorkflowPersistor method loadNodeContainer.
/**
* {@inheritDoc}
*/
@Override
public void loadNodeContainer(final Map<Integer, BufferedDataTable> tblRep, final ExecutionMonitor exec, final LoadResult loadResult) throws CanceledExecutionException, IOException {
ReferencedFile workflowKNIMEFile = getWorkflowKNIMEFile();
if (workflowKNIMEFile == null || m_workflowSett == null) {
setDirtyAfterLoad();
throw new IllegalStateException("The method preLoadNodeContainer has either not been called or failed");
}
/* read nodes */
NodeSettingsRO nodes;
try {
nodes = loadSettingsForNodes(m_workflowSett);
} catch (InvalidSettingsException e) {
String error = "Can't load nodes in workflow, config not found: " + e.getMessage();
getLogger().debug(error, e);
loadResult.addError(error);
setDirtyAfterLoad();
setNeedsResetAfterLoad();
// stop loading here
return;
}
// ids of nodes that failed to load. Used to suppress superfluous errors when reading the connections
Set<Integer> failingNodeIDSet = new HashSet<Integer>();
// ids of nodes whose factory can't be loaded (e.g. node extension not installed)
Map<Integer, NodeFactoryUnknownException> missingNodeIDMap = new HashMap<Integer, NodeFactoryUnknownException>();
exec.setMessage("node information");
final ReferencedFile workflowDirRef = workflowKNIMEFile.getParent();
/* Load nodes */
for (String nodeKey : nodes.keySet()) {
exec.checkCanceled();
NodeSettingsRO nodeSetting;
try {
nodeSetting = nodes.getNodeSettings(nodeKey);
} catch (InvalidSettingsException e) {
String error = "Unable to load settings for node with internal " + "id \"" + nodeKey + "\": " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
continue;
}
if (shouldSkipThisNode(nodeSetting)) {
continue;
}
int nodeIDSuffix;
try {
nodeIDSuffix = loadNodeIDSuffix(nodeSetting);
} catch (InvalidSettingsException e) {
nodeIDSuffix = getRandomNodeID();
String error = "Unable to load node ID (internal id \"" + nodeKey + "\"), trying random number " + nodeIDSuffix + "instead: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
}
NodeType nodeType;
try {
nodeType = loadNodeType(nodeSetting);
} catch (InvalidSettingsException e) {
String error = "Can't retrieve node type for contained node with id suffix " + nodeIDSuffix + ", attempting to read ordinary (native) node: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
nodeType = NodeType.NativeNode;
}
NodeUIInformation nodeUIInfo = null;
String uiInfoClassName;
try {
uiInfoClassName = loadUIInfoClassName(nodeSetting);
} catch (InvalidSettingsException e) {
String error = "Unable to load UI information class name " + "to node with ID suffix " + nodeIDSuffix + ", no UI information available: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
uiInfoClassName = null;
}
if (uiInfoClassName != null) {
try {
// load node ui info
nodeUIInfo = loadNodeUIInformation(nodeSetting);
} catch (InvalidSettingsException e) {
String error = "Unable to load UI information to " + "node with ID suffix " + nodeIDSuffix + ", no UI information available: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
}
}
ReferencedFile nodeFile;
try {
nodeFile = loadNodeFile(nodeSetting, workflowDirRef);
} catch (InvalidSettingsException e) {
String error = "Unable to load settings for node " + "with ID suffix " + nodeIDSuffix + ": " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
failingNodeIDSet.add(nodeIDSuffix);
continue;
}
FromFileNodeContainerPersistor persistor;
switch(nodeType) {
case MetaNode:
persistor = createWorkflowPersistorLoad(nodeFile);
break;
case NativeNode:
persistor = createNativeNodeContainerPersistorLoad(nodeFile);
break;
case SubNode:
persistor = createSubNodeContainerPersistorLoad(nodeFile);
break;
default:
throw new IllegalStateException("Unknown node type: " + nodeType);
}
try {
LoadResult childResult = new LoadResult(nodeType.toString() + " with ID suffix " + nodeIDSuffix);
persistor.preLoadNodeContainer(this, nodeSetting, childResult);
loadResult.addChildError(childResult);
} catch (Throwable e) {
String error = "Unable to load node with ID suffix " + nodeIDSuffix + " into workflow, skipping it: " + e.getMessage();
String loadErrorString;
if (e instanceof NodeFactoryUnknownException) {
loadErrorString = e.getMessage();
} else {
loadErrorString = error;
}
if (e instanceof InvalidSettingsException || e instanceof IOException || e instanceof NodeFactoryUnknownException) {
getLogger().debug(error, e);
} else {
getLogger().error(error, e);
}
loadResult.addError(loadErrorString);
if (e instanceof NodeFactoryUnknownException) {
missingNodeIDMap.put(nodeIDSuffix, (NodeFactoryUnknownException) e);
// don't set dirty
} else {
setDirtyAfterLoad();
failingNodeIDSet.add(nodeIDSuffix);
// node directory is the parent of the settings.xml
m_obsoleteNodeDirectories.add(nodeFile.getParent());
continue;
}
}
NodeContainerMetaPersistor meta = persistor.getMetaPersistor();
if (m_nodeContainerLoaderMap.containsKey(nodeIDSuffix)) {
int randomID = getRandomNodeID();
setDirtyAfterLoad();
loadResult.addError("Duplicate id encountered in workflow: " + nodeIDSuffix + ", uniquifying to random id " + randomID + ", this possibly screws the connections");
nodeIDSuffix = randomID;
}
meta.setNodeIDSuffix(nodeIDSuffix);
meta.setUIInfo(nodeUIInfo);
if (persistor.isDirtyAfterLoad()) {
setDirtyAfterLoad();
}
m_nodeContainerLoaderMap.put(nodeIDSuffix, persistor);
}
/* read connections */
exec.setMessage("connection information");
NodeSettingsRO connections;
try {
connections = loadSettingsForConnections(m_workflowSett);
if (connections == null) {
connections = EMPTY_SETTINGS;
}
} catch (InvalidSettingsException e) {
String error = "Can't load workflow connections, config not found: " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
connections = EMPTY_SETTINGS;
}
for (String connectionKey : connections.keySet()) {
exec.checkCanceled();
ConnectionContainerTemplate c;
try {
c = loadConnection(connections.getNodeSettings(connectionKey));
} catch (InvalidSettingsException e) {
String error = "Can't load connection with internal ID \"" + connectionKey + "\": " + e.getMessage();
getLogger().debug(error, e);
setDirtyAfterLoad();
loadResult.addError(error);
continue;
}
int sourceIDSuffix = c.getSourceSuffix();
NodeContainerPersistor sourceNodePersistor = m_nodeContainerLoaderMap.get(sourceIDSuffix);
if (sourceNodePersistor == null && sourceIDSuffix != -1) {
setDirtyAfterLoad();
if (!failingNodeIDSet.contains(sourceIDSuffix)) {
loadResult.addError("Unable to load node connection " + c + ", source node does not exist");
}
continue;
}
fixSourcePortIfNecessary(sourceNodePersistor, c);
int destIDSuffix = c.getDestSuffix();
NodeContainerPersistor destNodePersistor = m_nodeContainerLoaderMap.get(destIDSuffix);
if (destNodePersistor == null && destIDSuffix != -1) {
setDirtyAfterLoad();
if (!failingNodeIDSet.contains(destIDSuffix)) {
loadResult.addError("Unable to load node connection " + c + ", destination node does not exist");
}
continue;
}
fixDestPortIfNecessary(destNodePersistor, c);
if (!m_connectionSet.add(c)) {
setDirtyAfterLoad();
loadResult.addError("Duplicate connection information: " + c);
}
}
for (Map.Entry<Integer, NodeFactoryUnknownException> missingNode : missingNodeIDMap.entrySet()) {
exec.checkCanceled();
int missingNodeSuffix = missingNode.getKey();
NodeAndBundleInformation nodeInfo = missingNode.getValue().getNodeAndBundleInformation();
loadResult.addMissingNode(nodeInfo);
NodeSettingsRO additionalFactorySettings = missingNode.getValue().getAdditionalFactorySettings();
ArrayList<PersistorWithPortIndex> upstreamNodes = new ArrayList<PersistorWithPortIndex>();
ArrayList<List<PersistorWithPortIndex>> downstreamNodes = new ArrayList<List<PersistorWithPortIndex>>();
for (ConnectionContainerTemplate t : m_connectionSet) {
// check upstream nodes
int sourceSuffix = t.getSourceSuffix();
int destSuffix = t.getDestSuffix();
int sourcePort = t.getSourcePort();
int destPort = t.getDestPort();
if (destSuffix == missingNodeSuffix) {
FromFileNodeContainerPersistor persistor;
if (sourceSuffix == -1) {
// connected to this metanode's input port bar
persistor = this;
} else {
persistor = m_nodeContainerLoaderMap.get(sourceSuffix);
}
ensureArrayListIndexValid(upstreamNodes, destPort);
upstreamNodes.set(destPort, new PersistorWithPortIndex(persistor, sourcePort));
}
// check downstream nodes
if (sourceSuffix == missingNodeSuffix) {
FromFileNodeContainerPersistor persistor;
if (destSuffix == -1) {
// connect to this metanode's output port bar
persistor = this;
} else {
persistor = m_nodeContainerLoaderMap.get(destSuffix);
}
ensureArrayListIndexValid(downstreamNodes, sourcePort);
List<PersistorWithPortIndex> downstreamNodesAtPort = downstreamNodes.get(sourcePort);
if (downstreamNodesAtPort == null) {
downstreamNodesAtPort = new ArrayList<PersistorWithPortIndex>();
downstreamNodes.set(sourcePort, downstreamNodesAtPort);
}
downstreamNodesAtPort.add(new PersistorWithPortIndex(persistor, destPort));
}
}
FromFileNodeContainerPersistor failingNodePersistor = m_nodeContainerLoaderMap.get(missingNodeSuffix);
failingNodePersistor.guessPortTypesFromConnectedNodes(nodeInfo, additionalFactorySettings, upstreamNodes, downstreamNodes);
}
exec.setProgress(1.0);
}
use of org.knime.core.internal.ReferencedFile in project knime-core by knime.
the class BufferedDataTable method loadFromFile.
/**
* Factory method to restore a table that has been written using
* the save method.
* @param dirRef The directory to load from.
* @param settings The settings to load from.
* @param exec The exec mon for progress/cancel
* @param tblRep The table repository
* @param bufferRep The buffer repository (needed for blobs).
* @param fileStoreHandlerRepository ...
* @return The table as written by save.
* @throws IOException If reading fails.
* @throws CanceledExecutionException If canceled.
* @throws InvalidSettingsException If settings are invalid.
*/
static BufferedDataTable loadFromFile(final ReferencedFile dirRef, final NodeSettingsRO settings, final ExecutionMonitor exec, final Map<Integer, BufferedDataTable> tblRep, final HashMap<Integer, ContainerTable> bufferRep, final FileStoreHandlerRepository fileStoreHandlerRepository) throws IOException, CanceledExecutionException, InvalidSettingsException {
File dir = dirRef.getFile();
NodeSettingsRO s;
// in version 1.1.x and before, the information was stored in
// an external data.xml (directly in the node dir)
boolean isVersion11x;
File dataXML = new File(dir, TABLE_DESCRIPTION_FILE);
// loading an exported workflow without data
if (!dataXML.exists() && settings == null) {
throw new IOException("No such data file: " + dataXML.getAbsolutePath());
}
DataTableSpec spec;
if (dataXML.exists()) {
// version 1.2.0 and later
s = NodeSettings.loadFromXML(new BufferedInputStream(new FileInputStream(dataXML)));
spec = loadSpec(dirRef);
isVersion11x = false;
} else {
// version 1.1.x
s = settings.getNodeSettings(CFG_TABLE_META);
// needs to be read from zip file!
spec = null;
isVersion11x = true;
}
int id = s.getInt(CFG_TABLE_ID);
LAST_ID.set(Math.max(LAST_ID.get(), id + 1));
String fileName = s.getString(CFG_TABLE_FILE_NAME);
ReferencedFile fileRef;
if (fileName != null) {
fileRef = new ReferencedFile(dirRef, fileName);
File file = fileRef.getFile();
if (!file.exists()) {
throw new IOException("No such data file: " + fileRef);
}
if (!file.isFile() || !file.canRead()) {
throw new IOException("Cannot read file " + fileRef);
}
} else {
// for instance for a column filter node this is null.
fileRef = null;
}
String tableType = s.getString(CFG_TABLE_TYPE);
BufferedDataTable t;
if (tableType.equals(TABLE_TYPE_REFERENCE_IN_SAME_NODE)) {
t = tblRep.get(id);
if (t == null) {
throw new InvalidSettingsException("Table reference with ID " + id + " not found in load map");
}
return t;
} else if (tableType.equals(TABLE_TYPE_CONTAINER)) {
ContainerTable fromContainer;
if (isVersion11x) {
fromContainer = DataContainer.readFromZip(fileRef.getFile());
} else {
fromContainer = BufferedDataContainer.readFromZipDelayed(fileRef, spec, id, bufferRep, fileStoreHandlerRepository);
}
t = new BufferedDataTable(fromContainer, id);
} else {
String[] referenceDirs;
// (no concatenate table in those versions)
if (s.containsKey("table_reference")) {
String refDir = s.getString("table_reference");
referenceDirs = refDir == null ? new String[0] : new String[] { refDir };
} else {
referenceDirs = s.getStringArray(CFG_TABLE_REFERENCE);
}
for (String reference : referenceDirs) {
if (reference == null) {
throw new InvalidSettingsException("Reference dir is \"null\"");
}
ReferencedFile referenceDirRef = new ReferencedFile(dirRef, reference);
loadFromFile(referenceDirRef, s, exec, tblRep, bufferRep, fileStoreHandlerRepository);
}
if (tableType.equals(TABLE_TYPE_REARRANGE_COLUMN)) {
t = new BufferedDataTable(new RearrangeColumnsTable(fileRef, s, tblRep, spec, id, bufferRep, fileStoreHandlerRepository));
} else if (tableType.equals(TABLE_TYPE_JOINED)) {
JoinedTable jt = JoinedTable.load(s, spec, tblRep);
t = new BufferedDataTable(jt);
} else if (tableType.equals(TABLE_TYPE_VOID)) {
VoidTable jt = VoidTable.load(spec);
t = new BufferedDataTable(jt);
} else if (tableType.equals(TABLE_TYPE_CONCATENATE)) {
ConcatenateTable ct = ConcatenateTable.load(s, spec, tblRep);
t = new BufferedDataTable(ct);
} else if (tableType.equals(TABLE_TYPE_WRAPPED)) {
WrappedTable wt = WrappedTable.load(s, tblRep);
t = new BufferedDataTable(wt);
} else if (tableType.equals(TABLE_TYPE_NEW_SPEC)) {
TableSpecReplacerTable replTable;
if (isVersion11x) {
replTable = TableSpecReplacerTable.load11x(fileRef.getFile(), s, tblRep);
} else {
replTable = TableSpecReplacerTable.load(s, spec, tblRep);
}
t = new BufferedDataTable(replTable);
} else if (tableType.equals(TABLE_TYPE_EXTENSION)) {
ExtensionTable et = ExtensionTable.loadExtensionTable(fileRef, spec, s, tblRep, exec);
t = new BufferedDataTable(et);
} else {
throw new InvalidSettingsException("Unknown table identifier: " + tableType);
}
}
t.m_tableID = id;
tblRep.put(id, t);
return t;
}
use of org.knime.core.internal.ReferencedFile in project knime-core by knime.
the class Node method createNodeExecutionResult.
/**
* Creates an execution result containing all calculated values in a
* execution. The returned value is suitable to be used in
* {@link #loadDataAndInternals(
* NodeContentPersistor, ExecutionMonitor, LoadResult)}.
* If this node is not executed, it will assign null values to the fields
* in the returned execution result.
* @param exec For progress information.
* @return A new execution result containing the values being calculated.
* @throws CanceledExecutionException If canceled
*/
public NodeExecutionResult createNodeExecutionResult(final ExecutionMonitor exec) throws CanceledExecutionException {
NodeExecutionResult result = new NodeExecutionResult();
result.setWarningMessage(m_model.getWarningMessage());
if (hasContent()) {
File internTempDir;
try {
internTempDir = FileUtil.createTempDir("knime_node_internDir");
exec.setMessage("Saving internals");
saveInternals(internTempDir, exec.createSubProgress(0.0));
result.setNodeInternDir(new ReferencedFile(internTempDir));
} catch (IOException ioe) {
LOGGER.error("Unable to save internals", ioe);
}
}
if (m_internalHeldPortObjects != null) {
PortObject[] internalHeldPortObjects = Arrays.copyOf(m_internalHeldPortObjects, m_internalHeldPortObjects.length);
result.setInternalHeldPortObjects(internalHeldPortObjects);
}
PortObject[] pos = new PortObject[getNrOutPorts()];
PortObjectSpec[] poSpecs = new PortObjectSpec[getNrOutPorts()];
for (int i = 0; i < pos.length; i++) {
PortObject po = getOutputObject(i);
if (po != null) {
pos[i] = po;
poSpecs[i] = po.getSpec();
}
}
result.setPortObjects(pos);
result.setPortObjectSpecs(poSpecs);
// Add the outgoing flow variables to the execution result
FlowObjectStack outgoingStack = m_model.getOutgoingFlowObjectStack();
List<FlowVariable> nodeFlowVars = outgoingStack.getAvailableFlowVariables().values().stream().filter(f -> f.getScope().equals(FlowVariable.Scope.Flow)).collect(Collectors.toList());
// the bottom most element should remain at the bottom of the stack
Collections.reverse(nodeFlowVars);
result.setFlowVariables(nodeFlowVars);
return result;
}
use of org.knime.core.internal.ReferencedFile in project knime-core by knime.
the class FileNodePersistor method loadPortObjectSpec.
private PortObjectSpec loadPortObjectSpec(final Node node, final NodeSettingsRO settings, final int index) throws InvalidSettingsException, IOException {
int newIndex = getNewPortIndex(index);
PortType type = node.getOutputType(newIndex);
boolean isDataPort = BufferedDataTable.class.isAssignableFrom(type.getPortObjectClass());
if (!isDataPort) {
// port is a model port, no spec available in 1.x.x
return null;
}
// in 1.1.x and before the settings.xml contained the location
// of the data table specs file (spec_0.xml, e.g.). From 1.2.0 on,
// the spec is saved in data/data_0/spec.xml
boolean isVersion11x = settings.containsKey(CFG_SPEC_FILES);
ReferencedFile nodeDirectory = getNodeDirectory();
if (isVersion11x) {
NodeSettingsRO spec = settings.getNodeSettings(CFG_SPEC_FILES);
String specName = spec.getString(CFG_OUTPUT_PREFIX + index);
ReferencedFile targetFileRef = new ReferencedFile(nodeDirectory, specName);
File targetFile = targetFileRef.getFile();
DataTableSpec outSpec = null;
if (targetFile.exists()) {
NodeSettingsRO settingsSpec = NodeSettings.loadFromXML(new BufferedInputStream(new FileInputStream(targetFile)));
outSpec = DataTableSpec.load(settingsSpec);
}
return outSpec;
} else {
NodeSettingsRO dataSettings = settings.getNodeSettings(CFG_DATA_FILE);
String dataDirStr = dataSettings.getString(CFG_DATA_FILE_DIR);
ReferencedFile dataDirRef = new ReferencedFile(nodeDirectory, dataDirStr);
NodeSettingsRO portSettings = dataSettings.getNodeSettings(CFG_OUTPUT_PREFIX + index);
String dataName = portSettings.getString(CFG_DATA_FILE_DIR);
DataTableSpec outSpec = null;
if (portSettings.getBoolean(CFG_HAS_SPEC_FILE, true)) {
ReferencedFile dirRef = new ReferencedFile(dataDirRef, dataName);
File dir = dirRef.getFile();
readDirectory(dir);
outSpec = BufferedDataTable.loadSpec(dirRef);
if (portSettings.containsKey(CFG_HAS_SPEC_FILE) && outSpec == null) {
throw new IOException("No spec file available for" + " outport " + index + ".");
}
}
return outSpec;
}
}
Aggregations