use of org.knime.core.node.workflow.FileWorkflowPersistor.LoadVersion in project knime-core by knime.
the class BugAP7982_FutureKNIMEVersion_AllCompatible method loadWorkflow.
/**
* Load workflow, expect no errors.
*/
@Test
public void loadWorkflow() throws Exception {
File wkfDir = getDefaultWorkflowDirectory();
WorkflowLoadResult loadWorkflow = loadWorkflow(wkfDir, new ExecutionMonitor(), new WorkflowLoadHelper(wkfDir) {
@Override
public UnknownKNIMEVersionLoadPolicy getUnknownKNIMEVersionLoadPolicy(final LoadVersion workflowKNIMEVersion, final Version createdByKNIMEVersion, final boolean isNightlyBuild) {
throw new AssertionFailedError("Not to be called - workflow is expected to be compatible");
}
});
setManager(loadWorkflow.getWorkflowManager());
assertThat("Expected to loaded without errors", loadWorkflow.getType(), is(LoadResultEntryType.Ok));
assertThat("Workflow version incorrect", getManager().getLoadVersion(), is(LoadVersion.V280));
}
use of org.knime.core.node.workflow.FileWorkflowPersistor.LoadVersion in project knime-core by knime.
the class BugAP7982_FutureKNIMEVersion_FutureVersion method loadWorkflow.
private WorkflowLoadResult loadWorkflow(final boolean tryToLoadInsteadOfFail) throws Exception {
File wkfDir = getDefaultWorkflowDirectory();
WorkflowLoadResult loadWorkflow = loadWorkflow(wkfDir, new ExecutionMonitor(), new WorkflowLoadHelper(wkfDir) {
@Override
public UnknownKNIMEVersionLoadPolicy getUnknownKNIMEVersionLoadPolicy(final LoadVersion workflowKNIMEVersion, final Version createdByKNIMEVersion, final boolean isNightlyBuild) {
assertThat("Unexpected KNIME version in file", workflowKNIMEVersion, is(LoadVersion.FUTURE));
assertThat("Nightly flag wrong", isNightlyBuild, is(m_isExpectNightly));
if (tryToLoadInsteadOfFail) {
return UnknownKNIMEVersionLoadPolicy.Try;
} else {
return UnknownKNIMEVersionLoadPolicy.Abort;
}
}
});
return loadWorkflow;
}
use of org.knime.core.node.workflow.FileWorkflowPersistor.LoadVersion in project knime-core by knime.
the class SubNodeContainer method performLoadContent.
/**
* {@inheritDoc}
*/
@Override
WorkflowCopyContent performLoadContent(final SingleNodeContainerPersistor nodePersistor, final Map<Integer, BufferedDataTable> tblRep, final FlowObjectStack inStack, final ExecutionMonitor exec, final LoadResult loadResult, final boolean preserveNodeMessage) throws CanceledExecutionException {
SubNodeContainerPersistor subNodePersistor = (SubNodeContainerPersistor) nodePersistor;
WorkflowPersistor workflowPersistor = subNodePersistor.getWorkflowPersistor();
// TODO pass in a filter input stack
m_wfm.loadContent(workflowPersistor, tblRep, inStack, exec, loadResult, preserveNodeMessage);
if (workflowPersistor.isDirtyAfterLoad() || m_wfm.isDirty()) {
setDirty();
}
InternalNodeContainerState loadState = nodePersistor.getMetaPersistor().getState();
if (!m_wfm.getInternalState().equals(loadState)) {
// can happen for workflows that were exported without data;
// the same check is done by the caller (WorkflowManager#postLoad) and handled appropriately
setInternalState(m_wfm.getInternalState(), false);
}
NodeSettingsRO modelSettings = subNodePersistor.getSNCSettings().getModelSettings();
if (modelSettings != null) {
try {
loadModelSettingsIntoDialogNodes(modelSettings, false);
} catch (InvalidSettingsException e) {
final String msg = "Could not load Wrapped Metanode configuration into dialog-nodes: " + e.getMessage();
LOGGER.error(msg, e);
loadResult.addError(msg);
setDirty();
}
}
checkInOutNodesAfterLoad(subNodePersistor, loadResult);
// put data input output node if it was executed;
final NativeNodeContainer virtualOutNode = getVirtualOutNode();
LoadVersion l = nodePersistor instanceof FileSingleNodeContainerPersistor ? ((FileSingleNodeContainerPersistor) nodePersistor).getLoadVersion() : LoadVersion.V3010;
if (l.isOlderThan(LoadVersion.V3010) && virtualOutNode.getInternalState().isExecuted()) {
VirtualSubNodeOutputNodeModel outNodeModel = getVirtualOutNodeModel();
PortObject[] outputData = new PortObject[virtualOutNode.getNrInPorts()];
m_wfm.assembleInputData(getVirtualOutNodeID(), outputData);
outNodeModel.postLoadExecute(ArrayUtils.removeAll(outputData, 0));
// allow node to receive the internal held objects so that the next save operation also persists the
// array of internal held objects - otherwise we get strange errors with nodes saved in 2.x, then loaded
// and saved in 3.1+ (and converted ... although unmodified)
getVirtualOutNode().getNode().assignInternalHeldObjects(outputData, null, getVirtualOutNode().createExecutionContext(), new PortObject[0]);
}
setVirtualOutputIntoOutport(m_wfm.getInternalState());
m_wfmStateChangeListener = createAndAddStateListener();
getInPort(0).setPortName("Variable Inport");
getOutPort(0).setPortName("Variable Outport");
getVirtualInNode().addNodeStateChangeListener(new RefreshPortNamesListener());
getVirtualOutNode().addNodeStateChangeListener(new RefreshPortNamesListener());
refreshPortNames();
return null;
}
use of org.knime.core.node.workflow.FileWorkflowPersistor.LoadVersion in project knime-core by knime.
the class WorkflowManager method load.
/**
* Implementation of {@link #load(FileWorkflowPersistor, ExecutionMonitor, boolean)}.
*
* @noreference This method is not intended to be referenced by clients.
*/
public void load(final TemplateNodeContainerPersistor persistor, final MetaNodeLinkUpdateResult result, final ExecutionMonitor exec, final boolean keepNodeMessages) throws IOException, InvalidSettingsException, CanceledExecutionException, UnsupportedWorkflowVersionException {
final ReferencedFile refDirectory = persistor.getMetaPersistor().getNodeContainerDirectory();
exec.setMessage("Loading workflow structure from \"" + refDirectory + "\"");
exec.checkCanceled();
LoadVersion version = persistor.getLoadVersion();
LOGGER.debug("Loading workflow from \"" + refDirectory + "\" (version \"" + version + "\" with loader class \"" + persistor.getClass().getSimpleName() + "\")");
// data files are loaded using a repository of reference tables;
Map<Integer, BufferedDataTable> tblRep = new HashMap<Integer, BufferedDataTable>();
persistor.preLoadNodeContainer(null, null, result);
NodeContainerTemplate loadedInstance = null;
boolean isIsolatedProject = persistor.isProject();
InsertWorkflowPersistor insertPersistor = new InsertWorkflowPersistor(persistor);
ReentrantLock lock = isIsolatedProject ? new ReentrantLock() : m_workflowLock.getReentrantLock();
lock.lock();
try {
m_loadVersion = persistor.getLoadVersion();
NodeID[] newIDs = loadContent(insertPersistor, tblRep, null, exec, result, keepNodeMessages).getNodeIDs();
if (newIDs.length != 1) {
throw new InvalidSettingsException("Loading workflow failed, " + "couldn't identify child sub flow (typically " + "a project)");
}
loadedInstance = (NodeContainerTemplate) getNodeContainer(newIDs[0]);
} finally {
lock.unlock();
}
exec.setProgress(1.0);
result.setLoadedInstance(loadedInstance);
result.setGUIMustReportDataLoadErrors(persistor.mustWarnOnDataLoadError());
}
use of org.knime.core.node.workflow.FileWorkflowPersistor.LoadVersion in project knime-core by knime.
the class WorkflowManager method save.
/**
* @param directory The directory to save in
* @param exec The execution monitor
* @param saveHelper ...
* @throws IOException If an IO error occured
* @throws CanceledExecutionException If the execution was canceled
* @throws LockFailedException If locking failed
* @since 2.10
*/
public void save(final File directory, final WorkflowSaveHelper saveHelper, final ExecutionMonitor exec) throws IOException, CanceledExecutionException, LockFailedException {
if (this == ROOT) {
throw new IOException("Can't save root workflow");
}
if (m_isWorkflowDirectoryReadonly) {
throw new IOException("Workflow is read-only, can't save");
}
try (WorkflowLock lock = lock()) {
ReferencedFile directoryReference = new ReferencedFile(directory);
// if it's the location associated with the workflow we will use the same instance (due to VM lock)
if (directoryReference.equals(getNodeContainerDirectory())) {
directoryReference = getNodeContainerDirectory();
} else if (saveHelper.isAutoSave() && directoryReference.equals(getAutoSaveDirectory())) {
directoryReference = getAutoSaveDirectory();
}
directoryReference.writeLock();
try {
final boolean isWorkingDirectory = directoryReference.equals(getNodeContainerDirectory());
final LoadVersion saveVersion = FileWorkflowPersistor.VERSION_LATEST;
if (m_loadVersion != null && !m_loadVersion.equals(saveVersion)) {
LOGGER.info("Workflow was created with another version of KNIME (workflow version " + m_loadVersion + "), converting to current version. This may take some time.");
setDirtyAll();
}
if (isWorkingDirectory) {
m_loadVersion = saveVersion;
}
if (m_authorInformation == null) {
m_authorInformation = new AuthorInformation();
} else {
m_authorInformation = new AuthorInformation(m_authorInformation);
}
final File workflowDir = directoryReference.getFile();
workflowDir.mkdirs();
boolean isTemplate = getTemplateInformation().getRole().equals(Role.Template);
if (isTemplate) {
FileWorkflowPersistor.saveAsTemplate(this, directoryReference, exec, saveHelper);
} else {
FileWorkflowPersistor.save(this, directoryReference, exec, saveHelper);
WorkflowSaveHook.runHooks(this, saveHelper.isSaveData(), workflowDir);
}
} finally {
directoryReference.writeUnlock();
}
}
}
Aggregations