use of org.knime.core.util.LockFailedException in project knime-core by knime.
the class LoadWorkflowRunnable method run.
/**
* {@inheritDoc}
*/
@Override
public void run(final IProgressMonitor pm) {
// indicates whether to create an empty workflow
// this is done if the file is empty
boolean createEmptyWorkflow = false;
// name of workflow will be null (uses directory name then)
String name = null;
m_throwable = null;
try {
// create progress monitor
ProgressHandler progressHandler = new ProgressHandler(pm, 101, "Loading workflow...");
final CheckCancelNodeProgressMonitor progressMonitor = new CheckCancelNodeProgressMonitor(pm);
progressMonitor.addProgressListener(progressHandler);
File workflowDirectory = m_workflowFile.getParentFile();
Display d = Display.getDefault();
GUIWorkflowLoadHelper loadHelper = new GUIWorkflowLoadHelper(d, workflowDirectory.getName(), m_mountpointURI, workflowDirectory, m_mountpointRoot);
final WorkflowLoadResult result = WorkflowManager.loadProject(workflowDirectory, new ExecutionMonitor(progressMonitor), loadHelper);
final WorkflowManager wm = result.getWorkflowManager();
m_editor.setWorkflowManager(wm);
pm.subTask("Finished.");
pm.done();
if (wm.isDirty()) {
m_editor.markDirty();
}
final IStatus status = createStatus(result, !result.getGUIMustReportDataLoadErrors());
String message;
switch(status.getSeverity()) {
case IStatus.OK:
message = "No problems during load.";
break;
case IStatus.WARNING:
message = "Warnings during load";
logPreseveLineBreaks("Warnings during load: " + result.getFilteredError("", LoadResultEntryType.Warning), false);
break;
default:
message = "Errors during load";
logPreseveLineBreaks("Errors during load: " + result.getFilteredError("", LoadResultEntryType.Warning), true);
}
if (!status.isOK()) {
showLoadErrorDialog(result, status, message);
}
final List<NodeID> linkedMNs = wm.getLinkedMetaNodes(true);
if (!linkedMNs.isEmpty()) {
final WorkflowEditor editor = m_editor;
m_editor.addAfterOpenRunnable(new Runnable() {
@Override
public void run() {
postLoadCheckForMetaNodeUpdates(editor, wm, linkedMNs);
}
});
}
} catch (FileNotFoundException fnfe) {
m_throwable = fnfe;
LOGGER.fatal("File not found", fnfe);
} catch (IOException ioe) {
m_throwable = ioe;
if (m_workflowFile.length() == 0) {
LOGGER.info("New workflow created.");
// this is the only place to set this flag to true: we have an
// empty workflow file, i.e. a new project was created
// bugfix 1555: if an exception is thrown DO NOT create empty
// workflow
createEmptyWorkflow = true;
} else {
LOGGER.error("Could not load workflow from: " + m_workflowFile.getName(), ioe);
}
} catch (InvalidSettingsException ise) {
LOGGER.error("Could not load workflow from: " + m_workflowFile.getName(), ise);
m_throwable = ise;
} catch (UnsupportedWorkflowVersionException uve) {
m_loadingCanceledMessage = INCOMPATIBLE_VERSION_MSG;
LOGGER.info(m_loadingCanceledMessage, uve);
m_editor.setWorkflowManager(null);
} catch (CanceledExecutionException cee) {
m_loadingCanceledMessage = "Canceled loading workflow: " + m_workflowFile.getParentFile().getName();
LOGGER.info(m_loadingCanceledMessage, cee);
m_editor.setWorkflowManager(null);
} catch (LockFailedException lfe) {
StringBuilder error = new StringBuilder();
error.append("Unable to load workflow \"");
error.append(m_workflowFile.getParentFile().getName());
if (m_workflowFile.getParentFile().exists()) {
error.append("\"\nIt is in use by another user/instance.");
} else {
error.append("\"\nLocation does not exist.");
}
m_loadingCanceledMessage = error.toString();
LOGGER.info(m_loadingCanceledMessage, lfe);
m_editor.setWorkflowManager(null);
} catch (Throwable e) {
m_throwable = e;
LOGGER.error("Workflow could not be loaded. " + e.getMessage(), e);
m_editor.setWorkflowManager(null);
} finally {
// (empty workflow file)
if (createEmptyWorkflow) {
WorkflowCreationHelper creationHelper = new WorkflowCreationHelper();
WorkflowContext.Factory fac = new WorkflowContext.Factory(m_workflowFile.getParentFile());
fac.setMountpointRoot(m_mountpointRoot);
fac.setMountpointURI(m_mountpointURI);
creationHelper.setWorkflowContext(fac.createContext());
m_editor.setWorkflowManager(WorkflowManager.ROOT.createAndAddProject(name, creationHelper));
// save empty project immediately
// bugfix 1341 -> see WorkflowEditor line 1294
// (resource delta visitor movedTo)
Display.getDefault().syncExec(new Runnable() {
@Override
public void run() {
m_editor.doSave(new NullProgressMonitor());
}
});
m_editor.setIsDirty(false);
}
// IMPORTANT: Remove the reference to the file and the
// editor!!! Otherwise the memory cannot be freed later
m_editor = null;
m_workflowFile = null;
m_mountpointRoot = null;
}
}
use of org.knime.core.util.LockFailedException in project knime-core by knime.
the class FileWorkflowPersistor method saveContent.
/**
* @param wm The WFM to save.
* @param preFilledSettings The settings eventually written to workflow.knime (or workflow.knime.encrypted).
* For workflows it contains the version number, cipher, template infos etc. The name of the setting defines the
* output file name (so it's important!)
* @param rawWorkflowDirRef To save to.
* @param execMon ...
* @param saveHelper ...
* @throws IOException ...
* @throws CanceledExecutionException ...
* @throws LockFailedException ...
*/
private static void saveContent(final WorkflowManager wm, final NodeSettings preFilledSettings, final ReferencedFile rawWorkflowDirRef, final ExecutionMonitor execMon, final WorkflowSaveHelper saveHelper) throws IOException, CanceledExecutionException, LockFailedException {
ReferencedFile workflowDirRef = rawWorkflowDirRef;
Role r = wm.getTemplateInformation().getRole();
final String fName = preFilledSettings.getKey();
if (!workflowDirRef.fileLockRootForVM()) {
throw new LockFailedException("Can't write workflow to \"" + workflowDirRef + "\" because the directory can't be locked");
}
try {
final ReferencedFile nodeContainerDirectory = wm.getNodeContainerDirectory();
final ReferencedFile autoSaveDirectory = wm.getAutoSaveDirectory();
if (!saveHelper.isAutoSave() && workflowDirRef.equals(nodeContainerDirectory)) {
if (!nodeContainerDirectory.isDirty()) {
return;
} else {
// update variable assignment to do changes on member
workflowDirRef = nodeContainerDirectory;
// delete "old" node directories if not saving to the working
// directory -- do this before saving the nodes (dirs newly created)
WorkflowManager.deleteObsoleteNodeDirs(nodeContainerDirectory.getDeletedNodesFileLocations());
}
}
if (saveHelper.isAutoSave() && workflowDirRef.equals(autoSaveDirectory)) {
if (!autoSaveDirectory.isDirty()) {
return;
} else {
workflowDirRef = autoSaveDirectory;
WorkflowManager.deleteObsoleteNodeDirs(autoSaveDirectory.getDeletedNodesFileLocations());
}
}
File workflowDir = workflowDirRef.getFile();
workflowDir.mkdirs();
if (!workflowDir.isDirectory()) {
throw new IOException("Unable to create or write directory \": " + workflowDir + "\"");
}
saveWorkflowName(preFilledSettings, wm.getNameField());
saveAuthorInformation(wm.getAuthorInformation(), preFilledSettings);
saveWorkflowCipher(preFilledSettings, wm.getWorkflowCipher());
FileNodeContainerMetaPersistor.save(preFilledSettings, wm, workflowDirRef);
saveWorkflowVariables(wm, preFilledSettings);
saveCredentials(wm, preFilledSettings);
saveWorkflowAnnotations(wm, preFilledSettings);
NodeSettingsWO nodesSettings = saveSettingsForNodes(preFilledSettings);
Collection<NodeContainer> nodes = wm.getNodeContainers();
double progRatio = 1.0 / (nodes.size() + 1);
for (NodeContainer nextNode : nodes) {
int id = nextNode.getID().getIndex();
ExecutionMonitor subExec = execMon.createSubProgress(progRatio);
execMon.setMessage(nextNode.getNameWithID());
NodeSettingsWO sub = nodesSettings.addNodeSettings("node_" + id);
saveNodeContainer(sub, workflowDirRef, nextNode, subExec, saveHelper);
subExec.setProgress(1.0);
}
execMon.setMessage("connection information");
NodeSettingsWO connSettings = saveSettingsForConnections(preFilledSettings);
int connectionNumber = 0;
for (ConnectionContainer cc : wm.getConnectionContainers()) {
NodeSettingsWO nextConnectionConfig = connSettings.addNodeSettings("connection_" + connectionNumber);
saveConnection(nextConnectionConfig, cc);
connectionNumber += 1;
}
int inCount = wm.getNrInPorts();
NodeSettingsWO inPortsSetts = inCount > 0 ? saveInPortsSetting(preFilledSettings) : null;
NodeSettingsWO inPortsSettsEnum = null;
if (inPortsSetts != null) {
// TODO actually not neccessary to save the class name
saveInportsBarUIInfoClassName(inPortsSetts, wm.getInPortsBarUIInfo());
saveInportsBarUIInfoSettings(inPortsSetts, wm.getInPortsBarUIInfo());
inPortsSettsEnum = saveInPortsEnumSetting(inPortsSetts);
}
for (int i = 0; i < inCount; i++) {
NodeSettingsWO sPort = saveInPortSetting(inPortsSettsEnum, i);
saveInPort(sPort, wm, i);
}
int outCount = wm.getNrOutPorts();
NodeSettingsWO outPortsSetts = outCount > 0 ? saveOutPortsSetting(preFilledSettings) : null;
NodeSettingsWO outPortsSettsEnum = null;
if (outPortsSetts != null) {
saveOutportsBarUIInfoClassName(outPortsSetts, wm.getOutPortsBarUIInfo());
saveOutportsBarUIInfoSettings(outPortsSetts, wm.getOutPortsBarUIInfo());
outPortsSettsEnum = saveOutPortsEnumSetting(outPortsSetts);
}
for (int i = 0; i < outCount; i++) {
NodeSettingsWO singlePort = saveOutPortSetting(outPortsSettsEnum, i);
saveOutPort(singlePort, wm, i);
}
saveEditorUIInformation(wm, preFilledSettings);
File workflowFile = new File(workflowDir, fName);
String toBeDeletedFileName = Role.Template.equals(r) ? TEMPLATE_FILE : WORKFLOW_FILE;
new File(workflowDir, toBeDeletedFileName).delete();
new File(workflowDir, WorkflowCipher.getCipherFileName(toBeDeletedFileName)).delete();
OutputStream os = new FileOutputStream(workflowFile);
os = wm.getDirectNCParent().cipherOutput(os);
preFilledSettings.saveToXML(os);
if (saveHelper.isSaveData()) {
File saveWithDataFile = new File(workflowDir, SAVED_WITH_DATA_FILE);
BufferedWriter o = new BufferedWriter(new FileWriter(saveWithDataFile));
o.write("Do not delete this file!");
o.newLine();
o.write("This file serves to indicate that the workflow was written as part of the usual save " + "routine (not exported).");
o.newLine();
o.newLine();
o.write("Workflow was last saved by user ");
o.write(System.getProperty("user.name"));
o.write(" on " + new Date());
o.close();
}
if (saveHelper.isAutoSave() && autoSaveDirectory == null) {
wm.setAutoSaveDirectory(workflowDirRef);
}
if (!saveHelper.isAutoSave() && nodeContainerDirectory == null) {
wm.setNodeContainerDirectory(workflowDirRef);
}
NodeContainerState wmState = wm.getNodeContainerState();
// non remote executions
boolean isExecutingLocally = wmState.isExecutionInProgress() && !wmState.isExecutingRemotely();
if (workflowDirRef.equals(nodeContainerDirectory) && !isExecutingLocally) {
wm.unsetDirty();
}
workflowDirRef.setDirty(isExecutingLocally);
execMon.setProgress(1.0);
} finally {
workflowDirRef.fileUnlockRootForVM();
}
}
use of org.knime.core.util.LockFailedException in project knime-core by knime.
the class SandboxedNodeCreator method createSandbox.
/**
* Creates that temporary mini workflow that is executed remotely on the cluster/stream executor.
* The returned value should be {@link SandboxedNode#close()} when done (using try-with-resources). After this
* method is called no other set-method should be called.
*
* @param exec for progress/cancelation
* @return the index of the node that represents this node (the node to execute) in the temporary mini workflow
* @throws InvalidSettingsException
* @throws IOException
* @throws CanceledExecutionException
* @throws LockFailedException
* @throws InterruptedException
*/
public SandboxedNode createSandbox(final ExecutionMonitor exec) throws InvalidSettingsException, IOException, CanceledExecutionException, LockFailedException, InterruptedException {
exec.setMessage("Creating virtual workflow");
final WorkflowManager parent = m_nc.getParent();
// derive workflow context via NodeContext as the parent could only a be a metanode in a metanode...
final WorkflowContext origContext = NodeContext.getContext().getWorkflowManager().getContext();
WorkflowContext.Factory ctxFactory;
// (specifically reading knime://knime.workflow files)
if (!m_copyDataIntoNewContext) {
ctxFactory = new WorkflowContext.Factory(origContext);
if (m_localWorkflowDir != null) {
ctxFactory.setOriginalLocation(origContext.getCurrentLocation()).setCurrentLocation(m_localWorkflowDir);
}
} else if (m_localWorkflowDir != null) {
ctxFactory = new WorkflowContext.Factory(m_localWorkflowDir);
} else {
ctxFactory = new WorkflowContext.Factory(FileUtil.createTempDir("sandbox-" + m_nc.getNameWithID()));
}
// We have to use the same location for the temporary files
ctxFactory.setTempLocation(origContext.getTempLocation());
origContext.getMountpointURI().ifPresent(u -> ctxFactory.setMountpointURI(u));
WorkflowCreationHelper creationHelper = new WorkflowCreationHelper();
creationHelper.setWorkflowContext(ctxFactory.createContext());
if (!m_copyDataIntoNewContext) {
creationHelper.setDataHandlers(parent.getGlobalTableRepository(), parent.getFileStoreHandlerRepository());
}
WorkflowManager tempWFM = m_rootWFM.createAndAddProject("Sandbox Exec on " + m_nc.getNameWithID(), creationHelper);
// Add the workflow variables
List<FlowVariable> workflowVariables = parent.getProjectWFM().getWorkflowVariables();
tempWFM.addWorkflowVariables(true, workflowVariables.toArray(new FlowVariable[workflowVariables.size()]));
// update credentials store of the workflow
CredentialsStore cs = tempWFM.getCredentialsStore();
workflowVariables.stream().filter(f -> f.getType().equals(FlowVariable.Type.CREDENTIALS)).filter(f -> !cs.contains(f.getName())).forEach(cs::addFromFlowVariable);
final int inCnt = m_inData.length;
// port object IDs in static port object map, one entry for
// each connected input (no value for unconnected optional inputs)
List<Integer> portObjectRepositoryIDs = new ArrayList<Integer>(inCnt);
try {
NodeID[] ins = new NodeID[inCnt];
for (int i = 0; i < inCnt; i++) {
final PortObject in = m_inData[i];
final NodeInPort inPort = m_nc.getInPort(i);
final PortType portType = inPort.getPortType();
if (in == null) {
// unconnected optional input
CheckUtils.checkState(portType.isOptional(), "No data at port %d, although port is mandatory (port type %s)", i, portType.getName());
continue;
}
int portObjectRepositoryID = PortObjectRepository.add(in);
portObjectRepositoryIDs.add(portObjectRepositoryID);
boolean isTable = BufferedDataTable.TYPE.equals(portType);
NodeID inID = tempWFM.createAndAddNode(isTable ? TABLE_READ_NODE_FACTORY : OBJECT_READ_NODE_FACTORY);
NodeSettings s = new NodeSettings("temp_data_in");
tempWFM.saveNodeSettings(inID, s);
List<FlowVariable> flowVars = getFlowVariablesOnPort(i);
PortObjectInNodeModel.setInputNodeSettings(s, portObjectRepositoryID, flowVars, m_copyDataIntoNewContext);
// update credentials store of the workflow
flowVars.stream().filter(f -> f.getType().equals(FlowVariable.Type.CREDENTIALS)).filter(f -> !cs.contains(f.getName())).forEach(cs::addFromFlowVariable);
tempWFM.loadNodeSettings(inID, s);
ins[i] = inID;
}
// execute inPort object nodes to store the input data in them
if (ins.length > 0 && !tempWFM.executeAllAndWaitUntilDoneInterruptibly()) {
String error = "Unable to execute virtual workflow, status sent to log facilities";
LOGGER.debug(error + ":");
LOGGER.debug(tempWFM.toString());
throw new RuntimeException(error);
}
// add the target node to the workflow
WorkflowCopyContent.Builder content = WorkflowCopyContent.builder();
content.setNodeIDs(m_nc.getID());
final NodeID targetNodeID = tempWFM.copyFromAndPasteHere(parent, content.build()).getNodeIDs()[0];
NodeContainer targetNode = tempWFM.getNodeContainer(targetNodeID);
// connect target node to inPort object nodes, skipping unconnected (optional) inputs
IntStream.range(0, inCnt).filter(i -> ins[i] != null).forEach(i -> tempWFM.addConnection(ins[i], 1, targetNodeID, i));
if (m_forwardConnectionProgressEvents) {
setupConnectionProgressEventListeners(m_nc, targetNode);
}
// copy the existing tables into the (meta) node (e.g. an executed file reader that's necessary
// for other nodes to execute)
exec.setMessage("Copying tables into temp flow");
NodeContainerExecutionResult origResult = m_nc.createExecutionResult(exec);
ExecutionMonitor copyExec = exec.createSubProgress(0.0);
copyExistingTablesIntoSandboxContainer(origResult, m_nc, targetNode, copyExec, m_copyDataIntoNewContext);
CopyContentIntoTempFlowNodeExecutionJobManager copyDataIntoTmpFlow = new CopyContentIntoTempFlowNodeExecutionJobManager(origResult);
NodeExecutionJobManager oldJobManager = targetNode.getJobManager();
tempWFM.setJobManager(targetNodeID, copyDataIntoTmpFlow);
tempWFM.executeAllAndWaitUntilDoneInterruptibly();
tempWFM.setJobManager(targetNodeID, oldJobManager);
// do not use the cluster executor on the cluster...
tempWFM.setJobManager(targetNodeID, NodeExecutionJobManagerPool.getDefaultJobManagerFactory().getInstance());
if (!m_copyDataIntoNewContext) {
copyFileStoreHandlerReference(targetNode, parent, false);
}
// save workflow in the local job dir
if (m_localWorkflowDir != null) {
tempWFM.save(m_localWorkflowDir, exec, true);
deepCopyFilesInWorkflowDir(m_nc, tempWFM);
}
return new SandboxedNode(tempWFM, targetNodeID);
} finally {
portObjectRepositoryIDs.stream().forEach(PortObjectRepository::remove);
}
}
use of org.knime.core.util.LockFailedException in project knime-core by knime.
the class WorkflowManager method load.
/**
* Loads the workflow contained in the directory as node into this workflow instance. Loading a whole new project is
* usually done using {@link WorkflowManager#loadProject(File, ExecutionMonitor, WorkflowLoadHelper)} .
*
* @param directory to load from
* @param exec For progress/cancellation (currently not supported)
* @param loadHelper callback to load credentials and such (if available) during load of the underlying
* <code>SingleNodeContainer</code> (may be null).
* @param keepNodeMessages Whether to keep the messages that are associated with the nodes in the loaded workflow
* (mostly false but true when remotely computed results are loaded).
* @return A workflow load result, which also contains the loaded workflow.
* @throws IOException If errors reading the "important" files fails due to I/O problems (file not present, e.g.)
* @throws InvalidSettingsException If parsing the "important" files fails.
* @throws CanceledExecutionException If canceled.
* @throws UnsupportedWorkflowVersionException If the version of the workflow is unknown (future version)
* @throws LockFailedException if the flow can't be locked for opening
*/
public WorkflowLoadResult load(final File directory, final ExecutionMonitor exec, final WorkflowLoadHelper loadHelper, final boolean keepNodeMessages) throws IOException, InvalidSettingsException, CanceledExecutionException, UnsupportedWorkflowVersionException, LockFailedException {
ReferencedFile rootFile = new ReferencedFile(directory);
boolean isTemplate = loadHelper.isTemplateFlow();
if (!isTemplate) {
// don't lock read-only templates (as we don't have r/o locks yet)
if (!rootFile.fileLockRootForVM()) {
StringBuilder error = new StringBuilder();
error.append("Unable to lock workflow from \"");
error.append(rootFile).append("\". ");
if (rootFile.getFile().exists()) {
error.append("It is in use by another user/instance.");
} else {
error.append("Location does not exist.");
}
throw new LockFailedException(error.toString());
}
}
try {
FileWorkflowPersistor persistor = createLoadPersistor(directory, loadHelper);
return load(persistor, exec, keepNodeMessages);
} finally {
if (!isTemplate) {
rootFile.fileUnlockRootForVM();
}
}
}
Aggregations