use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.
the class RepositoryFactory method loadMetaNode.
private static WorkflowManagerUI loadMetaNode(final String pluginId, final String workflowDir) {
LOGGER.debug("found pre-installed template " + workflowDir);
Bundle bundle = Platform.getBundle(pluginId);
URL url = FileLocator.find(bundle, new Path(workflowDir), null);
if (url != null) {
try {
File f = new File(FileLocator.toFileURL(url).getFile());
LOGGER.debug("meta node template name: " + f.getName());
WorkflowLoadHelper loadHelper = new WorkflowLoadHelper(true) {
/**
* {@inheritDoc}
*/
@Override
public String getDotKNIMEFileName() {
return WorkflowPersistor.WORKFLOW_FILE;
}
};
// don't lock workflow dir
FileWorkflowPersistor persistor = WorkflowManager.createLoadPersistor(f, loadHelper);
WorkflowManager metaNode = WorkflowManager.META_NODE_ROOT.load(persistor, new ExecutionMonitor(), false).getWorkflowManager();
return WorkflowManagerWrapper.wrap(metaNode);
} catch (CanceledExecutionException cee) {
LOGGER.error("Unexpected canceled execution exception", cee);
} catch (Exception e) {
LOGGER.error("Failed to load meta workflow repository", e);
}
}
return null;
}
use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.
the class AbstractSaveRunnable method run.
@Override
public final void run(final IProgressMonitor pm) {
File workflowDir = getSaveLocation();
try {
final WorkflowManager wfm = m_editor.getWorkflowManager().get();
ProgressHandler progressHandler = new ProgressHandler(pm, wfm.getNodeContainers().size(), "Saving workflow... (cannot be canceled)");
final CheckCancelNodeProgressMonitor progressMonitor = new CheckCancelNodeProgressMonitor(pm);
progressMonitor.addProgressListener(progressHandler);
final ExecutionMonitor exec = new ExecutionMonitor(progressMonitor);
save(wfm, exec);
// the refresh used to take place in WorkflowEditor#saveTo but
// was moved to this runnable as part of bug fix 3028
IResource r = KnimeResourceUtil.getResourceForURI(workflowDir.toURI());
if (r != null) {
String pName = r.getName();
pm.setTaskName("Refreshing " + pName + "...");
r.refreshLocal(IResource.DEPTH_INFINITE, pm);
}
} catch (FileNotFoundException fnfe) {
m_logger.fatal("File not found", fnfe);
m_exceptionMessage.append("File access problems: " + fnfe.getMessage());
m_monitor.setCanceled(true);
} catch (IOException ioe) {
if (new File(workflowDir, WorkflowPersistor.WORKFLOW_FILE).length() == 0) {
m_logger.info("New workflow created.");
} else {
m_logger.error("Could not save workflow: " + workflowDir.getName(), ioe);
m_exceptionMessage.append("File access problems: " + ioe.getMessage());
m_monitor.setCanceled(true);
}
} catch (CanceledExecutionException cee) {
m_logger.info("Canceled saving workflow: " + workflowDir.getName());
m_exceptionMessage.append("Saving workflow" + " was canceled.");
m_monitor.setCanceled(true);
} catch (Exception e) {
m_logger.error("Could not save workflow", e);
m_exceptionMessage.append("Could not save workflow: " + e.getMessage());
m_monitor.setCanceled(true);
} finally {
pm.subTask("Finished.");
pm.done();
m_editor = null;
m_exceptionMessage = null;
m_monitor = null;
}
}
use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.
the class LoadWorkflowRunnable method run.
/**
* {@inheritDoc}
*/
@Override
public void run(final IProgressMonitor pm) {
// indicates whether to create an empty workflow
// this is done if the file is empty
boolean createEmptyWorkflow = false;
// name of workflow will be null (uses directory name then)
String name = null;
m_throwable = null;
try {
// create progress monitor
ProgressHandler progressHandler = new ProgressHandler(pm, 101, "Loading workflow...");
final CheckCancelNodeProgressMonitor progressMonitor = new CheckCancelNodeProgressMonitor(pm);
progressMonitor.addProgressListener(progressHandler);
File workflowDirectory = m_workflowFile.getParentFile();
Display d = Display.getDefault();
GUIWorkflowLoadHelper loadHelper = new GUIWorkflowLoadHelper(d, workflowDirectory.getName(), m_mountpointURI, workflowDirectory, m_mountpointRoot);
final WorkflowLoadResult result = WorkflowManager.loadProject(workflowDirectory, new ExecutionMonitor(progressMonitor), loadHelper);
final WorkflowManager wm = result.getWorkflowManager();
m_editor.setWorkflowManager(wm);
pm.subTask("Finished.");
pm.done();
if (wm.isDirty()) {
m_editor.markDirty();
}
final IStatus status = createStatus(result, !result.getGUIMustReportDataLoadErrors());
String message;
switch(status.getSeverity()) {
case IStatus.OK:
message = "No problems during load.";
break;
case IStatus.WARNING:
message = "Warnings during load";
logPreseveLineBreaks("Warnings during load: " + result.getFilteredError("", LoadResultEntryType.Warning), false);
break;
default:
message = "Errors during load";
logPreseveLineBreaks("Errors during load: " + result.getFilteredError("", LoadResultEntryType.Warning), true);
}
if (!status.isOK()) {
showLoadErrorDialog(result, status, message);
}
final List<NodeID> linkedMNs = wm.getLinkedMetaNodes(true);
if (!linkedMNs.isEmpty()) {
final WorkflowEditor editor = m_editor;
m_editor.addAfterOpenRunnable(new Runnable() {
@Override
public void run() {
postLoadCheckForMetaNodeUpdates(editor, wm, linkedMNs);
}
});
}
} catch (FileNotFoundException fnfe) {
m_throwable = fnfe;
LOGGER.fatal("File not found", fnfe);
} catch (IOException ioe) {
m_throwable = ioe;
if (m_workflowFile.length() == 0) {
LOGGER.info("New workflow created.");
// this is the only place to set this flag to true: we have an
// empty workflow file, i.e. a new project was created
// bugfix 1555: if an exception is thrown DO NOT create empty
// workflow
createEmptyWorkflow = true;
} else {
LOGGER.error("Could not load workflow from: " + m_workflowFile.getName(), ioe);
}
} catch (InvalidSettingsException ise) {
LOGGER.error("Could not load workflow from: " + m_workflowFile.getName(), ise);
m_throwable = ise;
} catch (UnsupportedWorkflowVersionException uve) {
m_loadingCanceledMessage = INCOMPATIBLE_VERSION_MSG;
LOGGER.info(m_loadingCanceledMessage, uve);
m_editor.setWorkflowManager(null);
} catch (CanceledExecutionException cee) {
m_loadingCanceledMessage = "Canceled loading workflow: " + m_workflowFile.getParentFile().getName();
LOGGER.info(m_loadingCanceledMessage, cee);
m_editor.setWorkflowManager(null);
} catch (LockFailedException lfe) {
StringBuilder error = new StringBuilder();
error.append("Unable to load workflow \"");
error.append(m_workflowFile.getParentFile().getName());
if (m_workflowFile.getParentFile().exists()) {
error.append("\"\nIt is in use by another user/instance.");
} else {
error.append("\"\nLocation does not exist.");
}
m_loadingCanceledMessage = error.toString();
LOGGER.info(m_loadingCanceledMessage, lfe);
m_editor.setWorkflowManager(null);
} catch (Throwable e) {
m_throwable = e;
LOGGER.error("Workflow could not be loaded. " + e.getMessage(), e);
m_editor.setWorkflowManager(null);
} finally {
// (empty workflow file)
if (createEmptyWorkflow) {
WorkflowCreationHelper creationHelper = new WorkflowCreationHelper();
WorkflowContext.Factory fac = new WorkflowContext.Factory(m_workflowFile.getParentFile());
fac.setMountpointRoot(m_mountpointRoot);
fac.setMountpointURI(m_mountpointURI);
creationHelper.setWorkflowContext(fac.createContext());
m_editor.setWorkflowManager(WorkflowManager.ROOT.createAndAddProject(name, creationHelper));
// save empty project immediately
// bugfix 1341 -> see WorkflowEditor line 1294
// (resource delta visitor movedTo)
Display.getDefault().syncExec(new Runnable() {
@Override
public void run() {
m_editor.doSave(new NullProgressMonitor());
}
});
m_editor.setIsDirty(false);
}
// IMPORTANT: Remove the reference to the file and the
// editor!!! Otherwise the memory cannot be freed later
m_editor = null;
m_workflowFile = null;
m_mountpointRoot = null;
}
}
use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.
the class CreateMetaNodeTemplateCommand method execute.
/**
* {@inheritDoc}
*/
@Override
public void execute() {
// Add node to workflow and get the container
LoadMetaNodeTemplateRunnable loadRunnable = null;
try {
IWorkbench wb = PlatformUI.getWorkbench();
IProgressService ps = wb.getProgressService();
// this one sets the workflow manager in the editor
loadRunnable = new LoadMetaNodeTemplateRunnable(getHostWFM(), m_templateKNIMEFolder);
ps.run(false, true, loadRunnable);
MetaNodeLinkUpdateResult result = loadRunnable.getLoadResult();
m_container = (NodeContainer) result.getLoadedInstance();
if (m_container == null) {
throw new RuntimeException("No template returned by load routine, see log for details");
}
// create extra info and set it
NodeUIInformation info = NodeUIInformation.builder().setNodeLocation(m_location.x, m_location.y, -1, -1).setHasAbsoluteCoordinates(false).setSnapToGrid(m_snapToGrid).setIsDropLocation(true).build();
m_container.setUIInformation(info);
} catch (Throwable t) {
Throwable cause = t;
while ((cause.getCause() != null) && (cause.getCause() != cause)) {
cause = cause.getCause();
}
String error = "The selected node could not be created";
if (cause instanceof FileNotFoundException) {
error += " because a file could not be found: " + cause.getMessage();
MessageDialog.openError(Display.getDefault().getActiveShell(), "Node cannot be created.", error);
} else if (cause instanceof IOException) {
error += " because of an I/O error: " + cause.getMessage();
MessageDialog.openError(Display.getDefault().getActiveShell(), "Node cannot be created.", error);
} else if (cause instanceof InvalidSettingsException) {
error += " because the metanode contains invalid settings: " + cause.getMessage();
MessageDialog.openError(Display.getDefault().getActiveShell(), "Node cannot be created.", error);
} else if (cause instanceof UnsupportedWorkflowVersionException) {
error += " because the metanode version is incompatible: " + cause.getMessage();
MessageDialog.openError(Display.getDefault().getActiveShell(), "Node cannot be created.", error);
} else if ((cause instanceof CanceledExecutionException) || (cause instanceof InterruptedException)) {
LOGGER.info("Metanode loading was canceled by the user", cause);
} else {
LOGGER.error(String.format("Metanode loading failed with %s: %s", cause.getClass().getSimpleName(), cause.getMessage()), cause);
error += ": " + cause.getMessage();
MessageDialog.openError(Display.getDefault().getActiveShell(), "Node cannot be created.", error);
}
}
}
use of org.knime.core.node.CanceledExecutionException in project knime-core by knime.
the class DecTreePredictorNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
public PortObject[] execute(final PortObject[] inPorts, final ExecutionContext exec) throws CanceledExecutionException, Exception {
exec.setMessage("Decision Tree Predictor: Loading predictor...");
PMMLPortObject port = (PMMLPortObject) inPorts[INMODELPORT];
List<Node> models = port.getPMMLValue().getModels(PMMLModelType.TreeModel);
if (models.isEmpty()) {
String msg = "Decision Tree evaluation failed: " + "No tree model found.";
LOGGER.error(msg);
throw new RuntimeException(msg);
}
PMMLDecisionTreeTranslator trans = new PMMLDecisionTreeTranslator();
port.initializeModelTranslator(trans);
DecisionTree decTree = trans.getDecisionTree();
decTree.resetColorInformation();
BufferedDataTable inData = (BufferedDataTable) inPorts[INDATAPORT];
// get column with color information
String colorColumn = null;
for (DataColumnSpec s : inData.getDataTableSpec()) {
if (s.getColorHandler() != null) {
colorColumn = s.getName();
break;
}
}
decTree.setColorColumn(colorColumn);
exec.setMessage("Decision Tree Predictor: start execution.");
PortObjectSpec[] inSpecs = new PortObjectSpec[] { inPorts[0].getSpec(), inPorts[1].getSpec() };
DataTableSpec outSpec = createOutTableSpec(inSpecs);
BufferedDataContainer outData = exec.createDataContainer(outSpec);
long coveredPattern = 0;
long nrPattern = 0;
long rowCount = 0;
long numberRows = inData.size();
exec.setMessage("Classifying...");
for (DataRow thisRow : inData) {
DataCell cl = null;
LinkedHashMap<String, Double> classDistrib = null;
try {
Pair<DataCell, LinkedHashMap<DataCell, Double>> pair = decTree.getWinnerAndClasscounts(thisRow, inData.getDataTableSpec());
cl = pair.getFirst();
LinkedHashMap<DataCell, Double> classCounts = pair.getSecond();
classDistrib = getDistribution(classCounts);
if (coveredPattern < m_maxNumCoveredPattern.getIntValue()) {
// remember this one for HiLite support
decTree.addCoveredPattern(thisRow, inData.getDataTableSpec());
coveredPattern++;
} else {
// too many patterns for HiLite - at least remember color
decTree.addCoveredColor(thisRow, inData.getDataTableSpec());
}
nrPattern++;
} catch (Exception e) {
LOGGER.error("Decision Tree evaluation failed: " + e.getMessage());
throw e;
}
if (cl == null) {
LOGGER.error("Decision Tree evaluation failed: result empty");
throw new Exception("Decision Tree evaluation failed.");
}
DataCell[] newCells = new DataCell[outSpec.getNumColumns()];
int numInCells = thisRow.getNumCells();
for (int i = 0; i < numInCells; i++) {
newCells[i] = thisRow.getCell(i);
}
if (m_showDistribution.getBooleanValue()) {
for (int i = numInCells; i < newCells.length - 1; i++) {
String predClass = outSpec.getColumnSpec(i).getName();
if (classDistrib != null && classDistrib.get(predClass) != null) {
newCells[i] = new DoubleCell(classDistrib.get(predClass));
} else {
newCells[i] = new DoubleCell(0.0);
}
}
}
newCells[newCells.length - 1] = cl;
outData.addRowToTable(new DefaultRow(thisRow.getKey(), newCells));
rowCount++;
if (rowCount % 100 == 0) {
exec.setProgress(rowCount / (double) numberRows, "Classifying... Row " + rowCount + " of " + numberRows);
}
exec.checkCanceled();
}
if (coveredPattern < nrPattern) {
// let the user know that we did not store all available pattern
// for HiLiting.
this.setWarningMessage("Tree only stored first " + m_maxNumCoveredPattern.getIntValue() + " (of " + nrPattern + ") rows for HiLiting!");
}
outData.close();
m_decTree = decTree;
exec.setMessage("Decision Tree Predictor: end execution.");
return new BufferedDataTable[] { outData.getTable() };
}
Aggregations