use of ca.corefacility.bioinformatics.irida.exceptions.ExecutionManagerException in project irida by phac-nml.
the class AnalysisController method getAjaxStatusUpdateForAnalysisSubmission.
/**
* Get the current status for a given {@link AnalysisSubmission}
*
* @param submissionId The {@link UUID} id for a given {@link AnalysisSubmission}
* @param locale The users current {@link Locale}
* @return {@link HashMap} containing the status and the percent complete for the {@link AnalysisSubmission}
*/
@RequestMapping(value = "/ajax/status/{submissionId}", produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Map<String, String> getAjaxStatusUpdateForAnalysisSubmission(@PathVariable Long submissionId, Locale locale) {
Map<String, String> result = new HashMap<>();
AnalysisSubmission analysisSubmission = analysisSubmissionService.read(submissionId);
AnalysisState state = analysisSubmission.getAnalysisState();
result.put("state", state.toString());
result.put("stateLang", messageSource.getMessage("analysis.state." + state.toString(), null, locale));
if (!state.equals(AnalysisState.ERROR)) {
float percentComplete = 0;
try {
percentComplete = analysisSubmissionService.getPercentCompleteForAnalysisSubmission(analysisSubmission.getId());
result.put("percentComplete", Float.toString(percentComplete));
} catch (ExecutionManagerException e) {
logger.error("Error getting the percentage complete", e);
result.put("percentageComplete", "");
}
}
return result;
}
use of ca.corefacility.bioinformatics.irida.exceptions.ExecutionManagerException in project irida by phac-nml.
the class AnalysisSubmissionServiceImplTest method testDeleteSubmissionWorkflowError.
/**
* Tests that deleting a submission deletes the submission even if an
* execution manager exception is thrown.
*
* @throws ExecutionManagerException
*/
@Test
public void testDeleteSubmissionWorkflowError() throws ExecutionManagerException {
when(analysisSubmissionRepository.findOne(ID)).thenReturn(analysisSubmission);
when(analysisSubmissionRepository.exists(ID)).thenReturn(true);
when(analysisExecutionService.cleanupSubmission(analysisSubmission)).thenThrow(new ExecutionManagerException());
when(analysisSubmission.getAnalysisCleanedState()).thenReturn(AnalysisCleanedState.NOT_CLEANED);
analysisSubmissionServiceImpl.delete(ID);
verify(analysisExecutionService).cleanupSubmission(analysisSubmission);
verify(analysisSubmissionRepository).delete(ID);
}
use of ca.corefacility.bioinformatics.irida.exceptions.ExecutionManagerException in project irida by phac-nml.
the class AnalysisProvenanceServiceGalaxyTest method testHistoriesFailure.
@Test(expected = ExecutionManagerException.class)
public void testHistoriesFailure() throws ExecutionManagerException {
when(galaxyHistoriesService.showHistoryContents(any(String.class))).thenThrow(new ExecutionManagerException());
provenanceService.buildToolExecutionForOutputFile(analysisSubmission(), analysisOutputFile());
}
use of ca.corefacility.bioinformatics.irida.exceptions.ExecutionManagerException in project irida by phac-nml.
the class AnalysisProvenanceServiceGalaxy method buildToolExecutionForHistoryStep.
/**
* Build up a complete *tree* of ToolExecution from Galaxy's history
* contents provenance objects. Recursively follows predecessors from the
* current history.
*
* @param toolDetails
* the details of the current tool to build up tool execution
* details for.
* @param currentProvenance
* the provenance that corresponds to the tool details.
* @param historyId
* the Galaxy ID we should use to extract tool execution
* information.
* @return the entire tree of ToolExecutions for the tool and its
* provenance.
* @throws ExecutionManagerException
* if we could not get the history contents provenance or the
* tool details for a predecessor of the current tool details or
* provenance.
*/
private ToolExecution buildToolExecutionForHistoryStep(final Tool toolDetails, final HistoryContentsProvenance currentProvenance, final String historyId) throws ExecutionManagerException {
final Map<String, Set<String>> predecessors = getPredecessors(currentProvenance);
final Map<String, Object> parameters = currentProvenance.getParameters();
// remove keys from parameters that are Galaxy-related (and thus
// ignorable), or keys that *match* input keys (as mentioned in
// getPredecessors, the input keys are going to have a numeric
// suffix and so don't equal the key that we want to remove from the
// key set):
/* @formatter:off */
final Set<String> parameterKeys = parameters.keySet().stream().filter(k -> !PARAMETERS_TO_IGNORE.contains(k)).filter(k -> !predecessors.keySet().stream().anyMatch(p -> k.contains(p))).collect(Collectors.toSet());
/* @formatter:on */
final Map<String, Object> paramValues = new HashMap<>();
for (final String parameterKey : parameterKeys) {
paramValues.put(parameterKey, parameters.get(parameterKey));
}
final Set<ToolExecution> prevSteps = new HashSet<>();
final String toolName = toolDetails.getName();
final String toolVersion = toolDetails.getVersion();
final String jobId = currentProvenance.getJobId();
final JobDetails jobDetails = jobsClient.showJob(jobId);
final String commandLine = jobDetails.getCommandLine();
final Map<String, String> paramStrings = buildParamMap(paramValues);
for (final String predecessorKey : predecessors.keySet()) {
// arbitrarily select one of the predecessors from the set, then
// recurse on that predecessor:
final String predecessor = predecessors.get(predecessorKey).iterator().next();
final HistoryContentsProvenance previousProvenance = galaxyHistoriesService.showProvenance(historyId, predecessor);
final Tool previousToolDetails = toolsClient.showTool(previousProvenance.getToolId());
final ToolExecution toolExecution = buildToolExecutionForHistoryStep(previousToolDetails, previousProvenance, historyId);
prevSteps.add(toolExecution);
}
return new ToolExecution(prevSteps, toolName, toolVersion, jobId, paramStrings, commandLine);
}
use of ca.corefacility.bioinformatics.irida.exceptions.ExecutionManagerException in project irida by phac-nml.
the class AnalysisExecutionScheduledTaskImpl method transferAnalysesResults.
/**
* {@inheritDoc}
*/
@Override
public Set<Future<AnalysisSubmission>> transferAnalysesResults() {
synchronized (transferAnalysesResultsLock) {
logger.trace("Running transferAnalysesResults");
List<AnalysisSubmission> analysisSubmissions = analysisSubmissionRepository.findByAnalysisState(AnalysisState.FINISHED_RUNNING);
Set<Future<AnalysisSubmission>> submissions = Sets.newHashSet();
for (AnalysisSubmission analysisSubmission : analysisSubmissions) {
logger.debug("Transferring results for " + analysisSubmission);
try {
submissions.add(analysisExecutionService.transferAnalysisResults(analysisSubmission));
} catch (ExecutionManagerException | IOException | IridaWorkflowException e) {
logger.error("Error transferring submission " + analysisSubmission, e);
}
}
return submissions;
}
}
Aggregations