use of ca.corefacility.bioinformatics.irida.model.enums.AnalysisState in project irida by phac-nml.
the class AnalysisController method getAjaxStatusUpdateForAnalysisSubmission.
/**
* Get the current status for a given {@link AnalysisSubmission}
*
* @param submissionId The {@link UUID} id for a given {@link AnalysisSubmission}
* @param locale The users current {@link Locale}
* @return {@link HashMap} containing the status and the percent complete for the {@link AnalysisSubmission}
*/
@RequestMapping(value = "/ajax/status/{submissionId}", produces = MediaType.APPLICATION_JSON_VALUE)
@ResponseBody
public Map<String, String> getAjaxStatusUpdateForAnalysisSubmission(@PathVariable Long submissionId, Locale locale) {
Map<String, String> result = new HashMap<>();
AnalysisSubmission analysisSubmission = analysisSubmissionService.read(submissionId);
AnalysisState state = analysisSubmission.getAnalysisState();
result.put("state", state.toString());
result.put("stateLang", messageSource.getMessage("analysis.state." + state.toString(), null, locale));
if (!state.equals(AnalysisState.ERROR)) {
float percentComplete = 0;
try {
percentComplete = analysisSubmissionService.getPercentCompleteForAnalysisSubmission(analysisSubmission.getId());
result.put("percentComplete", Float.toString(percentComplete));
} catch (ExecutionManagerException e) {
logger.error("Error getting the percentage complete", e);
result.put("percentageComplete", "");
}
}
return result;
}
use of ca.corefacility.bioinformatics.irida.model.enums.AnalysisState in project irida by phac-nml.
the class AnalysesListingService method createDataTablesAnalysis.
/**
* Convert a {@link AnalysisSubmission} to a {@link DTAnalysis} to be used by DataTables on the client.
*
* @param submission {@link AnalysisSubmission}
* @param locale {@link Locale}
* @return {@link DTAnalysis}
* @throws IridaWorkflowNotFoundException If the requested workflow doesn't exist
* @throws ExecutionManagerException If the submission cannot be read properly
*/
private DTAnalysis createDataTablesAnalysis(AnalysisSubmission submission, Locale locale) throws IridaWorkflowNotFoundException, ExecutionManagerException {
Long id = submission.getId();
String name = submission.getName();
String submitter = submission.getSubmitter().getLabel();
Date createdDate = submission.getCreatedDate();
float percentComplete = 0;
AnalysisState analysisState = submission.getAnalysisState();
JobError error = null;
if (analysisState.equals(AnalysisState.ERROR)) {
error = getFirstJobError(submission);
} else {
percentComplete = analysisSubmissionService.getPercentCompleteForAnalysisSubmission(submission.getId());
}
String workflowType = iridaWorkflowsService.getIridaWorkflow(submission.getWorkflowId()).getWorkflowDescription().getAnalysisType().toString();
String workflow = messageSource.getMessage("workflow." + workflowType + ".title", null, locale);
String state = messageSource.getMessage("analysis.state." + analysisState.toString(), null, locale);
Long duration = 0L;
if (analysisState.equals(AnalysisState.COMPLETED)) {
duration = getDurationInMilliseconds(submission.getCreatedDate(), submission.getAnalysis().getCreatedDate());
}
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
boolean updatePermission = updateAnalysisPermission.isAllowed(authentication, submission);
return new DTAnalysis(id, name, submitter, percentComplete, createdDate, workflow, state, error, duration, updatePermission);
}
use of ca.corefacility.bioinformatics.irida.model.enums.AnalysisState in project irida by phac-nml.
the class AnalysisSubmissionServiceImplIT method searchAnalyses.
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void searchAnalyses() {
Page<AnalysisSubmission> paged = analysisSubmissionService.listAllSubmissions(null, null, null, null, new PageRequest(0, 10, new Sort(Direction.ASC, "createdDate")));
assertEquals(10, paged.getContent().size());
// Try filtering a by names
String name = "My";
paged = analysisSubmissionService.listAllSubmissions(null, name, null, null, new PageRequest(0, 10, new Sort(Direction.ASC, "createdDate")));
assertEquals(8, paged.getContent().size());
// Add a state filter
AnalysisState state = AnalysisState.COMPLETED;
paged = analysisSubmissionService.listAllSubmissions(null, name, state, null, new PageRequest(0, 10, new Sort(Direction.ASC, "createdDate")));
assertEquals(2, paged.getContent().size());
}
use of ca.corefacility.bioinformatics.irida.model.enums.AnalysisState in project irida by phac-nml.
the class AnalysisSubmissionCleanupServiceImpl method switchInconsistentSubmissionsToError.
/**
* {@inheritDoc}
*/
@Override
@PreAuthorize("hasRole('ROLE_ADMIN')")
public int switchInconsistentSubmissionsToError() {
if (ranSwitchInconsistentSubmissionsToError) {
throw new RuntimeException("already ran this method once");
} else {
int numberSubmissionsSwitched = 0;
ranSwitchInconsistentSubmissionsToError = true;
for (AnalysisState state : inconsistentStates) {
List<AnalysisSubmission> submissions = analysisSubmissionRepository.findByAnalysisState(state);
for (AnalysisSubmission submission : submissions) {
logger.error("AnalysisSubmission [id=" + submission.getId() + ", name=" + submission.getName() + ", state=" + submission.getAnalysisState() + "] left in inconsistent state. Switching to " + AnalysisState.ERROR + ".");
submission.setAnalysisState(AnalysisState.ERROR);
analysisSubmissionRepository.save(submission);
numberSubmissionsSwitched++;
}
}
return numberSubmissionsSwitched;
}
}
use of ca.corefacility.bioinformatics.irida.model.enums.AnalysisState in project irida by phac-nml.
the class AnalysisSubmissionServiceImpl method getPercentCompleteForAnalysisSubmission.
/**
* {@inheritDoc}
*/
@Override
@PreAuthorize("hasRole('ROLE_ADMIN') or hasPermission(#id, 'canReadAnalysisSubmission')")
public float getPercentCompleteForAnalysisSubmission(Long id) throws EntityNotFoundException, ExecutionManagerException, NoPercentageCompleteException {
AnalysisSubmission analysisSubmission = read(id);
AnalysisState analysisState = analysisSubmission.getAnalysisState();
switch(analysisState) {
case NEW:
case PREPARING:
case PREPARED:
case SUBMITTING:
return STATE_PERCENTAGE.get(analysisState);
/**
* If the analysis is in a state of {@link AnalysisState.RUNNING}
* then we are able to ask Galaxy for the proportion of jobs that
* are complete. We can scale this value between RUNNING_PERCENT
* (10%) and FINISHED_RUNNING_PERCENT (90%) so that after all jobs
* are complete we are only at 90%. The remaining 10% involves
* transferring files back to Galaxy.
*
* For example, if there are 10 out of 20 jobs finished on Galaxy,
* then the proportion of jobs complete is 10/20 = 0.5. So, the
* percent complete for the overall analysis is: percentComplete =
* 10 + (90 - 10) * 0.5 = 50%.
*
* If there are 20 out of 20 jobs finished in Galaxy, then the
* percent complete is: percentComplete = 10 + (90 - 10) * 1.0 =
* 90%.
*/
case RUNNING:
String workflowHistoryId = analysisSubmission.getRemoteAnalysisId();
GalaxyWorkflowStatus workflowStatus = galaxyHistoriesService.getStatusForHistory(workflowHistoryId);
return RUNNING_PERCENT + (FINISHED_RUNNING_PERCENT - RUNNING_PERCENT) * workflowStatus.getProportionComplete();
case FINISHED_RUNNING:
case COMPLETING:
case TRANSFERRED:
case POST_PROCESSING:
case COMPLETED:
return STATE_PERCENTAGE.get(analysisState);
default:
throw new NoPercentageCompleteException("No valid percent complete for state " + analysisState);
}
}
Aggregations