use of ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowNotFoundException in project irida by phac-nml.
the class AnalysisExecutionScheduledTaskImpl method prepareAnalyses.
/**
* {@inheritDoc}
*/
@Override
public Set<Future<AnalysisSubmission>> prepareAnalyses() {
synchronized (prepareAnalysesLock) {
logger.trace("Running prepareAnalyses");
List<AnalysisSubmission> analysisSubmissions = analysisSubmissionRepository.findByAnalysisState(AnalysisState.NEW);
// Sort submissions by priority high to low
analysisSubmissions.sort((a1, a2) -> {
return a2.getPriority().compareTo(a1.getPriority());
});
Set<Future<AnalysisSubmission>> submissions = Sets.newHashSet();
// check to see if execution service wants any more jobs
int capacity = analysisExecutionService.getCapacity();
if (capacity > 0) {
if (capacity < analysisSubmissions.size()) {
logger.debug("Attempting to submit more jobs than capacity, list will be trimmed: " + analysisSubmissions.size() + "=>" + capacity);
// only submit up to capacity
analysisSubmissions = analysisSubmissions.subList(0, capacity);
}
for (AnalysisSubmission analysisSubmission : analysisSubmissions) {
logger.debug("Preparing " + analysisSubmission);
try {
submissions.add(analysisExecutionService.prepareSubmission(analysisSubmission));
} catch (ExecutionManagerException | IridaWorkflowNotFoundException | IOException e) {
logger.error("Error preparing submission " + analysisSubmission, e);
}
}
} else {
logger.trace("AnalysisExecutionService at max capacity. No jobs updated.");
}
return submissions;
}
}
use of ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowNotFoundException in project irida by phac-nml.
the class PipelineController method ajaxStartPipeline.
// ************************************************************************************************
// AJAX
// ************************************************************************************************
/**
* Launch a pipeline
*
* @param locale the locale that the browser is using for the current request.
* @param parameters DTO of pipeline start parameters
* @return a JSON response with the status and any messages.
*/
@RequestMapping(value = "/ajax/start", method = RequestMethod.POST)
@ResponseBody
public Map<String, Object> ajaxStartPipeline(Locale locale, @RequestBody final PipelineStartParameters parameters) {
try {
IridaWorkflow flow = workflowsService.getIridaWorkflow(parameters.getWorkflowId());
IridaWorkflowDescription description = flow.getWorkflowDescription();
// The pipeline needs to have a name.
String name = parameters.getName();
if (Strings.isNullOrEmpty(name)) {
return ImmutableMap.of("error", messageSource.getMessage("workflow.no-name-provided", null, locale));
}
// Check to see if a reference file is required.
Long ref = parameters.getRef();
if (description.requiresReference() && ref == null) {
return ImmutableMap.of("error", messageSource.getMessage("pipeline.error.no-reference.pipeline-start", null, locale));
}
// Get a list of the files to submit
List<SingleEndSequenceFile> singleEndFiles = new ArrayList<>();
List<SequenceFilePair> sequenceFilePairs = new ArrayList<>();
List<Long> single = parameters.getSingle();
if (single != null) {
Iterable<SequencingObject> readMultiple = sequencingObjectService.readMultiple(single);
readMultiple.forEach(f -> {
if (!(f instanceof SingleEndSequenceFile)) {
throw new IllegalArgumentException("file " + f.getId() + " not a SingleEndSequenceFile");
}
singleEndFiles.add((SingleEndSequenceFile) f);
});
// Check the single files for duplicates in a sample, throws SampleAnalysisDuplicateException
sequencingObjectService.getUniqueSamplesForSequencingObjects(Sets.newHashSet(singleEndFiles));
}
List<Long> paired = parameters.getPaired();
if (paired != null) {
Iterable<SequencingObject> readMultiple = sequencingObjectService.readMultiple(paired);
readMultiple.forEach(f -> {
if (!(f instanceof SequenceFilePair)) {
throw new IllegalArgumentException("file " + f.getId() + " not a SequenceFilePair");
}
sequenceFilePairs.add((SequenceFilePair) f);
});
// Check the pair files for duplicates in a sample, throws SampleAnalysisDuplicateException
sequencingObjectService.getUniqueSamplesForSequencingObjects(Sets.newHashSet(sequenceFilePairs));
}
// Get the pipeline parameters
Map<String, String> params = new HashMap<>();
IridaWorkflowNamedParameters namedParameters = null;
Map<String, Object> selectedParameters = parameters.getSelectedParameters();
if (selectedParameters != null) {
try {
final String selectedParametersId = selectedParameters.get("id").toString();
if (!DEFAULT_WORKFLOW_PARAMETERS_ID.equals(selectedParametersId) && !CUSTOM_UNSAVED_WORKFLOW_PARAMETERS_ID.equals(selectedParametersId)) {
// this means that a named parameter set was selected
// and unmodified, so load up that named parameter set
// to pass along.
namedParameters = namedParameterService.read(Long.valueOf(selectedParametersId));
} else {
@SuppressWarnings("unchecked") final List<Map<String, String>> unnamedParameters = (List<Map<String, String>>) selectedParameters.get("parameters");
for (final Map<String, String> parameter : unnamedParameters) {
params.put(parameter.get("name"), parameter.get("value"));
}
}
} catch (Exception e) {
return ImmutableMap.of("parameterError", messageSource.getMessage("pipeline.parameters.error", null, locale));
}
}
List<Project> projectsToShare = new ArrayList<>();
List<Long> sharedProjects = parameters.getSharedProjects();
if (sharedProjects != null && !sharedProjects.isEmpty()) {
projectsToShare = Lists.newArrayList(projectService.readMultiple(sharedProjects));
}
String analysisDescription = parameters.getDescription();
Boolean writeResultsToSamples = parameters.getWriteResultsToSamples();
if (description.getInputs().requiresSingleSample()) {
analysisSubmissionService.createSingleSampleSubmission(flow, ref, singleEndFiles, sequenceFilePairs, params, namedParameters, name, analysisDescription, projectsToShare, writeResultsToSamples);
} else {
analysisSubmissionService.createMultipleSampleSubmission(flow, ref, singleEndFiles, sequenceFilePairs, params, namedParameters, name, analysisDescription, projectsToShare, writeResultsToSamples);
}
} catch (IridaWorkflowNotFoundException e) {
logger.error("Cannot find IridaWorkflow [" + parameters.getWorkflowId() + "]", e);
return ImmutableMap.of("pipelineError", messageSource.getMessage("pipeline.error.invalid-pipeline", null, locale));
} catch (DuplicateSampleException e) {
logger.error("Multiple files for Sample found", e);
return ImmutableMap.of("pipelineError", messageSource.getMessage("pipeline.error.duplicate-samples", null, locale));
}
return ImmutableMap.of("success", true);
}
use of ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowNotFoundException in project irida by phac-nml.
the class PipelineController method getSpecifiedPipelinePage.
/**
* Get a generic pipeline page.
*
* @param model
* the the model for the current request
* @param principal
* the user in the current request
* @param locale
* the locale that the user is using
* @param pipelineId
* the pipeline to load
* @return a page reference or redirect to load.
*/
@RequestMapping(value = "/{pipelineId}")
public String getSpecifiedPipelinePage(final Model model, Principal principal, Locale locale, @PathVariable UUID pipelineId) {
String response = URL_EMPTY_CART_REDIRECT;
boolean canUpdateAllSamples;
Map<Project, Set<Sample>> cartMap = cartController.getSelected();
// Cannot run a pipeline on an empty cart!
if (!cartMap.isEmpty()) {
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
IridaWorkflow flow = null;
try {
flow = workflowsService.getIridaWorkflow(pipelineId);
} catch (IridaWorkflowNotFoundException e) {
logger.error("Workflow not found - See stack:", e);
return "redirect:errors/not_found";
}
// Check if there even is functionality to update samples from results for this pipeline
canUpdateAllSamples = analysisSubmissionSampleProcessor.hasRegisteredAnalysisSampleUpdater(flow.getWorkflowDescription().getAnalysisType());
User user = userService.getUserByUsername(principal.getName());
// Get all the reference files that could be used for this pipeline.
List<Map<String, Object>> referenceFileList = new ArrayList<>();
List<Map<String, Object>> projectList = new ArrayList<>();
List<Map<String, Object>> addRefList = new ArrayList<>();
IridaWorkflowDescription description = flow.getWorkflowDescription();
final String workflowName = description.getName().toLowerCase();
for (Project project : cartMap.keySet()) {
// Check to see if it requires a reference file.
if (description.requiresReference()) {
List<Join<Project, ReferenceFile>> joinList = referenceFileService.getReferenceFilesForProject(project);
for (Join<Project, ReferenceFile> join : joinList) {
referenceFileList.add(ImmutableMap.of("project", project, "file", join.getObject()));
}
if (referenceFileList.size() == 0) {
if (user.getSystemRole().equals(Role.ROLE_ADMIN) || projectService.userHasProjectRole(user, project, ProjectRole.PROJECT_OWNER)) {
addRefList.add(ImmutableMap.of("name", project.getLabel(), "id", project.getId()));
}
}
}
Set<Sample> samples = cartMap.get(project);
Map<String, Object> projectMap = new HashMap<>();
List<Map<String, Object>> sampleList = new ArrayList<>();
for (Sample sample : samples) {
Map<String, Object> sampleMap = new HashMap<>();
sampleMap.put("name", sample.getLabel());
sampleMap.put("id", sample.getId().toString());
Map<String, List<? extends Object>> files = new HashMap<>();
// Paired end reads
if (description.acceptsPairedSequenceFiles()) {
Collection<SampleSequencingObjectJoin> pairs = sequencingObjectService.getSequencesForSampleOfType(sample, SequenceFilePair.class);
files.put("paired_end", pairs.stream().map(SampleSequencingObjectJoin::getObject).collect(Collectors.toList()));
}
// Singe end reads
if (description.acceptsSingleSequenceFiles()) {
Collection<SampleSequencingObjectJoin> singles = sequencingObjectService.getSequencesForSampleOfType(sample, SingleEndSequenceFile.class);
files.put("single_end", singles.stream().map(SampleSequencingObjectJoin::getObject).collect(Collectors.toList()));
}
sampleMap.put("files", files);
sampleList.add(sampleMap);
}
projectMap.put("id", project.getId().toString());
projectMap.put("name", project.getLabel());
projectMap.put("samples", sampleList);
projectList.add(projectMap);
canUpdateAllSamples &= updateSamplePermission.isAllowed(authentication, samples);
}
// Need to add the pipeline parameters
final List<IridaWorkflowParameter> defaultWorkflowParameters = flow.getWorkflowDescription().getParameters();
final List<Map<String, Object>> parameters = new ArrayList<>();
if (defaultWorkflowParameters != null) {
final List<Map<String, String>> defaultParameters = new ArrayList<>();
for (IridaWorkflowParameter p : defaultWorkflowParameters) {
if (p.isRequired()) {
continue;
}
defaultParameters.add(ImmutableMap.of("label", messageSource.getMessage("pipeline.parameters." + workflowName + "." + p.getName(), null, locale), "value", p.getDefaultValue(), "name", p.getName()));
}
parameters.add(ImmutableMap.of("id", DEFAULT_WORKFLOW_PARAMETERS_ID, "label", messageSource.getMessage("workflow.parameters.named.default", null, locale), "parameters", defaultParameters));
final List<IridaWorkflowNamedParameters> namedParameters = namedParameterService.findNamedParametersForWorkflow(pipelineId);
for (final IridaWorkflowNamedParameters p : namedParameters) {
final List<Map<String, String>> namedParametersList = new ArrayList<>();
for (final Map.Entry<String, String> parameter : p.getInputParameters().entrySet()) {
namedParametersList.add(ImmutableMap.of("label", messageSource.getMessage("pipeline.parameters." + workflowName + "." + parameter.getKey(), null, locale), "value", parameter.getValue(), "name", parameter.getKey()));
}
parameters.add(ImmutableMap.of("id", p.getId(), "label", p.getLabel(), "parameters", namedParametersList));
}
model.addAttribute("parameterModalTitle", messageSource.getMessage("pipeline.parameters.modal-title." + workflowName, null, locale));
} else {
model.addAttribute("noParameters", messageSource.getMessage("pipeline.no-parameters", null, locale));
}
// Parameters should be added not matter what, even if they are empty.
model.addAttribute("parameters", parameters);
model.addAttribute("title", messageSource.getMessage("pipeline.title." + description.getName(), null, locale));
model.addAttribute("mainTitle", messageSource.getMessage("pipeline.h1." + description.getName(), null, locale));
model.addAttribute("name", description.getName());
model.addAttribute("pipelineId", pipelineId.toString());
model.addAttribute("referenceFiles", referenceFileList);
model.addAttribute("referenceRequired", description.requiresReference());
model.addAttribute("addRefProjects", addRefList);
model.addAttribute("projects", projectList);
model.addAttribute("canUpdateSamples", canUpdateAllSamples);
model.addAttribute("workflowName", workflowName);
model.addAttribute("dynamicSourceRequired", description.requiresDynamicSource());
final List<Map<String, Object>> dynamicSources = new ArrayList<>();
if (description.requiresDynamicSource()) {
TabularToolDataTable galaxyToolDataTable = new TabularToolDataTable();
IridaWorkflowDynamicSourceGalaxy dynamicSource = new IridaWorkflowDynamicSourceGalaxy();
for (IridaWorkflowParameter parameter : description.getParameters()) {
if (parameter.isRequired() && parameter.hasDynamicSource()) {
try {
dynamicSource = parameter.getDynamicSource();
} catch (IridaWorkflowParameterException e) {
logger.debug("Dynamic Source error: ", e);
}
List<Object> parametersList = new ArrayList<>();
String dynamicSourceName;
Map<String, Object> toolDataTable = new HashMap<>();
try {
dynamicSourceName = dynamicSource.getName();
toolDataTable.put("id", dynamicSourceName);
toolDataTable.put("label", messageSource.getMessage("dynamicsource.label." + dynamicSourceName, null, locale));
toolDataTable.put("parameters", parametersList);
galaxyToolDataTable = galaxyToolDataService.getToolDataTable(dynamicSourceName);
List<String> labels = galaxyToolDataTable.getFieldsForColumn(dynamicSource.getDisplayColumn());
Iterator<String> labelsIterator = labels.iterator();
List<String> values = galaxyToolDataTable.getFieldsForColumn(dynamicSource.getParameterColumn());
Iterator<String> valuesIterator = values.iterator();
while (labelsIterator.hasNext() && valuesIterator.hasNext()) {
String label = labelsIterator.next();
String value = valuesIterator.next();
HashMap<String, String> toolDataTableFieldsMap = new HashMap<>();
toolDataTableFieldsMap.put("label", label);
toolDataTableFieldsMap.put("value", value);
toolDataTableFieldsMap.put("name", parameter.getName());
parametersList.add(toolDataTableFieldsMap);
}
dynamicSources.add(toolDataTable);
} catch (Exception e) {
logger.debug("Tool Data Table not found: ", e);
}
}
}
model.addAttribute("dynamicSources", dynamicSources);
}
response = URL_GENERIC_PIPELINE;
}
return response;
}
use of ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowNotFoundException in project irida by phac-nml.
the class PipelineController method getPipelineLaunchPage.
/**
* Get the Pipeline Selection Page
*
* @param model
* {@link Model}
* @param locale
* Current users {@link Locale}
*
* @return location of the pipeline selection page.
*/
@RequestMapping
public String getPipelineLaunchPage(final Model model, Locale locale) {
Set<AnalysisType> workflows = workflowsService.getRegisteredWorkflowTypes();
List<Map<String, String>> flows = new ArrayList<>(workflows.size());
workflows.stream().forEach(type -> {
IridaWorkflow flow = null;
try {
flow = workflowsService.getDefaultWorkflowByType(type);
IridaWorkflowDescription description = flow.getWorkflowDescription();
String name = type.toString();
String key = "workflow." + name;
flows.add(ImmutableMap.of("name", name, "id", description.getId().toString(), "title", messageSource.getMessage(key + ".title", null, locale), "description", messageSource.getMessage(key + ".description", null, locale)));
} catch (IridaWorkflowNotFoundException e) {
logger.error("Workflow not found - See stack:", e);
}
});
flows.sort((f1, f2) -> f1.get("name").compareTo(f2.get("name")));
model.addAttribute("counts", getCartSummaryMap());
model.addAttribute("workflows", flows);
return URL_LAUNCH;
}
Aggregations