Search in sources :

Example 46 with IridaWorkflow

use of ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow in project irida by phac-nml.

the class IridaWorkflowLoaderService method loadAllWorkflowImplementations.

/**
 * Loads up a set of {@link IridaWorkflow}s from the given directory.
 *
 * @param workflowDirectory
 *            The directory containing the different workflow
 *            implementations and files.
 * @return A set of {@link IridaWorkflow}s for all implementations.
 * @throws IOException
 *             If there was an issue reading one of the workflow files.
 * @throws IridaWorkflowLoadException
 *             If there was an issue when loading up the workflows.
 */
public Set<IridaWorkflow> loadAllWorkflowImplementations(Path workflowDirectory) throws IOException, IridaWorkflowLoadException {
    checkNotNull(workflowDirectory, "workflowDirectory is null");
    checkArgument(Files.isDirectory(workflowDirectory), "workflowDirectory is not a directory");
    Set<IridaWorkflow> workflowImplementations = new HashSet<>();
    DirectoryStream<Path> stream = Files.newDirectoryStream(workflowDirectory);
    for (Path implementationDirectory : stream) {
        if (!Files.isDirectory(implementationDirectory)) {
            logger.warn("Workflow directory " + workflowDirectory + " contains a file " + implementationDirectory + " that is not a proper workflow directory");
        } else {
            IridaWorkflow iridaWorkflow = loadIridaWorkflowFromDirectory(implementationDirectory);
            workflowImplementations.add(iridaWorkflow);
        }
    }
    return workflowImplementations;
}
Also used : Path(java.nio.file.Path) IridaWorkflow(ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow) HashSet(java.util.HashSet)

Example 47 with IridaWorkflow

use of ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow in project irida by phac-nml.

the class AnalysesListingService method getPagedSubmissions.

/**
 * Get a {@link DataTablesResponse} for {@link AnalysisSubmission}s based upon the {@link User}, and the {@link Project}
 *
 * @param params  {@link DataTablesParams}
 * @param locale  {@link Locale}
 * @param user    {@link User}
 * @param project {@link Project}
 * @return {@link DataTablesResponse}
 * @throws IridaWorkflowNotFoundException If the requested workflow doesn't exist
 * @throws ExecutionManagerException      If the submission cannot be read properly
 */
public DataTablesResponse getPagedSubmissions(DataTablesParams params, Locale locale, User user, Project project) throws IridaWorkflowNotFoundException, ExecutionManagerException {
    /*
		Check the DataTableParams to see if any search conditions are present
		 */
    Map<String, String> searchMap = params.getSearchMap();
    AnalysisState state = searchMap.containsKey("analysisState") ? AnalysisState.valueOf(searchMap.get("analysisState")) : null;
    String name = searchMap.getOrDefault("name", null);
    /*
		Workflow Ids are a special consideration.
		The actual ids need to be look up based on the name passed.
		 */
    Set<UUID> workflowIds = null;
    if (searchMap.containsKey("workflow")) {
        AnalysisType workflowType = AnalysisType.fromString(searchMap.get("workflow"));
        Set<IridaWorkflow> workflows = iridaWorkflowsService.getAllWorkflowsByType(workflowType);
        workflowIds = workflows.stream().map(IridaWorkflow::getWorkflowIdentifier).collect(Collectors.toSet());
    }
    Page<AnalysisSubmission> page;
    PageRequest pageRequest = new PageRequest(params.getCurrentPage(), params.getLength(), params.getSort());
    if (user != null) {
        // if user is set, get submissions for the user
        page = analysisSubmissionService.listSubmissionsForUser(params.getSearchValue(), name, state, user, workflowIds, pageRequest);
    } else if (project != null) {
        // if the project is set, get submissions for the project
        page = analysisSubmissionService.listSubmissionsForProject(params.getSearchValue(), name, state, workflowIds, project, pageRequest);
    } else {
        // if neither is set, get admin page
        page = analysisSubmissionService.listAllSubmissions(params.getSearchValue(), name, state, workflowIds, pageRequest);
    }
    /*
		IRIDA DataTables response expects and object that implements the DataTablesResponseModel interface.
		 */
    List<DataTablesResponseModel> data = new ArrayList<>();
    for (AnalysisSubmission submission : page.getContent()) {
        // Each AnalysisSubmission needs to be converted into a DTAnalysis.
        data.add(createDataTablesAnalysis(submission, locale));
    }
    return new DataTablesResponse(params, page, data);
}
Also used : AnalysisType(ca.corefacility.bioinformatics.irida.model.enums.AnalysisType) AnalysisState(ca.corefacility.bioinformatics.irida.model.enums.AnalysisState) DataTablesResponseModel(ca.corefacility.bioinformatics.irida.ria.web.components.datatables.models.DataTablesResponseModel) IridaWorkflow(ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow) AnalysisSubmission(ca.corefacility.bioinformatics.irida.model.workflow.submission.AnalysisSubmission) PageRequest(org.springframework.data.domain.PageRequest) DataTablesResponse(ca.corefacility.bioinformatics.irida.ria.web.components.datatables.DataTablesResponse)

Example 48 with IridaWorkflow

use of ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow in project irida by phac-nml.

the class PipelineController method ajaxStartPipeline.

// ************************************************************************************************
// AJAX
// ************************************************************************************************
/**
 * Launch a pipeline
 *
 * @param locale     the locale that the browser is using for the current request.
 * @param parameters DTO of pipeline start parameters
 * @return a JSON response with the status and any messages.
 */
@RequestMapping(value = "/ajax/start", method = RequestMethod.POST)
@ResponseBody
public Map<String, Object> ajaxStartPipeline(Locale locale, @RequestBody final PipelineStartParameters parameters) {
    try {
        IridaWorkflow flow = workflowsService.getIridaWorkflow(parameters.getWorkflowId());
        IridaWorkflowDescription description = flow.getWorkflowDescription();
        // The pipeline needs to have a name.
        String name = parameters.getName();
        if (Strings.isNullOrEmpty(name)) {
            return ImmutableMap.of("error", messageSource.getMessage("workflow.no-name-provided", null, locale));
        }
        // Check to see if a reference file is required.
        Long ref = parameters.getRef();
        if (description.requiresReference() && ref == null) {
            return ImmutableMap.of("error", messageSource.getMessage("pipeline.error.no-reference.pipeline-start", null, locale));
        }
        // Get a list of the files to submit
        List<SingleEndSequenceFile> singleEndFiles = new ArrayList<>();
        List<SequenceFilePair> sequenceFilePairs = new ArrayList<>();
        List<Long> single = parameters.getSingle();
        if (single != null) {
            Iterable<SequencingObject> readMultiple = sequencingObjectService.readMultiple(single);
            readMultiple.forEach(f -> {
                if (!(f instanceof SingleEndSequenceFile)) {
                    throw new IllegalArgumentException("file " + f.getId() + " not a SingleEndSequenceFile");
                }
                singleEndFiles.add((SingleEndSequenceFile) f);
            });
            // Check the single files for duplicates in a sample, throws SampleAnalysisDuplicateException
            sequencingObjectService.getUniqueSamplesForSequencingObjects(Sets.newHashSet(singleEndFiles));
        }
        List<Long> paired = parameters.getPaired();
        if (paired != null) {
            Iterable<SequencingObject> readMultiple = sequencingObjectService.readMultiple(paired);
            readMultiple.forEach(f -> {
                if (!(f instanceof SequenceFilePair)) {
                    throw new IllegalArgumentException("file " + f.getId() + " not a SequenceFilePair");
                }
                sequenceFilePairs.add((SequenceFilePair) f);
            });
            // Check the pair files for duplicates in a sample, throws SampleAnalysisDuplicateException
            sequencingObjectService.getUniqueSamplesForSequencingObjects(Sets.newHashSet(sequenceFilePairs));
        }
        // Get the pipeline parameters
        Map<String, String> params = new HashMap<>();
        IridaWorkflowNamedParameters namedParameters = null;
        Map<String, Object> selectedParameters = parameters.getSelectedParameters();
        if (selectedParameters != null) {
            try {
                final String selectedParametersId = selectedParameters.get("id").toString();
                if (!DEFAULT_WORKFLOW_PARAMETERS_ID.equals(selectedParametersId) && !CUSTOM_UNSAVED_WORKFLOW_PARAMETERS_ID.equals(selectedParametersId)) {
                    // this means that a named parameter set was selected
                    // and unmodified, so load up that named parameter set
                    // to pass along.
                    namedParameters = namedParameterService.read(Long.valueOf(selectedParametersId));
                } else {
                    @SuppressWarnings("unchecked") final List<Map<String, String>> unnamedParameters = (List<Map<String, String>>) selectedParameters.get("parameters");
                    for (final Map<String, String> parameter : unnamedParameters) {
                        params.put(parameter.get("name"), parameter.get("value"));
                    }
                }
            } catch (Exception e) {
                return ImmutableMap.of("parameterError", messageSource.getMessage("pipeline.parameters.error", null, locale));
            }
        }
        List<Project> projectsToShare = new ArrayList<>();
        List<Long> sharedProjects = parameters.getSharedProjects();
        if (sharedProjects != null && !sharedProjects.isEmpty()) {
            projectsToShare = Lists.newArrayList(projectService.readMultiple(sharedProjects));
        }
        String analysisDescription = parameters.getDescription();
        Boolean writeResultsToSamples = parameters.getWriteResultsToSamples();
        if (description.getInputs().requiresSingleSample()) {
            analysisSubmissionService.createSingleSampleSubmission(flow, ref, singleEndFiles, sequenceFilePairs, params, namedParameters, name, analysisDescription, projectsToShare, writeResultsToSamples);
        } else {
            analysisSubmissionService.createMultipleSampleSubmission(flow, ref, singleEndFiles, sequenceFilePairs, params, namedParameters, name, analysisDescription, projectsToShare, writeResultsToSamples);
        }
    } catch (IridaWorkflowNotFoundException e) {
        logger.error("Cannot find IridaWorkflow [" + parameters.getWorkflowId() + "]", e);
        return ImmutableMap.of("pipelineError", messageSource.getMessage("pipeline.error.invalid-pipeline", null, locale));
    } catch (DuplicateSampleException e) {
        logger.error("Multiple files for Sample found", e);
        return ImmutableMap.of("pipelineError", messageSource.getMessage("pipeline.error.duplicate-samples", null, locale));
    }
    return ImmutableMap.of("success", true);
}
Also used : SequencingObject(ca.corefacility.bioinformatics.irida.model.sequenceFile.SequencingObject) HashMap(java.util.HashMap) IridaWorkflow(ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow) ArrayList(java.util.ArrayList) IridaWorkflowNamedParameters(ca.corefacility.bioinformatics.irida.model.workflow.submission.IridaWorkflowNamedParameters) SingleEndSequenceFile(ca.corefacility.bioinformatics.irida.model.sequenceFile.SingleEndSequenceFile) DuplicateSampleException(ca.corefacility.bioinformatics.irida.exceptions.DuplicateSampleException) SequenceFilePair(ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair) List(java.util.List) ArrayList(java.util.ArrayList) IridaWorkflowNotFoundException(ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowNotFoundException) IridaWorkflowParameterException(ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowParameterException) IOException(java.io.IOException) DuplicateSampleException(ca.corefacility.bioinformatics.irida.exceptions.DuplicateSampleException) IridaWorkflowNotFoundException(ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowNotFoundException) Project(ca.corefacility.bioinformatics.irida.model.project.Project) IridaWorkflowDescription(ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowDescription) SequencingObject(ca.corefacility.bioinformatics.irida.model.sequenceFile.SequencingObject) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) RequestMapping(org.springframework.web.bind.annotation.RequestMapping) ResponseBody(org.springframework.web.bind.annotation.ResponseBody)

Example 49 with IridaWorkflow

use of ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow in project irida by phac-nml.

the class PipelineController method getSpecifiedPipelinePage.

/**
 * Get a generic pipeline page.
 *
 * @param model
 *            the the model for the current request
 * @param principal
 *            the user in the current request
 * @param locale
 *            the locale that the user is using
 * @param pipelineId
 *            the pipeline to load
 * @return a page reference or redirect to load.
 */
@RequestMapping(value = "/{pipelineId}")
public String getSpecifiedPipelinePage(final Model model, Principal principal, Locale locale, @PathVariable UUID pipelineId) {
    String response = URL_EMPTY_CART_REDIRECT;
    boolean canUpdateAllSamples;
    Map<Project, Set<Sample>> cartMap = cartController.getSelected();
    // Cannot run a pipeline on an empty cart!
    if (!cartMap.isEmpty()) {
        Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
        IridaWorkflow flow = null;
        try {
            flow = workflowsService.getIridaWorkflow(pipelineId);
        } catch (IridaWorkflowNotFoundException e) {
            logger.error("Workflow not found - See stack:", e);
            return "redirect:errors/not_found";
        }
        // Check if there even is functionality to update samples from results for this pipeline
        canUpdateAllSamples = analysisSubmissionSampleProcessor.hasRegisteredAnalysisSampleUpdater(flow.getWorkflowDescription().getAnalysisType());
        User user = userService.getUserByUsername(principal.getName());
        // Get all the reference files that could be used for this pipeline.
        List<Map<String, Object>> referenceFileList = new ArrayList<>();
        List<Map<String, Object>> projectList = new ArrayList<>();
        List<Map<String, Object>> addRefList = new ArrayList<>();
        IridaWorkflowDescription description = flow.getWorkflowDescription();
        final String workflowName = description.getName().toLowerCase();
        for (Project project : cartMap.keySet()) {
            // Check to see if it requires a reference file.
            if (description.requiresReference()) {
                List<Join<Project, ReferenceFile>> joinList = referenceFileService.getReferenceFilesForProject(project);
                for (Join<Project, ReferenceFile> join : joinList) {
                    referenceFileList.add(ImmutableMap.of("project", project, "file", join.getObject()));
                }
                if (referenceFileList.size() == 0) {
                    if (user.getSystemRole().equals(Role.ROLE_ADMIN) || projectService.userHasProjectRole(user, project, ProjectRole.PROJECT_OWNER)) {
                        addRefList.add(ImmutableMap.of("name", project.getLabel(), "id", project.getId()));
                    }
                }
            }
            Set<Sample> samples = cartMap.get(project);
            Map<String, Object> projectMap = new HashMap<>();
            List<Map<String, Object>> sampleList = new ArrayList<>();
            for (Sample sample : samples) {
                Map<String, Object> sampleMap = new HashMap<>();
                sampleMap.put("name", sample.getLabel());
                sampleMap.put("id", sample.getId().toString());
                Map<String, List<? extends Object>> files = new HashMap<>();
                // Paired end reads
                if (description.acceptsPairedSequenceFiles()) {
                    Collection<SampleSequencingObjectJoin> pairs = sequencingObjectService.getSequencesForSampleOfType(sample, SequenceFilePair.class);
                    files.put("paired_end", pairs.stream().map(SampleSequencingObjectJoin::getObject).collect(Collectors.toList()));
                }
                // Singe end reads
                if (description.acceptsSingleSequenceFiles()) {
                    Collection<SampleSequencingObjectJoin> singles = sequencingObjectService.getSequencesForSampleOfType(sample, SingleEndSequenceFile.class);
                    files.put("single_end", singles.stream().map(SampleSequencingObjectJoin::getObject).collect(Collectors.toList()));
                }
                sampleMap.put("files", files);
                sampleList.add(sampleMap);
            }
            projectMap.put("id", project.getId().toString());
            projectMap.put("name", project.getLabel());
            projectMap.put("samples", sampleList);
            projectList.add(projectMap);
            canUpdateAllSamples &= updateSamplePermission.isAllowed(authentication, samples);
        }
        // Need to add the pipeline parameters
        final List<IridaWorkflowParameter> defaultWorkflowParameters = flow.getWorkflowDescription().getParameters();
        final List<Map<String, Object>> parameters = new ArrayList<>();
        if (defaultWorkflowParameters != null) {
            final List<Map<String, String>> defaultParameters = new ArrayList<>();
            for (IridaWorkflowParameter p : defaultWorkflowParameters) {
                if (p.isRequired()) {
                    continue;
                }
                defaultParameters.add(ImmutableMap.of("label", messageSource.getMessage("pipeline.parameters." + workflowName + "." + p.getName(), null, locale), "value", p.getDefaultValue(), "name", p.getName()));
            }
            parameters.add(ImmutableMap.of("id", DEFAULT_WORKFLOW_PARAMETERS_ID, "label", messageSource.getMessage("workflow.parameters.named.default", null, locale), "parameters", defaultParameters));
            final List<IridaWorkflowNamedParameters> namedParameters = namedParameterService.findNamedParametersForWorkflow(pipelineId);
            for (final IridaWorkflowNamedParameters p : namedParameters) {
                final List<Map<String, String>> namedParametersList = new ArrayList<>();
                for (final Map.Entry<String, String> parameter : p.getInputParameters().entrySet()) {
                    namedParametersList.add(ImmutableMap.of("label", messageSource.getMessage("pipeline.parameters." + workflowName + "." + parameter.getKey(), null, locale), "value", parameter.getValue(), "name", parameter.getKey()));
                }
                parameters.add(ImmutableMap.of("id", p.getId(), "label", p.getLabel(), "parameters", namedParametersList));
            }
            model.addAttribute("parameterModalTitle", messageSource.getMessage("pipeline.parameters.modal-title." + workflowName, null, locale));
        } else {
            model.addAttribute("noParameters", messageSource.getMessage("pipeline.no-parameters", null, locale));
        }
        // Parameters should be added not matter what, even if they are empty.
        model.addAttribute("parameters", parameters);
        model.addAttribute("title", messageSource.getMessage("pipeline.title." + description.getName(), null, locale));
        model.addAttribute("mainTitle", messageSource.getMessage("pipeline.h1." + description.getName(), null, locale));
        model.addAttribute("name", description.getName());
        model.addAttribute("pipelineId", pipelineId.toString());
        model.addAttribute("referenceFiles", referenceFileList);
        model.addAttribute("referenceRequired", description.requiresReference());
        model.addAttribute("addRefProjects", addRefList);
        model.addAttribute("projects", projectList);
        model.addAttribute("canUpdateSamples", canUpdateAllSamples);
        model.addAttribute("workflowName", workflowName);
        model.addAttribute("dynamicSourceRequired", description.requiresDynamicSource());
        final List<Map<String, Object>> dynamicSources = new ArrayList<>();
        if (description.requiresDynamicSource()) {
            TabularToolDataTable galaxyToolDataTable = new TabularToolDataTable();
            IridaWorkflowDynamicSourceGalaxy dynamicSource = new IridaWorkflowDynamicSourceGalaxy();
            for (IridaWorkflowParameter parameter : description.getParameters()) {
                if (parameter.isRequired() && parameter.hasDynamicSource()) {
                    try {
                        dynamicSource = parameter.getDynamicSource();
                    } catch (IridaWorkflowParameterException e) {
                        logger.debug("Dynamic Source error: ", e);
                    }
                    List<Object> parametersList = new ArrayList<>();
                    String dynamicSourceName;
                    Map<String, Object> toolDataTable = new HashMap<>();
                    try {
                        dynamicSourceName = dynamicSource.getName();
                        toolDataTable.put("id", dynamicSourceName);
                        toolDataTable.put("label", messageSource.getMessage("dynamicsource.label." + dynamicSourceName, null, locale));
                        toolDataTable.put("parameters", parametersList);
                        galaxyToolDataTable = galaxyToolDataService.getToolDataTable(dynamicSourceName);
                        List<String> labels = galaxyToolDataTable.getFieldsForColumn(dynamicSource.getDisplayColumn());
                        Iterator<String> labelsIterator = labels.iterator();
                        List<String> values = galaxyToolDataTable.getFieldsForColumn(dynamicSource.getParameterColumn());
                        Iterator<String> valuesIterator = values.iterator();
                        while (labelsIterator.hasNext() && valuesIterator.hasNext()) {
                            String label = labelsIterator.next();
                            String value = valuesIterator.next();
                            HashMap<String, String> toolDataTableFieldsMap = new HashMap<>();
                            toolDataTableFieldsMap.put("label", label);
                            toolDataTableFieldsMap.put("value", value);
                            toolDataTableFieldsMap.put("name", parameter.getName());
                            parametersList.add(toolDataTableFieldsMap);
                        }
                        dynamicSources.add(toolDataTable);
                    } catch (Exception e) {
                        logger.debug("Tool Data Table not found: ", e);
                    }
                }
            }
            model.addAttribute("dynamicSources", dynamicSources);
        }
        response = URL_GENERIC_PIPELINE;
    }
    return response;
}
Also used : ReferenceFile(ca.corefacility.bioinformatics.irida.model.project.ReferenceFile) Set(java.util.Set) User(ca.corefacility.bioinformatics.irida.model.user.User) HashMap(java.util.HashMap) IridaWorkflow(ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow) ArrayList(java.util.ArrayList) IridaWorkflowNamedParameters(ca.corefacility.bioinformatics.irida.model.workflow.submission.IridaWorkflowNamedParameters) IridaWorkflowParameterException(ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowParameterException) IridaWorkflowParameter(ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowParameter) List(java.util.List) ArrayList(java.util.ArrayList) Sample(ca.corefacility.bioinformatics.irida.model.sample.Sample) Join(ca.corefacility.bioinformatics.irida.model.joins.Join) SampleSequencingObjectJoin(ca.corefacility.bioinformatics.irida.model.sample.SampleSequencingObjectJoin) TabularToolDataTable(com.github.jmchilton.blend4j.galaxy.beans.TabularToolDataTable) IridaWorkflowNotFoundException(ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowNotFoundException) IridaWorkflowParameterException(ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowParameterException) IOException(java.io.IOException) DuplicateSampleException(ca.corefacility.bioinformatics.irida.exceptions.DuplicateSampleException) IridaWorkflowNotFoundException(ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowNotFoundException) Project(ca.corefacility.bioinformatics.irida.model.project.Project) Authentication(org.springframework.security.core.Authentication) IridaWorkflowDescription(ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowDescription) SequencingObject(ca.corefacility.bioinformatics.irida.model.sequenceFile.SequencingObject) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) SampleSequencingObjectJoin(ca.corefacility.bioinformatics.irida.model.sample.SampleSequencingObjectJoin) RequestMapping(org.springframework.web.bind.annotation.RequestMapping)

Example 50 with IridaWorkflow

use of ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow in project irida by phac-nml.

the class PipelineController method getPipelineLaunchPage.

/**
 * Get the Pipeline Selection Page
 *
 * @param model
 * 		{@link Model}
 * @param locale
 * 		Current users {@link Locale}
 *
 * @return location of the pipeline selection page.
 */
@RequestMapping
public String getPipelineLaunchPage(final Model model, Locale locale) {
    Set<AnalysisType> workflows = workflowsService.getRegisteredWorkflowTypes();
    List<Map<String, String>> flows = new ArrayList<>(workflows.size());
    workflows.stream().forEach(type -> {
        IridaWorkflow flow = null;
        try {
            flow = workflowsService.getDefaultWorkflowByType(type);
            IridaWorkflowDescription description = flow.getWorkflowDescription();
            String name = type.toString();
            String key = "workflow." + name;
            flows.add(ImmutableMap.of("name", name, "id", description.getId().toString(), "title", messageSource.getMessage(key + ".title", null, locale), "description", messageSource.getMessage(key + ".description", null, locale)));
        } catch (IridaWorkflowNotFoundException e) {
            logger.error("Workflow not found - See stack:", e);
        }
    });
    flows.sort((f1, f2) -> f1.get("name").compareTo(f2.get("name")));
    model.addAttribute("counts", getCartSummaryMap());
    model.addAttribute("workflows", flows);
    return URL_LAUNCH;
}
Also used : AnalysisType(ca.corefacility.bioinformatics.irida.model.enums.AnalysisType) IridaWorkflowNotFoundException(ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowNotFoundException) IridaWorkflow(ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow) ArrayList(java.util.ArrayList) IridaWorkflowDescription(ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowDescription) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) RequestMapping(org.springframework.web.bind.annotation.RequestMapping)

Aggregations

IridaWorkflow (ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow)50 AnalysisSubmission (ca.corefacility.bioinformatics.irida.model.workflow.submission.AnalysisSubmission)27 Test (org.junit.Test)27 Path (java.nio.file.Path)25 History (com.github.jmchilton.blend4j.galaxy.beans.History)19 HistoriesClient (com.github.jmchilton.blend4j.galaxy.HistoriesClient)18 WorkflowsClient (com.github.jmchilton.blend4j.galaxy.WorkflowsClient)18 Workflow (com.github.jmchilton.blend4j.galaxy.beans.Workflow)18 WithMockUser (org.springframework.security.test.context.support.WithMockUser)18 AnalysisType (ca.corefacility.bioinformatics.irida.model.enums.AnalysisType)10 SequenceFilePair (ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair)10 Analysis (ca.corefacility.bioinformatics.irida.model.workflow.analysis.Analysis)9 ArrayList (java.util.ArrayList)9 SingleEndSequenceFile (ca.corefacility.bioinformatics.irida.model.sequenceFile.SingleEndSequenceFile)8 IridaWorkflowDescription (ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowDescription)8 ToolsClient (com.github.jmchilton.blend4j.galaxy.ToolsClient)8 IridaWorkflowNotFoundException (ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowNotFoundException)7 PreparedWorkflowGalaxy (ca.corefacility.bioinformatics.irida.model.workflow.execution.galaxy.PreparedWorkflowGalaxy)7 WorkflowInputsGalaxy (ca.corefacility.bioinformatics.irida.model.workflow.execution.galaxy.WorkflowInputsGalaxy)6 HistoryContents (com.github.jmchilton.blend4j.galaxy.beans.HistoryContents)6