use of ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow in project irida by phac-nml.
the class AnalysisWorkspaceServiceGalaxyIT method testPrepareAnalysisFilesSingleSuccess.
/**
* Tests out successfully preparing single workflow input files for
* execution.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesSingleSuccess() throws InterruptedException, ExecutionManagerException, IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesSingleSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
LibrariesClient librariesClient = localGalaxy.getGalaxyInstanceAdmin().getLibrariesClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSingle);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L, sequenceFilePathA, referenceFilePath, validWorkflowIdSingle, false);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
PreparedWorkflowGalaxy preparedWorkflow = analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
assertEquals("the response history id should match the input history id", createdHistory.getId(), preparedWorkflow.getRemoteAnalysisId());
assertNotNull("the returned workflow inputs should not be null", preparedWorkflow.getWorkflowInputs());
assertNotNull("the returned library id should not be null", preparedWorkflow.getRemoteDataId());
// verify correct library is created
List<LibraryContent> libraryContents = librariesClient.getLibraryContents(preparedWorkflow.getRemoteDataId());
Map<String, List<LibraryContent>> libraryContentsMap = libraryContents.stream().collect(Collectors.groupingBy(LibraryContent::getName));
assertFalse("the returned library should exist in Galaxy", libraryContentsMap.isEmpty());
String sequenceFileALibraryName = "/" + sequenceFilePathA.getFileName().toString();
assertEquals("the returned library does not contain the correct number of elements", 2, libraryContentsMap.size());
assertTrue("the returned library does not contain a root folder", libraryContentsMap.containsKey("/"));
assertTrue("the returned library does not contain the correct sequence file", libraryContentsMap.containsKey(sequenceFileALibraryName));
assertEquals("the returned library does not contain the correct sequence file", 1, libraryContentsMap.get(sequenceFileALibraryName).size());
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("the created history should contain 3 entries", 3, historyContents.size());
Map<String, HistoryContents> contentsMap = historyContentsAsMap(historyContents);
assertTrue("the created history should contain the file " + sequenceFilePathA.toFile().getName(), contentsMap.containsKey(sequenceFilePathA.toFile().getName()));
assertTrue("the created history should contain the file " + referenceFilePath.toFile().getName(), contentsMap.containsKey(referenceFilePath.toFile().getName()));
assertTrue("the created history should contain the collection with name " + INPUTS_SINGLE_NAME, contentsMap.containsKey(INPUTS_SINGLE_NAME));
// make sure workflow inputs contains correct information
Map<String, WorkflowInput> workflowInputsMap = preparedWorkflow.getWorkflowInputs().getInputsObject().getInputs();
assertEquals("the created workflow inputs has an invalid number of elements", 2, workflowInputsMap.size());
}
use of ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow in project irida by phac-nml.
the class AnalysisWorkspaceServiceGalaxyIT method testPrepareAnalysisFilesSinglePairFail.
/**
* Tests out failing to prepare paired and single workflow input files for
* execution (duplicate samples among single and paired input files).
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test(expected = SampleAnalysisDuplicateException.class)
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesSinglePairFail() throws InterruptedException, ExecutionManagerException, IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesSinglePairFail");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSinglePaired);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSinglePairSubmissionInDatabaseSameSample(1L, pairSequenceFiles1A, pairSequenceFiles2A, sequenceFilePath3, referenceFilePath, validWorkflowIdSinglePaired);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
}
use of ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow in project irida by phac-nml.
the class AnalysisWorkspaceServiceGalaxyIT method testGetAnalysisResultsTestAnalysisSinglePairedSuccess.
/**
* Tests out successfully getting results for an analysis (TestAnalysis)
* consisting of both single and paired sequence reads.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws IridaWorkflowAnalysisTypeException
* @throws TimeoutException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testGetAnalysisResultsTestAnalysisSinglePairedSuccess() throws InterruptedException, ExecutionManagerException, IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException, TimeoutException {
History history = new History();
history.setName("testGetAnalysisResultsTestAnalysisSinglePairedSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
ToolsClient toolsClient = localGalaxy.getGalaxyInstanceAdmin().getToolsClient();
History createdHistory = historiesClient.create(history);
// upload test outputs
uploadFileToHistory(sequenceFilePathA, OUTPUT1_NAME, createdHistory.getId(), toolsClient);
uploadFileToHistory(sequenceFilePathA, OUTPUT2_NAME, createdHistory.getId(), toolsClient);
// wait for history
Util.waitUntilHistoryComplete(createdHistory.getId(), galaxyHistoriesService, 60);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSinglePaired);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
List<Path> paths1 = new ArrayList<>();
paths1.add(sequenceFilePathA);
List<Path> paths2 = new ArrayList<>();
paths2.add(sequenceFilePath2A);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSinglePairSubmissionInDatabaseSameSample(1L, paths1, paths2, sequenceFilePath3, referenceFilePath, validWorkflowIdSinglePaired);
Set<SingleEndSequenceFile> singleFiles = sequencingObjectService.getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SingleEndSequenceFile.class);
Set<SequenceFilePair> pairedFiles = sequencingObjectService.getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SequenceFilePair.class);
assertEquals("invalid number of single end input files", 1, singleFiles.size());
assertEquals("invalid number of paired end inputs", 1, pairedFiles.size());
SequenceFilePair submittedSp = pairedFiles.iterator().next();
Set<SequenceFile> submittedSf = submittedSp.getFiles();
assertEquals("invalid number of files for paired input", 2, submittedSf.size());
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisSubmission.setAnalysisState(AnalysisState.COMPLETING);
analysisSubmissionRepository.save(analysisSubmission);
Analysis analysis = analysisWorkspaceService.getAnalysisResults(analysisSubmission);
assertNotNull("the analysis results were not properly created", analysis);
assertEquals("the Analysis results class is invalid", Analysis.class, analysis.getClass());
assertEquals("the analysis results has an invalid number of output files", 2, analysis.getAnalysisOutputFiles().size());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT1_NAME), analysis.getAnalysisOutputFile(OUTPUT1_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT1_NAME, analysis.getAnalysisOutputFile(OUTPUT1_KEY).getLabel());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT2_NAME), analysis.getAnalysisOutputFile(OUTPUT2_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT2_NAME, analysis.getAnalysisOutputFile(OUTPUT2_KEY).getLabel());
}
use of ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow in project irida by phac-nml.
the class AnalysisControllerTest method testGetAnalysisDetailsNotCompleted.
@Test
public void testGetAnalysisDetailsNotCompleted() throws IOException, IridaWorkflowNotFoundException {
Long submissionId = 1L;
ExtendedModelMap model = new ExtendedModelMap();
Locale locale = Locale.ENGLISH;
final IridaWorkflowInput input = new IridaWorkflowInput("single", "paired", "reference", true);
AnalysisSubmission submission = TestDataFactory.constructAnalysisSubmission();
IridaWorkflowDescription description = new IridaWorkflowDescription(submission.getWorkflowId(), "My Workflow", "V1", AnalysisType.PHYLOGENOMICS, input, Lists.newArrayList(), Lists.newArrayList(), Lists.newArrayList());
IridaWorkflow iridaWorkflow = new IridaWorkflow(description, null);
submission.setAnalysisState(AnalysisState.RUNNING);
when(analysisSubmissionServiceMock.read(submissionId)).thenReturn(submission);
when(iridaWorkflowsServiceMock.getIridaWorkflow(submission.getWorkflowId())).thenReturn(iridaWorkflow);
String detailsPage = analysisController.getDetailsPage(submissionId, model, locale);
assertEquals("should be details page", AnalysisController.PAGE_DETAILS_DIRECTORY + "tree", detailsPage);
assertFalse("No preview should be available", model.containsAttribute("preview"));
assertEquals("submission should be in model", submission, model.get("analysisSubmission"));
}
use of ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow in project irida by phac-nml.
the class TestDataFactory method getIridaWorkflow.
public static IridaWorkflow getIridaWorkflow(UUID id) {
IridaWorkflowInput input = new IridaWorkflowInput();
List<IridaWorkflowOutput> outputs = ImmutableList.of(new IridaWorkflowOutput());
List<IridaWorkflowToolRepository> tools = ImmutableList.of();
List<IridaWorkflowParameter> parameters = ImmutableList.of();
IridaWorkflowDescription description = new IridaWorkflowDescription(id, "My Workflow", "V1", AnalysisType.DEFAULT, input, outputs, tools, parameters);
IridaWorkflowStructure structure = new IridaWorkflowStructure(null);
return new IridaWorkflow(description, structure);
}
Aggregations