use of ca.corefacility.bioinformatics.irida.model.workflow.analysis.Analysis in project irida by phac-nml.
the class AnalysisWorkspaceServiceGalaxyIT method testGetAnalysisResultsTestAnalysisPairedSuccess.
/**
* Tests out successfully getting results for an analysis (TestAnalysis)
* consisting only of paired sequence reads.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws IridaWorkflowAnalysisTypeException
* @throws TimeoutException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testGetAnalysisResultsTestAnalysisPairedSuccess() throws InterruptedException, ExecutionManagerException, IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException, TimeoutException {
History history = new History();
history.setName("testGetAnalysisResultsTestAnalysisPairedSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
ToolsClient toolsClient = localGalaxy.getGalaxyInstanceAdmin().getToolsClient();
History createdHistory = historiesClient.create(history);
// upload test outputs
uploadFileToHistory(sequenceFilePathA, OUTPUT1_NAME, createdHistory.getId(), toolsClient);
uploadFileToHistory(sequenceFilePathA, OUTPUT2_NAME, createdHistory.getId(), toolsClient);
// wait for history
Util.waitUntilHistoryComplete(createdHistory.getId(), galaxyHistoriesService, 60);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPaired);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
List<Path> paths1 = new ArrayList<>();
paths1.add(sequenceFilePathA);
List<Path> paths2 = new ArrayList<>();
paths2.add(sequenceFilePath2A);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L, paths1, paths2, referenceFilePath, validWorkflowIdPaired, false);
Set<SingleEndSequenceFile> submittedSingleFiles = sequencingObjectService.getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SingleEndSequenceFile.class);
Set<SequenceFilePair> pairedFiles = sequencingObjectService.getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SequenceFilePair.class);
assertEquals("the created submission should have no single input files", 0, submittedSingleFiles.size());
assertEquals("the created submission has an invalid number of paired input files", 1, pairedFiles.size());
SequenceFilePair submittedSp = pairedFiles.iterator().next();
Set<SequenceFile> submittedSf = submittedSp.getFiles();
assertEquals("the paired input should have 2 files", 2, submittedSf.size());
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisSubmission.setAnalysisState(AnalysisState.COMPLETING);
analysisSubmissionRepository.save(analysisSubmission);
Analysis analysis = analysisWorkspaceService.getAnalysisResults(analysisSubmission);
assertNotNull("the analysis results were not properly created", analysis);
assertEquals("the Analysis results class is invalid", Analysis.class, analysis.getClass());
assertEquals("the analysis results has an invalid number of output files", 2, analysis.getAnalysisOutputFiles().size());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT1_NAME), analysis.getAnalysisOutputFile(OUTPUT1_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT1_NAME, analysis.getAnalysisOutputFile(OUTPUT1_KEY).getLabel());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT2_NAME), analysis.getAnalysisOutputFile(OUTPUT2_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT2_NAME, analysis.getAnalysisOutputFile(OUTPUT2_KEY).getLabel());
}
use of ca.corefacility.bioinformatics.irida.model.workflow.analysis.Analysis in project irida by phac-nml.
the class AnalysisServiceTest method testCreateAnalysisWithMultipleOutputFile.
@Test
public void testCreateAnalysisWithMultipleOutputFile() throws IOException {
Path outputFile1 = Files.createTempFile(null, null);
Path outputFile2 = Files.createTempFile(null, null);
Path outputFile3 = Files.createTempFile(null, null);
AnalysisOutputFile report1 = new AnalysisOutputFile(outputFile1, "", "", null);
AnalysisOutputFile report2 = new AnalysisOutputFile(outputFile2, "", "", null);
AnalysisOutputFile report3 = new AnalysisOutputFile(outputFile3, "", "", null);
Map<String, AnalysisOutputFile> analysisOutputFiles = new ImmutableMap.Builder<String, AnalysisOutputFile>().put("tree", report1).put("matrix", report2).put("table", report3).build();
Analysis analysis = new Analysis("something", analysisOutputFiles, AnalysisType.PHYLOGENOMICS);
analysisService.create(analysis);
verify(analysisOutputFileRepository, times(3)).save(any(AnalysisOutputFile.class));
verify(analysisRepository).save(analysis);
}
use of ca.corefacility.bioinformatics.irida.model.workflow.analysis.Analysis in project irida by phac-nml.
the class AnalysisServiceImplIT method testCreatePhylogenomicsAnalysis.
@Test
@WithMockUser(username = "admin", roles = "ADMIN")
public void testCreatePhylogenomicsAnalysis() throws IOException {
Path treePath = Files.createTempFile(null, null);
Path tablePath = Files.createTempFile(null, null);
Path matrixPath = Files.createTempFile(null, null);
Map<String, String> params = new HashMap<>();
params.put("param", "value");
ToolExecution toolExecutionTree = new ToolExecution(null, "ls", "1.0", "executionManagerId", params, "/bin/ls -lrth");
ToolExecution toolExecutionTable = new ToolExecution(null, "ls", "1.0", "executionManagerId", params, "/bin/ls -lrth");
ToolExecution toolExecutionMatrix = new ToolExecution(null, "ls", "1.0", "executionManagerId", params, "/bin/ls -lrth");
AnalysisOutputFile tree = new AnalysisOutputFile(treePath, "internal-galaxy-tree-identifier", "", toolExecutionTree);
AnalysisOutputFile table = new AnalysisOutputFile(tablePath, "internal-galaxy-table-identifier", "", toolExecutionTable);
AnalysisOutputFile matrix = new AnalysisOutputFile(matrixPath, "internal-galaxy-matrix-identifier", "", toolExecutionMatrix);
Map<String, AnalysisOutputFile> analysisOutputFiles = new ImmutableMap.Builder<String, AnalysisOutputFile>().put("tree", tree).put("matrix", matrix).put("table", table).build();
Analysis pipeline = new Analysis(EXECUTION_MANAGER_ID, analysisOutputFiles, AnalysisType.PHYLOGENOMICS);
// make sure that we're not falsely putting the files into the correct
// directory in the first place.
assertFalse("file was stored in the wrong directory.", pipeline.getAnalysisOutputFile(TREE_KEY).getFile().startsWith(outputFileBaseDirectory));
assertFalse("file was stored in the wrong directory.", pipeline.getAnalysisOutputFile(MATRIX_KEY).getFile().startsWith(outputFileBaseDirectory));
assertFalse("file was stored in the wrong directory.", pipeline.getAnalysisOutputFile(TABLE_KEY).getFile().startsWith(outputFileBaseDirectory));
Analysis analysis = analysisService.create(pipeline);
// make sure that we put the analysis output files into the correct
// directory.
assertEquals("returned analysis was of the wrong type.", AnalysisType.PHYLOGENOMICS, analysis.getAnalysisType());
assertTrue("file was stored in the wrong directory.", analysis.getAnalysisOutputFile(TREE_KEY).getFile().startsWith(outputFileBaseDirectory));
assertTrue("file was stored in the wrong directory.", analysis.getAnalysisOutputFile(MATRIX_KEY).getFile().startsWith(outputFileBaseDirectory));
assertTrue("file was stored in the wrong directory.", analysis.getAnalysisOutputFile(TABLE_KEY).getFile().startsWith(outputFileBaseDirectory));
}
use of ca.corefacility.bioinformatics.irida.model.workflow.analysis.Analysis in project irida by phac-nml.
the class AnalysisWorkspaceServiceGalaxy method getAnalysisResults.
/**
* {@inheritDoc}
*/
@Override
public Analysis getAnalysisResults(AnalysisSubmission analysisSubmission) throws ExecutionManagerException, IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException {
checkNotNull(analysisSubmission, "analysisSubmission is null");
checkNotNull(analysisSubmission.getWorkflowId(), "workflowId is null");
checkNotNull(analysisSubmission.getRemoteWorkflowId(), "remoteWorkflowId is null");
Path outputDirectory = Files.createTempDirectory("analysis-output");
logger.trace("Created temporary directory " + outputDirectory + " for analysis output files");
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(analysisSubmission.getWorkflowId());
String analysisId = analysisSubmission.getRemoteAnalysisId();
Map<String, IridaWorkflowOutput> outputsMap = iridaWorkflow.getWorkflowDescription().getOutputsMap();
String labelPrefix = getLabelPrefix(analysisSubmission, iridaWorkflow);
Map<String, AnalysisOutputFile> analysisOutputFiles = Maps.newHashMap();
for (String analysisOutputName : outputsMap.keySet()) {
String outputFileName = outputsMap.get(analysisOutputName).getFileName();
Dataset outputDataset = galaxyHistoriesService.getDatasetForFileInHistory(outputFileName, analysisId);
AnalysisOutputFile analysisOutput = buildOutputFile(analysisId, labelPrefix, outputDataset, outputDirectory);
analysisOutputFiles.put(analysisOutputName, analysisOutput);
}
AnalysisType analysisType = iridaWorkflow.getWorkflowDescription().getAnalysisType();
return new Analysis(analysisId, analysisOutputFiles, analysisType);
}
use of ca.corefacility.bioinformatics.irida.model.workflow.analysis.Analysis in project irida by phac-nml.
the class AnalysisSubmissionSampleProcessorImpl method updateSamples.
/**
* {@inheritDoc}
*/
@Override
@RunAsUser("#analysisSubmission.getSubmitter()")
@Transactional(propagation = Propagation.REQUIRES_NEW)
@PreAuthorize("hasPermission(#analysisSubmission, 'canUpdateSamplesFromAnalysisSubmission')")
public void updateSamples(AnalysisSubmission analysisSubmission) throws PostProcessingException {
if (!analysisSubmission.getUpdateSamples()) {
logger.trace("Will not update samples from results for submission=" + analysisSubmission);
} else {
logger.debug("Updating sample from results for submission=" + analysisSubmission);
Set<Sample> samples = sampleRepository.findSamplesForAnalysisSubmission(analysisSubmission);
Analysis analysis = analysisSubmission.getAnalysis();
checkNotNull(analysis, "No analysis associated with submission " + analysisSubmission);
checkNotNull(samples, "No samples associated with submission " + analysisSubmission);
AnalysisSampleUpdater analysisSampleUpdaterService = analysisSampleUpdaterMap.get(analysis.getAnalysisType());
if (analysisSampleUpdaterService != null) {
// re-reading submission to ensure file paths are correct
analysisSubmission = analysisSubmissionService.read(analysisSubmission.getId());
analysisSampleUpdaterService.update(samples, analysisSubmission);
} else {
logger.debug("No associated object for updating samples for analysis of type " + analysis.getAnalysisType());
}
}
}
Aggregations