use of ca.corefacility.bioinformatics.irida.model.workflow.submission.AnalysisSubmission in project irida by phac-nml.
the class AnalysisExecutionServiceGalaxyIT method testTransferAnalysisResultsSuccessPhylogenomicsPairedNoParameters.
/**
* Tests out getting analysis results successfully for phylogenomics
* pipeline (paired test version with no parameters, using defaults).
*
* @throws Exception
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testTransferAnalysisResultsSuccessPhylogenomicsPairedNoParameters() throws Exception {
String validCoverageFromProvenance = "\"10\"";
String validMidCoverageFromProvenance = "10";
// I verify parameters were set
String validTreeFile = "10 10 10";
// correctly by checking output file
// (where parameters were printed).
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L, pairedPaths1, pairedPaths2, referenceFilePath, iridaPhylogenomicsPairedParametersWorkflowId, false);
Future<AnalysisSubmission> analysisSubmittedFuture = analysisExecutionService.prepareSubmission(analysisSubmission);
AnalysisSubmission analysisSubmitted = analysisSubmittedFuture.get();
Future<AnalysisSubmission> analysisExecutionFuture = analysisExecutionService.executeAnalysis(analysisSubmitted);
AnalysisSubmission analysisExecuted = analysisExecutionFuture.get();
analysisExecutionGalaxyITService.waitUntilSubmissionComplete(analysisExecuted);
analysisExecuted.setAnalysisState(AnalysisState.FINISHED_RUNNING);
Future<AnalysisSubmission> analysisSubmissionCompletedFuture = analysisExecutionService.transferAnalysisResults(analysisExecuted);
analysisSubmissionCompletedFuture.get();
AnalysisSubmission analysisSubmissionCompletedDatabase = analysisSubmissionService.read(analysisSubmission.getId());
assertEquals("analysis state is not completed", AnalysisState.COMPLETED, analysisSubmissionCompletedDatabase.getAnalysisState());
Analysis analysisResults = analysisSubmissionCompletedDatabase.getAnalysis();
assertEquals("analysis results is an invalid class", AnalysisType.PHYLOGENOMICS, analysisResults.getAnalysisType());
assertEquals("invalid number of output files", 3, analysisResults.getAnalysisOutputFiles().size());
AnalysisOutputFile phylogeneticTree = analysisResults.getAnalysisOutputFile(TREE_KEY);
AnalysisOutputFile snpMatrix = analysisResults.getAnalysisOutputFile(MATRIX_KEY);
AnalysisOutputFile snpTable = analysisResults.getAnalysisOutputFile(TABLE_KEY);
// verify parameters were set properly by checking contents of file
@SuppressWarnings("resource") String treeContent = new Scanner(phylogeneticTree.getFile().toFile()).useDelimiter("\\Z").next();
assertEquals("phylogenetic trees containing the parameters should be equal", validTreeFile, treeContent);
// phy tree
final ToolExecution phyTreeCoreInputs = phylogeneticTree.getCreatedByTool();
assertEquals("The first tool execution should be by core_pipeline_outputs_paired_with_parameters v0.1.0", "core_pipeline_outputs_paired_with_parameters", phyTreeCoreInputs.getToolName());
assertEquals("The first tool execution should be by core_pipeline_outputs_paired_with_parameters v0.1.0", "0.1.0", phyTreeCoreInputs.getToolVersion());
Map<String, String> phyTreeCoreParameters = phyTreeCoreInputs.getExecutionTimeParameters();
assertEquals("incorrect number of non-file parameters", 4, phyTreeCoreParameters.size());
assertEquals("parameter coverageMin set incorrectly", validCoverageFromProvenance, phyTreeCoreParameters.get("coverageMin"));
assertEquals("parameter coverageMid set incorrectly", validMidCoverageFromProvenance, phyTreeCoreParameters.get("conditional.coverageMid"));
assertEquals("parameter coverageMax set incorrectly", validCoverageFromProvenance, phyTreeCoreParameters.get("coverageMax"));
assertEquals("parameter conditional_select set incorrectly", "all", phyTreeCoreParameters.get("conditional.conditional_select"));
Set<ToolExecution> phyTreeCorePreviousSteps = phyTreeCoreInputs.getPreviousSteps();
assertEquals("there should exist 2 previous steps", 2, phyTreeCorePreviousSteps.size());
Set<String> uploadedFileTypesPhy = Sets.newHashSet();
for (ToolExecution previousStep : phyTreeCorePreviousSteps) {
assertTrue("previous steps should be input tools.", previousStep.isInputTool());
uploadedFileTypesPhy.add(previousStep.getExecutionTimeParameters().get("file_type"));
}
assertEquals("uploaded files should have correct types", Sets.newHashSet("\"fastqsanger\"", "\"fasta\""), uploadedFileTypesPhy);
// snp matrix
final ToolExecution matrixCoreInputs = snpMatrix.getCreatedByTool();
assertEquals("The first tool execution should be by core_pipeline_outputs_paired_with_parameters v0.1.0", "core_pipeline_outputs_paired_with_parameters", matrixCoreInputs.getToolName());
assertEquals("The first tool execution should be by core_pipeline_outputs_paired_with_parameters v0.1.0", "0.1.0", matrixCoreInputs.getToolVersion());
Map<String, String> matrixCoreParameters = matrixCoreInputs.getExecutionTimeParameters();
assertEquals("incorrect number of non-file parameters", 4, matrixCoreParameters.size());
assertEquals("parameter coverageMin set incorrectly", validCoverageFromProvenance, matrixCoreParameters.get("coverageMin"));
assertEquals("parameter coverageMid set incorrectly", validMidCoverageFromProvenance, phyTreeCoreParameters.get("conditional.coverageMid"));
assertEquals("parameter coverageMax set incorrectly", validCoverageFromProvenance, matrixCoreParameters.get("coverageMax"));
assertEquals("parameter conditional_select set incorrectly", "all", phyTreeCoreParameters.get("conditional.conditional_select"));
Set<ToolExecution> matrixCorePreviousSteps = matrixCoreInputs.getPreviousSteps();
assertEquals("there should exist 2 previous steps", 2, matrixCorePreviousSteps.size());
Set<String> uploadedFileTypesMatrix = Sets.newHashSet();
for (ToolExecution previousStep : matrixCorePreviousSteps) {
assertTrue("previous steps should be input tools.", previousStep.isInputTool());
uploadedFileTypesMatrix.add(previousStep.getExecutionTimeParameters().get("file_type"));
}
assertEquals("uploaded files should have correct types", Sets.newHashSet("\"fastqsanger\"", "\"fasta\""), uploadedFileTypesMatrix);
// snp table
final ToolExecution tableCoreInputs = snpTable.getCreatedByTool();
assertEquals("The first tool execution should be by core_pipeline_outputs_paired_with_parameters v0.1.0", "core_pipeline_outputs_paired_with_parameters", tableCoreInputs.getToolName());
assertEquals("The first tool execution should be by core_pipeline_outputs_paired_with_parameters v0.1.0", "0.1.0", tableCoreInputs.getToolVersion());
Map<String, String> tableCoreParameters = tableCoreInputs.getExecutionTimeParameters();
assertEquals("incorrect number of non-file parameters", 4, tableCoreParameters.size());
assertEquals("parameter coverageMin set incorrectly", validCoverageFromProvenance, tableCoreParameters.get("coverageMin"));
assertEquals("parameter coverageMid set incorrectly", validMidCoverageFromProvenance, phyTreeCoreParameters.get("conditional.coverageMid"));
assertEquals("parameter coverageMax set incorrectly", validCoverageFromProvenance, tableCoreParameters.get("coverageMax"));
assertEquals("parameter conditional_select set incorrectly", "all", phyTreeCoreParameters.get("conditional.conditional_select"));
Set<ToolExecution> tablePreviousSteps = tableCoreInputs.getPreviousSteps();
assertEquals("there should exist 2 previous steps", 2, tablePreviousSteps.size());
Set<String> uploadedFileTypesTable = Sets.newHashSet();
for (ToolExecution previousStep : tablePreviousSteps) {
assertTrue("previous steps should be input tools.", previousStep.isInputTool());
uploadedFileTypesTable.add(previousStep.getExecutionTimeParameters().get("file_type"));
}
assertEquals("uploaded files should have correct types", Sets.newHashSet("\"fastqsanger\"", "\"fasta\""), uploadedFileTypesTable);
}
use of ca.corefacility.bioinformatics.irida.model.workflow.submission.AnalysisSubmission in project irida by phac-nml.
the class AnalysisExecutionServiceGalaxyIT method testCleanupErrorAnalysisFailGalaxy.
/**
* Tests out cleaning up an analysis in error and failing due to an error in
* Galaxy.
*
* @throws Throwable
*/
@Test(expected = ExecutionManagerException.class)
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testCleanupErrorAnalysisFailGalaxy() throws Throwable {
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L, sequenceFilePath, referenceFilePath, iridaTestAnalysisWorkflowId, AnalysisState.NEW, false);
Future<AnalysisSubmission> analysisSubmittedFuture = analysisExecutionService.prepareSubmission(analysisSubmission);
AnalysisSubmission analysisSubmitted = analysisSubmittedFuture.get();
analysisSubmitted.setAnalysisState(AnalysisState.ERROR);
analysisSubmitted.setRemoteWorkflowId("invalid");
analysisSubmissionRepository.save(analysisSubmitted);
// Once analysis is complete, attempt to clean up
Future<AnalysisSubmission> analysisSubmissionCleanedFuture = analysisExecutionService.cleanupSubmission(analysisSubmitted);
try {
analysisSubmissionCleanedFuture.get();
fail("No exception thrown");
} catch (ExecutionException e) {
assertEquals("The AnalysisState was changed from ERROR", AnalysisState.ERROR, analysisSubmissionService.read(analysisSubmission.getId()).getAnalysisState());
assertEquals("The AnalysisCleanedState was not changed to error", AnalysisCleanedState.CLEANING_ERROR, analysisSubmissionService.read(analysisSubmission.getId()).getAnalysisCleanedState());
// pull out real exception
throw e.getCause();
}
}
use of ca.corefacility.bioinformatics.irida.model.workflow.submission.AnalysisSubmission in project irida by phac-nml.
the class AnalysisExecutionServiceGalaxyIT method testExecuteAnalysisFailRemoteWorkflowId.
/**
* Tests out attempting to execute an analysis with an invalid remote
* workflow id.
*
* @throws Throwable
*/
@Test(expected = WorkflowException.class)
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testExecuteAnalysisFailRemoteWorkflowId() throws Throwable {
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L, sequenceFilePath, referenceFilePath, validIridaWorkflowId, false);
Future<AnalysisSubmission> analysisSubmittedFuture = analysisExecutionService.prepareSubmission(analysisSubmission);
AnalysisSubmission analysisSubmitted = analysisSubmittedFuture.get();
analysisSubmitted.setRemoteWorkflowId(localGalaxy.getInvalidWorkflowId());
analysisSubmissionService.update(analysisSubmitted);
Future<AnalysisSubmission> analysisExecutedFuture = analysisExecutionService.executeAnalysis(analysisSubmitted);
try {
analysisExecutedFuture.get();
} catch (ExecutionException e) {
// check to make sure the submission is in the error state
AnalysisSubmission savedSubmission = analysisSubmissionRepository.findOne(analysisSubmitted.getId());
assertEquals(AnalysisState.ERROR, savedSubmission.getAnalysisState());
throw e.getCause();
}
}
use of ca.corefacility.bioinformatics.irida.model.workflow.submission.AnalysisSubmission in project irida by phac-nml.
the class AnalysisExecutionServiceGalaxyIT method testCleanupCleanedAnalysisError.
/**
* Tests out cleaning up an analysis that's already been cleaned and
* failing.
*
* @throws Exception
*/
@Test(expected = IllegalArgumentException.class)
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testCleanupCleanedAnalysisError() throws Exception {
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L, sequenceFilePath, referenceFilePath, iridaTestAnalysisWorkflowId, false);
Future<AnalysisSubmission> analysisSubmittedFuture = analysisExecutionService.prepareSubmission(analysisSubmission);
AnalysisSubmission analysisSubmitted = analysisSubmittedFuture.get();
analysisSubmitted.setAnalysisState(AnalysisState.ERROR);
analysisSubmissionRepository.save(analysisSubmitted);
Future<AnalysisSubmission> analysisSubmissionCleanedFuture = analysisExecutionService.cleanupSubmission(analysisSubmitted);
AnalysisSubmission cleanedSubmission = analysisSubmissionCleanedFuture.get();
analysisExecutionService.cleanupSubmission(cleanedSubmission);
}
use of ca.corefacility.bioinformatics.irida.model.workflow.submission.AnalysisSubmission in project irida by phac-nml.
the class AnalysisExecutionServiceGalaxyIT method testTransferAnalysisResultsFailTestAnalysisMissingOutput.
/**
* Tests failure to get analysis results due to a missing output file.
* @throws Throwable
*/
@Test(expected = GalaxyDatasetNotFoundException.class)
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testTransferAnalysisResultsFailTestAnalysisMissingOutput() throws Throwable {
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L, sequenceFilePath, referenceFilePath, iridaTestAnalysisWorkflowIdMissingOutput, false);
Future<AnalysisSubmission> analysisSubmittedFuture = analysisExecutionService.prepareSubmission(analysisSubmission);
AnalysisSubmission analysisSubmitted = analysisSubmittedFuture.get();
Future<AnalysisSubmission> analysisExecutionFuture = analysisExecutionService.executeAnalysis(analysisSubmitted);
AnalysisSubmission analysisExecuted = analysisExecutionFuture.get();
analysisExecutionGalaxyITService.waitUntilSubmissionComplete(analysisExecuted);
analysisExecuted.setAnalysisState(AnalysisState.FINISHED_RUNNING);
Future<AnalysisSubmission> analysisSubmissionCompletedFuture = analysisExecutionService.transferAnalysisResults(analysisExecuted);
try {
analysisSubmissionCompletedFuture.get();
} catch (ExecutionException e) {
logger.debug("Submission on exception=" + analysisSubmissionService.read(analysisSubmission.getId()));
assertEquals(AnalysisState.ERROR, analysisSubmissionService.read(analysisSubmission.getId()).getAnalysisState());
// pull out real exception
throw e.getCause();
}
}
Aggregations