use of ca.corefacility.bioinformatics.irida.model.workflow.execution.galaxy.PreparedWorkflowGalaxy in project irida by phac-nml.
the class AnalysisExecutionServiceGalaxyTest method setup.
/**
* Setup variables for tests.
*
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws ExecutionManagerException
* @throws NoSuchValueException
* @throws IridaWorkflowAnalysisTypeException
*/
@Before
public void setup() throws IridaWorkflowNotFoundException, IOException, ExecutionManagerException, NoSuchValueException, IridaWorkflowAnalysisTypeException, AnalysisAlreadySetException {
MockitoAnnotations.initMocks(this);
String submissionName = "name";
Set<SequencingObject> submissionInputFiles = Sets.newHashSet(new SingleEndSequenceFile(new SequenceFile()));
analysisSubmission = AnalysisSubmission.builder(WORKFLOW_ID).name(submissionName + "intial").inputFiles(submissionInputFiles).build();
analysisPreparing = AnalysisSubmission.builder(WORKFLOW_ID).name(submissionName + "preparing").inputFiles(submissionInputFiles).build();
analysisPrepared = AnalysisSubmission.builder(WORKFLOW_ID).name(submissionName + "prepared").inputFiles(submissionInputFiles).build();
analysisSubmitting = AnalysisSubmission.builder(WORKFLOW_ID).name(submissionName + "submitting").inputFiles(submissionInputFiles).build();
analysisRunning = AnalysisSubmission.builder(WORKFLOW_ID).name(submissionName + "running").inputFiles(submissionInputFiles).build();
analysisFinishedRunning = AnalysisSubmission.builder(WORKFLOW_ID).name(submissionName + "finishedrunning").inputFiles(submissionInputFiles).build();
analysisCompleting = AnalysisSubmission.builder(WORKFLOW_ID).name(submissionName + "completing").inputFiles(submissionInputFiles).build();
analysisCompleted = AnalysisSubmission.builder(WORKFLOW_ID).name(submissionName + "completed").inputFiles(submissionInputFiles).build();
analysisCompletedCleaning = AnalysisSubmission.builder(WORKFLOW_ID).name(submissionName + "cleaning").inputFiles(submissionInputFiles).build();
analysisCompletedCleaned = AnalysisSubmission.builder(WORKFLOW_ID).name(submissionName + "cleaned").inputFiles(submissionInputFiles).build();
analysisError = AnalysisSubmission.builder(WORKFLOW_ID).name(submissionName + "error").inputFiles(submissionInputFiles).build();
analysisErrorCleaning = AnalysisSubmission.builder(WORKFLOW_ID).name(submissionName + "errorcleaning").inputFiles(submissionInputFiles).build();
analysisErrorCleaned = AnalysisSubmission.builder(WORKFLOW_ID).name(submissionName + "errorcleaned").inputFiles(submissionInputFiles).build();
AnalysisExecutionServiceGalaxyAsync workflowManagementAsync = new AnalysisExecutionServiceGalaxyAsync(analysisSubmissionService, analysisService, galaxyWorkflowService, analysisWorkspaceService, iridaWorkflowsService, analysisSubmissionSampleProcessor);
AnalysisExecutionServiceGalaxyCleanupAsync analysisExecutionServiceGalaxyCleanupAsync = new AnalysisExecutionServiceGalaxyCleanupAsync(analysisSubmissionService, galaxyWorkflowService, galaxyHistoriesService, galaxyLibrariesService);
workflowManagement = new AnalysisExecutionServiceGalaxy(analysisSubmissionService, galaxyHistoriesService, workflowManagementAsync, analysisExecutionServiceGalaxyCleanupAsync);
when(iridaWorkflowsService.getIridaWorkflow(WORKFLOW_ID)).thenReturn(iridaWorkflow);
when(iridaWorkflow.getWorkflowStructure()).thenReturn(iridaWorkflowStructure);
when(iridaWorkflowStructure.getWorkflowFile()).thenReturn(workflowFile);
when(analysisSubmissionService.create(analysisSubmission)).thenReturn(analysisSubmission);
when(analysisSubmissionService.read(INTERNAL_ANALYSIS_ID)).thenReturn(analysisSubmission);
analysisSubmission.setId(INTERNAL_ANALYSIS_ID);
analysisPreparing.setId(INTERNAL_ANALYSIS_ID);
analysisPreparing.setAnalysisState(AnalysisState.PREPARING);
when(analysisWorkspaceService.prepareAnalysisWorkspace(analysisPreparing)).thenReturn(ANALYSIS_ID);
analysisPrepared.setId(INTERNAL_ANALYSIS_ID);
analysisPrepared.setAnalysisState(AnalysisState.PREPARED);
analysisPrepared.setRemoteWorkflowId(REMOTE_WORKFLOW_ID);
analysisPrepared.setRemoteAnalysisId(ANALYSIS_ID);
analysisSubmitting.setId(INTERNAL_ANALYSIS_ID);
analysisSubmitting.setAnalysisState(AnalysisState.SUBMITTING);
analysisSubmitting.setRemoteWorkflowId(REMOTE_WORKFLOW_ID);
analysisSubmitting.setRemoteAnalysisId(ANALYSIS_ID);
analysisRunning.setId(INTERNAL_ANALYSIS_ID);
analysisRunning.setAnalysisState(AnalysisState.RUNNING);
analysisRunning.setRemoteWorkflowId(REMOTE_WORKFLOW_ID);
analysisRunning.setRemoteAnalysisId(ANALYSIS_ID);
analysisRunning.setRemoteInputDataId(LIBRARY_ID);
analysisFinishedRunning.setId(INTERNAL_ANALYSIS_ID);
analysisFinishedRunning.setAnalysisState(AnalysisState.FINISHED_RUNNING);
analysisFinishedRunning.setRemoteWorkflowId(REMOTE_WORKFLOW_ID);
analysisFinishedRunning.setRemoteAnalysisId(ANALYSIS_ID);
analysisFinishedRunning.setRemoteInputDataId(LIBRARY_ID);
analysisCompleting.setId(INTERNAL_ANALYSIS_ID);
analysisCompleting.setAnalysisState(AnalysisState.COMPLETING);
analysisCompleting.setRemoteWorkflowId(REMOTE_WORKFLOW_ID);
analysisCompleting.setRemoteAnalysisId(ANALYSIS_ID);
analysisCompleting.setRemoteInputDataId(LIBRARY_ID);
when(analysisWorkspaceService.getAnalysisResults(analysisCompleting)).thenReturn(analysisResults);
when(analysisService.create(analysisResults)).thenReturn(analysisResults);
analysisCompleted.setId(INTERNAL_ANALYSIS_ID);
analysisCompleted.setAnalysisState(AnalysisState.COMPLETED);
analysisCompleted.setRemoteWorkflowId(REMOTE_WORKFLOW_ID);
analysisCompleted.setRemoteAnalysisId(ANALYSIS_ID);
analysisCompleted.setAnalysisCleanedState(AnalysisCleanedState.NOT_CLEANED);
analysisCompleted.setAnalysis(analysisResults);
analysisCompleted.setRemoteInputDataId(LIBRARY_ID);
analysisCompleted.setUpdateSamples(true);
analysisCompletedCleaning.setId(INTERNAL_ANALYSIS_ID);
analysisCompletedCleaning.setAnalysisState(AnalysisState.COMPLETED);
analysisCompletedCleaning.setRemoteWorkflowId(REMOTE_WORKFLOW_ID);
analysisCompletedCleaning.setRemoteAnalysisId(ANALYSIS_ID);
analysisCompletedCleaning.setAnalysisCleanedState(AnalysisCleanedState.CLEANING);
analysisCompletedCleaning.setAnalysis(analysisResults);
analysisCompletedCleaning.setRemoteInputDataId(LIBRARY_ID);
analysisCompletedCleaned.setId(INTERNAL_ANALYSIS_ID);
analysisCompletedCleaned.setAnalysisState(AnalysisState.COMPLETED);
analysisCompletedCleaned.setRemoteWorkflowId(REMOTE_WORKFLOW_ID);
analysisCompletedCleaned.setRemoteAnalysisId(ANALYSIS_ID);
analysisCompletedCleaned.setAnalysisCleanedState(AnalysisCleanedState.CLEANED);
analysisCompletedCleaned.setAnalysis(analysisResults);
analysisCompletedCleaned.setRemoteInputDataId(LIBRARY_ID);
analysisError.setId(INTERNAL_ANALYSIS_ID);
analysisError.setAnalysisState(AnalysisState.ERROR);
analysisError.setRemoteWorkflowId(REMOTE_WORKFLOW_ID);
analysisError.setAnalysisCleanedState(AnalysisCleanedState.NOT_CLEANED);
analysisErrorCleaning.setId(INTERNAL_ANALYSIS_ID);
analysisErrorCleaning.setAnalysisState(AnalysisState.ERROR);
analysisErrorCleaning.setRemoteWorkflowId(REMOTE_WORKFLOW_ID);
analysisErrorCleaning.setAnalysisCleanedState(AnalysisCleanedState.CLEANING);
analysisErrorCleaned.setId(INTERNAL_ANALYSIS_ID);
analysisErrorCleaned.setAnalysisState(AnalysisState.ERROR);
analysisErrorCleaned.setRemoteWorkflowId(REMOTE_WORKFLOW_ID);
analysisErrorCleaned.setAnalysisCleanedState(AnalysisCleanedState.CLEANED);
analysisPrepared.setRemoteAnalysisId(ANALYSIS_ID);
preparedWorkflow = new PreparedWorkflowGalaxy(ANALYSIS_ID, LIBRARY_ID, workflowInputsGalaxy);
when(galaxyWorkflowService.uploadGalaxyWorkflow(workflowFile)).thenReturn(REMOTE_WORKFLOW_ID);
when(galaxyHistoriesService.deleteHistory(ANALYSIS_ID)).thenReturn(new HistoryDeleteResponse());
}
use of ca.corefacility.bioinformatics.irida.model.workflow.execution.galaxy.PreparedWorkflowGalaxy in project irida by phac-nml.
the class AnalysisWorkspaceServiceGalaxyIT method testPrepareAnalysisFilesSinglePairSuccess.
/**
* Tests out successfully preparing paired and single workflow input files
* for execution.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesSinglePairSuccess() throws InterruptedException, ExecutionManagerException, IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesPairSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSinglePaired);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSinglePairSubmissionInDatabaseDifferentSample(1L, 2L, pairSequenceFiles1A, pairSequenceFiles2A, sequenceFilePath3, referenceFilePath, validWorkflowIdSinglePaired);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
PreparedWorkflowGalaxy preparedWorkflow = analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
assertEquals("the response history id should match the input history id", createdHistory.getId(), preparedWorkflow.getRemoteAnalysisId());
WorkflowInputsGalaxy workflowInputsGalaxy = preparedWorkflow.getWorkflowInputs();
assertNotNull("the returned workflow inputs should not be null", workflowInputsGalaxy);
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("the created history has an invalid number of elements", 6, historyContents.size());
Map<String, HistoryContents> contentsMap = historyContentsAsMap(historyContents);
assertTrue("the created history should contain the file " + sequenceFilePathA.toFile().getName(), contentsMap.containsKey(sequenceFilePathA.toFile().getName()));
assertTrue("the created history should contain the file " + sequenceFilePath2A.toFile().getName(), contentsMap.containsKey(sequenceFilePath2A.toFile().getName()));
assertTrue("the created history should contain the file " + sequenceFilePath3.toFile().getName(), contentsMap.containsKey(sequenceFilePath3.toFile().getName()));
assertTrue("the created history should contain the file " + referenceFilePath.toFile().getName(), contentsMap.containsKey(referenceFilePath.toFile().getName()));
assertTrue("the created history should contain a dataset collection with the name " + INPUTS_SINGLE_NAME, contentsMap.containsKey(INPUTS_SINGLE_NAME));
assertTrue("the created history should contain a dataset collection with the name " + INPUTS_PAIRED_NAME, contentsMap.containsKey(INPUTS_PAIRED_NAME));
// make sure workflow inputs contains correct information
Map<String, WorkflowInput> workflowInputsMap = preparedWorkflow.getWorkflowInputs().getInputsObject().getInputs();
assertEquals("the created workflow inputs has an invalid number of elements", 3, workflowInputsMap.size());
}
use of ca.corefacility.bioinformatics.irida.model.workflow.execution.galaxy.PreparedWorkflowGalaxy in project irida by phac-nml.
the class AnalysisWorkspaceServiceGalaxyIT method testPrepareAnalysisFilesParametersSuccess.
/**
* Tests out successfully preparing paired workflow input files for
* execution with parameters.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesParametersSuccess() throws InterruptedException, ExecutionManagerException, IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesParametersSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPairedWithParameters);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
Map<String, String> parameters = ImmutableMap.of("coverage", "20");
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L, pairSequenceFiles1A, pairSequenceFiles2A, referenceFilePath, parameters, validWorkflowIdPairedWithParameters);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
PreparedWorkflowGalaxy preparedWorkflow = analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
assertEquals("the response history id should match the input history id", createdHistory.getId(), preparedWorkflow.getRemoteAnalysisId());
WorkflowInputsGalaxy workflowInputsGalaxy = preparedWorkflow.getWorkflowInputs();
assertNotNull("the returned workflow inputs should not be null", workflowInputsGalaxy);
assertNotNull("the returned library id should not be null", preparedWorkflow.getRemoteDataId());
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("the created history has an invalid number of elements", 4, historyContents.size());
WorkflowInputs workflowInputs = preparedWorkflow.getWorkflowInputs().getInputsObject();
assertNotNull("created workflowInputs is null", workflowInputs);
Map<String, Object> toolParameters = workflowInputs.getParameters().get("core_pipeline_outputs_paired_with_parameters");
assertNotNull("toolParameters is null", toolParameters);
String coverageMinValue = (String) toolParameters.get("coverageMin");
assertEquals("coverageMinValue should have been changed", "20", coverageMinValue);
assertEquals("coverageMidValue should have been changed", ImmutableMap.of("coverageMid", "20"), toolParameters.get("conditional"));
String coverageMaxValue = (String) toolParameters.get("coverageMin");
assertEquals("coverageMaxValue should have been changed", "20", coverageMaxValue);
}
use of ca.corefacility.bioinformatics.irida.model.workflow.execution.galaxy.PreparedWorkflowGalaxy in project irida by phac-nml.
the class AnalysisWorkspaceServiceGalaxyTest method testPrepareAnalysisFilesSingleSuccess.
/**
* Tests out successfully to preparing an analysis with single files.
*
* @throws ExecutionManagerException
* @throws IridaWorkflowException
*/
@SuppressWarnings("unchecked")
@Test
public void testPrepareAnalysisFilesSingleSuccess() throws ExecutionManagerException, IridaWorkflowException {
Set<SingleEndSequenceFile> singleFiles = Sets.newHashSet(sampleSingleSequenceFileMap.values());
submission = AnalysisSubmission.builder(workflowId).name("my analysis").inputFiles(Sets.newHashSet(singleFiles)).referenceFile(referenceFile).build();
submission.setRemoteAnalysisId(HISTORY_ID);
submission.setRemoteWorkflowId(WORKFLOW_ID);
when(sequencingObjectService.getSequencingObjectsOfTypeForAnalysisSubmission(submission, SingleEndSequenceFile.class)).thenReturn(singleFiles);
when(iridaWorkflowsService.getIridaWorkflow(workflowId)).thenReturn(iridaWorkflowSingle);
when(galaxyHistoriesService.findById(HISTORY_ID)).thenReturn(workflowHistory);
when(galaxyLibrariesService.buildEmptyLibrary(any(GalaxyProjectName.class))).thenReturn(workflowLibrary);
when(sequencingObjectService.getUniqueSamplesForSequencingObjects(singleFiles)).thenReturn(sampleSingleSequenceFileMap);
when(galaxyHistoriesService.fileToHistory(refFile, InputFileType.FASTA, workflowHistory)).thenReturn(refDataset);
when(galaxyWorkflowService.getWorkflowDetails(WORKFLOW_ID)).thenReturn(workflowDetails);
when(analysisParameterServiceGalaxy.prepareAnalysisParameters(any(Map.class), any(IridaWorkflow.class))).thenReturn(new WorkflowInputsGalaxy(new WorkflowInputs()));
when(galaxyWorkflowService.getWorkflowInputId(workflowDetails, SEQUENCE_FILE_SINGLE_LABEL)).thenReturn(SEQUENCE_FILE_SINGLE_ID);
when(galaxyWorkflowService.getWorkflowInputId(workflowDetails, REFERENCE_FILE_LABEL)).thenReturn(REFERENCE_FILE_ID);
when(analysisCollectionServiceGalaxy.uploadSequenceFilesSingleEnd(any(Map.class), eq(workflowHistory), eq(workflowLibrary))).thenReturn(collectionResponseSingle);
PreparedWorkflowGalaxy preparedWorkflow = workflowPreparation.prepareAnalysisFiles(submission);
assertEquals("preparedWorflow history id not equal to " + HISTORY_ID, HISTORY_ID, preparedWorkflow.getRemoteAnalysisId());
assertEquals("preparedWorkflow library is invalid", LIBRARY_ID, preparedWorkflow.getRemoteDataId());
assertNotNull("workflowInputs in preparedWorkflow is null", preparedWorkflow.getWorkflowInputs());
Map<String, WorkflowInput> workflowInputsMap = preparedWorkflow.getWorkflowInputs().getInputsObject().getInputs();
assertEquals("workflow inputs has invalid size", 2, workflowInputsMap.size());
assertTrue("workflow inputs should contain reference file entry", workflowInputsMap.containsKey(REFERENCE_FILE_ID));
assertTrue("workflow inputs should contain sequence file single entry", workflowInputsMap.containsKey(SEQUENCE_FILE_SINGLE_ID));
verify(analysisCollectionServiceGalaxy).uploadSequenceFilesSingleEnd(any(Map.class), any(History.class), any(Library.class));
verify(analysisCollectionServiceGalaxy, never()).uploadSequenceFilesPaired(any(Map.class), any(History.class), any(Library.class));
}
use of ca.corefacility.bioinformatics.irida.model.workflow.execution.galaxy.PreparedWorkflowGalaxy in project irida by phac-nml.
the class AnalysisExecutionServiceGalaxyAsync method executeAnalysis.
/**
* Executes the passed prepared {@link AnalysisSubmission} in an execution
* manager.
*
* @param analysisSubmission
* The {@link AnalysisSubmission} to execute.
* @return A {@link Future} with an {@link AnalysisSubmission} for the
* analysis submitted.
* @throws ExecutionManagerException
* If there was an exception submitting the analysis to the
* execution manager.
* @throws IridaWorkflowException If there was an issue with the IRIDA workflow.
*/
@RunAsUser("#analysisSubmission.getSubmitter()")
public Future<AnalysisSubmission> executeAnalysis(AnalysisSubmission analysisSubmission) throws ExecutionManagerException, IridaWorkflowException {
checkNotNull(analysisSubmission, "analysisSubmission is null");
checkNotNull(analysisSubmission.getRemoteAnalysisId(), "remote analyis id is null");
checkNotNull(analysisSubmission.getWorkflowId(), "workflowId is null");
logger.debug("Running submission for " + analysisSubmission);
logger.trace("Preparing files for " + analysisSubmission);
PreparedWorkflowGalaxy preparedWorkflow = workspaceService.prepareAnalysisFiles(analysisSubmission);
WorkflowInputsGalaxy input = preparedWorkflow.getWorkflowInputs();
String libraryId = preparedWorkflow.getRemoteDataId();
logger.trace("Executing " + analysisSubmission);
galaxyWorkflowService.runWorkflow(input);
analysisSubmission.setAnalysisState(AnalysisState.RUNNING);
analysisSubmission.setRemoteInputDataId(libraryId);
analysisSubmission = analysisSubmissionService.update(analysisSubmission);
return new AsyncResult<>(analysisSubmission);
}
Aggregations