use of com.github.jmchilton.blend4j.galaxy.beans.WorkflowDetails in project irida by phac-nml.
the class GalaxyWorkflowsIT method runSingleCollectionWorkflow.
/**
* Starts the execution of a workflow with a list of fastq files and the given workflow id.
* @param inputFilesForward A list of forward read fastq files start the workflow.
* @param inputFilesReverse A list of reverse read fastq files start the workflow.
* @param inputFileType The file type of the input files.
* @param workflowId The id of the workflow to start.
* @param workflowInputLabel The label of a workflow input in Galaxy.
* @throws ExecutionManagerException If there was an error executing the workflow.
*/
private WorkflowOutputs runSingleCollectionWorkflow(List<Path> inputFilesForward, List<Path> inputFilesReverse, InputFileType inputFileType, String workflowId, String workflowInputLabel) throws ExecutionManagerException {
checkNotNull(inputFilesForward, "inputFilesForward is null");
checkNotNull(inputFilesReverse, "inputFilesReverse is null");
checkArgument(inputFilesForward.size() == inputFilesReverse.size(), "inputFiles have different number of elements");
checkNotNull(inputFileType, "inputFileType is null");
checkNotNull(workflowInputLabel, "workflowInputLabel is null");
for (Path file : inputFilesForward) {
checkArgument(Files.exists(file), "inputFileForward " + file + " does not exist");
}
for (Path file : inputFilesReverse) {
checkArgument(Files.exists(file), "inputFilesReverse " + file + " does not exist");
}
History workflowHistory = galaxyHistory.newHistoryForWorkflow();
WorkflowDetails workflowDetails = workflowsClient.showWorkflow(workflowId);
// upload dataset to history
List<Dataset> inputDatasetsForward = uploadFilesListToHistory(inputFilesForward, inputFileType, workflowHistory);
List<Dataset> inputDatasetsReverse = uploadFilesListToHistory(inputFilesReverse, inputFileType, workflowHistory);
assertEquals(inputFilesForward.size(), inputDatasetsForward.size());
assertEquals(inputDatasetsForward.size(), inputDatasetsReverse.size());
// construct list of datasets
CollectionResponse collection = constructPairedFileCollection(inputDatasetsForward, inputDatasetsReverse, workflowHistory);
logger.debug("Constructed dataset collection: id=" + collection.getId() + ", " + collection.getName());
String workflowInputId = galaxyWorkflowService.getWorkflowInputId(workflowDetails, workflowInputLabel);
WorkflowInputs inputs = new WorkflowInputs();
inputs.setDestination(new WorkflowInputs.ExistingHistory(workflowHistory.getId()));
inputs.setWorkflowId(workflowDetails.getId());
inputs.setInput(workflowInputId, new WorkflowInputs.WorkflowInput(collection.getId(), WorkflowInputs.InputSourceType.HDCA));
// execute workflow
WorkflowOutputs output = workflowsClient.runWorkflow(inputs);
logger.debug("Running workflow in history " + output.getHistoryId());
return output;
}
use of com.github.jmchilton.blend4j.galaxy.beans.WorkflowDetails in project irida by phac-nml.
the class GalaxyWorkflowsIT method testDeleteWorkflowSuccess.
/**
* Tests deleting a workflow and succeeding.
*
* @throws WorkflowUploadException
* @throws IOException
* @throws WorkflowException
* @throws DeleteGalaxyObjectFailedException
*/
@Test
public void testDeleteWorkflowSuccess() throws WorkflowUploadException, IOException, WorkflowException, DeleteGalaxyObjectFailedException {
String workflowId = galaxyWorkflowService.uploadGalaxyWorkflow(workflowPath);
WorkflowDetails details = galaxyWorkflowService.getWorkflowDetails(workflowId);
assertFalse(details.isDeleted());
galaxyWorkflowService.deleteWorkflow(workflowId);
details = galaxyWorkflowService.getWorkflowDetails(workflowId);
assertTrue(details.isDeleted());
}
use of com.github.jmchilton.blend4j.galaxy.beans.WorkflowDetails in project irida by phac-nml.
the class GalaxyWorkflowsIT method runSingleFileTabularWorkflow.
/**
* Runs a test workflow with the given parameters and input file.
*
* @param workflowId
* The id of the workflow to run.
* @param workflowInputLabel
* The lable of the input for the workflow.
* @param history
* The history to run the workflow in.
* @param inputFile
* The file to run the workflow on.
* @param toolName
* The toolName of a parameter to override.
* @param toolParameter
* The overridden tool parameter.
* @return A {@link WorkflowOutputs} for this workflow.
* @throws ExecutionManagerException
*/
private WorkflowOutputs runSingleFileTabularWorkflow(String workflowId, String workflowInputLabel, History history, Path inputFile, String toolName, ToolParameter toolParameter) throws ExecutionManagerException {
checkArgument(Files.exists(inputFile), "inputFile " + inputFile + " does not exist");
WorkflowDetails workflowDetails = workflowsClient.showWorkflow(workflowId);
// upload dataset to history
Dataset inputDataset = fileToHistory(inputFile, "tabular", history.getId());
assertNotNull(inputDataset);
String workflowInputId = galaxyWorkflowService.getWorkflowInputId(workflowDetails, workflowInputLabel);
WorkflowInputs inputs = new WorkflowInputs();
inputs.setDestination(new WorkflowInputs.ExistingHistory(history.getId()));
inputs.setWorkflowId(workflowDetails.getId());
inputs.setInput(workflowInputId, new WorkflowInputs.WorkflowInput(inputDataset.getId(), WorkflowInputs.InputSourceType.HDA));
inputs.setToolParameter(toolName, toolParameter);
// execute workflow
WorkflowOutputs output = workflowsClient.runWorkflow(inputs);
logger.debug("Running workflow in history " + output.getHistoryId());
return output;
}
use of com.github.jmchilton.blend4j.galaxy.beans.WorkflowDetails in project irida by phac-nml.
the class GalaxyWorkflowsIT method runSingleFileWorkflow.
/**
* Starts the execution of a workflow with a single fastq file and the given workflow id.
* @param inputFile An input file to start the workflow.
* @param inputFileType The file type of the input file.
* @param workflowId The id of the workflow to start.
* @param workflowInputLabel The label of a workflow input in Galaxy.
* @param toolParameters A map of tool parameters to set.
* @throws ExecutionManagerException If there was an error executing the workflow.
*/
private WorkflowOutputs runSingleFileWorkflow(Path inputFile, InputFileType inputFileType, String workflowId, String workflowInputLabel, Map<String, ToolParameter> toolParameters) throws ExecutionManagerException {
checkNotNull(inputFile, "file is null");
checkNotNull(inputFileType, "inputFileType is null");
checkNotNull(workflowInputLabel, "workflowInputLabel is null");
checkArgument(Files.exists(inputFile), "inputFile " + inputFile + " does not exist");
History workflowHistory = galaxyHistory.newHistoryForWorkflow();
WorkflowDetails workflowDetails = workflowsClient.showWorkflow(workflowId);
// upload dataset to history
Dataset inputDataset = galaxyHistory.fileToHistory(inputFile, inputFileType, workflowHistory);
assertNotNull(inputDataset);
String workflowInputId = galaxyWorkflowService.getWorkflowInputId(workflowDetails, workflowInputLabel);
WorkflowInputs inputs = new WorkflowInputs();
inputs.setDestination(new WorkflowInputs.ExistingHistory(workflowHistory.getId()));
inputs.setWorkflowId(workflowDetails.getId());
inputs.setInput(workflowInputId, new WorkflowInputs.WorkflowInput(inputDataset.getId(), WorkflowInputs.InputSourceType.HDA));
if (toolParameters != null) {
for (String toolId : toolParameters.keySet()) {
ToolParameter toolParameter = toolParameters.get(toolId);
inputs.setToolParameter(toolId, toolParameter);
}
}
// execute workflow
WorkflowOutputs output = workflowsClient.runWorkflow(inputs);
logger.debug("Running workflow in history " + output.getHistoryId());
return output;
}
use of com.github.jmchilton.blend4j.galaxy.beans.WorkflowDetails in project irida by phac-nml.
the class AnalysisExecutionServiceGalaxyIT method testCleanupErrorAnalysisSuccess.
/**
* Tests out cleaning up an analysis in error successfully.
*
* @throws Exception
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testCleanupErrorAnalysisSuccess() throws Exception {
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L, sequenceFilePath, referenceFilePath, iridaTestAnalysisWorkflowId, false);
Future<AnalysisSubmission> analysisSubmittedFuture = analysisExecutionService.prepareSubmission(analysisSubmission);
AnalysisSubmission analysisSubmitted = analysisSubmittedFuture.get();
analysisSubmitted.setAnalysisState(AnalysisState.ERROR);
analysisSubmissionRepository.save(analysisSubmitted);
WorkflowDetails workflowDetails = workflowsClient.showWorkflow(analysisSubmitted.getRemoteWorkflowId());
assertFalse("Workflow is already deleted", workflowDetails.isDeleted());
// Once analysis is complete, attempt to clean up
Future<AnalysisSubmission> analysisSubmissionCleanedFuture = analysisExecutionService.cleanupSubmission(analysisSubmitted);
AnalysisSubmission analysisSubmissionCleaned = analysisSubmissionCleanedFuture.get();
assertEquals("Analysis submission not properly cleaned", AnalysisCleanedState.CLEANED, analysisSubmissionCleaned.getAnalysisCleanedState());
workflowDetails = workflowsClient.showWorkflow(analysisSubmitted.getRemoteWorkflowId());
assertTrue("Workflow is not deleted", workflowDetails.isDeleted());
}
Aggregations