use of com.github.jmchilton.blend4j.galaxy.beans.WorkflowOutputs in project irida by phac-nml.
the class GalaxyWorkflowsIT method testExecuteCollectionsPairedList.
/**
* Tests executing a collections paired list workflow.
* @throws ExecutionManagerException
*/
@Test
public void testExecuteCollectionsPairedList() throws ExecutionManagerException {
String workflowId = localGalaxy.getWorklowCollectionListId();
String workflowInputLabel = localGalaxy.getWorkflowCollectionListLabel();
List<Path> dataFilesForward = new LinkedList<Path>();
dataFilesForward.add(dataFile1);
dataFilesForward.add(dataFile2);
List<Path> dataFilesReverse = new LinkedList<Path>();
dataFilesReverse.add(dataFile3);
dataFilesReverse.add(dataFile4);
WorkflowOutputs workflowOutput = runSingleCollectionWorkflow(dataFilesForward, dataFilesReverse, FILE_TYPE, workflowId, workflowInputLabel);
assertNotNull(workflowOutput);
assertNotNull(workflowOutput.getHistoryId());
// history should exist
HistoryDetails historyDetails = historiesClient.showHistory(workflowOutput.getHistoryId());
assertNotNull(historyDetails);
// outputs should exist
assertNotNull(workflowOutput.getOutputIds());
assertEquals(1, workflowOutput.getOutputIds().size());
String outputId = workflowOutput.getOutputIds().get(0);
// output dataset should exist
Dataset outputDataset = historiesClient.showDataset(workflowOutput.getHistoryId(), outputId);
assertNotNull(outputDataset);
// test get workflow status
GalaxyWorkflowStatus workflowStatus = galaxyHistory.getStatusForHistory(workflowOutput.getHistoryId());
float percentComplete = workflowStatus.getProportionComplete();
assertTrue(0.0f <= percentComplete && percentComplete <= 1.0f);
}
use of com.github.jmchilton.blend4j.galaxy.beans.WorkflowOutputs in project irida by phac-nml.
the class GalaxyWorkflowsIT method testExecuteWorkflowChangeToolParameter.
/**
* Tests executing a single workflow in Galaxy and changing a single tool
* parameter.
*
* @throws ExecutionManagerException
*/
@Test
public void testExecuteWorkflowChangeToolParameter() throws ExecutionManagerException {
String toolId = "Grep1";
String workflowId = localGalaxy.getSingleInputWorkflowId();
String workflowInputLabel = localGalaxy.getSingleInputWorkflowLabel();
Map<String, ToolParameter> toolParameters = ImmutableMap.of(toolId, new ToolParameter("pattern", "^#"));
WorkflowOutputs workflowOutput = runSingleFileWorkflow(dataFile1, FILE_TYPE, workflowId, workflowInputLabel, toolParameters);
assertNotNull("workflowOutput should not be null", workflowOutput);
assertNotNull("workflowOutput history id should not be null", workflowOutput.getHistoryId());
// history should exist
HistoryDetails historyDetails = historiesClient.showHistory(workflowOutput.getHistoryId());
assertNotNull("historyDetails for the history for the workflow should not be null", historyDetails);
// outputs should exist
assertNotNull("outputIds for the workflow should not be null", workflowOutput.getOutputIds());
assertTrue("there should exist output dataset ids for the workflow", workflowOutput.getOutputIds().size() > 0);
// each output dataset should exist
for (String outputId : workflowOutput.getOutputIds()) {
Dataset dataset = historiesClient.showDataset(workflowOutput.getHistoryId(), outputId);
assertNotNull("the output dataset should exist", dataset);
HistoryContentsProvenance provenance = historiesClient.showProvenance(workflowOutput.getHistoryId(), dataset.getId());
if (toolId.equals(provenance.getToolId())) {
Map<String, Object> parametersMap = provenance.getParameters();
assertEquals("pattern parameter is correct", "\"^#\"", parametersMap.get("pattern"));
}
}
// test get workflow status
GalaxyWorkflowStatus workflowStatus = galaxyHistory.getStatusForHistory(workflowOutput.getHistoryId());
float proportionComplete = workflowStatus.getProportionComplete();
assertTrue("the workflow proportion complete should be between 0 and 1", 0.0f <= proportionComplete && proportionComplete <= 1.0f);
}
use of com.github.jmchilton.blend4j.galaxy.beans.WorkflowOutputs in project irida by phac-nml.
the class GalaxyWorkflowsIT method runSingleCollectionWorkflow.
/**
* Starts the execution of a workflow with a list of fastq files and the given workflow id.
* @param inputFilesForward A list of forward read fastq files start the workflow.
* @param inputFilesReverse A list of reverse read fastq files start the workflow.
* @param inputFileType The file type of the input files.
* @param workflowId The id of the workflow to start.
* @param workflowInputLabel The label of a workflow input in Galaxy.
* @throws ExecutionManagerException If there was an error executing the workflow.
*/
private WorkflowOutputs runSingleCollectionWorkflow(List<Path> inputFilesForward, List<Path> inputFilesReverse, InputFileType inputFileType, String workflowId, String workflowInputLabel) throws ExecutionManagerException {
checkNotNull(inputFilesForward, "inputFilesForward is null");
checkNotNull(inputFilesReverse, "inputFilesReverse is null");
checkArgument(inputFilesForward.size() == inputFilesReverse.size(), "inputFiles have different number of elements");
checkNotNull(inputFileType, "inputFileType is null");
checkNotNull(workflowInputLabel, "workflowInputLabel is null");
for (Path file : inputFilesForward) {
checkArgument(Files.exists(file), "inputFileForward " + file + " does not exist");
}
for (Path file : inputFilesReverse) {
checkArgument(Files.exists(file), "inputFilesReverse " + file + " does not exist");
}
History workflowHistory = galaxyHistory.newHistoryForWorkflow();
WorkflowDetails workflowDetails = workflowsClient.showWorkflow(workflowId);
// upload dataset to history
List<Dataset> inputDatasetsForward = uploadFilesListToHistory(inputFilesForward, inputFileType, workflowHistory);
List<Dataset> inputDatasetsReverse = uploadFilesListToHistory(inputFilesReverse, inputFileType, workflowHistory);
assertEquals(inputFilesForward.size(), inputDatasetsForward.size());
assertEquals(inputDatasetsForward.size(), inputDatasetsReverse.size());
// construct list of datasets
CollectionResponse collection = constructPairedFileCollection(inputDatasetsForward, inputDatasetsReverse, workflowHistory);
logger.debug("Constructed dataset collection: id=" + collection.getId() + ", " + collection.getName());
String workflowInputId = galaxyWorkflowService.getWorkflowInputId(workflowDetails, workflowInputLabel);
WorkflowInputs inputs = new WorkflowInputs();
inputs.setDestination(new WorkflowInputs.ExistingHistory(workflowHistory.getId()));
inputs.setWorkflowId(workflowDetails.getId());
inputs.setInput(workflowInputId, new WorkflowInputs.WorkflowInput(collection.getId(), WorkflowInputs.InputSourceType.HDCA));
// execute workflow
WorkflowOutputs output = workflowsClient.runWorkflow(inputs);
logger.debug("Running workflow in history " + output.getHistoryId());
return output;
}
use of com.github.jmchilton.blend4j.galaxy.beans.WorkflowOutputs in project irida by phac-nml.
the class GalaxyWorkflowsIT method testGetWorkflowStatusComplete.
/**
* Tests executing a single workflow in Galaxy and getting the status after completion.
* @throws ExecutionManagerException
* @throws InterruptedException
* @throws TimeoutException
*/
@Test
public void testGetWorkflowStatusComplete() throws ExecutionManagerException, TimeoutException, InterruptedException {
History history = galaxyHistory.newHistoryForWorkflow();
WorkflowOutputs workflowOutput = runSingleFileTabularWorkflowFilterTool(history, dataFile1, VALID_FILTER_PARAMETER);
Util.waitUntilHistoryComplete(workflowOutput.getHistoryId(), galaxyHistory, 60);
// test get workflow status
GalaxyWorkflowStatus workflowStatus = galaxyHistory.getStatusForHistory(workflowOutput.getHistoryId());
assertEquals("final workflow state is invalid", GalaxyWorkflowState.OK, workflowStatus.getState());
assertTrue("final workflow state is invalid", workflowStatus.completedSuccessfully());
}
use of com.github.jmchilton.blend4j.galaxy.beans.WorkflowOutputs in project irida by phac-nml.
the class GalaxyWorkflowsIT method testGetWorkflowStatusError.
/**
* Tests executing a single workflow in Galaxy and getting an error status after completion.
* @throws ExecutionManagerException
* @throws InterruptedException
* @throws TimeoutException
*/
@Test
public void testGetWorkflowStatusError() throws ExecutionManagerException, TimeoutException, InterruptedException {
History history = galaxyHistory.newHistoryForWorkflow();
// no column c2 for this input file, so should give an error
WorkflowOutputs workflowOutput = runSingleFileTabularWorkflowFilterTool(history, dataFile1, INVALID_FILTER_PARAMETER);
Util.waitUntilHistoryComplete(workflowOutput.getHistoryId(), galaxyHistory, 60);
// test get workflow status
GalaxyWorkflowStatus workflowStatus = galaxyHistory.getStatusForHistory(workflowOutput.getHistoryId());
assertEquals("final workflow state is invalid", GalaxyWorkflowState.ERROR, workflowStatus.getState());
assertTrue("final workflow state is invalid", workflowStatus.errorOccurred());
}
Aggregations