use of com.github.jmchilton.blend4j.galaxy.beans.Dataset in project irida by phac-nml.
the class GalaxyHistoriesServiceIT method testGetDatasetForFileInHistoryFail.
/**
* Tests getting a dataset for a file in the history and failing.
* @throws UploadException
* @throws GalaxyDatasetException
*/
@Test(expected = GalaxyDatasetNotFoundException.class)
public void testGetDatasetForFileInHistoryFail() throws UploadException, GalaxyDatasetException {
History history = galaxyHistory.newHistoryForWorkflow();
Dataset dataset = galaxyHistory.fileToHistory(dataFile, InputFileType.FASTQ_SANGER, history);
String datasetName = dataset.getName() + "invalid";
galaxyHistory.getDatasetForFileInHistory(datasetName, history.getId());
}
use of com.github.jmchilton.blend4j.galaxy.beans.Dataset in project irida by phac-nml.
the class GalaxyHistoriesServiceIT method testDownloadDatasetFailHistoryId.
/**
* Tests failing to download a dataset (invalid history id)
* @throws IOException
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws TimeoutException
*/
@Test(expected = ExecutionManagerDownloadException.class)
@Ignore("Ignored because if inconsistent behaviour between Galaxy revisions.")
public void testDownloadDatasetFailHistoryId() throws IOException, TimeoutException, ExecutionManagerException, InterruptedException {
History history = galaxyHistory.newHistoryForWorkflow();
Dataset dataset = galaxyHistory.fileToHistory(dataFile, InputFileType.FASTQ_SANGER, history);
Util.waitUntilHistoryComplete(history.getId(), galaxyHistory, 60);
String invalidHistoryId = history.getId() + "a";
Path datasetPath = Files.createTempFile("data", "fastq");
galaxyHistory.downloadDatasetTo(invalidHistoryId, dataset.getId(), datasetPath);
}
use of com.github.jmchilton.blend4j.galaxy.beans.Dataset in project irida by phac-nml.
the class GalaxyWorkflowsIT method constructPairedFileCollection.
/**
* Constructs a collection containing a list of files from the given datasets.
* @param inputDatasetsForward The forward datasets to construct a collection from.
* @param inputDatasetsReverse The reverse datasets to construct a collection from.
* @param history The history to construct the collection within.
* @return A CollectionResponse describing the dataset collection.
* @throws ExecutionManagerException If an exception occured constructing the collection.
*/
public CollectionResponse constructPairedFileCollection(List<Dataset> inputDatasetsForward, List<Dataset> inputDatasetsReverse, History history) throws ExecutionManagerException {
checkNotNull(inputDatasetsForward, "inputDatasetsForward is null");
checkNotNull(inputDatasetsReverse, "inputDatasetsReverse is null");
checkNotNull(history, "history is null");
checkNotNull(history.getId(), "history does not have an associated id");
checkArgument(inputDatasetsForward.size() == inputDatasetsReverse.size(), "inputDatasets do not have equal sizes");
CollectionDescription collectionDescription = new CollectionDescription();
collectionDescription.setCollectionType(DatasetCollectionType.LIST_PAIRED.toString());
collectionDescription.setName(COLLECTION_NAME);
for (int i = 0; i < inputDatasetsForward.size(); i++) {
Dataset datasetForward = inputDatasetsForward.get(i);
Dataset datasetReverse = inputDatasetsReverse.get(i);
HistoryDatasetElement elementForward = new HistoryDatasetElement();
elementForward.setId(datasetForward.getId());
elementForward.setName(FORWARD_PAIR_NAME);
HistoryDatasetElement elementReverse = new HistoryDatasetElement();
elementReverse.setId(datasetReverse.getId());
elementReverse.setName(REVERSE_PAIR_NAME);
// Create an object to link together the forward and reverse reads for file2
CollectionElement element = new CollectionElement();
element.setName(BASE_NAME + i);
element.setCollectionType(DatasetCollectionType.PAIRED.toString());
element.addCollectionElement(elementForward);
element.addCollectionElement(elementReverse);
collectionDescription.addDatasetElement(element);
}
try {
return historiesClient.createDatasetCollection(history.getId(), collectionDescription);
} catch (RuntimeException e) {
throw new ExecutionManagerException("Could not construct dataset collection", e);
}
}
use of com.github.jmchilton.blend4j.galaxy.beans.Dataset in project irida by phac-nml.
the class GalaxyWorkflowsIT method testExecuteWorkflow.
/**
* Tests executing a single workflow in Galaxy.
* @throws ExecutionManagerException
*/
@Test
public void testExecuteWorkflow() throws ExecutionManagerException {
String workflowId = localGalaxy.getSingleInputWorkflowId();
String workflowInputLabel = localGalaxy.getSingleInputWorkflowLabel();
WorkflowOutputs workflowOutput = runSingleFileWorkflow(dataFile1, FILE_TYPE, workflowId, workflowInputLabel);
assertNotNull(workflowOutput);
assertNotNull(workflowOutput.getHistoryId());
// history should exist
HistoryDetails historyDetails = historiesClient.showHistory(workflowOutput.getHistoryId());
assertNotNull(historyDetails);
// outputs should exist
assertNotNull(workflowOutput.getOutputIds());
assertTrue(workflowOutput.getOutputIds().size() > 0);
// each output dataset should exist
for (String outputId : workflowOutput.getOutputIds()) {
Dataset dataset = historiesClient.showDataset(workflowOutput.getHistoryId(), outputId);
assertNotNull(dataset);
}
// test get workflow status
GalaxyWorkflowStatus workflowStatus = galaxyHistory.getStatusForHistory(workflowOutput.getHistoryId());
float percentComplete = workflowStatus.getProportionComplete();
assertTrue(0.0f <= percentComplete && percentComplete <= 1.0f);
}
use of com.github.jmchilton.blend4j.galaxy.beans.Dataset in project irida by phac-nml.
the class GalaxyWorkflowsIT method runSingleFileTabularWorkflow.
/**
* Runs a test workflow with the given parameters and input file.
*
* @param workflowId
* The id of the workflow to run.
* @param workflowInputLabel
* The lable of the input for the workflow.
* @param history
* The history to run the workflow in.
* @param inputFile
* The file to run the workflow on.
* @param toolName
* The toolName of a parameter to override.
* @param toolParameter
* The overridden tool parameter.
* @return A {@link WorkflowOutputs} for this workflow.
* @throws ExecutionManagerException
*/
private WorkflowOutputs runSingleFileTabularWorkflow(String workflowId, String workflowInputLabel, History history, Path inputFile, String toolName, ToolParameter toolParameter) throws ExecutionManagerException {
checkArgument(Files.exists(inputFile), "inputFile " + inputFile + " does not exist");
WorkflowDetails workflowDetails = workflowsClient.showWorkflow(workflowId);
// upload dataset to history
Dataset inputDataset = fileToHistory(inputFile, "tabular", history.getId());
assertNotNull(inputDataset);
String workflowInputId = galaxyWorkflowService.getWorkflowInputId(workflowDetails, workflowInputLabel);
WorkflowInputs inputs = new WorkflowInputs();
inputs.setDestination(new WorkflowInputs.ExistingHistory(history.getId()));
inputs.setWorkflowId(workflowDetails.getId());
inputs.setInput(workflowInputId, new WorkflowInputs.WorkflowInput(inputDataset.getId(), WorkflowInputs.InputSourceType.HDA));
inputs.setToolParameter(toolName, toolParameter);
// execute workflow
WorkflowOutputs output = workflowsClient.runWorkflow(inputs);
logger.debug("Running workflow in history " + output.getHistoryId());
return output;
}
Aggregations