use of com.github.jmchilton.blend4j.galaxy.beans.collection.response.CollectionResponse in project irida by phac-nml.
the class GalaxyHistoriesServiceIT method testConstructCollectionSuccess.
/**
* Tests out successfully constructing a collection of datasets.
* @throws ExecutionManagerException
*/
@Test
public void testConstructCollectionSuccess() throws ExecutionManagerException {
History history = galaxyHistory.newHistoryForWorkflow();
Dataset dataset1 = galaxyHistory.fileToHistory(dataFile, FILE_TYPE, history);
Dataset dataset2 = galaxyHistory.fileToHistory(dataFile2, FILE_TYPE, history);
assertNotNull(dataset1);
assertNotNull(dataset2);
String collectionName = "collection";
CollectionDescription description = new CollectionDescription();
description.setName(collectionName);
description.setCollectionType(DatasetCollectionType.LIST.toString());
HistoryDatasetElement element1 = new HistoryDatasetElement();
element1.setId(dataset1.getId());
element1.setName(dataset1.getName());
description.addDatasetElement(element1);
HistoryDatasetElement element2 = new HistoryDatasetElement();
element2.setId(dataset2.getId());
element2.setName(dataset2.getName());
description.addDatasetElement(element2);
CollectionResponse collectionResponse = galaxyHistory.constructCollection(description, history);
assertNotNull(collectionResponse);
assertEquals(DatasetCollectionType.LIST.toString(), collectionResponse.getCollectionType());
assertEquals(history.getId(), collectionResponse.getHistoryId());
assertEquals(2, collectionResponse.getElements().size());
}
use of com.github.jmchilton.blend4j.galaxy.beans.collection.response.CollectionResponse in project irida by phac-nml.
the class GalaxyWorkflowsIT method runSingleCollectionWorkflow.
/**
* Starts the execution of a workflow with a list of fastq files and the given workflow id.
* @param inputFilesForward A list of forward read fastq files start the workflow.
* @param inputFilesReverse A list of reverse read fastq files start the workflow.
* @param inputFileType The file type of the input files.
* @param workflowId The id of the workflow to start.
* @param workflowInputLabel The label of a workflow input in Galaxy.
* @throws ExecutionManagerException If there was an error executing the workflow.
*/
private WorkflowOutputs runSingleCollectionWorkflow(List<Path> inputFilesForward, List<Path> inputFilesReverse, InputFileType inputFileType, String workflowId, String workflowInputLabel) throws ExecutionManagerException {
checkNotNull(inputFilesForward, "inputFilesForward is null");
checkNotNull(inputFilesReverse, "inputFilesReverse is null");
checkArgument(inputFilesForward.size() == inputFilesReverse.size(), "inputFiles have different number of elements");
checkNotNull(inputFileType, "inputFileType is null");
checkNotNull(workflowInputLabel, "workflowInputLabel is null");
for (Path file : inputFilesForward) {
checkArgument(Files.exists(file), "inputFileForward " + file + " does not exist");
}
for (Path file : inputFilesReverse) {
checkArgument(Files.exists(file), "inputFilesReverse " + file + " does not exist");
}
History workflowHistory = galaxyHistory.newHistoryForWorkflow();
WorkflowDetails workflowDetails = workflowsClient.showWorkflow(workflowId);
// upload dataset to history
List<Dataset> inputDatasetsForward = uploadFilesListToHistory(inputFilesForward, inputFileType, workflowHistory);
List<Dataset> inputDatasetsReverse = uploadFilesListToHistory(inputFilesReverse, inputFileType, workflowHistory);
assertEquals(inputFilesForward.size(), inputDatasetsForward.size());
assertEquals(inputDatasetsForward.size(), inputDatasetsReverse.size());
// construct list of datasets
CollectionResponse collection = constructPairedFileCollection(inputDatasetsForward, inputDatasetsReverse, workflowHistory);
logger.debug("Constructed dataset collection: id=" + collection.getId() + ", " + collection.getName());
String workflowInputId = galaxyWorkflowService.getWorkflowInputId(workflowDetails, workflowInputLabel);
WorkflowInputs inputs = new WorkflowInputs();
inputs.setDestination(new WorkflowInputs.ExistingHistory(workflowHistory.getId()));
inputs.setWorkflowId(workflowDetails.getId());
inputs.setInput(workflowInputId, new WorkflowInputs.WorkflowInput(collection.getId(), WorkflowInputs.InputSourceType.HDCA));
// execute workflow
WorkflowOutputs output = workflowsClient.runWorkflow(inputs);
logger.debug("Running workflow in history " + output.getHistoryId());
return output;
}
use of com.github.jmchilton.blend4j.galaxy.beans.collection.response.CollectionResponse in project irida by phac-nml.
the class GalaxyHistoriesServiceTest method testConstructCollectionSuccess.
/**
* Tests successfull construction of a dataset collection.
* @throws ExecutionManagerException
*/
@Test
public void testConstructCollectionSuccess() throws ExecutionManagerException {
CollectionResponse collectionResponse = new CollectionResponse();
History history = new History();
history.setId(HISTORY_ID);
HistoryDatasetElement datasetElement = new HistoryDatasetElement();
datasetElement.setId(DATA_ID);
CollectionDescription description = new CollectionDescription();
description.addDatasetElement(datasetElement);
when(historiesClient.createDatasetCollection(eq(HISTORY_ID), any(CollectionDescription.class))).thenReturn(collectionResponse);
assertEquals(collectionResponse, galaxyHistory.constructCollection(description, history));
}
use of com.github.jmchilton.blend4j.galaxy.beans.collection.response.CollectionResponse in project irida by phac-nml.
the class AnalysisWorkspaceServiceGalaxyTest method setup.
/**
* Sets up variables for testing.
*
* @throws IOException
* @throws GalaxyDatasetException
* @throws UploadException
*/
@Before
public void setup() throws IOException, UploadException, GalaxyDatasetException {
MockitoAnnotations.initMocks(this);
sFileA = new SequenceFile(createTempFile("fileA", "fastq"));
sFileB = new SequenceFile(createTempFile("fileB", "fastq"));
sFileC = new SequenceFile(createTempFile("fileC", "fastq"));
sObjA = new SingleEndSequenceFile(sFileA);
sObjB = new SingleEndSequenceFile(sFileB);
sObjC = new SingleEndSequenceFile(sFileC);
sequenceFilePair = new SequenceFilePair(sFileB, sFileC);
singleEndSequenceFile = sObjA;
Sample sampleA = new Sample();
sampleA.setSampleName("SampleA");
Sample sampleB = new Sample();
sampleB.setSampleName("SampleB");
Sample sampleC = new Sample();
sampleC.setSampleName("SampleC");
sampleSingleSequenceFileMap = ImmutableMap.of(sampleA, singleEndSequenceFile);
sampleSequenceFilePairMap = ImmutableMap.of(sampleB, sequenceFilePair);
sampleSequenceFilePairMapSampleA = ImmutableMap.of(sampleA, sequenceFilePair);
refFile = createTempFile("reference", "fasta");
referenceFile = new ReferenceFile(refFile);
inputFiles = new HashSet<>();
inputFiles.addAll(Arrays.asList(sObjA, sObjB, sObjC));
submission = AnalysisSubmission.builder(workflowId).name("my analysis").inputFiles(inputFiles).referenceFile(referenceFile).build();
workflowHistory = new History();
workflowHistory.setId(HISTORY_ID);
workflowLibrary = new Library();
workflowLibrary.setId(LIBRARY_ID);
workflowDetails = new WorkflowDetails();
workflowDetails.setId(WORKFLOW_ID);
workflowPreparation = new AnalysisWorkspaceServiceGalaxy(galaxyHistoriesService, galaxyWorkflowService, galaxyLibrariesService, iridaWorkflowsService, analysisCollectionServiceGalaxy, analysisProvenanceServiceGalaxy, analysisParameterServiceGalaxy, sequencingObjectService);
output1Dataset = new Dataset();
output1Dataset.setId("1");
output1Dataset.setName("output1.txt");
output2Dataset = new Dataset();
output2Dataset.setId("2");
output2Dataset.setName("output2.txt");
collectionResponseSingle = new CollectionResponse();
collectionResponseSingle.setId(COLLECTION_SINGLE_ID);
collectionResponsePaired = new CollectionResponse();
collectionResponsePaired.setId(COLLECTION_PAIRED_ID);
singleInputFiles = Sets.newHashSet(singleEndSequenceFile);
pairedInputFiles = Sets.newHashSet(sequenceFilePair);
}
use of com.github.jmchilton.blend4j.galaxy.beans.collection.response.CollectionResponse in project irida by phac-nml.
the class AnalysisCollectionServiceGalaxyIT method testUploadSequenceFilesPairedSuccess.
/**
* Tests successfully uploading a paired-end sequence file to Galaxy and
* constructing a collection.
*
* @throws ExecutionManagerException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testUploadSequenceFilesPairedSuccess() throws ExecutionManagerException {
History history = new History();
history.setName("testUploadSequenceFilesPaired");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
LibrariesClient librariesClient = localGalaxy.getGalaxyInstanceAdmin().getLibrariesClient();
History createdHistory = historiesClient.create(history);
Library library = new Library();
library.setName("testUploadSequenceFilesPaired");
Library createdLibrary = librariesClient.createLibrary(library);
Set<SequenceFilePair> sequenceFiles = Sets.newHashSet(databaseSetupGalaxyITService.setupSampleSequenceFileInDatabase(1L, pairSequenceFiles1A, pairSequenceFiles2A));
Map<Sample, IridaSequenceFilePair> sampleSequenceFilePairs = new HashMap<>(sequencingObjectService.getUniqueSamplesForSequencingObjects(sequenceFiles));
Sample sample1 = sampleRepository.findOne(1L);
CollectionResponse collectionResponse = analysisCollectionServiceGalaxy.uploadSequenceFilesPaired(sampleSequenceFilePairs, createdHistory, createdLibrary);
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("history does not have correct number of files", 3, historyContents.size());
Map<String, HistoryContents> contentsMap = historyContentsAsMap(historyContents);
assertTrue("the history should have a sequence file with name " + sequenceFilePathA.toFile().getName(), contentsMap.containsKey(sequenceFilePathA.toFile().getName()));
assertTrue("the history should have a file with name " + sequenceFilePath2A.toFile().getName(), contentsMap.containsKey(sequenceFilePath2A.toFile().getName()));
assertTrue("the history should have a dataset collection with name " + INPUTS_PAIRED_NAME, contentsMap.containsKey(INPUTS_PAIRED_NAME));
// verify correct collection has been created
assertEquals("invalid type of dataset collection created", DatasetCollectionType.LIST_PAIRED.toString(), collectionResponse.getCollectionType());
List<CollectionElementResponse> collectionElements = collectionResponse.getElements();
assertEquals("invalid number of elements in the dataset collection", 1, collectionElements.size());
Map<String, CollectionElementResponse> collectionElementsMap = collectionElementsAsMap(collectionElements);
assertTrue("the dataset collection element should have name " + sample1.getSampleName(), collectionElementsMap.containsKey(sample1.getSampleName()));
CollectionElementResponse sample1Response = collectionElementsMap.get(sample1.getSampleName());
// verify collection has 2 files (paired end data)
ElementResponse subElements = sample1Response.getResponseElement();
assertEquals("invalid class for sub-element in dataset collection", CollectionResponse.class, subElements.getClass());
CollectionResponse subElementsCollection = (CollectionResponse) subElements;
assertEquals("invalid type for sub-element in dataset collection", DatasetCollectionType.PAIRED.toString(), subElementsCollection.getCollectionType());
List<CollectionElementResponse> subCollectionElements = subElementsCollection.getElements();
assertEquals("invalid number of files for paired dataset collection element", 2, subCollectionElements.size());
Map<String, CollectionElementResponse> subCollectionElementsMap = collectionElementsAsMap(subCollectionElements);
assertTrue("dataset collection should have a sub-element with name " + FORWARD_NAME, subCollectionElementsMap.containsKey(FORWARD_NAME));
assertTrue("dataset collection should have a sub-element with name " + REVERSE_NAME, subCollectionElementsMap.containsKey(REVERSE_NAME));
// verify paired-end files are correct type in collection
CollectionElementResponse sequenceFile1 = subCollectionElementsMap.get(FORWARD_NAME);
CollectionElementResponse sequenceFile2 = subCollectionElementsMap.get(REVERSE_NAME);
assertEquals("the " + FORWARD_NAME + " sub-element should be a history dataset", HISTORY_DATASET_NAME, sequenceFile1.getElementType());
assertEquals("the " + REVERSE_NAME + " sub-element should be a history dataset", HISTORY_DATASET_NAME, sequenceFile2.getElementType());
// verify paired-end files are in correct order in collection
ElementResponse sequenceFile1Response = sequenceFile1.getResponseElement();
assertEquals("the " + FORWARD_NAME + " element is not of the correct type", Dataset.class, sequenceFile1Response.getClass());
ElementResponse sequenceFile2Response = sequenceFile2.getResponseElement();
assertEquals("the " + REVERSE_NAME + " element is not of the correct type", Dataset.class, sequenceFile2Response.getClass());
Dataset sequenceFile1Dataset = (Dataset) sequenceFile1Response;
assertEquals("forward file in Galaxy is named incorrectly", sequenceFilePathA.getFileName().toString(), sequenceFile1Dataset.getName());
Dataset sequenceFile2Dataset = (Dataset) sequenceFile2Response;
assertEquals("reverse file in Galaxy is named incorrectly", sequenceFilePath2A.getFileName().toString(), sequenceFile2Dataset.getName());
}
Aggregations