use of ca.corefacility.bioinformatics.irida.model.irida.IridaSingleEndSequenceFile in project irida by phac-nml.
the class AnalysisCollectionServiceGalaxyIT method testUploadSequenceFilesSingleSuccess.
/**
* Tests successfully uploading a single end sequence file to Galaxy and
* constructing a collection.
*
* @throws ExecutionManagerException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testUploadSequenceFilesSingleSuccess() throws ExecutionManagerException {
History history = new History();
history.setName("testUploadSequenceFilesSingleSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
LibrariesClient librariesClient = localGalaxy.getGalaxyInstanceAdmin().getLibrariesClient();
History createdHistory = historiesClient.create(history);
Library library = new Library();
library.setName("testUploadSequenceFilesSingleSuccess");
Library createdLibrary = librariesClient.createLibrary(library);
Set<SingleEndSequenceFile> sequenceFiles = Sets.newHashSet(databaseSetupGalaxyITService.setupSequencingObjectInDatabase(1L, sequenceFilePathA));
Map<Sample, IridaSingleEndSequenceFile> sampleSequenceFiles = new HashMap<>(sequencingObjectService.getUniqueSamplesForSequencingObjects(sequenceFiles));
Sample sample1 = sampleRepository.findOne(1L);
CollectionResponse collectionResponse = analysisCollectionServiceGalaxy.uploadSequenceFilesSingleEnd(sampleSequenceFiles, createdHistory, createdLibrary);
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("historyContents should have size 2", 2, historyContents.size());
Map<String, HistoryContents> contentsMap = historyContentsAsMap(historyContents);
assertTrue("sequenceFile should have been uploaded to history", contentsMap.containsKey(sequenceFilePathA.toFile().getName()));
assertTrue("dataset collection with name " + INPUTS_SINGLE_NAME + " should have been created in history", contentsMap.containsKey(INPUTS_SINGLE_NAME));
// verify correct collection has been created
assertEquals("constructed dataset collection should have been " + DatasetCollectionType.LIST + " but is instead " + collectionResponse.getCollectionType(), DatasetCollectionType.LIST.toString(), collectionResponse.getCollectionType());
List<CollectionElementResponse> collectionElements = collectionResponse.getElements();
assertEquals("dataset collection should have only 1 element", 1, collectionElements.size());
Map<String, CollectionElementResponse> collectionElementsMap = collectionElementsAsMap(collectionElements);
assertTrue("dataset collection should have an element with the name " + sample1.getSampleName(), collectionElementsMap.containsKey(sample1.getSampleName()));
CollectionElementResponse sample1Response = collectionElementsMap.get(sample1.getSampleName());
assertEquals("invalid type for dataset element", HISTORY_DATASET_NAME, sample1Response.getElementType());
}
use of ca.corefacility.bioinformatics.irida.model.irida.IridaSingleEndSequenceFile in project irida by phac-nml.
the class AnalysisCollectionServiceGalaxy method uploadSequenceFilesSingleEnd.
/**
* Uploads a list of single sequence files belonging to the given samples to
* Galaxy.
*
* @param sampleSequenceFiles
* A map between {@link Sample} and
* {@link IridaSingleEndSequenceFile}.
* @param workflowHistory
* The history to upload the sequence files into.
* @param workflowLibrary
* A temporary library to upload files into.
* @return A CollectionResponse for the dataset collection constructed from
* the given files.
* @throws ExecutionManagerException
* If there was an error uploading the files.
*/
public CollectionResponse uploadSequenceFilesSingleEnd(Map<Sample, ? extends IridaSingleEndSequenceFile> sampleSequenceFiles, History workflowHistory, Library workflowLibrary) throws ExecutionManagerException {
CollectionDescription description = new CollectionDescription();
description.setCollectionType(DatasetCollectionType.LIST.toString());
description.setName(COLLECTION_NAME_SINGLE);
Map<Path, Sample> samplesMap = new HashMap<>();
for (Sample sample : sampleSequenceFiles.keySet()) {
IridaSingleEndSequenceFile sequenceFile = sampleSequenceFiles.get(sample);
samplesMap.put(sequenceFile.getSequenceFile().getFile(), sample);
}
// upload files to library and then to a history
Set<Path> pathsToUpload = samplesMap.keySet();
Map<Path, String> pathHistoryDatasetId = galaxyHistoriesService.filesToLibraryToHistory(pathsToUpload, InputFileType.FASTQ_SANGER, workflowHistory, workflowLibrary, DataStorage.LOCAL);
for (Path sequenceFilePath : samplesMap.keySet()) {
if (!pathHistoryDatasetId.containsKey(sequenceFilePath)) {
throw new UploadException("Error, no corresponding history item found for " + sequenceFilePath);
}
Sample sample = samplesMap.get(sequenceFilePath);
String datasetHistoryId = pathHistoryDatasetId.get(sequenceFilePath);
HistoryDatasetElement datasetElement = new HistoryDatasetElement();
datasetElement.setId(datasetHistoryId);
datasetElement.setName(sample.getSampleName());
description.addDatasetElement(datasetElement);
}
return galaxyHistoriesService.constructCollection(description, workflowHistory);
}
Aggregations