use of com.github.jmchilton.blend4j.galaxy.beans.HistoryContents in project irida by phac-nml.
the class AnalysisCollectionServiceGalaxyIT method testUploadSequenceFilesPairedSuccess.
/**
* Tests successfully uploading a paired-end sequence file to Galaxy and
* constructing a collection.
*
* @throws ExecutionManagerException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testUploadSequenceFilesPairedSuccess() throws ExecutionManagerException {
History history = new History();
history.setName("testUploadSequenceFilesPaired");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
LibrariesClient librariesClient = localGalaxy.getGalaxyInstanceAdmin().getLibrariesClient();
History createdHistory = historiesClient.create(history);
Library library = new Library();
library.setName("testUploadSequenceFilesPaired");
Library createdLibrary = librariesClient.createLibrary(library);
Set<SequenceFilePair> sequenceFiles = Sets.newHashSet(databaseSetupGalaxyITService.setupSampleSequenceFileInDatabase(1L, pairSequenceFiles1A, pairSequenceFiles2A));
Map<Sample, IridaSequenceFilePair> sampleSequenceFilePairs = new HashMap<>(sequencingObjectService.getUniqueSamplesForSequencingObjects(sequenceFiles));
Sample sample1 = sampleRepository.findOne(1L);
CollectionResponse collectionResponse = analysisCollectionServiceGalaxy.uploadSequenceFilesPaired(sampleSequenceFilePairs, createdHistory, createdLibrary);
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("history does not have correct number of files", 3, historyContents.size());
Map<String, HistoryContents> contentsMap = historyContentsAsMap(historyContents);
assertTrue("the history should have a sequence file with name " + sequenceFilePathA.toFile().getName(), contentsMap.containsKey(sequenceFilePathA.toFile().getName()));
assertTrue("the history should have a file with name " + sequenceFilePath2A.toFile().getName(), contentsMap.containsKey(sequenceFilePath2A.toFile().getName()));
assertTrue("the history should have a dataset collection with name " + INPUTS_PAIRED_NAME, contentsMap.containsKey(INPUTS_PAIRED_NAME));
// verify correct collection has been created
assertEquals("invalid type of dataset collection created", DatasetCollectionType.LIST_PAIRED.toString(), collectionResponse.getCollectionType());
List<CollectionElementResponse> collectionElements = collectionResponse.getElements();
assertEquals("invalid number of elements in the dataset collection", 1, collectionElements.size());
Map<String, CollectionElementResponse> collectionElementsMap = collectionElementsAsMap(collectionElements);
assertTrue("the dataset collection element should have name " + sample1.getSampleName(), collectionElementsMap.containsKey(sample1.getSampleName()));
CollectionElementResponse sample1Response = collectionElementsMap.get(sample1.getSampleName());
// verify collection has 2 files (paired end data)
ElementResponse subElements = sample1Response.getResponseElement();
assertEquals("invalid class for sub-element in dataset collection", CollectionResponse.class, subElements.getClass());
CollectionResponse subElementsCollection = (CollectionResponse) subElements;
assertEquals("invalid type for sub-element in dataset collection", DatasetCollectionType.PAIRED.toString(), subElementsCollection.getCollectionType());
List<CollectionElementResponse> subCollectionElements = subElementsCollection.getElements();
assertEquals("invalid number of files for paired dataset collection element", 2, subCollectionElements.size());
Map<String, CollectionElementResponse> subCollectionElementsMap = collectionElementsAsMap(subCollectionElements);
assertTrue("dataset collection should have a sub-element with name " + FORWARD_NAME, subCollectionElementsMap.containsKey(FORWARD_NAME));
assertTrue("dataset collection should have a sub-element with name " + REVERSE_NAME, subCollectionElementsMap.containsKey(REVERSE_NAME));
// verify paired-end files are correct type in collection
CollectionElementResponse sequenceFile1 = subCollectionElementsMap.get(FORWARD_NAME);
CollectionElementResponse sequenceFile2 = subCollectionElementsMap.get(REVERSE_NAME);
assertEquals("the " + FORWARD_NAME + " sub-element should be a history dataset", HISTORY_DATASET_NAME, sequenceFile1.getElementType());
assertEquals("the " + REVERSE_NAME + " sub-element should be a history dataset", HISTORY_DATASET_NAME, sequenceFile2.getElementType());
// verify paired-end files are in correct order in collection
ElementResponse sequenceFile1Response = sequenceFile1.getResponseElement();
assertEquals("the " + FORWARD_NAME + " element is not of the correct type", Dataset.class, sequenceFile1Response.getClass());
ElementResponse sequenceFile2Response = sequenceFile2.getResponseElement();
assertEquals("the " + REVERSE_NAME + " element is not of the correct type", Dataset.class, sequenceFile2Response.getClass());
Dataset sequenceFile1Dataset = (Dataset) sequenceFile1Response;
assertEquals("forward file in Galaxy is named incorrectly", sequenceFilePathA.getFileName().toString(), sequenceFile1Dataset.getName());
Dataset sequenceFile2Dataset = (Dataset) sequenceFile2Response;
assertEquals("reverse file in Galaxy is named incorrectly", sequenceFilePath2A.getFileName().toString(), sequenceFile2Dataset.getName());
}
use of com.github.jmchilton.blend4j.galaxy.beans.HistoryContents in project irida by phac-nml.
the class AnalysisCollectionServiceGalaxyIT method testUploadSequenceFilesSingleSuccess.
/**
* Tests successfully uploading a single end sequence file to Galaxy and
* constructing a collection.
*
* @throws ExecutionManagerException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testUploadSequenceFilesSingleSuccess() throws ExecutionManagerException {
History history = new History();
history.setName("testUploadSequenceFilesSingleSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
LibrariesClient librariesClient = localGalaxy.getGalaxyInstanceAdmin().getLibrariesClient();
History createdHistory = historiesClient.create(history);
Library library = new Library();
library.setName("testUploadSequenceFilesSingleSuccess");
Library createdLibrary = librariesClient.createLibrary(library);
Set<SingleEndSequenceFile> sequenceFiles = Sets.newHashSet(databaseSetupGalaxyITService.setupSequencingObjectInDatabase(1L, sequenceFilePathA));
Map<Sample, IridaSingleEndSequenceFile> sampleSequenceFiles = new HashMap<>(sequencingObjectService.getUniqueSamplesForSequencingObjects(sequenceFiles));
Sample sample1 = sampleRepository.findOne(1L);
CollectionResponse collectionResponse = analysisCollectionServiceGalaxy.uploadSequenceFilesSingleEnd(sampleSequenceFiles, createdHistory, createdLibrary);
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("historyContents should have size 2", 2, historyContents.size());
Map<String, HistoryContents> contentsMap = historyContentsAsMap(historyContents);
assertTrue("sequenceFile should have been uploaded to history", contentsMap.containsKey(sequenceFilePathA.toFile().getName()));
assertTrue("dataset collection with name " + INPUTS_SINGLE_NAME + " should have been created in history", contentsMap.containsKey(INPUTS_SINGLE_NAME));
// verify correct collection has been created
assertEquals("constructed dataset collection should have been " + DatasetCollectionType.LIST + " but is instead " + collectionResponse.getCollectionType(), DatasetCollectionType.LIST.toString(), collectionResponse.getCollectionType());
List<CollectionElementResponse> collectionElements = collectionResponse.getElements();
assertEquals("dataset collection should have only 1 element", 1, collectionElements.size());
Map<String, CollectionElementResponse> collectionElementsMap = collectionElementsAsMap(collectionElements);
assertTrue("dataset collection should have an element with the name " + sample1.getSampleName(), collectionElementsMap.containsKey(sample1.getSampleName()));
CollectionElementResponse sample1Response = collectionElementsMap.get(sample1.getSampleName());
assertEquals("invalid type for dataset element", HISTORY_DATASET_NAME, sample1Response.getElementType());
}
use of com.github.jmchilton.blend4j.galaxy.beans.HistoryContents in project irida by phac-nml.
the class AnalysisWorkspaceServiceGalaxyIT method testPrepareAnalysisFilesParametersSuccessWithNoParameters.
/**
* Tests out successfully preparing paired workflow input files for
* execution, no parameters set.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesParametersSuccessWithNoParameters() throws InterruptedException, ExecutionManagerException, IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesParametersSuccessWithNoParameters");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPairedWithParameters);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L, pairSequenceFiles1A, pairSequenceFiles2A, referenceFilePath, validWorkflowIdPairedWithParameters, false);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
PreparedWorkflowGalaxy preparedWorkflow = analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
assertEquals("the response history id should match the input history id", createdHistory.getId(), preparedWorkflow.getRemoteAnalysisId());
WorkflowInputsGalaxy workflowInputsGalaxy = preparedWorkflow.getWorkflowInputs();
assertNotNull("the returned workflow inputs should not be null", workflowInputsGalaxy);
assertNotNull("the returned library id should not be null", preparedWorkflow.getRemoteDataId());
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("the created history has an invalid number of elements", 4, historyContents.size());
WorkflowInputs workflowInputs = preparedWorkflow.getWorkflowInputs().getInputsObject();
assertNotNull("created workflowInputs is null", workflowInputs);
Map<String, Object> toolParameters = workflowInputs.getParameters().get("core_pipeline_outputs_paired_with_parameters");
assertNotNull("toolParameters is null", toolParameters);
String coverageMinValue = (String) toolParameters.get("coverageMin");
assertEquals("coverageMinValue should have been changed to default", "10", coverageMinValue);
assertEquals("coverageMidValue should have been changed to default", ImmutableMap.of("coverageMid", "10"), toolParameters.get("conditional"));
String coverageMaxValue = (String) toolParameters.get("coverageMin");
assertEquals("coverageMaxValue should have been changed to default", "10", coverageMaxValue);
}
use of com.github.jmchilton.blend4j.galaxy.beans.HistoryContents in project irida by phac-nml.
the class AnalysisWorkspaceServiceGalaxyIT method testPrepareAnalysisFilesPairSuccess.
/**
* Tests out successfully preparing paired workflow input files for
* execution.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesPairSuccess() throws InterruptedException, ExecutionManagerException, IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesPairSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
LibrariesClient librariesClient = localGalaxy.getGalaxyInstanceAdmin().getLibrariesClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPaired);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L, pairSequenceFiles1A, pairSequenceFiles2A, referenceFilePath, validWorkflowIdPaired, false);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
PreparedWorkflowGalaxy preparedWorkflow = analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
assertEquals("the response history id should match the input history id", createdHistory.getId(), preparedWorkflow.getRemoteAnalysisId());
WorkflowInputsGalaxy workflowInputsGalaxy = preparedWorkflow.getWorkflowInputs();
assertNotNull("the returned workflow inputs should not be null", workflowInputsGalaxy);
assertNotNull("the returned library id should not be null", preparedWorkflow.getRemoteDataId());
// verify correct library is created
List<LibraryContent> libraryContents = librariesClient.getLibraryContents(preparedWorkflow.getRemoteDataId());
Map<String, List<LibraryContent>> libraryContentsMap = libraryContents.stream().collect(Collectors.groupingBy(LibraryContent::getName));
assertFalse("the returned library should exist in Galaxy", libraryContentsMap.isEmpty());
String sequenceFile1ALibraryName = "/" + sequenceFilePathA.getFileName().toString();
String sequenceFile2ALibraryName = "/" + sequenceFilePath2A.getFileName().toString();
assertEquals("the returned library does not contain the correct number of elements", 3, libraryContentsMap.size());
assertTrue("the returned library does not contain a root folder", libraryContentsMap.containsKey("/"));
assertTrue("the returned library does not contain the correct sequence file", libraryContentsMap.containsKey(sequenceFile1ALibraryName));
assertEquals("the returned library does not contain the correct sequence file", 1, libraryContentsMap.get(sequenceFile1ALibraryName).size());
assertTrue("the returned library does not contain the correct sequence file", libraryContentsMap.containsKey(sequenceFile2ALibraryName));
assertEquals("the returned library does not contain the correct sequence file", 1, libraryContentsMap.get(sequenceFile2ALibraryName).size());
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("the created history has an invalid number of elements", 4, historyContents.size());
Map<String, HistoryContents> contentsMap = historyContentsAsMap(historyContents);
assertTrue("the created history should contain the file " + sequenceFilePathA.toFile().getName(), contentsMap.containsKey(sequenceFilePathA.toFile().getName()));
assertTrue("the created history should contain the file " + sequenceFilePath2A.toFile().getName(), contentsMap.containsKey(sequenceFilePath2A.toFile().getName()));
assertTrue("the created history should contain the file " + referenceFilePath.toFile().getName(), contentsMap.containsKey(referenceFilePath.toFile().getName()));
assertTrue("the created history should contain the collection with name " + INPUTS_PAIRED_NAME, contentsMap.containsKey(INPUTS_PAIRED_NAME));
// make sure workflow inputs contains correct information
Map<String, WorkflowInput> workflowInputsMap = preparedWorkflow.getWorkflowInputs().getInputsObject().getInputs();
assertEquals("the created workflow inputs has an invalid number of elements", 2, workflowInputsMap.size());
}
use of com.github.jmchilton.blend4j.galaxy.beans.HistoryContents in project irida by phac-nml.
the class AnalysisWorkspaceServiceGalaxyIT method testPrepareAnalysisFilesParametersSuccessIgnoreDefaultParameters.
/**
* Tests out successfully preparing paired workflow input files for
* execution and ignoring default parameters.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesParametersSuccessIgnoreDefaultParameters() throws InterruptedException, ExecutionManagerException, IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesParametersSuccessIgnoreDefaultParameters");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPairedWithParameters);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
Map<String, String> parameters = ImmutableMap.of("coverage", IridaWorkflowParameter.IGNORE_DEFAULT_VALUE);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L, pairSequenceFiles1A, pairSequenceFiles2A, referenceFilePath, parameters, validWorkflowIdPairedWithParameters);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
PreparedWorkflowGalaxy preparedWorkflow = analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
assertEquals("the response history id should match the input history id", createdHistory.getId(), preparedWorkflow.getRemoteAnalysisId());
WorkflowInputsGalaxy workflowInputsGalaxy = preparedWorkflow.getWorkflowInputs();
assertNotNull("the returned workflow inputs should not be null", workflowInputsGalaxy);
assertNotNull("the returned library id should not be null", preparedWorkflow.getRemoteDataId());
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("the created history has an invalid number of elements", 4, historyContents.size());
WorkflowInputs workflowInputs = preparedWorkflow.getWorkflowInputs().getInputsObject();
assertNotNull("created workflowInputs is null", workflowInputs);
Map<String, Object> toolParameters = workflowInputs.getParameters().get("core_pipeline_outputs_paired_with_parameters");
assertNull("toolParameters is not null", toolParameters);
}
Aggregations