use of com.github.jmchilton.blend4j.galaxy.beans.HistoryContentsProvenance in project irida by phac-nml.
the class GalaxyWorkflowsIT method testExecuteWorkflowChangeToolParameter.
/**
* Tests executing a single workflow in Galaxy and changing a single tool
* parameter.
*
* @throws ExecutionManagerException
*/
@Test
public void testExecuteWorkflowChangeToolParameter() throws ExecutionManagerException {
String toolId = "Grep1";
String workflowId = localGalaxy.getSingleInputWorkflowId();
String workflowInputLabel = localGalaxy.getSingleInputWorkflowLabel();
Map<String, ToolParameter> toolParameters = ImmutableMap.of(toolId, new ToolParameter("pattern", "^#"));
WorkflowOutputs workflowOutput = runSingleFileWorkflow(dataFile1, FILE_TYPE, workflowId, workflowInputLabel, toolParameters);
assertNotNull("workflowOutput should not be null", workflowOutput);
assertNotNull("workflowOutput history id should not be null", workflowOutput.getHistoryId());
// history should exist
HistoryDetails historyDetails = historiesClient.showHistory(workflowOutput.getHistoryId());
assertNotNull("historyDetails for the history for the workflow should not be null", historyDetails);
// outputs should exist
assertNotNull("outputIds for the workflow should not be null", workflowOutput.getOutputIds());
assertTrue("there should exist output dataset ids for the workflow", workflowOutput.getOutputIds().size() > 0);
// each output dataset should exist
for (String outputId : workflowOutput.getOutputIds()) {
Dataset dataset = historiesClient.showDataset(workflowOutput.getHistoryId(), outputId);
assertNotNull("the output dataset should exist", dataset);
HistoryContentsProvenance provenance = historiesClient.showProvenance(workflowOutput.getHistoryId(), dataset.getId());
if (toolId.equals(provenance.getToolId())) {
Map<String, Object> parametersMap = provenance.getParameters();
assertEquals("pattern parameter is correct", "\"^#\"", parametersMap.get("pattern"));
}
}
// test get workflow status
GalaxyWorkflowStatus workflowStatus = galaxyHistory.getStatusForHistory(workflowOutput.getHistoryId());
float proportionComplete = workflowStatus.getProportionComplete();
assertTrue("the workflow proportion complete should be between 0 and 1", 0.0f <= proportionComplete && proportionComplete <= 1.0f);
}
use of com.github.jmchilton.blend4j.galaxy.beans.HistoryContentsProvenance in project irida by phac-nml.
the class AnalysisProvenanceServiceGalaxyTest method testBuildSingleStepToolExecutionStrangeDataStructureDoToString.
@Test
public void testBuildSingleStepToolExecutionStrangeDataStructureDoToString() throws ExecutionManagerException {
final HistoryContents hc = new HistoryContents();
hc.setName(FILENAME);
final HistoryContentsProvenance hcp = new HistoryContentsProvenance();
hcp.setParameters(ImmutableMap.of("akey", "[[\"avalue\"]]"));
final JobDetails jd = new JobDetails();
jd.setCommandLine("");
when(galaxyHistoriesService.showHistoryContents(any(String.class))).thenReturn(Lists.newArrayList(hc));
when(galaxyHistoriesService.showProvenance(any(String.class), any(String.class))).thenReturn(hcp);
when(toolsClient.showTool(any(String.class))).thenReturn(new Tool());
when(jobsClient.showJob(any(String.class))).thenReturn(jd);
final ToolExecution toolExecution = provenanceService.buildToolExecutionForOutputFile(analysisSubmission(), analysisOutputFile());
assertTrue("tool execution should have the specified parameter.", toolExecution.getExecutionTimeParameters().containsKey("akey"));
assertEquals("tool execution parameter should be specified value.", "[[\"avalue\"]]", toolExecution.getExecutionTimeParameters().get("akey"));
}
use of com.github.jmchilton.blend4j.galaxy.beans.HistoryContentsProvenance in project irida by phac-nml.
the class AnalysisProvenanceServiceGalaxyTest method testCantFindTools.
@Test(expected = ExecutionManagerException.class)
public void testCantFindTools() throws ExecutionManagerException {
final HistoryContents hc = new HistoryContents();
hc.setName(FILENAME);
when(galaxyHistoriesService.showHistoryContents(any(String.class))).thenReturn(Lists.newArrayList(hc));
when(galaxyHistoriesService.showProvenance(any(String.class), any(String.class))).thenReturn(new HistoryContentsProvenance());
when(toolsClient.showTool(any(String.class))).thenThrow(new RuntimeException());
provenanceService.buildToolExecutionForOutputFile(analysisSubmission(), analysisOutputFile());
}
use of com.github.jmchilton.blend4j.galaxy.beans.HistoryContentsProvenance in project irida by phac-nml.
the class AnalysisProvenanceServiceGalaxy method buildToolExecutionForHistoryStep.
/**
* Build up a complete *tree* of ToolExecution from Galaxy's history
* contents provenance objects. Recursively follows predecessors from the
* current history.
*
* @param toolDetails
* the details of the current tool to build up tool execution
* details for.
* @param currentProvenance
* the provenance that corresponds to the tool details.
* @param historyId
* the Galaxy ID we should use to extract tool execution
* information.
* @return the entire tree of ToolExecutions for the tool and its
* provenance.
* @throws ExecutionManagerException
* if we could not get the history contents provenance or the
* tool details for a predecessor of the current tool details or
* provenance.
*/
private ToolExecution buildToolExecutionForHistoryStep(final Tool toolDetails, final HistoryContentsProvenance currentProvenance, final String historyId) throws ExecutionManagerException {
final Map<String, Set<String>> predecessors = getPredecessors(currentProvenance);
final Map<String, Object> parameters = currentProvenance.getParameters();
// remove keys from parameters that are Galaxy-related (and thus
// ignorable), or keys that *match* input keys (as mentioned in
// getPredecessors, the input keys are going to have a numeric
// suffix and so don't equal the key that we want to remove from the
// key set):
/* @formatter:off */
final Set<String> parameterKeys = parameters.keySet().stream().filter(k -> !PARAMETERS_TO_IGNORE.contains(k)).filter(k -> !predecessors.keySet().stream().anyMatch(p -> k.contains(p))).collect(Collectors.toSet());
/* @formatter:on */
final Map<String, Object> paramValues = new HashMap<>();
for (final String parameterKey : parameterKeys) {
paramValues.put(parameterKey, parameters.get(parameterKey));
}
final Set<ToolExecution> prevSteps = new HashSet<>();
final String toolName = toolDetails.getName();
final String toolVersion = toolDetails.getVersion();
final String jobId = currentProvenance.getJobId();
final JobDetails jobDetails = jobsClient.showJob(jobId);
final String commandLine = jobDetails.getCommandLine();
final Map<String, String> paramStrings = buildParamMap(paramValues);
for (final String predecessorKey : predecessors.keySet()) {
// arbitrarily select one of the predecessors from the set, then
// recurse on that predecessor:
final String predecessor = predecessors.get(predecessorKey).iterator().next();
final HistoryContentsProvenance previousProvenance = galaxyHistoriesService.showProvenance(historyId, predecessor);
final Tool previousToolDetails = toolsClient.showTool(previousProvenance.getToolId());
final ToolExecution toolExecution = buildToolExecutionForHistoryStep(previousToolDetails, previousProvenance, historyId);
prevSteps.add(toolExecution);
}
return new ToolExecution(prevSteps, toolName, toolVersion, jobId, paramStrings, commandLine);
}
use of com.github.jmchilton.blend4j.galaxy.beans.HistoryContentsProvenance in project irida by phac-nml.
the class GalaxyJobErrorsService method createNewJobErrors.
/**
* Get any {@link JobError} associated with an {@link AnalysisSubmission}
*
* @param analysisSubmission {@link AnalysisSubmission} to search for job failures
* @return List of {@link JobError} objects associated with {@link AnalysisSubmission}
*/
public List<JobError> createNewJobErrors(AnalysisSubmission analysisSubmission) {
String historyId = analysisSubmission.getRemoteAnalysisId();
HistoryDetails historyDetails = historiesClient.showHistory(historyId);
List<String> erroredDatasetIds = historyDetails.getStateIds().get(GalaxyWorkflowState.ERROR.toString());
List<HistoryContentsProvenance> provenances = erroredDatasetIds.stream().map((x) -> historiesClient.showProvenance(historyId, x)).collect(Collectors.toList());
Map<String, List<HistoryContentsProvenance>> jobIdProvenancesMap = provenances.stream().collect(Collectors.groupingBy(HistoryContentsProvenance::getJobId));
List<JobError> jobErrors = new ArrayList<>();
for (Map.Entry<String, List<HistoryContentsProvenance>> entry : jobIdProvenancesMap.entrySet()) {
String jobId = entry.getKey();
JobDetails jobDetails = jobsClient.showJob(jobId);
HistoryContentsProvenance p = entry.getValue().iterator().next();
Tool tool = toolsClient.showTool(p.getToolId());
jobErrors.add(new JobError(analysisSubmission, jobDetails, p, tool));
}
return jobErrors;
}
Aggregations