use of com.hartwig.pipeline.PipelineState in project pipeline5 by hartwigmedical.
the class PipelineResultsTest method onlyWritesStagedFileWhenPipelineFailsSingleSample.
@Test
public void onlyWritesStagedFileWhenPipelineFailsSingleSample() {
ArgumentCaptor<String> createBlobCaptor = ArgumentCaptor.forClass(String.class);
PipelineState state = new PipelineState();
state.add(BamMetricsOutput.builder().sample("reference").status(PipelineStatus.FAILED).build());
victim.compose(TestInputs.referenceRunMetadata(), false, state);
verify(outputBucket, times(1)).create(createBlobCaptor.capture(), (byte[]) any());
assertThat(createBlobCaptor.getAllValues().get(0)).isEqualTo("reference-tag/STAGED");
}
use of com.hartwig.pipeline.PipelineState in project pipeline5 by hartwigmedical.
the class SmokeTest method runFullPipelineAndCheckFinalStatus.
public void runFullPipelineAndCheckFinalStatus(final String inputMode, final PipelineStatus expectedStatus) throws Exception {
PipelineMain victim = new PipelineMain();
String version = version();
String setName = noDots(inputMode + "-" + version);
final String fixtureDir = "smoke_test/" + inputMode + "/";
final String randomRunId = noDots(RandomStringUtils.random(5, true, false));
final ImmutableArguments.Builder builder = Arguments.defaultsBuilder(Arguments.DefaultsProfile.DEVELOPMENT.toString()).sampleJson(Resources.testResource(fixtureDir + "samples.json")).cloudSdkPath(CLOUD_SDK_PATH).setId(setName).runId(randomRunId).runGermlineCaller(false).cleanup(true).outputBucket("smoketest-pipeline-output-pilot-1").context(Context.DIAGNOSTIC);
final String username = System.getProperty("user.name");
if (username.equals("root")) {
String privateKeyPath = workingDir() + "/google-key.json";
builder.privateKeyPath(privateKeyPath).uploadPrivateKeyPath(privateKeyPath);
} else {
builder.cloudSdkPath(String.format("/Users/%s/google-cloud-sdk/bin", username));
}
Arguments arguments = builder.build();
Storage storage = StorageProvider.from(arguments, CredentialProvider.from(arguments).get()).get();
cleanupBucket(inputMode, arguments.outputBucket(), storage);
PipelineState state = victim.start(arguments);
assertThat(state.status()).isEqualTo(expectedStatus);
File expectedFilesResource = new File(Resources.testResource(fixtureDir + "expected_output_files"));
List<String> expectedFiles = FileUtils.readLines(expectedFilesResource, FILE_ENCODING);
final String outputDir = setName + "-" + randomRunId;
List<String> actualFiles = listOutput(outputDir, arguments.outputBucket(), storage);
assertThat(actualFiles).containsOnlyElementsOf(expectedFiles);
cleanupBucket(outputDir, arguments.outputBucket(), storage);
}
use of com.hartwig.pipeline.PipelineState in project pipeline5 by hartwigmedical.
the class StagedOutputPublisher method publish.
public void publish(final PipelineState state, final SomaticRunMetadata metadata) {
if (state.status() != PipelineStatus.FAILED && run.isPresent()) {
List<AddDatatype> addDatatypes = state.stageOutputs().stream().map(StageOutput::datatypes).flatMap(List::stream).collect(Collectors.toList());
SampleSet set = setResolver.resolve(metadata.set(), useOnlyDBSets);
Optional<String> tumorSampleName = metadata.maybeTumor().map(SingleSampleRunMetadata::sampleName);
Optional<String> refSampleName = metadata.maybeReference().map(SingleSampleRunMetadata::sampleName);
ImmutableAnalysis.Builder alignedReadsAnalysis = eventBuilder(Type.ALIGNMENT);
ImmutableAnalysis.Builder somaticAnalysis = eventBuilder(Type.SOMATIC);
ImmutableAnalysis.Builder germlineAnalysis = eventBuilder(Type.GERMLINE);
OutputIterator.from(blob -> {
Optional<AddDatatype> dataType = addDatatypes.stream().filter(d -> blob.getName().endsWith(d.path())).findFirst();
Blob blobWithMd5 = sourceBucket.get(blob.getName());
if (isSecondary(blobWithMd5)) {
alignedReadsAnalysis.addOutput(createBlob(tumorSampleName, refSampleName, dataType, blobWithMd5));
} else {
if (isGermline(blobWithMd5)) {
germlineAnalysis.addOutput(createBlob(tumorSampleName, refSampleName, dataType, blobWithMd5));
} else if (notSecondary(blobWithMd5)) {
somaticAnalysis.addOutput(createBlob(tumorSampleName, refSampleName, dataType, blobWithMd5));
}
}
}, sourceBucket).iterate(metadata);
publish(PipelineComplete.builder().pipeline(ImmutablePipeline.builder().sample(tumorSampleName.orElseGet(() -> refSampleName.orElseThrow())).bucket(sourceBucket.getName()).runId(run.get().getId()).setId(set.getId()).context(context).addAnalyses(alignedReadsAnalysis.build(), somaticAnalysis.build(), germlineAnalysis.build()).version(Versions.pipelineMajorMinorVersion()).build()).build());
}
}
use of com.hartwig.pipeline.PipelineState in project pipeline5 by hartwigmedical.
the class ResearchMetadataApiTest method pipelineCompleteWithFile.
@NotNull
public ArgumentCaptor<PubsubMessage> pipelineCompleteWithFile(final String s, final StageOutput stageOutput) {
PipelineState state = new PipelineState();
state.add(stageOutput);
ArgumentCaptor<PubsubMessage> pubsubMessageArgumentCaptor = ArgumentCaptor.forClass(PubsubMessage.class);
SomaticRunMetadata metadata = TestInputs.defaultSomaticRunMetadata();
when(setResolver.resolve(metadata.set(), true)).thenReturn(new SampleSet().id(SET_ID));
Blob outputBlob = mock(Blob.class);
when(outputBlob.getBucket()).thenReturn("bucket");
when(outputBlob.getName()).thenReturn(s);
when(outputBlob.getSize()).thenReturn(1L);
when(outputBlob.getMd5()).thenReturn("md5");
when(bucket.get(s)).thenReturn(outputBlob);
Page<Blob> page = TestBlobs.pageOf(outputBlob);
when(bucket.list(Storage.BlobListOption.prefix("set/"))).thenReturn(page);
// noinspection unchecked
when(publisher.publish(pubsubMessageArgumentCaptor.capture())).thenReturn(mock(ApiFuture.class));
victim.complete(state, metadata);
return pubsubMessageArgumentCaptor;
}
use of com.hartwig.pipeline.PipelineState in project pipeline5 by hartwigmedical.
the class ResearchMetadataApiTest method doesNothingOnFailedPipeline.
@Test
public void doesNothingOnFailedPipeline() {
PipelineState state = new PipelineState();
state.add(TestOutput.builder().status(PipelineStatus.FAILED).build());
victim.complete(state, TestInputs.defaultSomaticRunMetadata());
verify(publisher, never()).publish(any());
}
Aggregations