use of com.hartwig.pipeline.reruns.ApiPersistedDataset in project pipeline5 by hartwigmedical.
the class PipelineMain method start.
public PipelineState start(final Arguments arguments) {
LOGGER.info("Arguments are [{}]", arguments);
Versions.printAll();
try {
GoogleCredentials credentials = CredentialProvider.from(arguments).get();
Storage storage = StorageProvider.from(arguments, credentials).get();
Publisher turquoisePublisher = PublisherProvider.from(arguments, credentials).get("turquoise.events");
Publisher pipelinePublisher = PublisherProvider.from(arguments, credentials).get(PipelineComplete.TOPIC);
SomaticMetadataApi somaticMetadataApi = SomaticMetadataApiProvider.from(arguments, storage, pipelinePublisher).get();
SingleSampleEventListener referenceEventListener = new SingleSampleEventListener();
SingleSampleEventListener tumorEventListener = new SingleSampleEventListener();
SomaticRunMetadata somaticRunMetadata = somaticMetadataApi.get();
InputMode mode = new ModeResolver().apply(somaticRunMetadata);
LOGGER.info("Starting pipeline in [{}] mode", mode);
String ini = somaticRunMetadata.isSingleSample() ? "single_sample" : arguments.shallow() ? "shallow" : "somatic";
PipelineProperties eventSubjects = PipelineProperties.builder().sample(somaticRunMetadata.maybeTumor().map(SingleSampleRunMetadata::sampleName).orElseGet(() -> somaticRunMetadata.reference().sampleName())).runId(arguments.sbpApiRunId()).set(somaticRunMetadata.set()).referenceBarcode(somaticRunMetadata.maybeReference().map(SingleSampleRunMetadata::barcode)).tumorBarcode(somaticRunMetadata.maybeTumor().map(SingleSampleRunMetadata::barcode)).type(ini).build();
somaticMetadataApi.start();
startedEvent(eventSubjects, turquoisePublisher, arguments.publishToTurquoise());
BlockingQueue<BamMetricsOutput> referenceBamMetricsOutputQueue = new ArrayBlockingQueue<>(1);
BlockingQueue<BamMetricsOutput> tumorBamMetricsOutputQueue = new ArrayBlockingQueue<>(1);
BlockingQueue<FlagstatOutput> referenceFlagstatOutputQueue = new ArrayBlockingQueue<>(1);
BlockingQueue<FlagstatOutput> tumorFlagstatOutputQueue = new ArrayBlockingQueue<>(1);
BlockingQueue<GermlineCallerOutput> germlineCallerOutputQueue = new ArrayBlockingQueue<>(1);
StartingPoint startingPoint = new StartingPoint(arguments);
PersistedDataset persistedDataset = arguments.biopsy().<PersistedDataset>map(b -> new ApiPersistedDataset(SbpRestApi.newInstance(arguments.sbpApiUrl()), ObjectMappers.get(), b, arguments.project())).orElse(new NoopPersistedDataset());
PipelineState state = new FullPipeline(singleSamplePipeline(arguments, credentials, storage, referenceEventListener, somaticRunMetadata, referenceBamMetricsOutputQueue, germlineCallerOutputQueue, referenceFlagstatOutputQueue, startingPoint, persistedDataset, mode), singleSamplePipeline(arguments, credentials, storage, tumorEventListener, somaticRunMetadata, tumorBamMetricsOutputQueue, germlineCallerOutputQueue, tumorFlagstatOutputQueue, startingPoint, persistedDataset, mode), somaticPipeline(arguments, credentials, storage, somaticRunMetadata, referenceBamMetricsOutputQueue, tumorBamMetricsOutputQueue, referenceFlagstatOutputQueue, tumorFlagstatOutputQueue, startingPoint, persistedDataset, mode), Executors.newCachedThreadPool(), referenceEventListener, tumorEventListener, somaticMetadataApi, CleanupProvider.from(arguments, storage).get()).run();
completedEvent(eventSubjects, turquoisePublisher, state.status().toString(), arguments.publishToTurquoise());
VmExecutionLogSummary.ofFailedStages(storage, state);
return state;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
Aggregations