use of org.broadinstitute.hellbender.utils.pileup.ReadPileup in project gatk-protected by broadinstitute.
the class GATKProtectedVariantContextUtilsUnitTest method testGetPileup.
@Test
public void testGetPileup() {
final SAMFileHeader header = ArtificialReadUtils.createArtificialSamHeader(1, 1, 1000);
final Locatable loc = new SimpleInterval("chr1", 10, 10);
final int readLength = 3;
//this read doesn't overlap {@code loc}
final GATKRead read1 = ArtificialReadUtils.createArtificialRead(header, "read1", 0, 1, readLength);
read1.setBases(Utils.dupBytes((byte) 'A', readLength));
read1.setBaseQualities(Utils.dupBytes((byte) 30, readLength));
read1.setCigar("3M");
//this read overlaps {@code loc} with a Q30 'A'
final GATKRead read2 = ArtificialReadUtils.createArtificialRead(header, "read2", 0, 10, readLength);
read2.setBases(Utils.dupBytes((byte) 'A', readLength));
read2.setBaseQualities(Utils.dupBytes((byte) 30, readLength));
read2.setCigar("3M");
//this read overlaps {@code loc} with a Q20 'C' (due to the deletion)
final GATKRead read3 = ArtificialReadUtils.createArtificialRead(header, "read3", 0, 7, readLength);
read3.setBases(Utils.dupBytes((byte) 'C', readLength));
read3.setBaseQualities(Utils.dupBytes((byte) 20, readLength));
read3.setCigar("1M1D2M");
//this read doesn't overlap {@code loc} due to the deletion
final GATKRead read4 = ArtificialReadUtils.createArtificialRead(header, "read4", 0, 7, readLength);
read4.setBases(Utils.dupBytes((byte) 'C', readLength));
read4.setBaseQualities(Utils.dupBytes((byte) 20, readLength));
read4.setCigar("1M5D2M");
final ReadPileup pileup = GATKProtectedVariantContextUtils.getPileup(loc, Arrays.asList(read1, read2, read3, read4));
// the pileup should contain a Q30 'A' and a Q20 'C'
final int[] counts = pileup.getBaseCounts();
Assert.assertEquals(counts, new int[] { 1, 1, 0, 0 });
}
use of org.broadinstitute.hellbender.utils.pileup.ReadPileup in project gatk by broadinstitute.
the class CheckPileup method apply.
@Override
public void apply(final AlignmentContext context, final ReferenceContext ref, final FeatureContext featureContext) {
final ReadPileup pileup = context.getBasePileup();
final SAMPileupFeature truePileup = getTruePileup(featureContext);
if (!ignoreOverlaps) {
pileup.fixOverlaps();
}
if (truePileup == null) {
out.printf("No truth pileup data available at %s%n", pileup.getPileupString((char) ref.getBase()));
if (!continueAfterAnError) {
throw new UserException.BadInput(String.format("No pileup data available at %s given GATK's output of %s -- this walker requires samtools mpileup data over all bases", context.getLocation(), new String(pileup.getBases())));
}
} else {
final String pileupDiff = pileupDiff(pileup, truePileup);
if (pileupDiff != null) {
out.printf("%s vs. %s%n", pileup.getPileupString((char) ref.getBase()), truePileup.getPileupString());
if (!continueAfterAnError) {
throw new UserException.BadInput(String.format("The input pileup doesn't match the GATK's internal pileup: %s", pileupDiff));
}
}
}
nLoci++;
nBases += pileup.size();
}
use of org.broadinstitute.hellbender.utils.pileup.ReadPileup in project gatk by broadinstitute.
the class ASEReadCounter method filterPileup.
private ReadPileup filterPileup(final ReadPileup originalPileup, final CountPileupType countType) {
SAMFileHeader header = getHeaderForReads();
final ReadPileup pileupWithDeletions;
switch(countType) {
case COUNT_FRAGMENTS_REQUIRE_SAME_BASE:
pileupWithDeletions = originalPileup.getOverlappingFragmentFilteredPileup(true, ReadPileup.baseQualTieBreaker, header);
break;
case COUNT_READS:
pileupWithDeletions = originalPileup;
break;
case COUNT_FRAGMENTS:
pileupWithDeletions = originalPileup.getOverlappingFragmentFilteredPileup(false, ReadPileup.baseQualTieBreaker, header);
break;
default:
throw new UserException("Must use valid CountPileupType");
}
return pileupWithDeletions.makeFilteredPileup(p -> !p.isDeletion());
}
use of org.broadinstitute.hellbender.utils.pileup.ReadPileup in project gatk by broadinstitute.
the class ExampleLocusWalker method apply.
@Override
public void apply(AlignmentContext alignmentContext, ReferenceContext referenceContext, FeatureContext featureContext) {
// Get pileup and counts
ReadPileup pileup = alignmentContext.getBasePileup();
// print the locus and coverage
outputStream.printf("Current locus %s:%d (coverage=%s)\n", alignmentContext.getContig(), alignmentContext.getPosition(), pileup.size());
// print the reference context if available
if (referenceContext.hasBackingDataSource()) {
outputStream.println("\tReference base(s): " + new String(referenceContext.getBases()));
}
// print the overlapping variants if there are some
if (featureContext.hasBackingDataSource()) {
List<VariantContext> vars = featureContext.getValues(variants);
if (!vars.isEmpty()) {
outputStream.println("\tOverlapping variant(s):");
for (VariantContext variant : vars) {
outputStream.printf("\t\t%s:%d-%d, Ref:%s, Alt(s):%s\n", variant.getContig(), variant.getStart(), variant.getEnd(), variant.getReference(), variant.getAlternateAlleles());
}
}
}
outputStream.println();
}
use of org.broadinstitute.hellbender.utils.pileup.ReadPileup in project gatk by broadinstitute.
the class GenotypingEngine method emptyCallContext.
/**
* Produces an empty variant-call context to output when there is no enough data provided to call anything.
*
* @param features feature context
* @param ref the reference context.
* @param rawContext the read alignment at that location.
* @return it might be null if no enough information is provided to do even an empty call. For example when
* we have limited-context (i.e. any of the tracker, reference or alignment is {@code null}.
*/
protected final VariantCallContext emptyCallContext(final FeatureContext features, final ReferenceContext ref, final AlignmentContext rawContext, final SAMFileHeader header) {
if (features == null || ref == null || rawContext == null || !forceSiteEmission()) {
return null;
}
VariantContext vc;
if (configuration.genotypingOutputMode == GenotypingOutputMode.GENOTYPE_GIVEN_ALLELES) {
final VariantContext ggaVc = GenotypingGivenAllelesUtils.composeGivenAllelesVariantContextFromRod(features, rawContext.getLocation(), false, logger, configuration.alleles);
if (ggaVc == null) {
return null;
}
vc = new VariantContextBuilder(callSourceString(), ref.getInterval().getContig(), ggaVc.getStart(), ggaVc.getEnd(), ggaVc.getAlleles()).make();
} else {
// deal with bad/non-standard reference bases
if (!Allele.acceptableAlleleBases(new byte[] { ref.getBase() })) {
return null;
}
final Set<Allele> alleles = new LinkedHashSet<>(Collections.singleton(Allele.create(ref.getBase(), true)));
vc = new VariantContextBuilder(callSourceString(), ref.getInterval().getContig(), ref.getInterval().getStart(), ref.getInterval().getStart(), alleles).make();
}
if (vc != null && annotationEngine != null) {
// Note: we want to use the *unfiltered* and *unBAQed* context for the annotations
final ReadPileup pileup = rawContext.getBasePileup();
vc = annotationEngine.annotateContext(vc, features, ref, null, a -> true);
}
return new VariantCallContext(vc, false);
}
Aggregations