use of org.broadinstitute.hellbender.utils.pileup.PileupElement in project gatk by broadinstitute.
the class LocusIteratorByStateUnitTest method testLIBS.
@Test(enabled = true, dataProvider = "MyLIBSTest")
public void testLIBS(final LIBSTest params) {
// create the iterator by state with the fake reads and fake records
final GATKRead read = params.makeRead();
final LocusIteratorByState li;
li = makeLIBS(Arrays.asList(read), null, false, header);
final LIBS_position tester = new LIBS_position(read);
int bpVisited = 0;
int lastOffset = 0;
while (li.hasNext()) {
bpVisited++;
final AlignmentContext alignmentContext = li.next();
final ReadPileup p = alignmentContext.getBasePileup();
Assert.assertEquals(p.size(), 1);
final PileupElement pe = p.iterator().next();
Assert.assertEquals(p.getNumberOfElements(el -> el.isDeletion()), pe.isDeletion() ? 1 : 0, "wrong number of deletions in the pileup");
Assert.assertEquals(p.getNumberOfElements(el -> el.getRead().getMappingQuality() == 0), pe.getRead().getMappingQuality() == 0 ? 1 : 0, "wront number of mapq reads in the pileup");
tester.stepForwardOnGenome();
if (!hasNeighboringPaddedOps(params.getElements(), pe.getCurrentCigarOffset())) {
Assert.assertEquals(pe.isBeforeDeletionStart(), tester.isBeforeDeletionStart, "before deletion start failure");
Assert.assertEquals(pe.isAfterDeletionEnd(), tester.isAfterDeletionEnd, "after deletion end failure");
}
Assert.assertEquals(pe.isBeforeInsertion(), tester.isBeforeInsertion, "before insertion failure");
Assert.assertEquals(pe.isAfterInsertion(), tester.isAfterInsertion, "after insertion failure");
Assert.assertEquals(pe.isNextToSoftClip(), tester.isNextToSoftClip, "next to soft clip failure");
Assert.assertTrue(pe.getOffset() >= lastOffset, "Somehow read offsets are decreasing: lastOffset " + lastOffset + " current " + pe.getOffset());
Assert.assertEquals(pe.getOffset(), tester.getCurrentReadOffset(), "Read offsets are wrong at " + bpVisited);
Assert.assertEquals(pe.getCurrentCigarElement(), read.getCigar().getCigarElement(tester.currentOperatorIndex), "CigarElement index failure");
Assert.assertEquals(pe.getOffsetInCurrentCigar(), tester.getCurrentPositionOnOperatorBase0(), "CigarElement index failure");
Assert.assertEquals(read.getCigar().getCigarElement(pe.getCurrentCigarOffset()), pe.getCurrentCigarElement(), "Current cigar element isn't what we'd get from the read itself");
Assert.assertTrue(pe.getOffsetInCurrentCigar() >= 0, "Offset into current cigar too small");
Assert.assertTrue(pe.getOffsetInCurrentCigar() < pe.getCurrentCigarElement().getLength(), "Offset into current cigar too big");
Assert.assertEquals(pe.getOffset(), tester.getCurrentReadOffset(), "Read offset failure");
lastOffset = pe.getOffset();
}
final int expectedBpToVisit = read.getEnd() - read.getStart() + 1;
Assert.assertEquals(bpVisited, expectedBpToVisit, "Didn't visit the expected number of bp");
}
use of org.broadinstitute.hellbender.utils.pileup.PileupElement in project gatk by broadinstitute.
the class LocusIteratorByStateUnitTest method AssertWellOrderedPileup.
private void AssertWellOrderedPileup(final ReadPileup pileup) {
if (!pileup.isEmpty()) {
final int leftMostPos = -1;
for (final PileupElement pe : pileup) {
Assert.assertTrue(pileup.getLocation().getContig().equals(pe.getRead().getContig()), "ReadPileup contains an element " + pe + " that's on a different contig than the pileup itself");
Assert.assertTrue(pe.getRead().getStart() >= leftMostPos, "ReadPileup contains an element " + pe + " whose read's alignment start " + pe.getRead().getStart() + " occurs before the leftmost position we've seen previously " + leftMostPos);
}
}
}
use of org.broadinstitute.hellbender.utils.pileup.PileupElement in project gatk by broadinstitute.
the class LocusIteratorByStateUnitTest method testWholeIndelReadRepresentedTest.
/**
* Test to make sure that reads supporting only an indel (example cigar string: 76I) are represented properly
*/
@Test
public void testWholeIndelReadRepresentedTest() {
final int firstLocus = 44367788, secondLocus = firstLocus + 1;
final GATKRead read1 = ArtificialReadUtils.createArtificialRead(header, "read1", 0, secondLocus, 1);
read1.setBases(Utils.dupBytes((byte) 'A', 1));
read1.setBaseQualities(Utils.dupBytes((byte) '@', 1));
read1.setCigar("1I");
List<GATKRead> reads = Arrays.asList(read1);
// create the iterator by state with the fake reads and fake records
LocusIteratorByState li;
li = makeLIBS(reads, null, false, header);
while (li.hasNext()) {
final AlignmentContext alignmentContext = li.next();
final ReadPileup p = alignmentContext.getBasePileup();
Assert.assertTrue(p.size() == 1);
PileupElement pe = p.iterator().next();
Assert.assertTrue(pe.isBeforeInsertion());
Assert.assertFalse(pe.isAfterInsertion());
Assert.assertEquals(pe.getBasesOfImmediatelyFollowingInsertion(), "A");
}
final GATKRead read2 = ArtificialReadUtils.createArtificialRead(header, "read2", 0, secondLocus, 10);
read2.setBases(Utils.dupBytes((byte) 'A', 10));
read2.setBaseQualities(Utils.dupBytes((byte) '@', 10));
read2.setCigar("10I");
reads = Arrays.asList(read2);
// create the iterator by state with the fake reads and fake records
li = makeLIBS(reads, null, false, header);
while (li.hasNext()) {
final AlignmentContext alignmentContext = li.next();
final ReadPileup p = alignmentContext.getBasePileup();
Assert.assertTrue(p.size() == 1);
PileupElement pe = p.iterator().next();
Assert.assertTrue(pe.isBeforeInsertion());
Assert.assertFalse(pe.isAfterInsertion());
Assert.assertEquals(pe.getBasesOfImmediatelyFollowingInsertion(), "AAAAAAAAAA");
}
}
use of org.broadinstitute.hellbender.utils.pileup.PileupElement in project gatk by broadinstitute.
the class ReferenceConfidenceModel method calcNIndelInformativeReads.
/**
* Calculate the number of indel informative reads at pileup
*
* @param pileup a pileup
* @param pileupOffsetIntoRef the position of the pileup in the reference
* @param ref the ref bases
* @param maxIndelSize maximum indel size to consider in the informativeness calculation
* @return an integer >= 0
*/
@VisibleForTesting
int calcNIndelInformativeReads(final ReadPileup pileup, final int pileupOffsetIntoRef, final byte[] ref, final int maxIndelSize) {
int nInformative = 0;
for (final PileupElement p : pileup) {
final GATKRead read = p.getRead();
final int offset = p.getOffset();
// doesn't count as evidence
if (p.isBeforeDeletionStart() || p.isBeforeInsertion() || p.isDeletion()) {
continue;
}
// todo -- this code really should handle CIGARs directly instead of relying on the above tests
if (isReadInformativeAboutIndelsOfSize(read, offset, ref, pileupOffsetIntoRef, maxIndelSize)) {
nInformative++;
if (nInformative > MAX_N_INDEL_INFORMATIVE_READS) {
return MAX_N_INDEL_INFORMATIVE_READS;
}
}
}
return nInformative;
}
use of org.broadinstitute.hellbender.utils.pileup.PileupElement in project gatk by broadinstitute.
the class ASEReadCounter method calculateLineForSite.
private String calculateLineForSite(final ReadPileup pileup, final String siteID, final char refAllele, final char altAllele) {
int rawDepth = 0, lowBaseQDepth = 0, lowMAPQDepth = 0, refCount = 0, altCount = 0, totalNonFilteredCount = 0, otherBasesCount = 0, improperPairsCount = 0;
for (final PileupElement base : pileup) {
rawDepth++;
if (base.getRead().isPaired() && (base.getRead().mateIsUnmapped() || !base.getRead().isProperlyPaired())) {
improperPairsCount++;
continue;
}
if (base.getMappingQual() < minMappingQuality) {
lowMAPQDepth++;
continue;
}
if (base.getQual() < minBaseQuality) {
lowBaseQDepth++;
continue;
}
if (base.getBase() == refAllele) {
refCount++;
} else if (base.getBase() == altAllele) {
altCount++;
} else {
otherBasesCount++;
continue;
}
totalNonFilteredCount++;
}
if (totalNonFilteredCount < minDepthOfNonFilteredBases) {
return null;
}
final StringBuilder line = new StringBuilder();
line.append(pileup.getLocation().getContig()).append(separator);
line.append(pileup.getLocation().getStart()).append(separator);
line.append(siteID).append(separator);
line.append(refAllele).append(separator);
line.append(altAllele).append(separator);
line.append(refCount).append(separator);
line.append(altCount).append(separator);
line.append(totalNonFilteredCount).append(separator);
line.append(lowMAPQDepth).append(separator);
line.append(lowBaseQDepth).append(separator);
line.append(rawDepth).append(separator);
line.append(otherBasesCount).append(separator);
line.append(improperPairsCount);
return line.toString();
}
Aggregations