use of org.broadinstitute.hellbender.utils.pileup.PileupElement in project gatk-protected by broadinstitute.
the class ReferenceConfidenceModel method calcNIndelInformativeReads.
/**
* Calculate the number of indel informative reads at pileup
*
* @param pileup a pileup
* @param pileupOffsetIntoRef the position of the pileup in the reference
* @param ref the ref bases
* @param maxIndelSize maximum indel size to consider in the informativeness calculation
* @return an integer >= 0
*/
@VisibleForTesting
int calcNIndelInformativeReads(final ReadPileup pileup, final int pileupOffsetIntoRef, final byte[] ref, final int maxIndelSize) {
int nInformative = 0;
for (final PileupElement p : pileup) {
final GATKRead read = p.getRead();
final int offset = p.getOffset();
// doesn't count as evidence
if (p.isBeforeDeletionStart() || p.isBeforeInsertion() || p.isDeletion()) {
continue;
}
// todo -- this code really should handle CIGARs directly instead of relying on the above tests
if (isReadInformativeAboutIndelsOfSize(read, offset, ref, pileupOffsetIntoRef, maxIndelSize)) {
nInformative++;
if (nInformative > MAX_N_INDEL_INFORMATIVE_READS) {
return MAX_N_INDEL_INFORMATIVE_READS;
}
}
}
return nInformative;
}
use of org.broadinstitute.hellbender.utils.pileup.PileupElement in project gatk-protected by broadinstitute.
the class ReferenceConfidenceModel method calcGenotypeLikelihoodsOfRefVsAny.
/**
* Calculate the genotype likelihoods for the sample in pileup for being hom-ref contrasted with being ref vs. alt
*
* @param ploidy target sample ploidy.
* @param pileup the read backed pileup containing the data we want to evaluate
* @param refBase the reference base at this pileup position
* @param minBaseQual the min base quality for a read in the pileup at the pileup position to be included in the calculation
* @param hqSoftClips running average data structure (can be null) to collect information about the number of high quality soft clips
* @return a RefVsAnyResult genotype call.
*/
public RefVsAnyResult calcGenotypeLikelihoodsOfRefVsAny(final int ploidy, final ReadPileup pileup, final byte refBase, final byte minBaseQual, final MathUtils.RunningAverage hqSoftClips) {
final int likelihoodCount = ploidy + 1;
final double log10Ploidy = MathUtils.log10(ploidy);
final RefVsAnyResult result = new RefVsAnyResult(likelihoodCount);
int readCount = 0;
for (final PileupElement p : pileup) {
final byte qual = p.isDeletion() ? REF_MODEL_DELETION_QUAL : p.getQual();
if (!p.isDeletion() && qual <= minBaseQual) {
continue;
}
readCount++;
applyPileupElementRefVsNonRefLikelihoodAndCount(refBase, likelihoodCount, log10Ploidy, result, p, qual, hqSoftClips);
}
final double denominator = readCount * log10Ploidy;
for (int i = 0; i < likelihoodCount; i++) {
result.addGenotypeLikelihood(i, -denominator);
}
return result;
}
use of org.broadinstitute.hellbender.utils.pileup.PileupElement in project gatk by broadinstitute.
the class Pileup method createVerboseOutput.
/**
* Collect information for the overlapping reads, delimited by {@link #VERBOSE_DELIMITER}
* @param pileup the pileup to format
* @return formatted string with read information for the pileup
*/
@VisibleForTesting
static String createVerboseOutput(final ReadPileup pileup) {
final StringBuilder sb = new StringBuilder();
boolean isFirst = true;
sb.append(pileup.getNumberOfElements(PileupElement::isDeletion));
sb.append(" ");
for (final PileupElement p : pileup) {
if (isFirst) {
isFirst = false;
} else {
sb.append(",");
}
sb.append(p.getRead().getName());
sb.append(VERBOSE_DELIMITER);
sb.append(p.getOffset());
sb.append(VERBOSE_DELIMITER);
sb.append(p.getRead().getLength());
sb.append(VERBOSE_DELIMITER);
sb.append(p.getRead().getMappingQuality());
}
return sb.toString();
}
use of org.broadinstitute.hellbender.utils.pileup.PileupElement in project gatk by broadinstitute.
the class ReferenceConfidenceModel method calcGenotypeLikelihoodsOfRefVsAny.
/**
* Calculate the genotype likelihoods for the sample in pileup for being hom-ref contrasted with being ref vs. alt
*
* @param ploidy target sample ploidy.
* @param pileup the read backed pileup containing the data we want to evaluate
* @param refBase the reference base at this pileup position
* @param minBaseQual the min base quality for a read in the pileup at the pileup position to be included in the calculation
* @param hqSoftClips running average data structure (can be null) to collect information about the number of high quality soft clips
* @return a RefVsAnyResult genotype call.
*/
public RefVsAnyResult calcGenotypeLikelihoodsOfRefVsAny(final int ploidy, final ReadPileup pileup, final byte refBase, final byte minBaseQual, final MathUtils.RunningAverage hqSoftClips) {
final int likelihoodCount = ploidy + 1;
final double log10Ploidy = MathUtils.log10(ploidy);
final RefVsAnyResult result = new RefVsAnyResult(likelihoodCount);
int readCount = 0;
for (final PileupElement p : pileup) {
final byte qual = p.isDeletion() ? REF_MODEL_DELETION_QUAL : p.getQual();
if (!p.isDeletion() && qual <= minBaseQual) {
continue;
}
readCount++;
applyPileupElementRefVsNonRefLikelihoodAndCount(refBase, likelihoodCount, log10Ploidy, result, p, qual, hqSoftClips);
}
final double denominator = readCount * log10Ploidy;
for (int i = 0; i < likelihoodCount; i++) {
result.addGenotypeLikelihood(i, -denominator);
}
return result;
}
use of org.broadinstitute.hellbender.utils.pileup.PileupElement in project gatk by broadinstitute.
the class LocusIteratorByState method lazyLoadNextAlignmentContext.
/**
* Creates the next alignment context from the given state. Note that this is implemented as a
* lazy load method. nextAlignmentContext MUST BE null in order for this method to advance to the
* next entry.
*/
private void lazyLoadNextAlignmentContext() {
while (nextAlignmentContext == null && readStates.hasNext()) {
readStates.collectPendingReads();
final Locatable location = getLocation();
final Map<String, ReadPileup> fullPileupPerSample = new LinkedHashMap<>();
for (final Map.Entry<String, PerSampleReadStateManager> sampleStatePair : readStates) {
final String sample = sampleStatePair.getKey();
final PerSampleReadStateManager readState = sampleStatePair.getValue();
final Iterator<AlignmentStateMachine> iterator = readState.iterator();
final List<PileupElement> pile = new ArrayList<>(readState.size());
while (iterator.hasNext()) {
// state object with the read/offset information
final AlignmentStateMachine state = iterator.next();
final GATKRead read = state.getRead();
final CigarOperator op = state.getCigarOperator();
if (!includeReadsWithNsAtLoci && op == CigarOperator.N) {
continue;
}
if (!dontIncludeReadInPileup(read, location.getStart())) {
if (!includeReadsWithDeletionAtLoci && op == CigarOperator.D) {
continue;
}
pile.add(state.makePileupElement());
}
}
if (!pile.isEmpty()) {
// if this pileup added at least one base, add it to the full pileup
fullPileupPerSample.put(sample, new ReadPileup(location, pile));
}
}
// critical - must be called after we get the current state offsets and location
readStates.updateReadStates();
if (!fullPileupPerSample.isEmpty()) {
// if we got reads with non-D/N over the current position, we are done
nextAlignmentContext = new AlignmentContext(location, new ReadPileup(location, fullPileupPerSample));
}
}
}
Aggregations