use of org.broadinstitute.hellbender.utils.read.GATKRead in project gatk by broadinstitute.
the class HaplotypeCallerEngine method removeReadsFromAllSamplesExcept.
private void removeReadsFromAllSamplesExcept(final String targetSample, final AssemblyRegion activeRegion) {
final Set<GATKRead> readsToRemove = new LinkedHashSet<>();
for (final GATKRead rec : activeRegion.getReads()) {
if (!ReadUtils.getSampleName(rec, readsHeader).equals(targetSample)) {
readsToRemove.add(rec);
}
}
activeRegion.removeAll(readsToRemove);
}
use of org.broadinstitute.hellbender.utils.read.GATKRead in project gatk by broadinstitute.
the class OxoGReadCounts method annotate.
@Override
public void annotate(final ReferenceContext refContext, final VariantContext vc, final Genotype g, final GenotypeBuilder gb, final ReadLikelihoods<Allele> likelihoods) {
Utils.nonNull(gb, "gb is null");
Utils.nonNull(vc, "vc is null");
if (g == null || !g.isCalled() || likelihoods == null || !vc.isSNP()) {
return;
}
final Allele ref = vc.getReference();
final Allele alt = vc.getAlternateAllele(0);
int alt_F1R2 = 0;
int alt_F2R1 = 0;
int ref_F1R2 = 0;
int ref_F2R1 = 0;
for (final ReadLikelihoods<Allele>.BestAllele<Allele> bestAllele : likelihoods.bestAlleles(g.getSampleName())) {
final GATKRead read = bestAllele.read;
if (bestAllele.isInformative() && isUsableRead(read) && read.isPaired()) {
final Allele allele = bestAllele.allele;
if (allele.equals(ref, true)) {
if (read.isReverseStrand() == read.isFirstOfPair()) {
ref_F2R1++;
} else {
ref_F1R2++;
}
} else if (allele.equals(alt, true)) {
if (read.isReverseStrand() == read.isFirstOfPair()) {
alt_F2R1++;
} else {
alt_F1R2++;
}
}
}
}
final double numerator;
if (ref.equals(REF_C) || ref.equals(REF_A)) {
numerator = alt_F2R1;
} else {
numerator = alt_F1R2;
}
final double denominator = alt_F1R2 + alt_F2R1;
final double fraction = numerator / denominator;
gb.attribute(GATKVCFConstants.OXOG_ALT_F1R2_KEY, alt_F1R2);
gb.attribute(GATKVCFConstants.OXOG_ALT_F2R1_KEY, alt_F2R1);
gb.attribute(GATKVCFConstants.OXOG_REF_F1R2_KEY, ref_F1R2);
gb.attribute(GATKVCFConstants.OXOG_REF_F2R1_KEY, ref_F2R1);
gb.attribute(GATKVCFConstants.OXOG_FRACTION_KEY, fraction);
}
use of org.broadinstitute.hellbender.utils.read.GATKRead in project gatk by broadinstitute.
the class PairHMM method computeLog10Likelihoods.
/**
* Given a list of reads and haplotypes, for every read compute the total probability of said read arising from
* each haplotype given base substitution, insertion, and deletion probabilities.
*
* @param processedReads reads to analyze instead of the ones present in the destination read-likelihoods.
* @param logLikelihoods where to store the log likelihoods where position [a][r] is reserved for the log likelihood of {@code reads[r]}
* conditional to {@code alleles[a]}.
* @param gcp penalty for gap continuations base array map for processed reads.
*
*/
public void computeLog10Likelihoods(final LikelihoodMatrix<Haplotype> logLikelihoods, final List<GATKRead> processedReads, final Map<GATKRead, byte[]> gcp) {
if (processedReads.isEmpty()) {
return;
}
if (doProfiling) {
startTime = System.nanoTime();
}
// (re)initialize the pairHMM only if necessary
final int readMaxLength = findMaxReadLength(processedReads);
final int haplotypeMaxLength = findMaxAlleleLength(logLikelihoods.alleles());
if (!initialized || readMaxLength > maxReadLength || haplotypeMaxLength > maxHaplotypeLength) {
initialize(readMaxLength, haplotypeMaxLength);
}
final int readCount = processedReads.size();
final List<Haplotype> alleles = logLikelihoods.alleles();
final int alleleCount = alleles.size();
mLogLikelihoodArray = new double[readCount * alleleCount];
int idx = 0;
int readIndex = 0;
for (final GATKRead read : processedReads) {
final byte[] readBases = read.getBases();
final byte[] readQuals = read.getBaseQualities();
final byte[] readInsQuals = ReadUtils.getBaseInsertionQualities(read);
final byte[] readDelQuals = ReadUtils.getBaseDeletionQualities(read);
final byte[] overallGCP = gcp.get(read);
// peek at the next haplotype in the list (necessary to get nextHaplotypeBases, which is required for caching in the array implementation)
final boolean isFirstHaplotype = true;
for (int a = 0; a < alleleCount; a++) {
final Allele allele = alleles.get(a);
final byte[] alleleBases = allele.getBases();
final byte[] nextAlleleBases = a == alleles.size() - 1 ? null : alleles.get(a + 1).getBases();
final double lk = computeReadLikelihoodGivenHaplotypeLog10(alleleBases, readBases, readQuals, readInsQuals, readDelQuals, overallGCP, isFirstHaplotype, nextAlleleBases);
logLikelihoods.set(a, readIndex, lk);
mLogLikelihoodArray[idx++] = lk;
}
readIndex++;
}
if (doProfiling) {
threadLocalPairHMMComputeTimeDiff = (System.nanoTime() - startTime);
{
pairHMMComputeTime += threadLocalPairHMMComputeTimeDiff;
}
}
}
use of org.broadinstitute.hellbender.utils.read.GATKRead in project gatk by broadinstitute.
the class VectorLoglessPairHMM method computeLog10Likelihoods.
/**
* {@inheritDoc}
*/
@Override
public void computeLog10Likelihoods(final LikelihoodMatrix<Haplotype> logLikelihoods, final List<GATKRead> processedReads, final Map<GATKRead, byte[]> gcp) {
if (processedReads.isEmpty()) {
return;
}
if (doProfiling) {
startTime = System.nanoTime();
}
int readListSize = processedReads.size();
int numHaplotypes = logLikelihoods.numberOfAlleles();
ReadDataHolder[] readDataArray = new ReadDataHolder[readListSize];
int idx = 0;
for (GATKRead read : processedReads) {
readDataArray[idx] = new ReadDataHolder();
readDataArray[idx].readBases = read.getBases();
readDataArray[idx].readQuals = read.getBaseQualities();
readDataArray[idx].insertionGOP = ReadUtils.getBaseInsertionQualities(read);
readDataArray[idx].deletionGOP = ReadUtils.getBaseDeletionQualities(read);
readDataArray[idx].overallGCP = gcp.get(read);
++idx;
}
//to store results
mLogLikelihoodArray = new double[readListSize * numHaplotypes];
if (doProfiling) {
threadLocalSetupTimeDiff = (System.nanoTime() - startTime);
}
//for(reads)
// for(haplotypes)
// compute_full_prob()
pairHmm.computeLikelihoods(readDataArray, mHaplotypeDataArray, mLogLikelihoodArray);
int readIdx = 0;
for (int r = 0; r < readListSize; r++) {
int hapIdx = 0;
for (final Haplotype haplotype : logLikelihoods.alleles()) {
//Since the order of haplotypes in the List<Haplotype> and alleleHaplotypeMap is different,
//get idx of current haplotype in the list and use this idx to get the right likelihoodValue
final int idxInsideHaplotypeList = haplotypeToHaplotypeListIdxMap.get(haplotype);
logLikelihoods.set(hapIdx, r, mLogLikelihoodArray[readIdx + idxInsideHaplotypeList]);
++hapIdx;
}
readIdx += numHaplotypes;
}
if (doProfiling) {
threadLocalPairHMMComputeTimeDiff = (System.nanoTime() - startTime);
pairHMMComputeTime += threadLocalPairHMMComputeTimeDiff;
pairHMMSetupTime += threadLocalSetupTimeDiff;
}
}
use of org.broadinstitute.hellbender.utils.read.GATKRead in project gatk by broadinstitute.
the class ReadFilteringIterator method next.
@Override
public GATKRead next() {
if (!hasNext()) {
throw new NoSuchElementException("Iterator exhausted");
}
final GATKRead toReturn = nextRead;
nextRead = loadNextRead();
return toReturn;
}
Aggregations