use of org.broadinstitute.hellbender.utils.haplotype.Haplotype in project gatk by broadinstitute.
the class VectorLoglessPairHMM method initialize.
/**
* {@inheritDoc}
*/
@Override
public void initialize(final List<Haplotype> haplotypes, final Map<String, List<GATKRead>> perSampleReadList, final int readMaxLength, final int haplotypeMaxLength) {
// do not need to call super.initialize()
int numHaplotypes = haplotypes.size();
mHaplotypeDataArray = new HaplotypeDataHolder[numHaplotypes];
int idx = 0;
haplotypeToHaplotypeListIdxMap.clear();
for (final Haplotype currHaplotype : haplotypes) {
mHaplotypeDataArray[idx] = new HaplotypeDataHolder();
mHaplotypeDataArray[idx].haplotypeBases = currHaplotype.getBases();
haplotypeToHaplotypeListIdxMap.put(currHaplotype, idx);
++idx;
}
}
use of org.broadinstitute.hellbender.utils.haplotype.Haplotype in project gatk by broadinstitute.
the class PairHMMLikelihoodCalculationEngineUnitTest method testComputeLikelihoods.
@Test
public void testComputeLikelihoods() {
final LikelihoodEngineArgumentCollection LEAC = new LikelihoodEngineArgumentCollection();
PairHMMLikelihoodCalculationEngine.writeLikelihoodsToFile = true;
final ReadLikelihoodCalculationEngine lce = new PairHMMLikelihoodCalculationEngine((byte) SAMUtils.MAX_PHRED_SCORE, new PairHMMNativeArguments(), PairHMM.Implementation.LOGLESS_CACHING, MathUtils.logToLog10(QualityUtils.qualToErrorProbLog10(LEAC.phredScaledGlobalReadMismappingRate)), PairHMMLikelihoodCalculationEngine.PCRErrorModel.CONSERVATIVE);
final Map<String, List<GATKRead>> perSampleReadList = new HashMap<>();
final int n = 10;
final GATKRead read1 = ArtificialReadUtils.createArtificialRead(TextCigarCodec.decode(n + "M"));
read1.setMappingQuality(60);
final String sample1 = "sample1";
perSampleReadList.put(sample1, Arrays.asList(read1));
final SampleList samples = new IndexedSampleList(sample1);
final AssemblyResultSet assemblyResultSet = new AssemblyResultSet();
final byte[] bases = Strings.repeat("A", n + 1).getBytes();
final Haplotype hap1 = new Haplotype(bases, true);
hap1.setGenomeLocation(read1);
assemblyResultSet.add(hap1);
final byte[] basesModified = bases;
//different bases
basesModified[5] = 'C';
final Haplotype hap2 = new Haplotype(basesModified, false);
//use same loc
hap2.setGenomeLocation(read1);
assemblyResultSet.add(hap2);
final ReadLikelihoods<Haplotype> likes = lce.computeReadLikelihoods(assemblyResultSet, samples, perSampleReadList);
final LikelihoodMatrix<Haplotype> mtx = likes.sampleMatrix(0);
Assert.assertEquals(mtx.numberOfAlleles(), 2);
Assert.assertEquals(mtx.numberOfReads(), 1);
final double v1 = mtx.get(0, 0);
final double v2 = mtx.get(1, 0);
Assert.assertTrue(v1 > v2, "matching haplotype should have a higher likelihood");
lce.close();
new File(PairHMMLikelihoodCalculationEngine.LIKELIHOODS_FILENAME).delete();
}
use of org.broadinstitute.hellbender.utils.haplotype.Haplotype in project gatk by broadinstitute.
the class RandomLikelihoodCalculationEngineUnitTest method testComputeLikelihoods.
@Test
public void testComputeLikelihoods() {
final ReadLikelihoodCalculationEngine lce = new RandomLikelihoodCalculationEngine();
final Map<String, List<GATKRead>> perSampleReadList = new HashMap<>();
final int n = 10;
final GATKRead read1 = ArtificialReadUtils.createArtificialRead(TextCigarCodec.decode(n + "M"));
read1.setMappingQuality(60);
final String sample1 = "sample1";
perSampleReadList.put(sample1, Arrays.asList(read1));
final SampleList samples = new IndexedSampleList(sample1);
final AssemblyResultSet assemblyResultSet = new AssemblyResultSet();
final byte[] bases = Strings.repeat("A", n + 1).getBytes();
final Haplotype hap1 = new Haplotype(bases, true);
hap1.setGenomeLocation(read1);
assemblyResultSet.add(hap1);
final byte[] basesModified = bases;
//different bases
basesModified[5] = 'C';
final Haplotype hap2 = new Haplotype(basesModified, false);
//use same loc
hap2.setGenomeLocation(read1);
assemblyResultSet.add(hap2);
final ReadLikelihoods<Haplotype> likes = lce.computeReadLikelihoods(assemblyResultSet, samples, perSampleReadList);
final LikelihoodMatrix<Haplotype> mtx = likes.sampleMatrix(0);
Assert.assertEquals(mtx.numberOfAlleles(), 2);
Assert.assertEquals(mtx.numberOfReads(), 1);
final double v1 = mtx.get(0, 0);
final double v2 = mtx.get(1, 0);
Assert.assertTrue(v1 < 0);
Assert.assertTrue(v2 < 0);
lce.close();
}
use of org.broadinstitute.hellbender.utils.haplotype.Haplotype in project gatk by broadinstitute.
the class ReferenceConfidenceModelUnitTest method testRefConfidenceBasic.
@Test(dataProvider = "RefConfidenceData")
public void testRefConfidenceBasic(final int nReads, final int extension) {
final RefConfData data = new RefConfData("ACGTAACCGGTT", extension);
final List<Haplotype> haplotypes = Arrays.asList(data.getRefHap());
final List<VariantContext> calls = Collections.emptyList();
for (int i = 0; i < nReads; i++) {
data.getActiveRegion().add(data.makeRead(0, data.getRefLength()));
}
final ReadLikelihoods<Haplotype> likelihoods = createDummyStratifiedReadMap(data.getRefHap(), samples, data.getActiveRegion());
final PloidyModel ploidyModel = new HomogeneousPloidyModel(samples, 2);
final IndependentSampleGenotypesModel genotypingModel = new IndependentSampleGenotypesModel();
final List<Integer> expectedDPs = Collections.nCopies(data.getActiveRegion().getSpan().size(), nReads);
final List<VariantContext> contexts = model.calculateRefConfidence(data.getRefHap(), haplotypes, data.getPaddedRefLoc(), data.getActiveRegion(), likelihoods, ploidyModel, calls);
checkReferenceModelResult(data, contexts, expectedDPs, calls);
}
use of org.broadinstitute.hellbender.utils.haplotype.Haplotype in project gatk by broadinstitute.
the class ReadThreadingAssemblerUnitTest method assemble.
private List<Haplotype> assemble(final ReadThreadingAssembler assembler, final byte[] refBases, final SimpleInterval loc, final List<GATKRead> reads) {
final Haplotype refHaplotype = new Haplotype(refBases, true);
final Cigar c = new Cigar();
c.add(new CigarElement(refHaplotype.getBases().length, CigarOperator.M));
refHaplotype.setCigar(c);
final AssemblyRegion activeRegion = new AssemblyRegion(loc, null, true, 0, header);
activeRegion.addAll(reads);
// logger.warn("Assembling " + activeRegion + " with " + engine);
final AssemblyResultSet assemblyResultSet = assembler.runLocalAssembly(activeRegion, refHaplotype, refBases, loc, Collections.<VariantContext>emptyList(), null, header);
return assemblyResultSet.getHaplotypeList();
}
Aggregations