use of org.broadinstitute.hellbender.utils.IndexRange in project gatk by broadinstitute.
the class HashedListTargetCollectionUnitTest method testTargetFullRangeObject.
@Test(dependsOnMethods = { "testCorrectInitialization", "testCorrectRangeObjectInitialization" })
public void testTargetFullRangeObject() {
final IndexRange range = new IndexRange(0, targetDB.targetCount());
final List<SimpleInterval> targets = targetDB.targets(range);
Assert.assertNotNull(targets);
Assert.assertEquals(targets.size(), nonOverlappingTargetIntervals.size());
for (int i = 0; i < targets.size(); i++) {
Assert.assertEquals(targets.get(i), nonOverlappingTargetIntervals.get(i));
}
}
use of org.broadinstitute.hellbender.utils.IndexRange in project gatk by broadinstitute.
the class SomaticLikelihoodsEngineUnitTest method testEvidence.
@Test
public void testEvidence() {
// one exact limit for the evidence is when the likelihoods of each read are so peaked (i.e. the most likely allele
// of each read is much likelier than all other alleles) that the sum over latent read-to-allele assignments
// (that is, over the indicator z in the notes) is dominated by the max-likelihood allele configuration
// and thus the evidence reduces to exactly integrating out the Dirichlet allele fractions
final double[] prior = new double[] { 1, 2 };
final RealMatrix log10Likelihoods = new Array2DRowRealMatrix(2, 4);
log10Likelihoods.setRow(0, new double[] { 0.1, 4.0, 3.0, -10 });
log10Likelihoods.setRow(1, new double[] { -12, -9, -5.0, 0.5 });
final double calculatedLog10Evidence = SomaticLikelihoodsEngine.log10Evidence(log10Likelihoods, prior);
final double[] maxLikelihoodCounts = new double[] { 3, 1 };
final double expectedLog10Evidence = SomaticLikelihoodsEngine.log10DirichletNormalization(prior) - SomaticLikelihoodsEngine.log10DirichletNormalization(MathArrays.ebeAdd(prior, maxLikelihoodCounts)) + new IndexRange(0, log10Likelihoods.getColumnDimension()).sum(read -> log10Likelihoods.getColumnVector(read).getMaxValue());
Assert.assertEquals(calculatedLog10Evidence, expectedLog10Evidence, 1e-5);
// when there's just one read we can calculate the likelihood exactly
final double[] prior2 = new double[] { 1, 2 };
final RealMatrix log10Likelihoods2 = new Array2DRowRealMatrix(2, 1);
log10Likelihoods2.setRow(0, new double[] { 0.1 });
log10Likelihoods2.setRow(1, new double[] { 0.5 });
final double calculatedLog10Evidence2 = SomaticLikelihoodsEngine.log10Evidence(log10Likelihoods2, prior2);
final double[] delta0 = new double[] { 1, 0 };
final double[] delta1 = new double[] { 0, 1 };
final double expectedLog10Evidence2 = MathUtils.log10SumLog10(log10Likelihoods2.getEntry(0, 0) + SomaticLikelihoodsEngine.log10DirichletNormalization(prior2) - SomaticLikelihoodsEngine.log10DirichletNormalization(MathArrays.ebeAdd(prior2, delta0)), +log10Likelihoods2.getEntry(1, 0) + SomaticLikelihoodsEngine.log10DirichletNormalization(prior2) - SomaticLikelihoodsEngine.log10DirichletNormalization(MathArrays.ebeAdd(prior2, delta1)));
Assert.assertEquals(calculatedLog10Evidence2, expectedLog10Evidence2, 0.05);
}
Aggregations