use of org.broadinstitute.hellbender.tools.exome.ACNVModeledSegment in project gatk by broadinstitute.
the class AllelicSplitCallerModelStateUnitTest method testSerializationRoundTrip.
@Test
public void testSerializationRoundTrip() {
final ACNVModeledSegment acnvModeledSegment = new ACNVModeledSegment(new SimpleInterval("1", 1000, 1500), new PosteriorSummary(-4000, -4001, -4002), new PosteriorSummary(-4000, -4001, -4002));
final List<ACNVModeledSegment> tempList = new ArrayList<>();
tempList.add(acnvModeledSegment);
final AllelicBalanceCallerModelState state = AllelicBalanceCallerModelState.createInitialCNLOHCallerModelState(0.2, tempList, HomoSapiensConstants.DEFAULT_PLOIDY, CNLOHCaller.NUM_RHOS);
SparkTestUtils.roundTripInKryo(state, AllelicBalanceCallerModelState.class, SparkContextFactory.getTestSparkContext().getConf());
}
use of org.broadinstitute.hellbender.tools.exome.ACNVModeledSegment in project gatk by broadinstitute.
the class AllelicSplitCallerUnitTest method testMakeCalls.
@Test
public void testMakeCalls() {
// This mostly just checks that the calling does not crash and does produce results.
final CNLOHCaller cnlohCaller = new CNLOHCaller();
final JavaSparkContext ctx = SparkContextFactory.getTestSparkContext();
final List<ACNVModeledSegment> segs = SegmentUtils.readACNVModeledSegmentFile(ACNV_SEG_FILE);
SparkTestUtils.roundTripInKryo(segs.get(0), ACNVModeledSegment.class, ctx.getConf());
// Make sure the CNLOH Caller is serializable before making calls.
SparkTestUtils.roundTripInKryo(cnlohCaller, CNLOHCaller.class, ctx.getConf());
final List<AllelicCalls> calls = cnlohCaller.makeCalls(segs, 2, ctx);
Assert.assertNotNull(calls);
Assert.assertTrue(calls.size() > 0);
Assert.assertTrue(calls.stream().allMatch(c -> c.getBalancedCall() != null));
Assert.assertTrue(calls.stream().allMatch(c -> c.getCnlohCall() != null));
Assert.assertTrue(calls.stream().allMatch(c -> c.getAcnvSegment() != null));
// Make sure the CNLOH Caller is serializable after making calls.
SparkTestUtils.roundTripInKryo(cnlohCaller, CNLOHCaller.class, ctx.getConf());
SparkTestUtils.roundTripInKryo(calls.get(0), AllelicCalls.class, ctx.getConf());
}
use of org.broadinstitute.hellbender.tools.exome.ACNVModeledSegment in project gatk-protected by broadinstitute.
the class AllelicSplitCallerModelStateUnitTest method testBasicInit.
@Test
public void testBasicInit() {
final ACNVModeledSegment acnvModeledSegment = new ACNVModeledSegment(new SimpleInterval("1", 1000, 1500), new PosteriorSummary(-4000, -4001, -4002), new PosteriorSummary(-4000, -4001, -4002));
final List<ACNVModeledSegment> tempList = new ArrayList<>();
tempList.add(acnvModeledSegment);
final AllelicBalanceCallerModelState state = AllelicBalanceCallerModelState.createInitialCNLOHCallerModelState(0.2, tempList, HomoSapiensConstants.DEFAULT_PLOIDY, CNLOHCaller.NUM_RHOS);
Assert.assertNotNull(state);
Assert.assertNotNull(state.getEffectivePis());
Assert.assertTrue(state.getEffectivePis().length > 0);
Assert.assertTrue(state.getmVals().length > 0);
Assert.assertTrue(state.getnVals().length > 0);
Assert.assertEquals(MathUtils.sum(state.getEffectivePis()), 1.0, 1e-10);
}
use of org.broadinstitute.hellbender.tools.exome.ACNVModeledSegment in project gatk-protected by broadinstitute.
the class AllelicSplitCallerUnitTest method testMakeCalls.
@Test
public void testMakeCalls() {
// This mostly just checks that the calling does not crash and does produce results.
final CNLOHCaller cnlohCaller = new CNLOHCaller();
final JavaSparkContext ctx = SparkContextFactory.getTestSparkContext();
final List<ACNVModeledSegment> segs = SegmentUtils.readACNVModeledSegmentFile(ACNV_SEG_FILE);
SparkTestUtils.roundTripInKryo(segs.get(0), ACNVModeledSegment.class, ctx.getConf());
// Make sure the CNLOH Caller is serializable before making calls.
SparkTestUtils.roundTripInKryo(cnlohCaller, CNLOHCaller.class, ctx.getConf());
final List<AllelicCalls> calls = cnlohCaller.makeCalls(segs, 2, ctx);
Assert.assertNotNull(calls);
Assert.assertTrue(calls.size() > 0);
Assert.assertTrue(calls.stream().allMatch(c -> c.getBalancedCall() != null));
Assert.assertTrue(calls.stream().allMatch(c -> c.getCnlohCall() != null));
Assert.assertTrue(calls.stream().allMatch(c -> c.getAcnvSegment() != null));
// Make sure the CNLOH Caller is serializable after making calls.
SparkTestUtils.roundTripInKryo(cnlohCaller, CNLOHCaller.class, ctx.getConf());
SparkTestUtils.roundTripInKryo(calls.get(0), AllelicCalls.class, ctx.getConf());
}
use of org.broadinstitute.hellbender.tools.exome.ACNVModeledSegment in project gatk by broadinstitute.
the class CNLOHCaller method calcNewRhos.
private double[] calcNewRhos(final List<ACNVModeledSegment> segments, final List<double[][][]> responsibilitiesBySeg, final double lambda, final double[] rhos, final int[] mVals, final int[] nVals, final JavaSparkContext ctx) {
// Since, we pass in the entire responsibilities matrix, we need the correct index for each rho. That, and the
// fact that this is a univariate objective function, means we need to create an instance for each rho. And
// then we blast across Spark.
final List<Pair<? extends Function<Double, Double>, SearchInterval>> objectives = IntStream.range(0, rhos.length).mapToObj(i -> new Pair<>(new Function<Double, Double>() {
@Override
public Double apply(Double rho) {
return calculateESmnObjective(rho, segments, responsibilitiesBySeg, mVals, nVals, lambda, i);
}
}, new SearchInterval(0.0, 1.0, rhos[i]))).collect(Collectors.toList());
final JavaRDD<Pair<? extends Function<Double, Double>, SearchInterval>> objectivesRDD = ctx.parallelize(objectives);
final List<Double> resultsAsDouble = objectivesRDD.map(objective -> optimizeIt(objective.getFirst(), objective.getSecond())).collect();
return resultsAsDouble.stream().mapToDouble(Double::doubleValue).toArray();
}
Aggregations