use of org.broadinstitute.hellbender.tools.pon.allelic.AllelicPanelOfNormals in project gatk by broadinstitute.
the class PerformJointSegmentation method doWork.
@Override
public Object doWork() {
ParamUtils.isPositive(initialNumAFStates, "Must have at least one allele-fraction state.");
ParamUtils.isPositive(initialNumCRStates, "Must have at least one copy-ratio state.");
final AllelicPanelOfNormals allelicPoN = allelicPoNFile != null ? AllelicPanelOfNormals.read(allelicPoNFile) : AllelicPanelOfNormals.EMPTY_PON;
final AllelicCountCollection acc = new AllelicCountCollection(snpCountsFile);
final ReadCountCollection rcc;
try {
rcc = ReadCountCollectionUtils.parse(new File(coverageFile));
} catch (final IOException ex) {
throw new UserException.BadInput("could not read input file");
}
final JointAFCRSegmenter jointSegmenter = JointAFCRSegmenter.createJointSegmenter(initialNumCRStates, rcc, initialNumAFStates, acc, allelicPoN);
final List<Pair<SimpleInterval, AFCRHiddenState>> segmentation = jointSegmenter.findSegments();
final List<ACNVModeledSegment> segments = segmentation.stream().map(pair -> new ACNVModeledSegment(pair.getLeft(), errorlessPosterior(pair.getRight().getLog2CopyRatio()), errorlessPosterior(pair.getRight().getMinorAlleleFraction()))).collect(Collectors.toList());
//TODO: make more reasonable output for ACNV 2.0
SegmentUtils.writeACNVModeledSegmentFile(outputSegmentsFile, segments, new Genome(rcc, acc.getCounts()));
return "SUCCESS";
}
use of org.broadinstitute.hellbender.tools.pon.allelic.AllelicPanelOfNormals in project gatk-protected by broadinstitute.
the class CreateAllelicPanelOfNormals method doWork.
@Override
protected Object doWork() {
validateArguments();
if (!new HDF5Library().load(null)) {
//Note: passing null means using the default temp dir.
throw new UserException.HardwareFeatureException("Cannot load the required HDF5 library. " + "HDF5 is currently supported on x86-64 architecture and Linux or OSX systems.");
}
logger.info("Starting allelic panel of normals creation...");
final AllelicPanelOfNormals allelicPoN = new AllelicPanelOfNormalsCreator(inputFiles).create(siteFrequencyThreshold);
logger.info("Allelic panel of normals created.");
logger.info("Writing allelic panel of normals to output HDF5 file...");
allelicPoN.write(outputFile, HDF5File.OpenMode.CREATE);
logger.info("Allelic panel of normals written to " + outputFile + ".");
if (outputTSVFile != null) {
allelicPoN.write(outputTSVFile);
logger.info("Allelic panel of normals written as tab-separated values to " + outputTSVFile + ".");
}
return "SUCCESS";
}
use of org.broadinstitute.hellbender.tools.pon.allelic.AllelicPanelOfNormals in project gatk by broadinstitute.
the class AlleleFractionModellerUnitTest method testMCMCWithAllelicPoN.
/**
* Test MCMC inference on simulated data using an allelic PoN. Note that these MCMC tests were written to use
* simulated hets before the allelic PoN was introduced. Rather than generate a simulated PoN on the fly,
* we simply use a fixed simulated PoN loaded from a file and check that its MLE hyperparameters are "sampled"
* correctly by simply taking the MLE PoN values---i.e., the PoN does not actually cover the simulated sites and
* hence is not used to correct reference bias in the simulated data in any way.
* This latter functionality is tested on fixed data loaded from files in
* {@link AlleleFractionModellerUnitTest#testBiasCorrection} instead.
*/
@Test
public void testMCMCWithAllelicPoN() {
final double meanBiasSimulated = 1.2;
final double biasVarianceSimulated = 0.04;
// PoN generated with alpha = 65
final double meanBiasOfPoN = 1.083;
// PoN generated with beta = 60
final double biasVarianceOfPoN = 0.0181;
final AllelicPanelOfNormals allelicPoN = new AllelicPanelOfNormals(new AllelicCountCollection(ALLELIC_PON_NORMAL_COUNTS_FILE));
testMCMC(meanBiasSimulated, biasVarianceSimulated, meanBiasOfPoN, biasVarianceOfPoN, allelicPoN);
}
use of org.broadinstitute.hellbender.tools.pon.allelic.AllelicPanelOfNormals in project gatk by broadinstitute.
the class AlleleFractionModellerUnitTest method dataBiasCorrection.
@DataProvider(name = "biasCorrection")
public Object[][] dataBiasCorrection() {
LoggingUtils.setLoggingLevel(Log.LogLevel.INFO);
final AllelicCountCollection sampleNormal = new AllelicCountCollection(SAMPLE_NORMAL_FILE);
final AllelicCountCollection sampleWithBadSNPs = new AllelicCountCollection(SAMPLE_WITH_BAD_SNPS_FILE);
final AllelicCountCollection sampleWithEvent = new AllelicCountCollection(SAMPLE_WITH_EVENT_FILE);
final AllelicPanelOfNormals allelicPoNNormal = new AllelicPanelOfNormals(new AllelicCountCollection(ALLELIC_PON_NORMAL_COUNTS_FILE));
final AllelicPanelOfNormals allelicPoNWithBadSNPs = new AllelicPanelOfNormals(new AllelicCountCollection(ALLELIC_PON_WITH_BAD_SNPS_COUNTS_FILE));
final double minorFractionExpectedInMiddleSegmentNormal = 0.5;
final double minorFractionExpectedInMiddleSegmentWithBadSNPsAndNormalPoN = 0.4;
final double minorFractionExpectedInMiddleSegmentWithEvent = 0.33;
return new Object[][] { { sampleNormal, allelicPoNNormal, minorFractionExpectedInMiddleSegmentNormal }, { sampleWithBadSNPs, allelicPoNNormal, minorFractionExpectedInMiddleSegmentWithBadSNPsAndNormalPoN }, { sampleWithEvent, allelicPoNNormal, minorFractionExpectedInMiddleSegmentWithEvent }, { sampleWithBadSNPs, allelicPoNWithBadSNPs, minorFractionExpectedInMiddleSegmentNormal } };
}
use of org.broadinstitute.hellbender.tools.pon.allelic.AllelicPanelOfNormals in project gatk-protected by broadinstitute.
the class PerformJointSegmentation method doWork.
@Override
public Object doWork() {
ParamUtils.isPositive(initialNumAFStates, "Must have at least one allele-fraction state.");
ParamUtils.isPositive(initialNumCRStates, "Must have at least one copy-ratio state.");
final AllelicPanelOfNormals allelicPoN = allelicPoNFile != null ? AllelicPanelOfNormals.read(allelicPoNFile) : AllelicPanelOfNormals.EMPTY_PON;
final AllelicCountCollection acc = new AllelicCountCollection(snpCountsFile);
final ReadCountCollection rcc;
try {
rcc = ReadCountCollectionUtils.parse(new File(coverageFile));
} catch (final IOException ex) {
throw new UserException.BadInput("could not read input file");
}
final JointAFCRSegmenter jointSegmenter = JointAFCRSegmenter.createJointSegmenter(initialNumCRStates, rcc, initialNumAFStates, acc, allelicPoN);
final List<Pair<SimpleInterval, AFCRHiddenState>> segmentation = jointSegmenter.findSegments();
final List<ACNVModeledSegment> segments = segmentation.stream().map(pair -> new ACNVModeledSegment(pair.getLeft(), errorlessPosterior(pair.getRight().getLog2CopyRatio()), errorlessPosterior(pair.getRight().getMinorAlleleFraction()))).collect(Collectors.toList());
//TODO: make more reasonable output for ACNV 2.0
SegmentUtils.writeACNVModeledSegmentFile(outputSegmentsFile, segments, new Genome(rcc, acc.getCounts()));
return "SUCCESS";
}
Aggregations