use of org.broadinstitute.hellbender.utils.pileup.ReadPileup in project gatk by broadinstitute.
the class PileupSpark method pileupFunction.
private static Function<LocusWalkerContext, String> pileupFunction(List<FeatureInput<Feature>> metadata, boolean outputInsertLength, boolean showVerbose) {
return (Function<LocusWalkerContext, String>) context -> {
AlignmentContext alignmentContext = context.getAlignmentContext();
ReferenceContext referenceContext = context.getReferenceContext();
FeatureContext featureContext = context.getFeatureContext();
final String features = getFeaturesString(featureContext, metadata);
final ReadPileup basePileup = alignmentContext.getBasePileup();
final StringBuilder s = new StringBuilder();
s.append(String.format("%s %s", basePileup.getPileupString((referenceContext.hasBackingDataSource()) ? (char) referenceContext.getBase() : 'N'), features));
if (outputInsertLength) {
s.append(" ").append(insertLengthOutput(basePileup));
}
if (showVerbose) {
s.append(" ").append(createVerboseOutput(basePileup));
}
s.append("\n");
return s.toString();
};
}
use of org.broadinstitute.hellbender.utils.pileup.ReadPileup in project gatk by broadinstitute.
the class FragmentCollectionUnitTest method createFromMultiSamplePileup.
@Test
public void createFromMultiSamplePileup() throws Exception {
final SAMFileHeader header = ArtificialReadUtils.createArtificialSamHeader();
final GATKRead read1 = ArtificialReadUtils.createArtificialRead(header, "10M");
final GATKRead read2 = ArtificialReadUtils.createArtificialRead(header, "10M");
read1.setPosition(new SimpleInterval("22", 200, 210));
read2.setPosition(new SimpleInterval("22", 208, 218));
read1.setMatePosition(read2);
read2.setMatePosition(read1);
final Locatable loc = new SimpleInterval("22", 208, 208);
final Map<String, ReadPileup> stratified = new LinkedHashMap<>();
stratified.put("sample1", new ReadPileup(loc, Arrays.asList(read2), 0));
stratified.put("sample2", new ReadPileup(loc, Arrays.asList(read1), 9));
final ReadPileup combined = new ReadPileup(loc, stratified);
final FragmentCollection<PileupElement> elements = FragmentCollection.create(combined);
Assert.assertTrue(elements.getSingletonReads().isEmpty());
Assert.assertEquals(elements.getOverlappingPairs().size(), 1);
}
use of org.broadinstitute.hellbender.utils.pileup.ReadPileup in project gatk-protected by broadinstitute.
the class CalculateMixingFractions method apply.
@Override
public void apply(final VariantContext vc, final ReadsContext readsContext, final ReferenceContext refContext, final FeatureContext fc) {
if (!isBiallelicSingletonHetSnp(vc)) {
return;
}
final Optional<String> variantSample = StreamSupport.stream(vc.getGenotypes().spliterator(), false).filter(genotype -> genotype.isHet()).map(genotype -> genotype.getSampleName()).findFirst();
if (!variantSample.isPresent()) {
return;
}
final List<GATKRead> reads = new ArrayList<>();
final List<Integer> offsets = new ArrayList<>();
for (final GATKRead read : readsContext) {
if (read.failsVendorQualityCheck()) {
continue;
}
final AlignmentStateMachine asm = new AlignmentStateMachine(read);
while (asm.stepForwardOnGenome() != null && asm.getGenomePosition() < vc.getStart()) {
}
if (asm.getGenomePosition() == vc.getStart()) {
reads.add(read);
offsets.add(asm.getReadOffset());
}
}
final ReadPileup pileup = new ReadPileup(vc, reads, offsets);
final byte altBase = vc.getAlternateAllele(0).getBases()[0];
final long altCount = StreamSupport.stream(pileup.spliterator(), false).filter(pe -> pe.getBase() == altBase).count();
final long totalCount = pileup.size();
sampleCounts.get(variantSample.get()).addCounts(altCount, totalCount);
}
use of org.broadinstitute.hellbender.utils.pileup.ReadPileup in project gatk-protected by broadinstitute.
the class HaplotypeCallerGenotypingEngineUnitTest method testAddMiscellaneousAllele.
@Test(dataProvider = "AddMiscellaneousDataProvider", enabled = false)
public void testAddMiscellaneousAllele(final String readBases, final int readOffset, final String ref, final int refOffset, final String referenceAllele, final String[] alternatives, final double[] likelihoods, final double[] expected) {
final byte baseQual = (byte) 30;
final byte[] baseQuals = Utils.dupBytes(baseQual, readBases.length());
final GATKRead read = ArtificialReadUtils.createArtificialRead(readBases.getBytes(), baseQuals, readBases.length() + "M");
final Locatable loc = new SimpleInterval("20", refOffset, refOffset);
final ReadPileup pileup = new ReadPileup(loc, Collections.singletonList(read), readOffset);
final VariantContextBuilder vcb = new VariantContextBuilder();
final GenotypeBuilder gb = new GenotypeBuilder();
final List<String> alleleStrings = new ArrayList<>(1 + alternatives.length);
alleleStrings.add(referenceAllele);
alleleStrings.addAll(Arrays.asList(alternatives));
gb.AD(new int[] { 1 });
gb.DP(1);
gb.PL(likelihoods);
vcb.alleles(alleleStrings);
vcb.loc("20", refOffset, refOffset + referenceAllele.length() - 1);
vcb.genotypes(gb.make());
final VariantContext vc = vcb.make();
// GenotypingEngine.addMiscellaneousAllele(vc,pileup,ref.getBytes(),0);
final VariantContext updatedVc = null;
final GenotypeLikelihoods updatedLikelihoods = updatedVc.getGenotype(0).getLikelihoods();
Assert.assertEquals(updatedLikelihoods.getAsVector().length, expected.length);
final double[] updatedLikelihoodsArray = updatedVc.getGenotype(0).getLikelihoods().getAsVector();
for (int i = 0; i < updatedLikelihoodsArray.length; i++) {
Assert.assertEquals(updatedLikelihoodsArray[i], expected[i], 0.0001);
}
Allele altAllele = null;
for (final Allele allele : updatedVc.getAlleles()) if (allele.isSymbolic() && allele.getBaseString().equals(GATKVCFConstants.NON_REF_SYMBOLIC_ALLELE_NAME))
altAllele = allele;
Assert.assertNotNull(altAllele);
}
use of org.broadinstitute.hellbender.utils.pileup.ReadPileup in project gatk by broadinstitute.
the class PileupUnitTest method testCreateVerboseOutput.
@Test
public void testCreateVerboseOutput() throws Exception {
// the header
final SAMFileHeader header = ArtificialReadUtils.createArtificialSamHeader();
final SimpleInterval loc = new SimpleInterval("1:2");
// create one read/pileup element with deletion with the following verbose string
final String read1String = "read1@1@10@20";
final GATKRead read1 = ArtificialReadUtils.createArtificialRead(header, "read1", 0, 1, 10);
read1.setMappingQuality(20);
read1.setCigar("1M1D9M");
final PileupElement pe1 = new PileupElement(read1, 1, read1.getCigar().getCigarElement(1), 1, 0);
// create a second one without it with the following verbose string
final String read2String = "read2@1@50@10";
final GATKRead read2 = ArtificialReadUtils.createArtificialRead(header, "read2", 0, 2, 50);
read2.setMappingQuality(10);
read2.setCigar("50M");
final PileupElement pe2 = PileupElement.createPileupForReadAndOffset(read2, 1);
// generate the pileups
final ReadPileup pileup = new ReadPileup(loc, Arrays.asList(pe1, pe2));
// test one read
Assert.assertEquals(Pileup.createVerboseOutput(pileup.makeFilteredPileup(p -> p.getRead().getName().equals("read1"))), "1 " + read1String);
Assert.assertEquals(Pileup.createVerboseOutput(pileup.makeFilteredPileup(p -> p.getRead().getName().equals("read2"))), "0 " + read2String);
// test two reads
Assert.assertEquals(Pileup.createVerboseOutput(pileup), "1 " + read1String + "," + read2String);
// test an empty pileup
Assert.assertEquals(Pileup.createVerboseOutput(new ReadPileup(loc)), "0 ");
}
Aggregations