use of org.broadinstitute.hellbender.utils.SimpleInterval in project gatk-protected by broadinstitute.
the class AbstractConcordanceWalker method traverse.
@Override
public final void traverse() {
// Process each variant in the input stream.
StreamSupport.stream(getSpliteratorForDrivingVariants(), false).forEach(truthVersusEval -> {
final SimpleInterval variantInterval = new SimpleInterval(truthVersusEval);
apply(truthVersusEval, new ReadsContext(reads, variantInterval), new ReferenceContext(reference, variantInterval));
progressMeter.update(variantInterval);
});
}
use of org.broadinstitute.hellbender.utils.SimpleInterval in project gatk-protected by broadinstitute.
the class AnnotateVcfWithBamDepth method apply.
@Override
public void apply(final VariantContext vc, final ReadsContext readsContext, final ReferenceContext refContext, final FeatureContext fc) {
final MutableInt depth = new MutableInt(0);
for (final GATKRead read : readsContext) {
if (!read.failsVendorQualityCheck() && !read.isDuplicate() && !read.isUnmapped() && read.getEnd() > read.getStart() && new SimpleInterval(read).contains(vc)) {
depth.increment();
}
}
vcfWriter.add(new VariantContextBuilder(vc).attribute(POOLED_BAM_DEPTH_ANNOTATION_NAME, depth.intValue()).make());
}
use of org.broadinstitute.hellbender.utils.SimpleInterval in project gatk-protected by broadinstitute.
the class CollectAllelicCountsIntegrationTest method testData.
@DataProvider(name = "testData")
public Object[][] testData() throws IOException {
//counts from IGV with minMQ = 30 and minBQ = 20
final AllelicCountCollection normalCountsExpected = new AllelicCountCollection();
normalCountsExpected.add(new AllelicCount(new SimpleInterval("1", 10736, 10736), 0, 0));
normalCountsExpected.add(new AllelicCount(new SimpleInterval("1", 11522, 11522), 7, 4));
normalCountsExpected.add(new AllelicCount(new SimpleInterval("1", 12098, 12098), 8, 6));
normalCountsExpected.add(new AllelicCount(new SimpleInterval("1", 12444, 12444), 0, 18));
normalCountsExpected.add(new AllelicCount(new SimpleInterval("1", 13059, 13059), 0, 8));
normalCountsExpected.add(new AllelicCount(new SimpleInterval("1", 14630, 14630), 9, 8));
normalCountsExpected.add(new AllelicCount(new SimpleInterval("1", 15204, 15204), 4, 4));
normalCountsExpected.add(new AllelicCount(new SimpleInterval("2", 14689, 14689), 6, 9));
normalCountsExpected.add(new AllelicCount(new SimpleInterval("2", 14982, 14982), 6, 5));
normalCountsExpected.add(new AllelicCount(new SimpleInterval("2", 15110, 15110), 6, 0));
normalCountsExpected.add(new AllelicCount(new SimpleInterval("2", 15629, 15629), 5, 3));
final AllelicCountCollection tumorCountsExpected = new AllelicCountCollection();
tumorCountsExpected.add(new AllelicCount(new SimpleInterval("1", 10736, 10736), 0, 0));
tumorCountsExpected.add(new AllelicCount(new SimpleInterval("1", 11522, 11522), 7, 4));
tumorCountsExpected.add(new AllelicCount(new SimpleInterval("1", 12098, 12098), 8, 6));
tumorCountsExpected.add(new AllelicCount(new SimpleInterval("1", 12444, 12444), 0, 17));
tumorCountsExpected.add(new AllelicCount(new SimpleInterval("1", 13059, 13059), 0, 8));
tumorCountsExpected.add(new AllelicCount(new SimpleInterval("1", 14630, 14630), 9, 8));
tumorCountsExpected.add(new AllelicCount(new SimpleInterval("1", 15204, 15204), 4, 3));
tumorCountsExpected.add(new AllelicCount(new SimpleInterval("2", 14689, 14689), 6, 9));
tumorCountsExpected.add(new AllelicCount(new SimpleInterval("2", 14982, 14982), 6, 5));
tumorCountsExpected.add(new AllelicCount(new SimpleInterval("2", 15110, 15110), 6, 0));
tumorCountsExpected.add(new AllelicCount(new SimpleInterval("2", 15629, 15629), 5, 3));
return new Object[][] { { NORMAL_BAM_FILE, normalCountsExpected }, { TUMOR_BAM_FILE, tumorCountsExpected } };
}
use of org.broadinstitute.hellbender.utils.SimpleInterval in project gatk-protected by broadinstitute.
the class HaplotypeCallerGenotypingEngineUnitTest method testAddMiscellaneousAllele.
@Test(dataProvider = "AddMiscellaneousDataProvider", enabled = false)
public void testAddMiscellaneousAllele(final String readBases, final int readOffset, final String ref, final int refOffset, final String referenceAllele, final String[] alternatives, final double[] likelihoods, final double[] expected) {
final byte baseQual = (byte) 30;
final byte[] baseQuals = Utils.dupBytes(baseQual, readBases.length());
final GATKRead read = ArtificialReadUtils.createArtificialRead(readBases.getBytes(), baseQuals, readBases.length() + "M");
final Locatable loc = new SimpleInterval("20", refOffset, refOffset);
final ReadPileup pileup = new ReadPileup(loc, Collections.singletonList(read), readOffset);
final VariantContextBuilder vcb = new VariantContextBuilder();
final GenotypeBuilder gb = new GenotypeBuilder();
final List<String> alleleStrings = new ArrayList<>(1 + alternatives.length);
alleleStrings.add(referenceAllele);
alleleStrings.addAll(Arrays.asList(alternatives));
gb.AD(new int[] { 1 });
gb.DP(1);
gb.PL(likelihoods);
vcb.alleles(alleleStrings);
vcb.loc("20", refOffset, refOffset + referenceAllele.length() - 1);
vcb.genotypes(gb.make());
final VariantContext vc = vcb.make();
// GenotypingEngine.addMiscellaneousAllele(vc,pileup,ref.getBytes(),0);
final VariantContext updatedVc = null;
final GenotypeLikelihoods updatedLikelihoods = updatedVc.getGenotype(0).getLikelihoods();
Assert.assertEquals(updatedLikelihoods.getAsVector().length, expected.length);
final double[] updatedLikelihoodsArray = updatedVc.getGenotype(0).getLikelihoods().getAsVector();
for (int i = 0; i < updatedLikelihoodsArray.length; i++) {
Assert.assertEquals(updatedLikelihoodsArray[i], expected[i], 0.0001);
}
Allele altAllele = null;
for (final Allele allele : updatedVc.getAlleles()) if (allele.isSymbolic() && allele.getBaseString().equals(GATKVCFConstants.NON_REF_SYMBOLIC_ALLELE_NAME))
altAllele = allele;
Assert.assertNotNull(altAllele);
}
use of org.broadinstitute.hellbender.utils.SimpleInterval in project gatk-protected by broadinstitute.
the class HDF5PCACoveragePoNCreationUtilsUnitTest method simpleEigensampleData.
@DataProvider(name = "singleEigensample")
public Object[][] simpleEigensampleData() {
final List<Object[]> result = new ArrayList<>();
final int NUM_TARGETS = 10;
final int NUM_SAMPLES = 5;
final List<Target> targets = IntStream.range(0, NUM_TARGETS).boxed().map(i -> new Target("target_" + i, new SimpleInterval("1", 100 * i + 1, 100 * i + 5))).collect(Collectors.toList());
final List<String> columnNames = IntStream.range(0, NUM_SAMPLES).boxed().map(i -> "sample_" + i).collect(Collectors.toList());
double[][] countsArray = new double[NUM_TARGETS][NUM_SAMPLES];
final RealMatrix counts = new Array2DRowRealMatrix(countsArray);
// All row data is the same (0,1,2,3,4...)
final double[] rowData = IntStream.range(0, NUM_SAMPLES).boxed().mapToDouble(i -> i).toArray();
for (int i = 0; i < NUM_TARGETS; i++) {
counts.setRow(i, rowData);
}
new ReadCountCollection(targets, columnNames, counts);
result.add(new Object[] { new ReadCountCollection(targets, columnNames, counts) });
return result.toArray(new Object[result.size()][]);
}
Aggregations