use of htsjdk.samtools.SamReader in project gatk by broadinstitute.
the class MultiLevelCollectorTest method multilevelCollectorTest.
@Test(dataProvider = "variedAccumulationLevels")
public void multilevelCollectorTest(final Set<MetricAccumulationLevel> accumulationLevels) {
final SamReader in = SamReaderFactory.makeDefault().open(TESTFILE);
final RecordCountMultiLevelCollector collector = new RecordCountMultiLevelCollector(accumulationLevels, in.getFileHeader().getReadGroups());
for (final SAMRecord rec : in) {
collector.acceptRecord(rec, null);
}
collector.finish();
int totalProcessed = 0;
int totalMetrics = 0;
for (final MetricAccumulationLevel level : accumulationLevels) {
final Map<String, Integer> keyToMetrics = accumulationLevelToPerUnitReads.get(level);
for (final Map.Entry<String, Integer> entry : keyToMetrics.entrySet()) {
final TotalNumberMetric metric = collector.getUnitsToMetrics().get(entry.getKey());
Assert.assertEquals(entry.getValue(), metric.TALLY);
Assert.assertTrue(metric.FINISHED);
totalProcessed += metric.TALLY;
totalMetrics += 1;
}
}
Assert.assertEquals(collector.getUnitsToMetrics().size(), totalMetrics);
Assert.assertEquals(totalProcessed, collector.getNumProcessed());
CloserUtil.close(in);
}
use of htsjdk.samtools.SamReader in project gatk by broadinstitute.
the class MultiLevelReducibleCollectorUnitTest method multilevelCollectorTest.
@Test(dataProvider = "variedAccumulationLevels")
public void multilevelCollectorTest(final Set<MetricAccumulationLevel> accumulationLevels) {
final SamReader in = SamReaderFactory.makeDefault().open(TESTFILE);
final RecordCountMultiLevelCollector collector1 = new RecordCountMultiLevelCollector(accumulationLevels, in.getFileHeader().getReadGroups());
final RecordCountMultiLevelCollector collector2 = new RecordCountMultiLevelCollector(accumulationLevels, in.getFileHeader().getReadGroups());
//distribute the reads across the two collectors
int count = 1;
for (final SAMRecord rec : in) {
if (count % 2 == 0) {
collector1.acceptRecord(rec, null);
} else {
collector2.acceptRecord(rec, null);
}
count++;
}
collector1.finish();
collector2.finish();
// combine the results into collector1
collector1.combine(collector2);
int totalProcessed = 0;
int totalMetrics = 0;
for (final MetricAccumulationLevel level : accumulationLevels) {
final Map<String, Integer> keyToMetrics = accumulationLevelToPerUnitReads.get(level);
for (final Map.Entry<String, Integer> entry : keyToMetrics.entrySet()) {
final TotalNumberMetric metric = collector1.getUnitsToMetrics().get(entry.getKey());
Assert.assertEquals(entry.getValue(), metric.TALLY);
Assert.assertTrue(metric.FINISHED);
totalProcessed += metric.TALLY;
totalMetrics += 1;
}
}
Assert.assertEquals(collector1.getUnitsToMetrics().size(), totalMetrics);
Assert.assertEquals(totalProcessed, collector1.getNumProcessed());
CloserUtil.close(in);
}
use of htsjdk.samtools.SamReader in project gatk-protected by broadinstitute.
the class GetBayesianHetCoverageIntegrationTest method initHeaders.
@BeforeClass
public void initHeaders() throws IOException {
try (final SamReader normalBamReader = SamReaderFactory.makeDefault().open(NORMAL_BAM_FILE);
final SamReader tumorBamReader = SamReaderFactory.makeDefault().open(TUMOR_BAM_FILE)) {
normalHeader = normalBamReader.getFileHeader();
tumorHeader = tumorBamReader.getFileHeader();
}
}
use of htsjdk.samtools.SamReader in project gatk by broadinstitute.
the class FilterReadsIntegrationTest method getReadCounts.
private int getReadCounts(final String resultFileName, final String referenceFileName) {
final File path = new File(resultFileName);
IOUtil.assertFileIsReadable(path);
final File refFile = null == referenceFileName ? null : new File(TEST_DATA_DIR, referenceFileName);
final SamReader in = SamReaderFactory.makeDefault().referenceSequence(refFile).open(path);
int count = 0;
for (@SuppressWarnings("unused") final SAMRecord rec : in) {
count++;
}
CloserUtil.close(in);
return count;
}
Aggregations