use of htsjdk.samtools.metrics.MetricsFile in project gatk by broadinstitute.
the class CollectAlignmentSummaryMetricsTest method testMultipleLevelsOfMetrics.
@Test
public void testMultipleLevelsOfMetrics() throws IOException {
final File input = new File(TEST_DATA_DIR, "summary_alignment_stats_test_multiple.sam");
final File outfile = BaseTest.createTempFile("alignmentMetrics", ".txt");
final String[] args = new String[] { "--input", input.getAbsolutePath(), "--output", outfile.getAbsolutePath(), "--METRIC_ACCUMULATION_LEVEL", "ALL_READS", "--METRIC_ACCUMULATION_LEVEL", "SAMPLE", "--METRIC_ACCUMULATION_LEVEL", "LIBRARY", "--METRIC_ACCUMULATION_LEVEL", "READ_GROUP" };
runCommandLine(args);
final MetricsFile<AlignmentSummaryMetrics, Comparable<?>> output = new MetricsFile<>();
output.read(new FileReader(outfile));
for (final AlignmentSummaryMetrics metrics : output.getMetrics()) {
Assert.assertEquals(metrics.MEAN_READ_LENGTH, 101.0);
if (metrics.SAMPLE == null) {
switch(metrics.CATEGORY) {
case FIRST_OF_PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 9);
Assert.assertEquals(metrics.PF_READS, 7);
Assert.assertEquals(metrics.PF_NOISE_READS, 1);
Assert.assertEquals(metrics.PF_HQ_ALIGNED_READS, 0);
Assert.assertEquals(metrics.PF_HQ_ALIGNED_Q20_BASES, 0);
Assert.assertEquals(metrics.PF_HQ_MEDIAN_MISMATCHES, 0.0);
Assert.assertEquals(metrics.PF_ALIGNED_BASES, 0);
Assert.assertEquals(metrics.PF_MISMATCH_RATE, 0.0);
Assert.assertEquals(metrics.BAD_CYCLES, 19);
break;
case SECOND_OF_PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 9);
Assert.assertEquals(metrics.PF_READS, 9);
Assert.assertEquals(metrics.PF_NOISE_READS, 1);
Assert.assertEquals(metrics.PF_HQ_ALIGNED_READS, 0);
Assert.assertEquals(metrics.PF_HQ_ALIGNED_Q20_BASES, 0);
Assert.assertEquals(metrics.PF_HQ_MEDIAN_MISMATCHES, 0.0);
Assert.assertEquals(metrics.PF_ALIGNED_BASES, 0);
Assert.assertEquals(metrics.PF_MISMATCH_RATE, 0.0);
Assert.assertEquals(metrics.BAD_CYCLES, 3);
break;
case PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 18);
Assert.assertEquals(metrics.PF_READS, 16);
Assert.assertEquals(metrics.PF_NOISE_READS, 2);
Assert.assertEquals(metrics.PF_HQ_ALIGNED_READS, 0);
Assert.assertEquals(metrics.PF_HQ_ALIGNED_Q20_BASES, 0);
Assert.assertEquals(metrics.PF_HQ_MEDIAN_MISMATCHES, 0.0);
Assert.assertEquals(metrics.PF_ALIGNED_BASES, 0);
Assert.assertEquals(metrics.PF_MISMATCH_RATE, 0.0);
Assert.assertEquals(metrics.BAD_CYCLES, 22);
break;
case UNPAIRED:
default:
Assert.fail("Data does not contain this category: " + metrics.CATEGORY);
}
} else if (metrics.SAMPLE.equals("Ma")) {
// every level should be identical
switch(metrics.CATEGORY) {
case FIRST_OF_PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 5);
Assert.assertEquals(metrics.PF_READS, 3);
Assert.assertEquals(metrics.PF_NOISE_READS, 1);
Assert.assertEquals(metrics.PF_HQ_ALIGNED_READS, 0);
Assert.assertEquals(metrics.PF_HQ_ALIGNED_Q20_BASES, 0);
Assert.assertEquals(metrics.PF_HQ_MEDIAN_MISMATCHES, 0.0);
Assert.assertEquals(metrics.PF_ALIGNED_BASES, 0);
Assert.assertEquals(metrics.PF_MISMATCH_RATE, 0.0);
Assert.assertEquals(metrics.BAD_CYCLES, 24);
break;
case SECOND_OF_PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 5);
Assert.assertEquals(metrics.PF_READS, 5);
Assert.assertEquals(metrics.PF_NOISE_READS, 0);
Assert.assertEquals(metrics.PF_HQ_ALIGNED_READS, 0);
Assert.assertEquals(metrics.PF_HQ_ALIGNED_Q20_BASES, 0);
Assert.assertEquals(metrics.PF_HQ_MEDIAN_MISMATCHES, 0.0);
Assert.assertEquals(metrics.PF_ALIGNED_BASES, 0);
Assert.assertEquals(metrics.PF_MISMATCH_RATE, 0.0);
Assert.assertEquals(metrics.BAD_CYCLES, 3);
break;
case PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 10);
Assert.assertEquals(metrics.PF_READS, 8);
Assert.assertEquals(metrics.PF_NOISE_READS, 1);
Assert.assertEquals(metrics.PF_HQ_ALIGNED_READS, 0);
Assert.assertEquals(metrics.PF_HQ_ALIGNED_Q20_BASES, 0);
Assert.assertEquals(metrics.PF_HQ_MEDIAN_MISMATCHES, 0.0);
Assert.assertEquals(metrics.PF_ALIGNED_BASES, 0);
Assert.assertEquals(metrics.PF_MISMATCH_RATE, 0.0);
Assert.assertEquals(metrics.BAD_CYCLES, 27);
break;
case UNPAIRED:
default:
Assert.fail("Data does not contain this category: " + metrics.CATEGORY);
}
} else if (metrics.SAMPLE.equals("Pa")) {
// Two libraries and three read groups for this sample
if (metrics.LIBRARY == null) {
switch(metrics.CATEGORY) {
case FIRST_OF_PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 4);
Assert.assertEquals(metrics.PF_READS, 4);
Assert.assertEquals(metrics.PF_NOISE_READS, 0);
Assert.assertEquals(metrics.PF_HQ_ALIGNED_READS, 0);
Assert.assertEquals(metrics.PF_HQ_ALIGNED_Q20_BASES, 0);
Assert.assertEquals(metrics.PF_HQ_MEDIAN_MISMATCHES, 0.0);
Assert.assertEquals(metrics.PF_ALIGNED_BASES, 0);
Assert.assertEquals(metrics.PF_MISMATCH_RATE, 0.0);
Assert.assertEquals(metrics.BAD_CYCLES, 19);
break;
case SECOND_OF_PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 4);
Assert.assertEquals(metrics.PF_READS, 4);
Assert.assertEquals(metrics.PF_NOISE_READS, 1);
Assert.assertEquals(metrics.PF_HQ_ALIGNED_READS, 0);
Assert.assertEquals(metrics.PF_HQ_ALIGNED_Q20_BASES, 0);
Assert.assertEquals(metrics.PF_HQ_MEDIAN_MISMATCHES, 0.0);
Assert.assertEquals(metrics.PF_ALIGNED_BASES, 0);
Assert.assertEquals(metrics.PF_MISMATCH_RATE, 0.0);
Assert.assertEquals(metrics.BAD_CYCLES, 3);
break;
case PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 8);
Assert.assertEquals(metrics.PF_READS, 8);
Assert.assertEquals(metrics.PF_NOISE_READS, 1);
Assert.assertEquals(metrics.PF_HQ_ALIGNED_READS, 0);
Assert.assertEquals(metrics.PF_HQ_ALIGNED_Q20_BASES, 0);
Assert.assertEquals(metrics.PF_HQ_MEDIAN_MISMATCHES, 0.0);
Assert.assertEquals(metrics.PF_ALIGNED_BASES, 0);
Assert.assertEquals(metrics.PF_MISMATCH_RATE, 0.0);
Assert.assertEquals(metrics.BAD_CYCLES, 22);
break;
case UNPAIRED:
default:
Assert.fail("Data does not contain this category: " + metrics.CATEGORY);
}
} else if (metrics.LIBRARY.equals("lib1")) {
// Only one read group in this library so library and RG metrics should be identical
switch(metrics.CATEGORY) {
case FIRST_OF_PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 2);
Assert.assertEquals(metrics.PF_READS, 2);
Assert.assertEquals(metrics.PF_NOISE_READS, 0);
Assert.assertEquals(metrics.BAD_CYCLES, 19);
break;
case SECOND_OF_PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 2);
Assert.assertEquals(metrics.PF_READS, 2);
Assert.assertEquals(metrics.PF_NOISE_READS, 1);
Assert.assertEquals(metrics.BAD_CYCLES, 3);
break;
case PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 4);
Assert.assertEquals(metrics.PF_READS, 4);
Assert.assertEquals(metrics.PF_NOISE_READS, 1);
Assert.assertEquals(metrics.BAD_CYCLES, 22);
break;
case UNPAIRED:
default:
Assert.fail("Data does not contain this category: " + metrics.CATEGORY);
}
} else if (metrics.LIBRARY.equals("lib2")) {
if (metrics.READ_GROUP == null) {
switch(metrics.CATEGORY) {
case FIRST_OF_PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 2);
Assert.assertEquals(metrics.PF_READS, 2);
Assert.assertEquals(metrics.PF_NOISE_READS, 0);
Assert.assertEquals(metrics.BAD_CYCLES, 19);
break;
case SECOND_OF_PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 2);
Assert.assertEquals(metrics.PF_READS, 2);
Assert.assertEquals(metrics.PF_NOISE_READS, 0);
Assert.assertEquals(metrics.BAD_CYCLES, 3);
break;
case PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 4);
Assert.assertEquals(metrics.PF_READS, 4);
Assert.assertEquals(metrics.PF_NOISE_READS, 0);
Assert.assertEquals(metrics.BAD_CYCLES, 22);
break;
case UNPAIRED:
default:
Assert.fail("Data does not contain this category: " + metrics.CATEGORY);
}
} else if (metrics.READ_GROUP.equals("i")) {
switch(metrics.CATEGORY) {
case FIRST_OF_PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 1);
Assert.assertEquals(metrics.PF_READS, 1);
Assert.assertEquals(metrics.PF_NOISE_READS, 0);
Assert.assertEquals(metrics.BAD_CYCLES, 19);
break;
case SECOND_OF_PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 1);
Assert.assertEquals(metrics.PF_READS, 1);
Assert.assertEquals(metrics.PF_NOISE_READS, 0);
Assert.assertEquals(metrics.BAD_CYCLES, 3);
break;
case PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 2);
Assert.assertEquals(metrics.PF_READS, 2);
Assert.assertEquals(metrics.PF_NOISE_READS, 0);
Assert.assertEquals(metrics.BAD_CYCLES, 22);
break;
case UNPAIRED:
default:
Assert.fail("Data does not contain this category: " + metrics.CATEGORY);
}
} else if (metrics.READ_GROUP.equals("i2")) {
switch(metrics.CATEGORY) {
case FIRST_OF_PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 1);
Assert.assertEquals(metrics.PF_READS, 1);
Assert.assertEquals(metrics.PF_NOISE_READS, 0);
Assert.assertEquals(metrics.BAD_CYCLES, 27);
break;
case SECOND_OF_PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 1);
Assert.assertEquals(metrics.PF_READS, 1);
Assert.assertEquals(metrics.PF_NOISE_READS, 0);
Assert.assertEquals(metrics.BAD_CYCLES, 3);
break;
case PAIR:
Assert.assertEquals(metrics.TOTAL_READS, 2);
Assert.assertEquals(metrics.PF_READS, 2);
Assert.assertEquals(metrics.PF_NOISE_READS, 0);
Assert.assertEquals(metrics.BAD_CYCLES, 30);
break;
case UNPAIRED:
default:
Assert.fail("Data does not contain this category: " + metrics.CATEGORY);
}
} else {
Assert.fail("Data does not contain this read group: " + metrics.READ_GROUP);
}
} else {
Assert.fail("Data does not contain this library: " + metrics.LIBRARY);
}
} else {
Assert.fail("Data does not contain this sample: " + metrics.SAMPLE);
}
}
}
use of htsjdk.samtools.metrics.MetricsFile in project gatk by broadinstitute.
the class ExampleMultiMetricsCollector method saveMetrics.
/**
* Finish the metrics collection and save any results to the metrics file.
* @param metricsFile a metricsFile where the collected metrics should be stored. May not be null.
* @param authHolder Authentication info for this context.
*/
public void saveMetrics(final MetricsFile<ExampleMultiMetrics, Integer> metricsFile, final AuthHolder authHolder) {
Utils.nonNull(metricsFile);
finish();
addAllLevelsToFile(metricsFile);
if (null != authHolder) {
MetricsUtils.saveMetrics(metricsFile, inputArgs.output, authHolder);
} else {
metricsFile.write(new File(inputArgs.output));
}
}
use of htsjdk.samtools.metrics.MetricsFile in project gatk by broadinstitute.
the class MetricsUtilsTest method testSaveMetrics.
@Test(dataProvider = "metricsPaths", groups = "cloud")
public void testSaveMetrics(String destinationPrefix) throws IOException {
final String outputPath = BucketUtils.getTempFilePath(destinationPrefix, ".txt");
TestMetric testMetric = new TestMetric();
testMetric.value1 = 10;
testMetric.value2 = 5;
final MetricsFile<TestMetric, ?> metrics = new MetricsFile<>();
metrics.addMetric(testMetric);
MetricsUtils.saveMetrics(metrics, outputPath, getAuthentication());
Assert.assertTrue(BucketUtils.fileExists(outputPath));
File localCopy = copyFileToLocalTmpFile(outputPath);
final File expectedMetrics = createTempFile("expectedMetrics", ".txt");
metrics.write(expectedMetrics);
Assert.assertTrue(MetricsFile.areMetricsEqual(localCopy, expectedMetrics));
}
use of htsjdk.samtools.metrics.MetricsFile in project gatk by broadinstitute.
the class PairedVariantSubContextIterator method doWork.
// TODO: add optimization if the samples are in the same file
// TODO: add option for auto-detect pairs based on same sample name
// TODO: allow multiple sample-pairs in one pass
@Override
protected Object doWork() {
IOUtil.assertFileIsReadable(TRUTH_VCF);
IOUtil.assertFileIsReadable(CALL_VCF);
final File summaryMetricsFile = new File(OUTPUT + SUMMARY_METRICS_FILE_EXTENSION);
final File detailedMetricsFile = new File(OUTPUT + DETAILED_METRICS_FILE_EXTENSION);
final File contingencyMetricsFile = new File(OUTPUT + CONTINGENCY_METRICS_FILE_EXTENSION);
IOUtil.assertFileIsWritable(summaryMetricsFile);
IOUtil.assertFileIsWritable(detailedMetricsFile);
IOUtil.assertFileIsWritable(contingencyMetricsFile);
final boolean usingIntervals = this.INTERVALS != null && !this.INTERVALS.isEmpty();
IntervalList intervals = null;
SAMSequenceDictionary intervalsSamSequenceDictionary = null;
if (usingIntervals) {
logger.info("Loading up region lists.");
long genomeBaseCount = 0;
for (final File f : INTERVALS) {
IOUtil.assertFileIsReadable(f);
final IntervalList tmpIntervalList = IntervalList.fromFile(f);
if (genomeBaseCount == 0) {
// Don't count the reference length more than once.
intervalsSamSequenceDictionary = tmpIntervalList.getHeader().getSequenceDictionary();
genomeBaseCount = intervalsSamSequenceDictionary.getReferenceLength();
}
if (intervals == null)
intervals = tmpIntervalList;
else if (INTERSECT_INTERVALS)
intervals = IntervalList.intersection(intervals, tmpIntervalList);
else
intervals = IntervalList.union(intervals, tmpIntervalList);
}
if (intervals != null) {
intervals = intervals.uniqued();
}
logger.info("Finished loading up region lists.");
}
final VCFFileReader truthReader = new VCFFileReader(TRUTH_VCF, USE_VCF_INDEX);
final VCFFileReader callReader = new VCFFileReader(CALL_VCF, USE_VCF_INDEX);
// Check that the samples actually exist in the files!
if (!truthReader.getFileHeader().getGenotypeSamples().contains(TRUTH_SAMPLE)) {
throw new UserException("File " + TRUTH_VCF.getAbsolutePath() + " does not contain genotypes for sample " + TRUTH_SAMPLE);
}
if (!callReader.getFileHeader().getGenotypeSamples().contains(CALL_SAMPLE)) {
throw new UserException("File " + CALL_VCF.getAbsolutePath() + " does not contain genotypes for sample " + CALL_SAMPLE);
}
// Verify that both VCFs have the same Sequence Dictionary
SequenceUtil.assertSequenceDictionariesEqual(truthReader.getFileHeader().getSequenceDictionary(), callReader.getFileHeader().getSequenceDictionary());
if (usingIntervals) {
// If using intervals, verify that the sequence dictionaries agree with those of the VCFs
SequenceUtil.assertSequenceDictionariesEqual(intervalsSamSequenceDictionary, truthReader.getFileHeader().getSequenceDictionary());
}
// Build the pair of iterators over the regions of interest
final Iterator<VariantContext> truthIterator, callIterator;
if (usingIntervals) {
truthIterator = new ByIntervalListVariantContextIterator(truthReader, intervals);
callIterator = new ByIntervalListVariantContextIterator(callReader, intervals);
} else {
truthIterator = truthReader.iterator();
callIterator = callReader.iterator();
}
// Now do the iteration and count things up
final PairedVariantSubContextIterator pairedIterator = new PairedVariantSubContextIterator(truthIterator, TRUTH_SAMPLE, callIterator, CALL_SAMPLE, truthReader.getFileHeader().getSequenceDictionary());
snpCounter = new GenotypeConcordanceCounts();
indelCounter = new GenotypeConcordanceCounts();
// A map to keep track of the count of Truth/Call States which we could not successfully classify
final Map<String, Integer> unClassifiedStatesMap = new HashMap<>();
logger.info("Starting iteration over variants.");
while (pairedIterator.hasNext()) {
final VcTuple tuple = pairedIterator.next();
final VariantContext.Type truthVariantContextType = tuple.truthVariantContext != null ? tuple.truthVariantContext.getType() : NO_VARIATION;
final VariantContext.Type callVariantContextType = tuple.callVariantContext != null ? tuple.callVariantContext.getType() : NO_VARIATION;
// A flag to keep track of whether we have been able to successfully classify the Truth/Call States.
// Unclassified include MIXED/MNP/Symbolic...
boolean stateClassified = false;
final TruthAndCallStates truthAndCallStates = determineState(tuple.truthVariantContext, TRUTH_SAMPLE, tuple.callVariantContext, CALL_SAMPLE, MIN_GQ, MIN_DP);
if (truthVariantContextType == SNP) {
if ((callVariantContextType == SNP) || (callVariantContextType == MIXED) || (callVariantContextType == NO_VARIATION)) {
// Note. If truth is SNP and call is MIXED, the event will be logged in the indelCounter, with row = MIXED
snpCounter.increment(truthAndCallStates);
stateClassified = true;
}
} else if (truthVariantContextType == INDEL) {
// Note. If truth is Indel and call is MIXED, the event will be logged in the indelCounter, with row = MIXED
if ((callVariantContextType == INDEL) || (callVariantContextType == MIXED) || (callVariantContextType == NO_VARIATION)) {
indelCounter.increment(truthAndCallStates);
stateClassified = true;
}
} else if (truthVariantContextType == MIXED) {
// Note. If truth is MIXED and call is SNP, the event will be logged in the snpCounter, with column = MIXED
if (callVariantContextType == SNP) {
snpCounter.increment(truthAndCallStates);
stateClassified = true;
} else // Note. If truth is MIXED and call is INDEL, the event will be logged in the snpCounter, with column = MIXED
if (callVariantContextType == INDEL) {
indelCounter.increment(truthAndCallStates);
stateClassified = true;
}
} else if (truthVariantContextType == NO_VARIATION) {
if (callVariantContextType == SNP) {
snpCounter.increment(truthAndCallStates);
stateClassified = true;
} else if (callVariantContextType == INDEL) {
indelCounter.increment(truthAndCallStates);
stateClassified = true;
}
}
if (!stateClassified) {
final String condition = truthVariantContextType + " " + callVariantContextType;
Integer count = unClassifiedStatesMap.get(condition);
if (count == null)
count = 0;
unClassifiedStatesMap.put(condition, ++count);
}
final VariantContext variantContextForLogging = tuple.truthVariantContext != null ? tuple.truthVariantContext : tuple.callVariantContext;
progress.record(variantContextForLogging.getContig(), variantContextForLogging.getStart());
}
// Calculate and store the summary-level metrics
final MetricsFile<GenotypeConcordanceSummaryMetrics, ?> genotypeConcordanceSummaryMetricsFile = getMetricsFile();
GenotypeConcordanceSummaryMetrics summaryMetrics = new GenotypeConcordanceSummaryMetrics(SNP, snpCounter, TRUTH_SAMPLE, CALL_SAMPLE);
genotypeConcordanceSummaryMetricsFile.addMetric(summaryMetrics);
summaryMetrics = new GenotypeConcordanceSummaryMetrics(INDEL, indelCounter, TRUTH_SAMPLE, CALL_SAMPLE);
genotypeConcordanceSummaryMetricsFile.addMetric(summaryMetrics);
genotypeConcordanceSummaryMetricsFile.write(summaryMetricsFile);
// Calculate and store the detailed metrics for both SNP and indels
final MetricsFile<GenotypeConcordanceDetailMetrics, ?> genotypeConcordanceDetailMetrics = getMetricsFile();
outputDetailMetricsFile(SNP, genotypeConcordanceDetailMetrics, snpCounter, TRUTH_SAMPLE, CALL_SAMPLE);
outputDetailMetricsFile(INDEL, genotypeConcordanceDetailMetrics, indelCounter, TRUTH_SAMPLE, CALL_SAMPLE);
genotypeConcordanceDetailMetrics.write(detailedMetricsFile);
// Calculate and score the contingency metrics
final MetricsFile<GenotypeConcordanceContingencyMetrics, ?> genotypeConcordanceContingencyMetricsFile = getMetricsFile();
GenotypeConcordanceContingencyMetrics contingencyMetrics = new GenotypeConcordanceContingencyMetrics(SNP, snpCounter, TRUTH_SAMPLE, CALL_SAMPLE);
genotypeConcordanceContingencyMetricsFile.addMetric(contingencyMetrics);
contingencyMetrics = new GenotypeConcordanceContingencyMetrics(INDEL, indelCounter, TRUTH_SAMPLE, CALL_SAMPLE);
genotypeConcordanceContingencyMetricsFile.addMetric(contingencyMetrics);
genotypeConcordanceContingencyMetricsFile.write(contingencyMetricsFile);
for (final String condition : unClassifiedStatesMap.keySet()) {
logger.info("Uncovered truth/call Variant Context Type Counts: " + condition + " " + unClassifiedStatesMap.get(condition));
}
return null;
}
use of htsjdk.samtools.metrics.MetricsFile in project gatk by broadinstitute.
the class PreAdapterOrientationScorerUnitTest method testBasicScoring.
/**
* Note that (due to raw data), this test includes collapsing over libraries (not just the contexts).
* @throws IOException
*/
@Test
public void testBasicScoring() throws IOException {
final MetricsFile<SequencingArtifactMetrics.PreAdapterDetailMetrics, Comparable<?>> mf = new MetricsFile<>();
mf.read(new FileReader(testPreAdapterDetailsMetrics));
final Map<Transition, Double> scoreMap = PreAdapterOrientationScorer.scoreOrientationBiasMetricsOverContext(mf.getMetrics());
Assert.assertNotNull(scoreMap);
Assert.assertEquals(scoreMap.keySet().size(), 12);
// Ground truth values painstakingly derived manually
Assert.assertEquals(scoreMap.get(Transition.transitionOf('A', 'C')), 100.0, 1e-6);
Assert.assertEquals(scoreMap.get(Transition.transitionOf('A', 'G')), 50.5788416297570, 1e-6);
Assert.assertEquals(scoreMap.get(Transition.transitionOf('A', 'T')), 100.0, 1e-6);
Assert.assertEquals(scoreMap.get(Transition.transitionOf('C', 'A')), 100.0, 1e-6);
Assert.assertEquals(scoreMap.get(Transition.transitionOf('C', 'G')), 100.0, 1e-6);
Assert.assertEquals(scoreMap.get(Transition.transitionOf('C', 'T')), 58.0641821538479, 1e-6);
}
Aggregations