use of org.broadinstitute.hellbender.utils.text.parsers.BasicInputParser in project gatk by broadinstitute.
the class VectorPairHMMUnitTest method testLikelihoodsFromHaplotypes.
@Test(dataProvider = "JustHMMProvider")
public void testLikelihoodsFromHaplotypes(final PairHMM hmm, Boolean loaded) {
// skip if not loaded
if (!loaded.booleanValue()) {
throw new SkipException("AVX PairHMM is not supported on this system or the library is not available");
}
BasicInputParser parser = null;
try {
parser = new BasicInputParser(true, new FileInputStream(pairHMMTestData));
} catch (FileNotFoundException e) {
Assert.fail("PairHMM test data not found : " + pairHMMTestData);
}
while (parser.hasNext()) {
String[] tokens = parser.next();
final Haplotype hap = new Haplotype(tokens[0].getBytes(), true);
final byte[] bases = tokens[1].getBytes();
final byte[] baseQuals = normalize(tokens[2].getBytes(), 6);
final byte[] insertionQuals = normalize(tokens[3].getBytes());
final byte[] deletionQuals = normalize(tokens[4].getBytes());
final byte[] gcp = normalize(tokens[5].getBytes());
final double expectedResult = Double.parseDouble(tokens[6]);
final int readLength = bases.length;
final GATKRead read = ArtificialReadUtils.createArtificialRead(bases, baseQuals, readLength + "M");
ReadUtils.setInsertionBaseQualities(read, insertionQuals);
ReadUtils.setDeletionBaseQualities(read, deletionQuals);
final Map<GATKRead, byte[]> gpcs = new LinkedHashMap<>(readLength);
gpcs.put(read, gcp);
hmm.initialize(Arrays.asList(hap), null, 0, 0);
hmm.computeLog10Likelihoods(matrix(Arrays.asList(hap)), Arrays.asList(read), gpcs);
final double[] la = hmm.getLogLikelihoodArray();
Assert.assertEquals(la[0], expectedResult, 1e-5, "Likelihood not in expected range.");
}
hmm.close();
}
Aggregations