use of org.broadinstitute.hellbender.utils.Nucleotide in project gatk by broadinstitute.
the class BayesianHetPulldownCalculatorUnitTest method loadPileup.
/**
* load the fake fileup from file
*
* @throws IOException
*/
@BeforeClass
public void loadPileup() throws IOException {
/* load fake pileup and likelihoods from disk */
StringMapper<String> stringStripper = s -> s.substring(s.indexOf(':') + 1).replaceAll("\\s+", "");
StringMapper<Double> parseToDouble = s -> Double.parseDouble(stringStripper.of(s));
StringMapper<ArrayList<Double>> parseToDoubleArray = s -> {
String[] tokenizedLine = stringStripper.of(s).split(",");
ArrayList<Double> errorList = new ArrayList<>();
if (tokenizedLine.length >= 1 && !tokenizedLine[0].equals("")) {
errorList.addAll(Arrays.asList(tokenizedLine).stream().map(Double::parseDouble).collect(Collectors.toList()));
}
return errorList;
};
Scanner reader = new Scanner(new FileInputStream(FAKE_PILEUP_FILE));
while (reader.hasNextLine()) {
Map<Nucleotide, List<BayesianHetPulldownCalculator.BaseQuality>> baseQualities = new HashMap<>();
for (Nucleotide base : new Nucleotide[] { Nucleotide.A, Nucleotide.C, Nucleotide.T, Nucleotide.G }) {
/* load base read error list */
ArrayList<Double> readErrorList = parseToDoubleArray.of(reader.nextLine());
/* set the mapping error probability to 1e-6 */
ArrayList<Double> mappingErrorList = new ArrayList<>();
mappingErrorList.addAll(IntStream.range(0, readErrorList.size()).mapToDouble(i -> 1e-6).boxed().collect(Collectors.toList()));
/* contruct the BaseQuality list */
List<BayesianHetPulldownCalculator.BaseQuality> baseQualityList = new ArrayList<>();
baseQualityList.addAll(IntStream.range(0, readErrorList.size()).mapToObj(i -> new BayesianHetPulldownCalculator.BaseQuality(readErrorList.get(i), mappingErrorList.get(i))).collect(Collectors.toList()));
baseQualities.put(base, baseQualityList);
}
fakePileupBaseQualities.add(baseQualities);
fakePileupHetLogLikelihoodArray.add(parseToDouble.of(reader.nextLine()));
fakePileupHomLogLikelihoodArray.add(parseToDouble.of(reader.nextLine()));
}
numPileupEntries = fakePileupBaseQualities.size();
}
use of org.broadinstitute.hellbender.utils.Nucleotide in project gatk by broadinstitute.
the class BayesianHetPulldownCalculatorUnitTest method inputTestGetHomLogLikelihood.
@DataProvider(name = "inputTestGetHomLogLikelihood")
public Object[][] inputTestGetHomLogLikelihood() {
Nucleotide alleleRef = Nucleotide.A;
Nucleotide alleleAlt = Nucleotide.T;
double homRefPrior = 0.5;
Object[][] out = new Object[numPileupEntries][];
for (int i = 0; i < numPileupEntries; i++) {
out[i] = new Object[] { fakePileupBaseQualities.get(i), alleleRef, alleleAlt, homRefPrior, fakePileupHomLogLikelihoodArray.get(i) };
}
return out;
}
use of org.broadinstitute.hellbender.utils.Nucleotide in project gatk-protected by broadinstitute.
the class BayesianHetPulldownCalculatorUnitTest method loadPileup.
/**
* load the fake fileup from file
*
* @throws IOException
*/
@BeforeClass
public void loadPileup() throws IOException {
/* load fake pileup and likelihoods from disk */
StringMapper<String> stringStripper = s -> s.substring(s.indexOf(':') + 1).replaceAll("\\s+", "");
StringMapper<Double> parseToDouble = s -> Double.parseDouble(stringStripper.of(s));
StringMapper<ArrayList<Double>> parseToDoubleArray = s -> {
String[] tokenizedLine = stringStripper.of(s).split(",");
ArrayList<Double> errorList = new ArrayList<>();
if (tokenizedLine.length >= 1 && !tokenizedLine[0].equals("")) {
errorList.addAll(Arrays.asList(tokenizedLine).stream().map(Double::parseDouble).collect(Collectors.toList()));
}
return errorList;
};
Scanner reader = new Scanner(new FileInputStream(FAKE_PILEUP_FILE));
while (reader.hasNextLine()) {
Map<Nucleotide, List<BayesianHetPulldownCalculator.BaseQuality>> baseQualities = new HashMap<>();
for (Nucleotide base : new Nucleotide[] { Nucleotide.A, Nucleotide.C, Nucleotide.T, Nucleotide.G }) {
/* load base read error list */
ArrayList<Double> readErrorList = parseToDoubleArray.of(reader.nextLine());
/* set the mapping error probability to 1e-6 */
ArrayList<Double> mappingErrorList = new ArrayList<>();
mappingErrorList.addAll(IntStream.range(0, readErrorList.size()).mapToDouble(i -> 1e-6).boxed().collect(Collectors.toList()));
/* contruct the BaseQuality list */
List<BayesianHetPulldownCalculator.BaseQuality> baseQualityList = new ArrayList<>();
baseQualityList.addAll(IntStream.range(0, readErrorList.size()).mapToObj(i -> new BayesianHetPulldownCalculator.BaseQuality(readErrorList.get(i), mappingErrorList.get(i))).collect(Collectors.toList()));
baseQualities.put(base, baseQualityList);
}
fakePileupBaseQualities.add(baseQualities);
fakePileupHetLogLikelihoodArray.add(parseToDouble.of(reader.nextLine()));
fakePileupHomLogLikelihoodArray.add(parseToDouble.of(reader.nextLine()));
}
numPileupEntries = fakePileupBaseQualities.size();
}
use of org.broadinstitute.hellbender.utils.Nucleotide in project gatk-protected by broadinstitute.
the class BayesianHetPulldownCalculatorUnitTest method inputTestGetHetLogLikelihood.
@DataProvider(name = "inputTestGetHetLogLikelihood")
public Object[][] inputTestGetHetLogLikelihood() {
Nucleotide alleleRef = Nucleotide.A;
Nucleotide alleleAlt = Nucleotide.T;
Object[][] out = new Object[numPileupEntries][];
for (int i = 0; i < numPileupEntries; i++) {
out[i] = new Object[] { fakePileupBaseQualities.get(i), alleleRef, alleleAlt, fakePileupHetLogLikelihoodArray.get(i) };
}
return out;
}
use of org.broadinstitute.hellbender.utils.Nucleotide in project gatk-protected by broadinstitute.
the class AllelicCountReader method createAllelicCountWithVerbosity.
static AllelicCount createAllelicCountWithVerbosity(final DataLine dataLine, final AllelicCountTableColumn.AllelicCountTableVerbosity verbosity) {
/* mandatory (basic) fields */
final int position = dataLine.getInt(AllelicCountTableColumn.POSITION);
final SimpleInterval interval = new SimpleInterval(dataLine.get(AllelicCountTableColumn.CONTIG), position, position);
final int refReadCount = dataLine.getInt(AllelicCountTableColumn.REF_COUNT);
final int altReadCount = dataLine.getInt(AllelicCountTableColumn.ALT_COUNT);
if (verbosity == AllelicCountTableColumn.AllelicCountTableVerbosity.BASIC) {
return new AllelicCount(interval, refReadCount, altReadCount);
} else {
final Nucleotide refNucleotide = Nucleotide.valueOf(dataLine.get(AllelicCountTableColumn.REF_NUCLEOTIDE.name()).getBytes()[0]);
final Nucleotide altNucleotide = Nucleotide.valueOf(dataLine.get(AllelicCountTableColumn.ALT_NUCLEOTIDE.name()).getBytes()[0]);
final int readDepth = dataLine.getInt(AllelicCountTableColumn.READ_DEPTH.name());
if (verbosity == AllelicCountTableColumn.AllelicCountTableVerbosity.INTERMEDIATE) {
return new AllelicCount(interval, refReadCount, altReadCount, refNucleotide, altNucleotide, readDepth);
} else {
/* verbosity == AllelicCountTableVerbosity.FULL */
final double hetLogOdds = dataLine.getDouble(AllelicCountTableColumn.HET_LOG_ODDS.name());
return new AllelicCount(interval, refReadCount, altReadCount, refNucleotide, altNucleotide, readDepth, hetLogOdds);
}
}
}
Aggregations