use of org.broadinstitute.hellbender.exceptions.UserException in project gatk by broadinstitute.
the class ReadCountCollectionUtils method writeReadCountsFromSimpleInterval.
/**
* Write a read counts file of targets with coverage to file with dummy names
* @param outWriter Writer to write targets with coverage. Never {@code null}
* @param outName Name of the output writer. Never {@code null}
* @param sampleName Name of sample being written. Never {@code null}
* @param byKeySorted Map of simple-intervals to their copy-ratio. Never {@code null}
* @param comments Comments to add to header of coverage file.
*/
public static <N extends Number> void writeReadCountsFromSimpleInterval(final Writer outWriter, final String outName, final String sampleName, final SortedMap<SimpleInterval, N> byKeySorted, final String[] comments) {
Utils.nonNull(outWriter, "Output writer cannot be null.");
Utils.nonNull(sampleName, "Sample name cannot be null.");
Utils.nonNull(byKeySorted, "Targets cannot be null.");
Utils.nonNull(comments, "Comments cannot be null.");
final boolean areTargetIntervalsAllPopulated = byKeySorted.keySet().stream().allMatch(t -> t != null);
if (!areTargetIntervalsAllPopulated) {
throw new UserException("Cannot write target coverage file with any null intervals.");
}
try (final TableWriter<ReadCountRecord> writer = writerWithIntervals(outWriter, Collections.singletonList(sampleName))) {
for (String comment : comments) {
writer.writeComment(comment);
}
for (final Locatable locatable : byKeySorted.keySet()) {
final SimpleInterval interval = new SimpleInterval(locatable);
final double coverage = byKeySorted.get(locatable).doubleValue();
writer.writeRecord(new ReadCountRecord.SingleSampleRecord(new Target(interval), coverage));
}
} catch (final IOException e) {
throw new UserException.CouldNotCreateOutputFile(outName, e);
}
}
use of org.broadinstitute.hellbender.exceptions.UserException in project gatk by broadinstitute.
the class SelectVariants method apply.
@Override
public void apply(VariantContext vc, ReadsContext readsContext, ReferenceContext ref, FeatureContext featureContext) {
if (fullyDecode) {
vc = vc.fullyDecode(getHeaderForVariants(), lenientVCFProcessing);
}
if (mendelianViolations && invertLogic((mv.countFamilyViolations(sampleDB, samples, vc) == 0), invertMendelianViolations)) {
return;
}
if (discordanceOnly && !isDiscordant(vc, featureContext.getValues(discordanceTrack))) {
return;
}
if (concordanceOnly && !isConcordant(vc, featureContext.getValues(concordanceTrack))) {
return;
}
if (alleleRestriction.equals(NumberAlleleRestriction.BIALLELIC) && !vc.isBiallelic()) {
return;
}
if (alleleRestriction.equals(NumberAlleleRestriction.MULTIALLELIC) && vc.isBiallelic()) {
return;
}
if (containsIndelLargerOrSmallerThan(vc, maxIndelSize, minIndelSize)) {
return;
}
if (considerFilteredGenotypes()) {
final int numFilteredSamples = numFilteredGenotypes(vc);
final double fractionFilteredGenotypes = samples.isEmpty() ? 0.0 : numFilteredSamples / samples.size();
if (numFilteredSamples > maxFilteredGenotypes || numFilteredSamples < minFilteredGenotypes || fractionFilteredGenotypes > maxFractionFilteredGenotypes || fractionFilteredGenotypes < minFractionFilteredGenotypes)
return;
}
if (considerNoCallGenotypes()) {
final int numNoCallSamples = numNoCallGenotypes(vc);
final double fractionNoCallGenotypes = samples.isEmpty() ? 0.0 : ((double) numNoCallSamples) / samples.size();
if (numNoCallSamples > maxNOCALLnumber || fractionNoCallGenotypes > maxNOCALLfraction)
return;
}
// Initialize the cache of PL index to a list of alleles for each ploidy.
initalizeAlleleAnyploidIndicesCache(vc);
final VariantContext sub = subsetRecord(vc, preserveAlleles, removeUnusedAlternates);
final VariantContextBuilder builder = new VariantContextBuilder(vc);
if (setFilteredGenotypesToNocall) {
GATKVariantContextUtils.setFilteredGenotypeToNocall(builder, sub, setFilteredGenotypesToNocall, this::getGenotypeFilters);
}
final VariantContext filteredGenotypeToNocall = setFilteredGenotypesToNocall ? builder.make() : sub;
// Not excluding non-variants or subsetted polymorphic variants AND including filtered loci or subsetted variant is not filtered
if ((!XLnonVariants || filteredGenotypeToNocall.isPolymorphicInSamples()) && (!XLfiltered || !filteredGenotypeToNocall.isFiltered())) {
// Write the subsetted variant if it matches all of the expressions
boolean failedJexlMatch = false;
try {
for (VariantContextUtils.JexlVCMatchExp jexl : jexls) {
if (invertLogic(!VariantContextUtils.match(filteredGenotypeToNocall, jexl), invertSelect)) {
failedJexlMatch = true;
break;
}
}
} catch (IllegalArgumentException e) {
// expression detected...")
throw new UserException(e.getMessage() + "\nSee https://www.broadinstitute.org/gatk/guide/article?id=1255 for documentation on using JEXL in GATK", e);
}
if (!failedJexlMatch && (!selectRandomFraction || Utils.getRandomGenerator().nextDouble() < fractionRandom)) {
vcfWriter.add(filteredGenotypeToNocall);
}
}
}
use of org.broadinstitute.hellbender.exceptions.UserException in project gatk by broadinstitute.
the class CountFalsePositives method onTraversalSuccess.
@Override
public Object onTraversalSuccess() {
final List<SimpleInterval> intervals = intervalArgumentCollection.getIntervals(getReferenceDictionary());
final long targetTerritory = intervals.stream().mapToInt(i -> i.size()).sum();
try (FalsePositiveTableWriter writer = new FalsePositiveTableWriter(outputFile)) {
FalsePositiveRecord falsePositiveRecord = new FalsePositiveRecord(id, snpFalsePositiveCount, indelFalsePositiveCount, targetTerritory);
writer.writeRecord(falsePositiveRecord);
} catch (IOException e) {
throw new UserException(String.format("Encountered an IO exception while opening %s", outputFile));
}
return "SUCCESS";
}
use of org.broadinstitute.hellbender.exceptions.UserException in project gatk by broadinstitute.
the class ApplyVQSR method doAlleleSpecificFiltering.
/**
* Calculate the allele-specific filter status of vc
* @param vc
* @param recals
* @param builder is modified by adding attributes
* @return a String with the filter status for this site
*/
private String doAlleleSpecificFiltering(final VariantContext vc, final List<VariantContext> recals, final VariantContextBuilder builder) {
double bestLod = VariantRecalibratorEngine.MIN_ACCEPTABLE_LOD_SCORE;
final List<String> culpritStrings = new ArrayList<>();
final List<String> lodStrings = new ArrayList<>();
final List<String> AS_filterStrings = new ArrayList<>();
String[] prevCulpritList = null;
String[] prevLodList = null;
String[] prevASfiltersList = null;
//get VQSR annotations from previous run of ApplyRecalibration, if applicable
if (foundINDELTranches || foundSNPTranches) {
final String prevCulprits = vc.getAttributeAsString(GATKVCFConstants.AS_CULPRIT_KEY, "");
prevCulpritList = prevCulprits.isEmpty() ? new String[0] : prevCulprits.split(listPrintSeparator);
final String prevLodString = vc.getAttributeAsString(GATKVCFConstants.AS_VQS_LOD_KEY, "");
prevLodList = prevLodString.isEmpty() ? new String[0] : prevLodString.split(listPrintSeparator);
final String prevASfilters = vc.getAttributeAsString(GATKVCFConstants.AS_FILTER_STATUS_KEY, "");
prevASfiltersList = prevASfilters.isEmpty() ? new String[0] : prevASfilters.split(listPrintSeparator);
}
//for each allele in the current VariantContext
for (int altIndex = 0; altIndex < vc.getNAlleles() - 1; altIndex++) {
final Allele allele = vc.getAlternateAllele(altIndex);
//if the current allele is not part of this recalibration mode, add its annotations to the list and go to the next allele
if (!VariantDataManager.checkVariationClass(vc, allele, MODE)) {
updateAnnotationsWithoutRecalibrating(altIndex, prevCulpritList, prevLodList, prevASfiltersList, culpritStrings, lodStrings, AS_filterStrings);
continue;
}
//if the current allele does need to have recalibration applied...
//initialize allele-specific VQSR annotation data with values for spanning deletion
String alleleLodString = emptyFloatValue;
String alleleFilterString = emptyStringValue;
String alleleCulpritString = emptyStringValue;
//if it's not a spanning deletion, replace those allele strings with the real values
if (!GATKVCFConstants.isSpanningDeletion(allele)) {
VariantContext recalDatum = getMatchingRecalVC(vc, recals, allele);
if (recalDatum == null) {
throw new UserException("Encountered input allele which isn't found in the input recal file. Please make sure VariantRecalibrator and ApplyRecalibration were run on the same set of input variants with flag -AS. First seen at: " + vc);
}
//compare VQSLODs for all alleles in the current mode for filtering later
final double lod = recalDatum.getAttributeAsDouble(GATKVCFConstants.VQS_LOD_KEY, VariantRecalibratorEngine.MIN_ACCEPTABLE_LOD_SCORE);
if (lod > bestLod)
bestLod = lod;
alleleLodString = String.format("%.4f", lod);
alleleFilterString = generateFilterString(lod);
alleleCulpritString = recalDatum.getAttributeAsString(GATKVCFConstants.CULPRIT_KEY, ".");
if (recalDatum != null) {
if (recalDatum.hasAttribute(GATKVCFConstants.POSITIVE_LABEL_KEY))
builder.attribute(GATKVCFConstants.POSITIVE_LABEL_KEY, true);
if (recalDatum.hasAttribute(GATKVCFConstants.NEGATIVE_LABEL_KEY))
builder.attribute(GATKVCFConstants.NEGATIVE_LABEL_KEY, true);
}
}
//append per-allele VQSR annotations
lodStrings.add(alleleLodString);
AS_filterStrings.add(alleleFilterString);
culpritStrings.add(alleleCulpritString);
}
// Annotate the new record with its VQSLOD, AS_FilterStatus, and the worst performing annotation
if (!AS_filterStrings.isEmpty())
builder.attribute(GATKVCFConstants.AS_FILTER_STATUS_KEY, AnnotationUtils.encodeStringList(AS_filterStrings));
if (!lodStrings.isEmpty())
builder.attribute(GATKVCFConstants.AS_VQS_LOD_KEY, AnnotationUtils.encodeStringList(lodStrings));
if (!culpritStrings.isEmpty())
builder.attribute(GATKVCFConstants.AS_CULPRIT_KEY, AnnotationUtils.encodeStringList(culpritStrings));
return generateFilterStringFromAlleles(vc, bestLod);
}
use of org.broadinstitute.hellbender.exceptions.UserException in project gatk by broadinstitute.
the class ASEReadCounter method filterPileup.
private ReadPileup filterPileup(final ReadPileup originalPileup, final CountPileupType countType) {
SAMFileHeader header = getHeaderForReads();
final ReadPileup pileupWithDeletions;
switch(countType) {
case COUNT_FRAGMENTS_REQUIRE_SAME_BASE:
pileupWithDeletions = originalPileup.getOverlappingFragmentFilteredPileup(true, ReadPileup.baseQualTieBreaker, header);
break;
case COUNT_READS:
pileupWithDeletions = originalPileup;
break;
case COUNT_FRAGMENTS:
pileupWithDeletions = originalPileup.getOverlappingFragmentFilteredPileup(false, ReadPileup.baseQualTieBreaker, header);
break;
default:
throw new UserException("Must use valid CountPileupType");
}
return pileupWithDeletions.makeFilteredPileup(p -> !p.isDeletion());
}
Aggregations