use of org.broadinstitute.hellbender.exceptions.UserException in project gatk by broadinstitute.
the class IOUtils method getPath.
/**
* Converts the given URI to a {@link Path} object. If the filesystem cannot be found in the usual way, then attempt
* to load the filesystem provider using the thread context classloader. This is needed when the filesystem
* provider is loaded using a URL classloader (e.g. in spark-submit).
*
* @param uriString the URI to convert
* @return the resulting {@code Path}
* @throws UserException if an I/O error occurs when creating the file system
*/
public static Path getPath(String uriString) {
Utils.nonNull(uriString);
URI uri = URI.create(uriString);
try {
// special case GCS, in case the filesystem provider wasn't installed properly but is available.
if (CloudStorageFileSystem.URI_SCHEME.equals(uri.getScheme())) {
return BucketUtils.getPathOnGcs(uriString);
}
return uri.getScheme() == null ? Paths.get(uriString) : Paths.get(uri);
} catch (FileSystemNotFoundException e) {
try {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
if (cl == null) {
throw e;
}
return FileSystems.newFileSystem(uri, new HashMap<>(), cl).provider().getPath(uri);
} catch (IOException io) {
throw new UserException(uriString + " is not a supported path", io);
}
}
}
use of org.broadinstitute.hellbender.exceptions.UserException in project gatk-protected by broadinstitute.
the class BayesianHetPulldownCalculator method getTumorHetPulldownFromNormalPulldown.
/**
* Calculates the Het pulldown from a tumor file, given the tumor BAM file and the pulldown from a matched
* normal BAM file.
*
* Note: this method does not perform any statistical inference. The Het SNP sites are directly carried over
* from the matched normal pulldown. Here, we only collect statistics (ref count, alt count, read depth) and
* save to a pulldown. The verbosity level of the pulldown is INTERMEDIATE (see {@link AllelicCountTableColumn}).
*
* @param tumorBamFile the tumor BAM file
* @param normalHetPulldown the matched normal Het pulldown
* @return tumor Het pulldown
*/
public Pulldown getTumorHetPulldownFromNormalPulldown(final File tumorBamFile, final Pulldown normalHetPulldown) {
try (final SamReader bamReader = SamReaderFactory.makeDefault().validationStringency(validationStringency).referenceSequence(refFile).open(tumorBamFile)) {
if (bamReader.getFileHeader().getSortOrder() != SAMFileHeader.SortOrder.coordinate) {
throw new UserException.BadInput("BAM file " + tumorBamFile.toString() + " must be coordinate sorted.");
}
final Pulldown tumorHetPulldown = new Pulldown(bamReader.getFileHeader());
final SamLocusIterator locusIterator = getSamLocusIteratorWithDefaultFilters(bamReader);
/* get a map of SimpleIntervals in the pulldown to their index */
final Map<SimpleInterval, Integer> normalPulldownIndexMap = normalHetPulldown.getSimpleIntervalToIndexMap();
final int totalNumberOfSNPs = snpIntervals.size();
logger.info("Examining " + totalNumberOfSNPs + " sites in total...");
int locusCount = 0;
for (final SamLocusIterator.LocusInfo locus : locusIterator) {
if (locusCount % NUMBER_OF_SITES_PER_LOGGED_STATUS_UPDATE == 0) {
logger.info("Examined " + locusCount + " covered sites.");
}
locusCount++;
final int totalReadCount = locus.getRecordAndOffsets().size();
if (totalReadCount <= readDepthThreshold) {
continue;
}
/* find the AllelicCount from the normal pulldown */
int indexInNormalPulldown;
try {
indexInNormalPulldown = normalPulldownIndexMap.get(new SimpleInterval(locus.getSequenceName(), locus.getPosition(), locus.getPosition()));
} catch (NullPointerException e) {
throw new GATKException.ShouldNeverReachHereException("Can not find the required AllelicCount " + "object in the normal pulldown. Stopping.");
}
/* just count the alt and ref nucleotide and add to the tumor pulldown */
final Nucleotide.Counter baseCounts = getPileupBaseCounts(locus);
tumorHetPulldown.add(new AllelicCount(new SimpleInterval(locus.getSequenceName(), locus.getPosition(), locus.getPosition()), (int) baseCounts.get(normalHetPulldown.getCounts().get(indexInNormalPulldown).getRefNucleotide()), (int) baseCounts.get(normalHetPulldown.getCounts().get(indexInNormalPulldown).getAltNucleotide()), normalHetPulldown.getCounts().get(indexInNormalPulldown).getRefNucleotide(), normalHetPulldown.getCounts().get(indexInNormalPulldown).getAltNucleotide(), totalReadCount));
}
logger.info(locusCount + " covered sites out of " + totalNumberOfSNPs + " total sites were examined.");
return tumorHetPulldown;
} catch (final IOException | SAMFormatException e) {
throw new UserException(e.getMessage());
}
}
use of org.broadinstitute.hellbender.exceptions.UserException in project gatk by broadinstitute.
the class ReadCountCollectionUtils method writeReadCountsFromSimpleInterval.
/**
* Write a read counts file of targets with coverage to file with dummy names
* @param outWriter Writer to write targets with coverage. Never {@code null}
* @param outName Name of the output writer. Never {@code null}
* @param sampleName Name of sample being written. Never {@code null}
* @param byKeySorted Map of simple-intervals to their copy-ratio. Never {@code null}
* @param comments Comments to add to header of coverage file.
*/
public static <N extends Number> void writeReadCountsFromSimpleInterval(final Writer outWriter, final String outName, final String sampleName, final SortedMap<SimpleInterval, N> byKeySorted, final String[] comments) {
Utils.nonNull(outWriter, "Output writer cannot be null.");
Utils.nonNull(sampleName, "Sample name cannot be null.");
Utils.nonNull(byKeySorted, "Targets cannot be null.");
Utils.nonNull(comments, "Comments cannot be null.");
final boolean areTargetIntervalsAllPopulated = byKeySorted.keySet().stream().allMatch(t -> t != null);
if (!areTargetIntervalsAllPopulated) {
throw new UserException("Cannot write target coverage file with any null intervals.");
}
try (final TableWriter<ReadCountRecord> writer = writerWithIntervals(outWriter, Collections.singletonList(sampleName))) {
for (String comment : comments) {
writer.writeComment(comment);
}
for (final Locatable locatable : byKeySorted.keySet()) {
final SimpleInterval interval = new SimpleInterval(locatable);
final double coverage = byKeySorted.get(locatable).doubleValue();
writer.writeRecord(new ReadCountRecord.SingleSampleRecord(new Target(interval), coverage));
}
} catch (final IOException e) {
throw new UserException.CouldNotCreateOutputFile(outName, e);
}
}
use of org.broadinstitute.hellbender.exceptions.UserException in project gatk by broadinstitute.
the class SelectVariants method apply.
@Override
public void apply(VariantContext vc, ReadsContext readsContext, ReferenceContext ref, FeatureContext featureContext) {
if (fullyDecode) {
vc = vc.fullyDecode(getHeaderForVariants(), lenientVCFProcessing);
}
if (mendelianViolations && invertLogic((mv.countFamilyViolations(sampleDB, samples, vc) == 0), invertMendelianViolations)) {
return;
}
if (discordanceOnly && !isDiscordant(vc, featureContext.getValues(discordanceTrack))) {
return;
}
if (concordanceOnly && !isConcordant(vc, featureContext.getValues(concordanceTrack))) {
return;
}
if (alleleRestriction.equals(NumberAlleleRestriction.BIALLELIC) && !vc.isBiallelic()) {
return;
}
if (alleleRestriction.equals(NumberAlleleRestriction.MULTIALLELIC) && vc.isBiallelic()) {
return;
}
if (containsIndelLargerOrSmallerThan(vc, maxIndelSize, minIndelSize)) {
return;
}
if (considerFilteredGenotypes()) {
final int numFilteredSamples = numFilteredGenotypes(vc);
final double fractionFilteredGenotypes = samples.isEmpty() ? 0.0 : numFilteredSamples / samples.size();
if (numFilteredSamples > maxFilteredGenotypes || numFilteredSamples < minFilteredGenotypes || fractionFilteredGenotypes > maxFractionFilteredGenotypes || fractionFilteredGenotypes < minFractionFilteredGenotypes)
return;
}
if (considerNoCallGenotypes()) {
final int numNoCallSamples = numNoCallGenotypes(vc);
final double fractionNoCallGenotypes = samples.isEmpty() ? 0.0 : ((double) numNoCallSamples) / samples.size();
if (numNoCallSamples > maxNOCALLnumber || fractionNoCallGenotypes > maxNOCALLfraction)
return;
}
// Initialize the cache of PL index to a list of alleles for each ploidy.
initalizeAlleleAnyploidIndicesCache(vc);
final VariantContext sub = subsetRecord(vc, preserveAlleles, removeUnusedAlternates);
final VariantContextBuilder builder = new VariantContextBuilder(vc);
if (setFilteredGenotypesToNocall) {
GATKVariantContextUtils.setFilteredGenotypeToNocall(builder, sub, setFilteredGenotypesToNocall, this::getGenotypeFilters);
}
final VariantContext filteredGenotypeToNocall = setFilteredGenotypesToNocall ? builder.make() : sub;
// Not excluding non-variants or subsetted polymorphic variants AND including filtered loci or subsetted variant is not filtered
if ((!XLnonVariants || filteredGenotypeToNocall.isPolymorphicInSamples()) && (!XLfiltered || !filteredGenotypeToNocall.isFiltered())) {
// Write the subsetted variant if it matches all of the expressions
boolean failedJexlMatch = false;
try {
for (VariantContextUtils.JexlVCMatchExp jexl : jexls) {
if (invertLogic(!VariantContextUtils.match(filteredGenotypeToNocall, jexl), invertSelect)) {
failedJexlMatch = true;
break;
}
}
} catch (IllegalArgumentException e) {
// expression detected...")
throw new UserException(e.getMessage() + "\nSee https://www.broadinstitute.org/gatk/guide/article?id=1255 for documentation on using JEXL in GATK", e);
}
if (!failedJexlMatch && (!selectRandomFraction || Utils.getRandomGenerator().nextDouble() < fractionRandom)) {
vcfWriter.add(filteredGenotypeToNocall);
}
}
}
use of org.broadinstitute.hellbender.exceptions.UserException in project gatk by broadinstitute.
the class CountFalsePositives method onTraversalSuccess.
@Override
public Object onTraversalSuccess() {
final List<SimpleInterval> intervals = intervalArgumentCollection.getIntervals(getReferenceDictionary());
final long targetTerritory = intervals.stream().mapToInt(i -> i.size()).sum();
try (FalsePositiveTableWriter writer = new FalsePositiveTableWriter(outputFile)) {
FalsePositiveRecord falsePositiveRecord = new FalsePositiveRecord(id, snpFalsePositiveCount, indelFalsePositiveCount, targetTerritory);
writer.writeRecord(falsePositiveRecord);
} catch (IOException e) {
throw new UserException(String.format("Encountered an IO exception while opening %s", outputFile));
}
return "SUCCESS";
}
Aggregations