use of org.broadinstitute.hellbender.exceptions.UserException in project gatk by broadinstitute.
the class Main method extractCommandLineProgram.
/**
* Returns the command line program specified, or prints the usage and exits with exit code 1 *
*/
private static CommandLineProgram extractCommandLineProgram(final String[] args, final List<String> packageList, final List<Class<? extends CommandLineProgram>> classList, final String commandLineName) {
/** Get the set of classes that are our command line programs **/
final ClassFinder classFinder = new ClassFinder();
for (final String pkg : packageList) {
classFinder.find(pkg, CommandLineProgram.class);
}
String missingAnnotationClasses = "";
final Set<Class<?>> toCheck = classFinder.getClasses();
toCheck.addAll(classList);
final Map<String, Class<?>> simpleNameToClass = new LinkedHashMap<>();
for (final Class<?> clazz : toCheck) {
// No interfaces, synthetic, primitive, local, or abstract classes.
if (ClassUtils.canMakeInstances(clazz)) {
final CommandLineProgramProperties property = getProgramProperty(clazz);
// Check for missing annotations
if (null == property) {
if (missingAnnotationClasses.isEmpty())
missingAnnotationClasses += clazz.getSimpleName();
else
missingAnnotationClasses += ", " + clazz.getSimpleName();
} else if (!property.omitFromCommandLine()) {
/** We should check for missing annotations later **/
if (simpleNameToClass.containsKey(clazz.getSimpleName())) {
throw new RuntimeException("Simple class name collision: " + clazz.getSimpleName());
}
simpleNameToClass.put(clazz.getSimpleName(), clazz);
}
}
}
if (!missingAnnotationClasses.isEmpty()) {
throw new RuntimeException("The following classes are missing the required CommandLineProgramProperties annotation: " + missingAnnotationClasses);
}
final Set<Class<?>> classes = new LinkedHashSet<>();
classes.addAll(simpleNameToClass.values());
if (args.length < 1) {
printUsage(classes, commandLineName);
} else {
if (args[0].equals("-h") || args[0].equals("--help")) {
printUsage(classes, commandLineName);
} else {
if (simpleNameToClass.containsKey(args[0])) {
final Class<?> clazz = simpleNameToClass.get(args[0]);
try {
return (CommandLineProgram) clazz.newInstance();
} catch (final InstantiationException | IllegalAccessException e) {
throw new RuntimeException(e);
}
}
printUsage(classes, commandLineName);
throw new UserException(getUnknownCommandMessage(classes, args[0]));
}
}
return null;
}
use of org.broadinstitute.hellbender.exceptions.UserException in project gatk by broadinstitute.
the class CreateHadoopBamSplittingIndex method createBaiAndSplittingIndex.
private static void createBaiAndSplittingIndex(final File inputBam, final File index, final int granularity, final ValidationStringency readValidationStringency) {
assertIsBam(inputBam);
try (SamReader reader = SamReaderFactory.makeDefault().validationStringency(readValidationStringency).setOption(SamReaderFactory.Option.INCLUDE_SOURCE_IN_RECORDS, true).open(inputBam);
BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(index))) {
final SAMFileHeader header = reader.getFileHeader();
assertBamIsCoordinateSorted(header);
final SplittingBAMIndexer indexer = new SplittingBAMIndexer(out, granularity);
final BAMIndexer bamIndexer = new BAMIndexer(IOUtils.replaceExtension(index, BAMIndex.BAMIndexSuffix), header);
for (final SAMRecord read : reader) {
indexer.processAlignment(read);
bamIndexer.processAlignment(read);
}
indexer.finish(inputBam.length());
bamIndexer.finish();
} catch (final IOException e) {
throw new UserException("Couldn't create splitting index", e);
}
}
use of org.broadinstitute.hellbender.exceptions.UserException in project gatk-protected by broadinstitute.
the class ConvertACNVResults method runPipeline.
@Override
protected void runPipeline(final JavaSparkContext ctx) {
final String originalLogLevel = (ctx.getLocalProperty("logLevel") != null) ? ctx.getLocalProperty("logLevel") : "INFO";
ctx.setLogLevel("WARN");
final CNLOHCaller cnlohCaller = new CNLOHCaller();
cnlohCaller.setRhoThreshold(rhoThreshold);
final List<ACNVModeledSegment> segments = SegmentUtils.readACNVModeledSegmentFile(new File(segmentsFile));
String sampleName = determineSampleName(new File(segmentsFile));
// Create the outputdir
try {
FileUtils.forceMkdir(outputDir);
} catch (final IOException ioe) {
throw new UserException("Cannot create " + outputDir + ". Does a file (not directory) exist with the same name? Do you have access to create?", ioe);
}
final Genome genome = new Genome(targetCoveragesFile, snpCountsFile);
// Make the calls
logger.info("Making the balanced-segment (and CNLoH) calls...");
final List<AllelicCalls> calls = cnlohCaller.makeCalls(segments, numIterations, ctx);
// Write updated ACNV file with calls
logger.info("Writing updated ACNV file with calls ...");
final File finalACNVModeledSegmentsFile = new File(outputDir, getSegmentsBaseFilename() + "." + BALANCED_SEG_FILE_TAG + ".seg");
SegmentUtils.writeCnLoHACNVModeledSegmentFile(finalACNVModeledSegmentsFile, calls, genome);
// write file for GATK CNV formatted seg file
logger.info("Writing file with same output format as GATK CNV...");
final File finalModeledSegmentsFileAsGatkCNV = new File(outputDir, getSegmentsBaseFilename() + "." + GATK_SEG_FILE_TAG + ".seg");
SegmentUtils.writeModeledSegmentFile(finalModeledSegmentsFileAsGatkCNV, ACNVModeledSegmentConversionUtils.convertACNVModeledSegmentsToModeledSegments(segments, genome), sampleName);
//write file for ACS-compatible output to help Broad CGA
logger.info("Writing file with same output format as Broad CGA Allelic CapSeg ...");
final File finalACSModeledSegmentsFile = new File(outputDir, getSegmentsBaseFilename() + "." + CGA_ACS_SEG_FILE_TAG + ".seg");
ACSModeledSegmentUtils.writeACNVModeledSegmentFileAsAllelicCapSegFile(finalACSModeledSegmentsFile, calls, genome);
//write files for TITAN-compatible output to help Broad CGA
logger.info("Writing het file with input format for TITAN ...");
final File finalTitanHetFile = new File(outputDir, getSegmentsBaseFilename() + "." + TITAN_HET_FILE_TAG + ".tsv");
TitanFileConverter.convertHetPulldownToTitanHetFile(snpCountsFile, finalTitanHetFile);
logger.info("Writing tangent-normalized target CR estimates with input format for TITAN ...");
final File finalTitanTNFile = new File(outputDir, getSegmentsBaseFilename() + "." + TITAN_TN_FILE_TAG + ".tsv");
TitanFileConverter.convertCRToTitanCovFile(targetCoveragesFile, finalTitanTNFile);
ctx.setLogLevel(originalLogLevel);
logger.info("SUCCESS: CNLoH and splits called for sample " + sampleName + ".");
}
use of org.broadinstitute.hellbender.exceptions.UserException in project gatk-protected by broadinstitute.
the class FilterByOrientationBias method onTraversalStart.
@Override
public void onTraversalStart() {
// Gets around issue 2274 in gatk public
if (transitions.size() == 0) {
transitions.add(DEFAULT_ARTIFACT_MODE);
}
// Sort the input artifacts argument
transitions.sort(null);
final MetricsFile<SequencingArtifactMetrics.PreAdapterDetailMetrics, Comparable<?>> mf = new MetricsFile<>();
try {
mf.read(new FileReader(preAdapterMetricsFile));
} catch (final FileNotFoundException fnfe) {
throw new UserException("Could not find file: " + preAdapterMetricsFile.getAbsolutePath());
}
// Collect all of the transitions that were specified in the parameters.
relevantTransitions.addAll(transitions.stream().map(s -> convertParameterToTransition(s)).collect(Collectors.toSet()));
// Get the PreAdapterQ score from the picard metrics tool, which gives an indication of how badly infested the bam file is.
transitionToPreAdapterScoreMap = PreAdapterOrientationScorer.scoreOrientationBiasMetricsOverContext(mf.getMetrics());
logger.info("preAdapter scores:");
transitionToPreAdapterScoreMap.keySet().stream().forEach(k -> logger.info(k + ": " + transitionToPreAdapterScoreMap.get(k)));
setupVCFWriter();
}
use of org.broadinstitute.hellbender.exceptions.UserException in project gatk by broadinstitute.
the class GATKSparkTool method initializeIntervals.
/**
* Loads our intervals using the best available sequence dictionary (as returned by {@link #getBestAvailableSequenceDictionary})
* to parse/verify them. Does nothing if no intervals were specified.
*/
private void initializeIntervals() {
if (intervalArgumentCollection.intervalsSpecified()) {
final SAMSequenceDictionary intervalDictionary = getBestAvailableSequenceDictionary();
if (intervalDictionary == null) {
throw new UserException("We require at least one input source that has a sequence dictionary (reference or reads) when intervals are specified");
}
intervals = intervalArgumentCollection.getIntervals(intervalDictionary);
}
intervals = editIntervals(intervals);
}
Aggregations