use of com.github.lindenb.jvarkit.util.bio.bed.BedLine in project jvarkit by lindenb.
the class BamStats05 method readBedFile.
private Map<String, List<Interval>> readBedFile(final File bedFile) throws IOException {
final Map<String, List<Interval>> gene2interval = new TreeMap<String, List<Interval>>();
BufferedReader bedIn = null;
try {
bedIn = IOUtils.openFileForBufferedReading(bedFile);
final BedLineCodec codec = new BedLineCodec();
String line = null;
while ((line = bedIn.readLine()) != null) {
if (line.isEmpty() || line.startsWith("#"))
continue;
final BedLine bedLine = codec.decode(line);
if (bedLine == null)
continue;
if (bedLine.getColumnCount() < 4) {
throw new IOException("bad bed line in " + line + " " + bedFile);
}
final String chrom = bedLine.getContig();
final int chromStart1 = bedLine.getStart();
final int chromEnd1 = bedLine.getEnd();
final String gene = bedLine.get(3);
if (gene.isEmpty())
throw new IOException("bad bed gene in " + line + " " + bedFile);
List<Interval> intervals = gene2interval.get(gene);
if (intervals == null) {
intervals = new ArrayList<>();
gene2interval.put(gene, intervals);
} else if (!intervals.get(0).getContig().equals(chrom)) {
throw new IOException("more than one chromosome for gene:" + gene);
} else {
for (final Interval interval : intervals) {
if (interval.getEnd() < chromStart1)
continue;
if (interval.getStart() > chromEnd1)
continue;
throw new IOException("overlapping region: " + line + " and " + interval);
}
}
intervals.add(new Interval(chrom, chromStart1, chromEnd1));
}
bedIn.close();
return gene2interval;
} finally {
CloserUtil.close(bedIn);
}
}
use of com.github.lindenb.jvarkit.util.bio.bed.BedLine in project jvarkit by lindenb.
the class Biostar105754 method run.
private void run(final BufferedReader r) throws IOException {
final BedLineCodec codec = new BedLineCodec();
String line;
while ((line = r.readLine()) != null && !this.out.checkError()) {
final BedLine bedLine = codec.decode(line);
if (bedLine == null) {
continue;
}
final String chrom = bedLine.getContig();
int chromStart0 = bedLine.getStart() - 1;
int chromEnd0 = bedLine.getEnd();
if (chrom.isEmpty() || chromStart0 < 0L || chromEnd0 < chromStart0) {
System.err.println("Bad BED line: " + line);
continue;
}
// extends bed area until something was found
int chromStart = chromStart0;
int chromEnd = chromEnd0;
for (; ; ) {
BigWigIterator iter = this.bbFileReader.getBigWigIterator(chrom, chromStart, chrom, chromEnd, false);
if (iter != null) {
WigItem best = null;
while (iter.hasNext()) {
WigItem wigItem = iter.next();
if (best == null || distance(chromStart, chromEnd, best.getStartBase(), best.getEndBase()) > distance(chromStart, chromEnd, wigItem.getStartBase(), wigItem.getEndBase())) {
best = wigItem;
}
}
if (best != null) {
this.out.print(best.getChromosome());
this.out.print("\t");
this.out.print(best.getStartBase());
this.out.print("\t");
this.out.print(best.getEndBase());
this.out.print("\t");
this.out.print(best.getWigValue());
this.out.print("\t");
this.out.print(line);
this.out.println();
break;
}
}
// extend bed area
long start2 = chromStart - EXTEND_SHIFT;
long end2 = chromEnd + EXTEND_SHIFT;
if (start2 < 0)
start2 = 0;
if (end2 > MAX_CHROM_END)
end2 = MAX_CHROM_END;
// too wide, break loop
if (start2 == 0 && end2 == MAX_CHROM_END) {
LOG.warn("no data found for\t" + line);
break;
}
chromStart = (int) start2;
chromEnd = (int) end2;
}
}
}
use of com.github.lindenb.jvarkit.util.bio.bed.BedLine in project jvarkit by lindenb.
the class VcfLoopOverGenes method doWork.
@SuppressWarnings("resource")
@Override
public int doWork(final List<String> args) {
PrintWriter pw = null;
VCFFileReader vcfFileReader = null;
CloseableIterator<VariantContext> iter = null;
CloseableIterator<GeneLoc> iter2 = null;
BufferedReader br = null;
ArchiveFactory archive = null;
try {
final File vcf = new File(oneAndOnlyOneFile(args));
vcfFileReader = new VCFFileReader(vcf, (this.geneFile != null || !StringUtil.isBlank(this.regionStr)));
this.dictionary = vcfFileReader.getFileHeader().getSequenceDictionary();
if (this.dictionary == null) {
throw new JvarkitException.VcfDictionaryMissing(vcf);
}
final VcfTools tools = new VcfTools(vcfFileReader.getFileHeader());
if (!this.prefix.isEmpty() && !this.prefix.endsWith(".")) {
this.prefix += ".";
}
if (this.geneFile == null) {
final SortingCollection<GeneLoc> sortingCollection = SortingCollection.newInstance(GeneLoc.class, new GeneLocCodec(), (A, B) -> A.compareTo(B), this.writingSortingCollection.getMaxRecordsInRam(), this.writingSortingCollection.getTmpPaths());
sortingCollection.setDestructiveIteration(true);
if (StringUtil.isBlank(this.regionStr)) {
iter = vcfFileReader.iterator();
} else {
final IntervalParser parser = new IntervalParser(this.dictionary);
parser.setContigNameIsWholeContig(true);
final Interval interval = parser.parse(this.regionStr);
if (interval == null) {
LOG.error("Cannot parse interval " + this.regionStr);
return -1;
}
iter = vcfFileReader.query(interval.getContig(), interval.getStart(), interval.getEnd());
}
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(vcfFileReader.getFileHeader()).logger(LOG);
if (this.splitMethod.equals(SplitMethod.Annotations)) {
while (iter.hasNext()) {
final VariantContext ctx = progress.watch(iter.next());
for (final AnnPredictionParser.AnnPrediction pred : tools.getAnnPredictionParser().getPredictions(ctx)) {
if (this.snpEffNoIntergenic && pred.isIntergenicRegion()) {
continue;
}
if (!StringUtil.isBlank(pred.getGeneName())) {
sortingCollection.add(create(ctx, pred.getGeneName(), SourceType.ANN_GeneName));
}
if (!StringUtil.isBlank(pred.getGeneId())) {
sortingCollection.add(create(ctx, pred.getGeneId(), SourceType.ANN_GeneID));
}
if (!StringUtil.isBlank(pred.getFeatureId())) {
sortingCollection.add(create(ctx, pred.getFeatureId(), SourceType.ANN_FeatureID));
}
}
for (final VepPredictionParser.VepPrediction pred : tools.getVepPredictionParser().getPredictions(ctx)) {
if (!StringUtil.isBlank(pred.getGene())) {
sortingCollection.add(create(ctx, pred.getGene(), SourceType.VEP_Gene));
}
if (!StringUtil.isBlank(pred.getFeature())) {
sortingCollection.add(create(ctx, pred.getFeature(), SourceType.VEP_Feature));
}
if (!StringUtil.isBlank(pred.getSymbol())) {
sortingCollection.add(create(ctx, pred.getSymbol(), SourceType.VEP_Symbol));
}
if (!StringUtil.isBlank(pred.getHgncId())) {
sortingCollection.add(create(ctx, pred.getHgncId(), SourceType.VEP_HgncId));
}
}
}
} else /**
* split VCF per sliding window of variants
*/
if (this.splitMethod.equals(SplitMethod.VariantSlidingWindow)) {
if (this.variantsWinCount < 1) {
LOG.error("Bad value for variantsWinCount");
return -1;
}
if (this.variantsWinShift < 1 || this.variantsWinShift > this.variantsWinCount) {
LOG.error("Bad value for variantsWinShift");
return -1;
}
final List<VariantContext> buffer = new ArrayList<>(this.variantsWinCount);
/**
* routine to dump buffer into sorting collection
*/
final Runnable dumpBuffer = () -> {
if (buffer.isEmpty())
return;
final String contig = buffer.get(0).getContig();
final int chromStart = buffer.stream().mapToInt(CTX -> CTX.getStart()).min().getAsInt();
// use last of start too
final int chromEnd0 = buffer.stream().mapToInt(CTX -> CTX.getStart()).max().getAsInt();
// final int chromEnd1 = buffer.stream().mapToInt(CTX->CTX.getEnd()).max().getAsInt();
final String identifier = contig + "_" + String.format(NUM_FORMAT, chromStart) + "_" + String.format(NUM_FORMAT, chromEnd0);
for (final VariantContext ctx : buffer) {
sortingCollection.add(create(ctx, identifier, SourceType.SlidingVariants));
}
};
while (iter.hasNext()) {
VariantContext ctx = progress.watch(iter.next());
/* reduce the memory footprint for this context */
ctx = new VariantContextBuilder(ctx).genotypes(Collections.emptyList()).unfiltered().rmAttributes(new ArrayList<>(ctx.getAttributes().keySet())).make();
if (!buffer.isEmpty() && !buffer.get(0).getContig().equals(ctx.getContig())) {
dumpBuffer.run();
buffer.clear();
}
buffer.add(ctx);
if (buffer.size() >= this.variantsWinCount) {
dumpBuffer.run();
final int fromIndex = Math.min(this.variantsWinShift, buffer.size());
buffer.subList(0, fromIndex).clear();
}
}
dumpBuffer.run();
buffer.clear();
} else if (this.splitMethod.equals(SplitMethod.ContigSlidingWindow)) {
if (this.contigWinLength < 1) {
LOG.error("Bad value for contigWinCount");
return -1;
}
if (this.contigWinShift < 1 || this.contigWinShift > this.contigWinLength) {
LOG.error("Bad value for contigWinShift");
return -1;
}
while (iter.hasNext()) {
VariantContext ctx = progress.watch(iter.next());
/* reduce the memory footprint for this context */
ctx = new VariantContextBuilder(ctx).genotypes(Collections.emptyList()).unfiltered().rmAttributes(new ArrayList<>(ctx.getAttributes().keySet())).make();
int start = 0;
while (start <= ctx.getStart()) {
if (start + this.contigWinLength >= ctx.getStart()) {
final int chromStart = start;
final int chromEnd0 = start + this.contigWinLength;
final String identifier = ctx.getContig() + "_" + String.format(NUM_FORMAT, chromStart) + "_" + String.format(NUM_FORMAT, chromEnd0);
sortingCollection.add(create(ctx, identifier, SourceType.SlidingContig));
}
start += this.contigWinShift;
}
}
} else {
throw new IllegalStateException("No such method: " + this.splitMethod);
}
sortingCollection.doneAdding();
progress.finish();
iter.close();
iter = null;
pw = super.openFileOrStdoutAsPrintWriter(this.outputFile);
iter2 = sortingCollection.iterator();
final EqualRangeIterator<GeneLoc> eqiter = new EqualRangeIterator<>(iter2, this.compareGeneName);
int geneIdentifierId = 0;
while (eqiter.hasNext()) {
final List<GeneLoc> gene = eqiter.next();
pw.print(gene.get(0).contig);
pw.print('\t');
// -1 for BED
pw.print(gene.stream().mapToInt(G -> G.start).min().getAsInt() - 1);
pw.print('\t');
pw.print(gene.stream().mapToInt(G -> G.end).max().getAsInt());
pw.print('\t');
pw.print(this.prefix + String.format("%09d", ++geneIdentifierId));
pw.print('\t');
pw.print(gene.get(0).geneName);
pw.print('\t');
pw.print(gene.get(0).sourceType);
pw.print('\t');
pw.print(gene.size());
pw.println();
}
pw.flush();
pw.close();
pw = null;
eqiter.close();
iter2.close();
iter2 = null;
sortingCollection.cleanup();
} else {
if (this.nJobs < 1) {
this.nJobs = Math.max(1, Runtime.getRuntime().availableProcessors());
LOG.info("setting njobs to " + this.nJobs);
}
final ExecutorService executorService;
final List<Future<Integer>> futureResults;
if (this.nJobs > 1) {
executorService = new ThreadPoolExecutor(this.nJobs, this.nJobs, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>());
futureResults = new ArrayList<>();
} else {
executorService = null;
futureResults = Collections.emptyList();
}
if (this.outputFile == null) {
LOG.error("When scanning a VCF with " + this.geneFile + ". Output file must be defined");
}
if (!this.exec.isEmpty()) {
if (this.outputFile.getName().endsWith(".zip")) {
LOG.error("Cannot execute " + this.exec + " when saving to a zip.");
return -1;
}
}
archive = ArchiveFactory.open(this.outputFile);
PrintWriter manifest = this.deleteAfterCommand && !this.exec.isEmpty() ? // all files will be deleted, no manifest needed
new PrintWriter(new NullOuputStream()) : archive.openWriter(this.prefix + "manifest.txt");
br = IOUtils.openFileForBufferedReading(this.geneFile);
final BedLineCodec bedCodec = new BedLineCodec();
for (; ; ) {
if (!futureResults.isEmpty()) {
int i = 0;
while (i < futureResults.size()) {
final Future<Integer> r = futureResults.get(i);
if (r.isCancelled()) {
LOG.error("Task was canceled. Break.");
return -1;
} else if (r.isDone()) {
futureResults.remove(i);
int rez = r.get();
if (rez != 0) {
LOG.error("Task Failed (" + rez + "). Break");
}
} else {
i++;
}
}
}
final String line = br.readLine();
if (line == null)
break;
if (line.startsWith("#") || line.isEmpty())
continue;
final BedLine bedLine = bedCodec.decode(line);
if (bedLine == null)
continue;
// ID
final String geneIdentifier = bedLine.get(3);
// name
final String geneName = bedLine.get(4);
final SourceType sourceType = SourceType.valueOf(bedLine.get(5));
final String filename = geneIdentifier;
final String outputVcfName = (filename.startsWith(this.prefix) ? "" : this.prefix) + filename + ".vcf" + (this.compress ? ".gz" : "");
LOG.info(bedLine.getContig() + ":" + bedLine.getStart() + "-" + bedLine.getEnd() + " length :" + (bedLine.getEnd() - bedLine.getStart()));
if (bedLine.getEnd() - bedLine.getStart() > 1E6) {
LOG.warn("That's a large region ! " + bedLine);
}
OutputStream vcfOutputStream = null;
VariantContextWriter vw = null;
int countVariants = 0;
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(vcfFileReader.getFileHeader()).logger(LOG).prefix(geneName + " " + bedLine.getContig() + ":" + bedLine.getStart() + "-" + bedLine.getEnd());
iter = vcfFileReader.query(bedLine.getContig(), bedLine.getStart(), bedLine.getEnd());
while (iter.hasNext()) {
VariantContext ctx = progress.watch(iter.next());
switch(sourceType) {
case SlidingVariants:
{
// nothing
break;
}
case SlidingContig:
{
// nothing
break;
}
case ANN_GeneName:
case ANN_FeatureID:
case ANN_GeneID:
{
final List<String> preds = new ArrayList<>();
for (final AnnPredictionParser.AnnPrediction pred : tools.getAnnPredictionParser().getPredictions(ctx)) {
final String predictionIdentifier;
switch(sourceType) {
case ANN_GeneName:
predictionIdentifier = pred.getGeneName();
break;
case ANN_FeatureID:
predictionIdentifier = pred.getFeatureId();
break;
case ANN_GeneID:
predictionIdentifier = pred.getGeneId();
break;
default:
throw new IllegalStateException(bedLine.toString());
}
if (StringUtil.isBlank(predictionIdentifier))
continue;
if (!geneName.equals(predictionIdentifier))
continue;
preds.add(pred.getOriginalAttributeAsString());
}
if (preds.isEmpty()) {
ctx = null;
} else {
ctx = new VariantContextBuilder(ctx).rmAttribute(tools.getAnnPredictionParser().getTag()).attribute(tools.getAnnPredictionParser().getTag(), preds).make();
}
break;
}
case VEP_Gene:
case VEP_Feature:
case VEP_Symbol:
case VEP_HgncId:
{
final List<String> preds = new ArrayList<>();
for (final VepPredictionParser.VepPrediction pred : tools.getVepPredictions(ctx)) {
final String predictionIdentifier;
switch(sourceType) {
case VEP_Gene:
predictionIdentifier = pred.getGene();
break;
case VEP_Feature:
predictionIdentifier = pred.getFeature();
break;
case VEP_Symbol:
predictionIdentifier = pred.getSymbol();
break;
case VEP_HgncId:
predictionIdentifier = pred.getHgncId();
break;
default:
throw new IllegalStateException(bedLine.toString());
}
if (StringUtil.isBlank(predictionIdentifier))
continue;
if (!geneName.equals(predictionIdentifier))
continue;
preds.add(pred.getOriginalAttributeAsString());
}
if (preds.isEmpty()) {
ctx = null;
} else {
ctx = new VariantContextBuilder(ctx).rmAttribute(tools.getVepPredictionParser().getTag()).attribute(tools.getVepPredictionParser().getTag(), preds).make();
}
break;
}
default:
throw new IllegalStateException(bedLine.toString());
}
if (ctx == null)
continue;
if (vcfOutputStream == null) {
LOG.info(filename);
manifest.println(outputVcfName);
final VCFHeader header = new VCFHeader(vcfFileReader.getFileHeader());
header.addMetaDataLine(new VCFHeaderLine(VCF_HEADER_SPLITKEY, filename));
vcfOutputStream = archive.openOuputStream(outputVcfName);
vw = VCFUtils.createVariantContextWriterToOutputStream(vcfOutputStream);
vw.writeHeader(header);
}
countVariants++;
vw.add(ctx);
if (countVariants % 1000 == 0) {
LOG.info("Loading : " + geneIdentifier + " N=" + countVariants);
}
}
progress.finish();
LOG.info(geneIdentifier + " N=" + countVariants);
if (vcfOutputStream != null) {
vw.close();
vcfOutputStream.flush();
vcfOutputStream.close();
vw = null;
if (!this.exec.isEmpty()) {
final Callable<Integer> callable = () -> {
final File vcfOutFile = new File(this.outputFile, outputVcfName);
IOUtil.assertFileIsReadable(vcfOutFile);
final String vcfPath = vcfOutFile.getPath();
final StringTokenizer st = new StringTokenizer(this.exec);
final List<String> command = new ArrayList<>(1 + st.countTokens());
while (st.hasMoreTokens()) {
String token = st.nextToken().replaceAll("__PREFIX__", this.prefix).replaceAll("__CONTIG__", bedLine.getContig()).replaceAll("__CHROM__", bedLine.getContig()).replaceAll("__ID__", geneIdentifier).replaceAll("__NAME__", geneName).replaceAll("__START__", String.valueOf(bedLine.getStart())).replaceAll("__END__", String.valueOf(bedLine.getEnd())).replaceAll("__SOURCE__", sourceType.name()).replaceAll("__VCF__", vcfPath);
command.add(token);
}
LOG.info(command.stream().map(S -> "'" + S + "'").collect(Collectors.joining(" ")));
final ProcessBuilder pb = new ProcessBuilder(command);
pb.redirectErrorStream(true);
final Process p = pb.start();
final Thread stdoutThread = new Thread(() -> {
try {
InputStream in = p.getInputStream();
IOUtils.copyTo(in, stdout());
} catch (Exception err) {
LOG.error(err);
}
});
stdoutThread.start();
int exitValue = p.waitFor();
if (exitValue != 0) {
LOG.error("Command failed (" + exitValue + "):" + String.join(" ", command));
return -1;
} else {
if (deleteAfterCommand) {
if (!vcfOutFile.delete()) {
LOG.warn("Cannot delete " + vcfOutFile);
}
}
return 0;
}
};
if (executorService != null) {
final Future<Integer> rez = executorService.submit(callable);
futureResults.add(rez);
} else {
final int ret = callable.call();
if (ret != 0) {
LOG.error("Error with process (" + ret + ")");
return ret;
}
}
}
} else {
manifest.println("#" + filename);
LOG.warn("No Variant Found for " + line);
}
iter.close();
}
;
if (executorService != null) {
LOG.info("shutdown");
executorService.shutdown();
executorService.awaitTermination(365, TimeUnit.DAYS);
}
br.close();
br = null;
manifest.close();
archive.close();
archive = null;
LOG.info("Done");
}
vcfFileReader.close();
vcfFileReader = null;
return 0;
} catch (Exception e) {
LOG.error(e);
return -1;
} finally {
{
CloserUtil.close(iter2);
CloserUtil.close(iter);
CloserUtil.close(pw);
CloserUtil.close(vcfFileReader);
CloserUtil.close(br);
CloserUtil.close(archive);
}
}
}
use of com.github.lindenb.jvarkit.util.bio.bed.BedLine in project jvarkit by lindenb.
the class BamStats04 method doWork.
@Override
public int doWork(final List<String> args) {
if (this.bedFile == null || !this.bedFile.exists()) {
LOG.error("undefined option -B (bed file)");
return -1;
}
if (args.isEmpty()) {
LOG.error("Bam files missing");
return -1;
}
if (this.minCoverages.isEmpty()) {
this.minCoverages.add(0);
}
final String NO_PARTITION = "N/A";
BufferedReader bedIn = null;
final List<SamReader> samReaders = new ArrayList<>(args.size());
PrintWriter pw = null;
ReferenceGenome referenceGenome = null;
ReferenceContig referenceContig = null;
try {
final BedLineCodec codec = new BedLineCodec();
final Set<String> all_partitions = new TreeSet<>();
bedIn = IOUtils.openFileForBufferedReading(this.bedFile);
SAMSequenceDictionary dict = null;
for (final String filename : IOUtils.unrollFiles(args)) {
LOG.info(filename);
final SamReader samReader = super.openSamReader(filename);
if (!samReader.hasIndex()) {
LOG.error(filename + " is not indexed");
samReader.close();
return -1;
}
final SAMFileHeader samFileheader = samReader.getFileHeader();
if (samFileheader == null) {
LOG.error("SAM file is missing a header " + filename);
return -1;
}
final List<SAMReadGroupRecord> readGroups = samFileheader.getReadGroups();
if (readGroups == null || readGroups.isEmpty()) {
LOG.warn("No Read group (RG) in the header of " + filename);
all_partitions.add(NO_PARTITION);
} else {
for (final SAMReadGroupRecord rg : readGroups) {
all_partitions.add(this.partition.apply(rg, NO_PARTITION));
}
}
final SAMSequenceDictionary d = samFileheader.getSequenceDictionary();
if (d == null) {
samReader.close();
LOG.error(JvarkitException.BamDictionaryMissing.getMessage(filename));
return -1;
}
samReaders.add(samReader);
if (dict == null) {
dict = d;
} else if (SequenceUtil.areSequenceDictionariesEqual(d, dict)) {
LOG.error(JvarkitException.DictionariesAreNotTheSame.getMessage(d, dict));
return -1;
}
}
if (samReaders.isEmpty()) {
LOG.error("No Bam defined");
return -1;
}
if (!StringUtil.isBlank(this.faidxUri)) {
referenceGenome = new ReferenceGenomeFactory().open(this.faidxUri);
}
pw = super.openFileOrStdoutAsPrintWriter(this.outputFile);
pw.print("#chrom\tstart\tend\tlength\t" + this.partition.name() + (referenceGenome == null ? "" : "\tgc_percent"));
pw.print("\tmincov\tmaxcov");
for (final int MIN_COVERAGE : this.minCoverages) {
pw.print("\tmeancov_" + MIN_COVERAGE + "\tmediancov_" + MIN_COVERAGE + "\tnocoveragebp_" + MIN_COVERAGE + "\tpercentcovered_" + MIN_COVERAGE);
}
pw.println();
String line = null;
while ((line = bedIn.readLine()) != null) {
if (line.isEmpty() || line.startsWith("#"))
continue;
final BedLine bedLine = codec.decode(line);
if (bedLine == null)
continue;
if (dict.getSequence(bedLine.getContig()) == null) {
LOG.error("Unknown contig in " + line);
return -1;
}
if (bedLine.getStart() > bedLine.getEnd()) {
LOG.info("ignoring " + bedLine);
continue;
}
if (referenceGenome != null && (referenceContig == null || !referenceContig.hasName(bedLine.getContig()))) {
referenceContig = referenceGenome.getContig(bedLine.getContig());
}
final Map<String, IntervalStat> sample2stats = new HashMap<>(all_partitions.size());
for (final String rgId : all_partitions) {
sample2stats.put(rgId, new IntervalStat(bedLine));
}
for (final SamReader samReader : samReaders) {
/**
* start - 1-based, inclusive start of interval of interest. Zero implies start of the reference sequence.
* end - 1-based, inclusive end of interval of interest. Zero implies end of the reference sequence.
*/
final SAMRecordIterator r = samReader.queryOverlapping(bedLine.getContig(), bedLine.getStart(), bedLine.getEnd());
while (r.hasNext()) {
final SAMRecord rec = r.next();
if (rec.getReadUnmappedFlag())
continue;
if (this.filter.filterOut(rec))
continue;
if (!rec.getReferenceName().equals(bedLine.getContig()))
continue;
final String partition;
final SAMReadGroupRecord group = rec.getReadGroup();
if (group == null) {
partition = NO_PARTITION;
} else {
final String name = this.partition.apply(group);
partition = (StringUtil.isBlank(name) ? NO_PARTITION : name);
}
IntervalStat stat = sample2stats.get(partition);
if (stat == null) {
stat = new IntervalStat(bedLine);
sample2stats.put(partition, stat);
}
stat.visit(rec);
}
r.close();
}
// end of loop over sam Readers
final OptionalInt gcPercentInt = (referenceContig == null ? OptionalInt.empty() : referenceContig.getGCPercent(bedLine.getStart() - 1, bedLine.getEnd()).getGCPercentAsInteger());
for (final String partitionName : sample2stats.keySet()) {
final IntervalStat stat = sample2stats.get(partitionName);
Arrays.sort(stat.counts);
pw.print(bedLine.getContig() + "\t" + (bedLine.getStart() - 1) + "\t" + (bedLine.getEnd()) + "\t" + stat.counts.length + "\t" + partitionName);
if (referenceGenome != null) {
pw.print("\t");
if (gcPercentInt.isPresent())
pw.print(gcPercentInt.getAsInt());
}
pw.print("\t" + stat.counts[0] + "\t" + stat.counts[stat.counts.length - 1]);
for (final int MIN_COVERAGE : this.minCoverages) {
/**
* map depth to 0 if depth <= MIN_COVERAGE
*/
final IntUnaryOperator depthAdjuster = (D) -> (D <= MIN_COVERAGE ? 0 : D);
final int count_no_coverage = (int) Arrays.stream(stat.counts).filter(D -> depthAdjuster.applyAsInt(D) <= 0).count();
final double mean = Percentile.average().evaluate(Arrays.stream(stat.counts).map(depthAdjuster));
final double median_depth = Percentile.median().evaluate(Arrays.stream(stat.counts).map(depthAdjuster));
pw.print("\t" + mean + "\t" + median_depth + "\t" + count_no_coverage + "\t" + (int) (((stat.counts.length - count_no_coverage) / (double) stat.counts.length) * 100.0));
}
pw.println();
}
}
pw.flush();
pw.close();
pw = null;
LOG.info("done");
return RETURN_OK;
} catch (final Exception err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(referenceGenome);
CloserUtil.close(pw);
CloserUtil.close(bedIn);
CloserUtil.close(samReaders);
}
}
Aggregations