use of com.github.lindenb.jvarkit.bed.BedLineReader in project jvarkit by lindenb.
the class Biostar336589 method doWork.
@Override
public int doWork(final List<String> args) {
if (this.histogram_mode && this.input_is_bedpe) {
LOG.error("cannot use both histogram and interact format");
return -1;
}
if (this.faidx == null) {
LOG.error("undefined REF");
return -1;
}
if (this.min_internal_radius < 10) {
this.min_internal_radius = 10;
}
if (this.feature_height < 2) {
this.feature_height = 2;
}
if (this.arrow_size <= 0) {
this.arrow_size = 0.01;
}
if (this.contig_height <= 0) {
this.contig_height = 2.0;
}
try {
this.scoreColorStart = this.colorUtils.parse(this.colorScoreStartStr);
if (this.scoreColorStart == null)
this.scoreColorStart = Color.WHITE;
} catch (final Exception err) {
this.scoreColorStart = Color.WHITE;
}
try {
this.scoreColorEnd = this.colorUtils.parse(this.colorScoreEndStr);
if (this.scoreColorEnd == null)
this.scoreColorEnd = Color.BLACK;
} catch (final Exception err) {
this.scoreColorEnd = Color.BLACK;
}
int maxScore = 0;
try {
this.dict = SequenceDictionaryUtils.extractRequired(this.faidx);
if (this.skip_chromosome_size > 0) {
this.dict = new SAMSequenceDictionary(this.dict.getSequences().stream().filter(S -> S.getSequenceLength() > this.skip_chromosome_size).collect(Collectors.toList()));
}
if (this.dict.isEmpty()) {
throw new JvarkitException.DictionaryMissing(String.valueOf(this.faidx.toString()));
}
this.reference_length = this.dict.getReferenceLength();
this.tid_to_start = new long[this.dict.size()];
Arrays.fill(this.tid_to_start, 0L);
long n = 0;
for (int i = 0; i < dict.size(); i++) {
this.tid_to_start[i] = n;
n += dict.getSequence(i).getSequenceLength();
}
final List<Track> tracks = new ArrayList<>(1 + args.size());
final Set<String> skipped_contigs = new HashSet<>();
final ContigNameConverter converter = ContigNameConverter.fromOneDictionary(this.dict);
for (final String filename : args.isEmpty() ? Collections.singletonList((String) null) : args) {
final Track currentTrack = new Track(tracks.size());
if (!StringUtil.isBlank(filename))
currentTrack.name = filename;
tracks.add(currentTrack);
try (BedLineReader br = new BedLineReader(Paths.get(filename))) {
while (br.hasNext()) {
final BedLine bedLine = br.next();
final String newCtg = converter.apply(bedLine.getContig());
if (StringUtil.isBlank(newCtg)) {
if (skipped_contigs.add(bedLine.getContig())) {
LOG.warn("unknown contig " + bedLine.getContig() + ". Skipping.");
}
continue;
}
final SAMSequenceRecord ssr = this.dict.getSequence(newCtg);
if (ssr == null)
continue;
if (bedLine.getStart() > ssr.getSequenceLength())
continue;
if (bedLine.getEnd() < 1)
continue;
final Arc arc;
if (!this.input_is_bedpe) {
arc = new Arc();
arc.tid = ssr.getSequenceIndex();
arc.start = Math.max(bedLine.getStart(), 0);
arc.end = Math.min(bedLine.getEnd(), ssr.getSequenceLength());
arc.strand = toStrand(bedLine.getOrDefault(5, "."));
} else {
final ArcInterval arci = new ArcInterval();
arc = arci;
for (int side = 0; side < 2; ++side) {
final int shift = (side == 0 ? 8 : 13);
final String ctg2 = bedLine.getOrDefault(shift + 0, null);
if (StringUtil.isBlank(ctg2)) {
if (skipped_contigs.add(ctg2)) {
LOG.warn("unknown contig " + (side + 1) + "/2 " + bedLine + ". Skipping.");
}
continue;
}
final SAMSequenceRecord ssr2 = this.dict.getSequence(ctg2);
if (ssr2 == null)
continue;
int tid2 = ssr2.getSequenceIndex();
int start2 = -1;
int end2 = -1;
byte strand2 = -1;
try {
start2 = Math.max(0, Integer.parseInt(bedLine.getOrDefault(shift + 1, "")));
end2 = Math.min(ssr2.getSequenceLength(), Integer.parseInt(bedLine.getOrDefault(shift + 2, "")));
strand2 = toStrand(bedLine.getOrDefault(shift + 4, "."));
} catch (final NumberFormatException err2) {
tid2 = -1;
start2 = -1;
end2 = -1;
}
if (side == 0) {
arci.tid = tid2;
arci.start = start2;
arci.end = end2;
arci.strand = strand2;
} else {
arci.targetTid = tid2;
arci.targetStart = start2;
arci.targetEnd = end2;
arci.targetStrand = strand2;
}
}
if (arci.tid < 0 || arci.targetTid < 0 || arci.end < arci.start || arci.targetEnd < arci.targetStart) {
LOG.warn("bad interval bed record " + bedLine + ". Skipping.");
continue;
}
}
arc.name = bedLine.getOrDefault(3, "");
final String scoreStr = bedLine.getOrDefault(4, "0");
if (StringUtil.isBlank(scoreStr) || scoreStr.equals(".")) {
if (!this.input_is_bedpe && this.histogram_mode) {
LOG.warn("no score defined for " + bedLine.join() + " in histogram mode. skipping.");
continue;
}
} else {
try {
arc.score = Math.min(1000, Math.max(0, Integer.parseInt(scoreStr)));
maxScore = Math.max(maxScore, arc.score);
} catch (final NumberFormatException err) {
LOG.warn("bad score for " + bedLine.join());
if (this.histogram_mode) {
LOG.warn("skipping.");
continue;
}
arc.score = 0;
}
}
// color
arc.cssStyle = toCss(bedLine.getOrDefault(this.input_is_bedpe ? 7 : 8, ""));
final TrackContig currTrackContig = currentTrack.get(arc.tid);
if (// only one row for histograms
this.histogram_mode) {
if (currTrackContig.rows.isEmpty()) {
currTrackContig.rows.add(new LinkedList<>());
}
currTrackContig.rows.get(0).add(arc);
} else {
int y = 0;
for (y = 0; y < currTrackContig.rows.size(); ++y) {
final List<Arc> row = currTrackContig.rows.get(y);
if (row.stream().noneMatch(A -> A.withinDistanceOf(arc, this.histogram_mode ? 0 : this.min_distance_bp))) {
// add in front, should be faster if data are sorted
row.add(0, arc);
break;
}
}
if (y == currTrackContig.rows.size()) {
final List<Arc> row = new LinkedList<>();
currTrackContig.rows.add(row);
row.add(arc);
}
}
}
}
}
LOG.info("number of arcs : " + tracks.stream().mapToInt(T -> T.maxRows()).sum());
try (PrintWriter out = super.openPathOrStdoutAsPrintWriter(this.outputFile)) {
final XMLOutputFactory xof = XMLOutputFactory.newInstance();
final XMLStreamWriter w = xof.createXMLStreamWriter(out);
w.writeStartDocument("UTF-8", "1.0");
w.writeStartElement("svg");
if (this.linear_width <= 0) {
writeCircular(w, tracks, maxScore);
} else {
writeLinear(w, tracks, maxScore);
}
// svg
w.writeEndElement();
w.writeEndDocument();
w.flush();
w.close();
out.flush();
CloserUtil.close(w);
}
return 0;
} catch (final Exception err) {
LOG.error(err);
return -1;
} finally {
}
}
use of com.github.lindenb.jvarkit.bed.BedLineReader in project jvarkit by lindenb.
the class Biostar105754 method doWork.
@Override
public int doWork(final List<String> args) {
if (this.bigWigFile == null) {
LOG.error("Big wig file undefined option");
return -1;
}
try {
LOG.info("Opening " + this.bigWigFile);
this.bbFileReader = new BBFileReader(this.bigWigFile);
if (!this.bbFileReader.isBigWigFile()) {
LOG.error("File " + this.bigWigFile + " is not a bigwig file");
return -1;
}
try (PrintWriter out = super.openPathOrStdoutAsPrintWriter(outputFile)) {
if (args.isEmpty()) {
try (BedLineReader r = new BedLineReader(IOUtils.openStdinForBufferedReader(), "stdin")) {
run(r, out);
}
} else {
for (final String filename : args) {
try (BedLineReader r = new BedLineReader(IOUtils.openURIForBufferedReading(filename), filename)) {
run(r, out);
}
}
}
out.flush();
}
return 0;
} catch (final Throwable err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(bbFileReader);
bbFileReader = null;
}
}
use of com.github.lindenb.jvarkit.bed.BedLineReader in project jvarkit by lindenb.
the class Biostar78285 method doWork.
@Override
public int doWork(final List<String> args) {
if (this.gc_percent_window < 1) {
LOG.error("Bad GC% window size:" + this.gc_percent_window);
return -1;
}
final List<File> bamFiles = IOUtil.unrollFiles(args.stream().map(F -> new File(F)).collect(Collectors.toCollection(HashSet::new)), ".bam");
SAMSequenceDictionary dict = null;
final List<SamReader> samReaders = new ArrayList<>();
final List<CloseableIterator<SAMRecord>> samIterators = new ArrayList<>();
final TreeSet<String> samples = new TreeSet<>();
final String DEFAULT_PARTITION = "UNDEFINED_PARTITION";
ReferenceSequenceFile indexedFastaSequenceFile = null;
VariantContextWriter out = null;
try {
final SamReaderFactory samReaderFactory = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.LENIENT);
for (final File bamFile : bamFiles) {
LOG.info("Opening " + bamFile);
final SamReader samReader = samReaderFactory.open(bamFile);
samReaders.add(samReader);
final SAMFileHeader header = samReader.getFileHeader();
if (header == null) {
LOG.error("No header in " + bamFile);
return -1;
}
JvarkitException.BamBadSortOrder.verify(SortOrder.coordinate, header);
samples.addAll(header.getReadGroups().stream().map(RG -> this.partition.apply(RG, DEFAULT_PARTITION)).collect(Collectors.toSet()));
final SAMSequenceDictionary currDict = header.getSequenceDictionary();
if (currDict == null) {
LOG.error("SamFile doesn't contain a SAMSequenceDictionary : " + bamFile);
return -1;
}
if (dict == null) {
dict = currDict;
} else if (!SequenceUtil.areSequenceDictionariesEqual(dict, currDict)) {
LOG.error(JvarkitException.DictionariesAreNotTheSame.getMessage(dict, currDict));
return -1;
}
}
if (samReaders.isEmpty()) {
LOG.error("no bam");
return -1;
}
if (dict == null) {
LOG.error("no dictionary");
return -1;
}
final QueryInterval[] intervals;
if (this.captureBed != null) {
LOG.info("Opening " + this.captureBed);
ContigNameConverter.setDefaultAliases(dict);
final List<QueryInterval> L = new ArrayList<>();
try (BedLineReader li = new BedLineReader(this.captureBed)) {
while (li.hasNext()) {
final BedLine bed = li.next();
if (bed == null)
continue;
final QueryInterval q = bed.toQueryInterval(dict);
L.add(q);
}
}
intervals = QueryInterval.optimizeIntervals(L.toArray(new QueryInterval[L.size()]));
} else {
intervals = null;
}
for (final SamReader samReader : samReaders) {
LOG.info("querying " + samReader.getResourceDescription());
final CloseableIterator<SAMRecord> iter;
if (intervals == null) {
iter = samReader.iterator();
} else {
iter = samReader.queryOverlapping(intervals);
}
samIterators.add(new FilterIterator<SAMRecord>(iter, R -> !R.getReadUnmappedFlag() && !filter.filterOut(R)));
}
if (this.refFile != null) {
indexedFastaSequenceFile = ReferenceSequenceFileFactory.getReferenceSequenceFile(this.refFile);
final SAMSequenceDictionary refdict = SequenceDictionaryUtils.extractRequired(indexedFastaSequenceFile);
ContigNameConverter.setDefaultAliases(refdict);
if (!SequenceUtil.areSequenceDictionariesEqual(dict, refdict)) {
LOG.error(JvarkitException.DictionariesAreNotTheSame.getMessage(dict, refdict));
return -1;
}
}
out = openVariantContextWriter(this.outputFile);
final Set<VCFHeaderLine> metaData = new HashSet<>();
VCFStandardHeaderLines.addStandardFormatLines(metaData, true, VCFConstants.DEPTH_KEY, VCFConstants.GENOTYPE_KEY);
VCFStandardHeaderLines.addStandardInfoLines(metaData, true, VCFConstants.DEPTH_KEY);
metaData.add(new VCFFormatHeaderLine("DF", 1, VCFHeaderLineType.Integer, "Number of Reads on plus strand"));
metaData.add(new VCFFormatHeaderLine("DR", 1, VCFHeaderLineType.Integer, "Number of Reads on minus strand"));
metaData.add(new VCFInfoHeaderLine("AVG_DP", 1, VCFHeaderLineType.Float, "Mean depth"));
metaData.add(new VCFInfoHeaderLine("MEDIAN_DP", 1, VCFHeaderLineType.Float, "Median depth"));
metaData.add(new VCFInfoHeaderLine("MIN_DP", 1, VCFHeaderLineType.Integer, "Min depth"));
metaData.add(new VCFInfoHeaderLine("MAX_DP", 1, VCFHeaderLineType.Integer, "Max depth"));
metaData.add(new VCFHeaderLine(Biostar78285.class.getSimpleName() + ".SamFilter", this.filter.toString()));
for (final Integer treshold : this.minDepthTresholds) {
metaData.add(new VCFFilterHeaderLine("DP_LT_" + treshold, "All genotypes have DP< " + treshold));
metaData.add(new VCFInfoHeaderLine("NUM_DP_LT_" + treshold, 1, VCFHeaderLineType.Integer, "Number of genotypes having DP< " + treshold));
metaData.add(new VCFInfoHeaderLine("FRACT_DP_LT_" + treshold, 1, VCFHeaderLineType.Float, "Fraction of genotypes having DP< " + treshold));
}
if (indexedFastaSequenceFile != null) {
metaData.add(new VCFInfoHeaderLine("GC_PERCENT", 1, VCFHeaderLineType.Integer, "GC% window_size:" + this.gc_percent_window));
}
final List<Allele> refAlleles = Collections.singletonList(Allele.create("N", true));
final List<Allele> NO_CALLS = Arrays.asList(Allele.NO_CALL, Allele.NO_CALL);
final VCFHeader vcfHeader = new VCFHeader(metaData, samples);
vcfHeader.setSequenceDictionary(dict);
out.writeHeader(vcfHeader);
final SAMRecordCoordinateComparator samRecordCoordinateComparator = new SAMRecordCoordinateComparator();
final PeekableIterator<SAMRecord> peekIter = new PeekableIterator<>(new MergingIterator<>((R1, R2) -> samRecordCoordinateComparator.fileOrderCompare(R1, R2), samIterators));
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(dict);
for (final SAMSequenceRecord ssr : dict.getSequences()) {
final IntervalTree<Boolean> capturePos;
if (intervals != null) {
if (!Arrays.stream(intervals).anyMatch(I -> I.referenceIndex == ssr.getSequenceIndex())) {
continue;
}
capturePos = new IntervalTree<>();
Arrays.stream(intervals).filter(I -> I.referenceIndex == ssr.getSequenceIndex()).forEach(I -> capturePos.put(I.start, I.end, true));
;
} else {
capturePos = null;
}
final GenomicSequence genomicSequence;
if (indexedFastaSequenceFile != null && indexedFastaSequenceFile.getSequenceDictionary().getSequence(ssr.getSequenceName()) != null) {
genomicSequence = new GenomicSequence(indexedFastaSequenceFile, ssr.getSequenceName());
} else {
genomicSequence = null;
}
final List<SAMRecord> buffer = new ArrayList<>();
for (int ssr_pos = 1; ssr_pos <= ssr.getSequenceLength(); ++ssr_pos) {
if (capturePos != null && !capturePos.overlappers(ssr_pos, ssr_pos).hasNext())
continue;
progress.watch(ssr.getSequenceName(), ssr_pos);
while (peekIter.hasNext()) {
final SAMRecord rec = peekIter.peek();
if (rec.getReadUnmappedFlag()) {
// consumme
peekIter.next();
continue;
}
if (this.filter.filterOut(rec)) {
// consumme
peekIter.next();
continue;
}
if (rec.getReferenceIndex() < ssr.getSequenceIndex()) {
throw new IllegalStateException("should not happen");
}
if (rec.getReferenceIndex() > ssr.getSequenceIndex()) {
break;
}
if (rec.getAlignmentEnd() < ssr_pos) {
throw new IllegalStateException("should not happen");
}
if (rec.getAlignmentStart() > ssr_pos) {
break;
}
buffer.add(peekIter.next());
}
int x = 0;
while (x < buffer.size()) {
final SAMRecord R = buffer.get(x);
if (R.getReferenceIndex() != ssr.getSequenceIndex() || R.getAlignmentEnd() < ssr_pos) {
buffer.remove(x);
} else {
x++;
}
}
final Map<String, PosInfo> count = samples.stream().map(S -> new PosInfo(S)).collect(Collectors.toMap(P -> P.sample, Function.identity()));
for (final SAMRecord rec : buffer) {
if (rec.getReferenceIndex() != ssr.getSequenceIndex())
throw new IllegalStateException("should not happen");
if (rec.getAlignmentEnd() < ssr_pos)
continue;
if (rec.getAlignmentStart() > ssr_pos)
continue;
final Cigar cigar = rec.getCigar();
if (cigar == null)
continue;
int refpos = rec.getAlignmentStart();
final String sample = this.partition.getPartion(rec, DEFAULT_PARTITION);
for (final CigarElement ce : cigar.getCigarElements()) {
if (refpos > ssr_pos)
break;
final CigarOperator op = ce.getOperator();
if (op.consumesReferenceBases()) {
if (op.consumesReadBases()) {
if (refpos <= ssr_pos && ssr_pos <= refpos + ce.getLength()) {
final PosInfo posInfo = count.get(sample);
if (posInfo != null) {
posInfo.dp++;
if (rec.getReadNegativeStrandFlag()) {
posInfo.negative_strand++;
}
}
break;
}
}
refpos += ce.getLength();
}
}
}
final VariantContextBuilder vcb = new VariantContextBuilder();
final Set<String> filters = new HashSet<>();
vcb.chr(ssr.getSequenceName());
vcb.start(ssr_pos);
vcb.stop(ssr_pos);
if (genomicSequence == null) {
vcb.alleles(refAlleles);
} else {
vcb.alleles(Collections.singletonList(Allele.create((byte) genomicSequence.charAt(ssr_pos - 1), true)));
final GenomicSequence.GCPercent gcp = genomicSequence.getGCPercent(Math.max((ssr_pos - 1) - this.gc_percent_window, 0), Math.min(ssr_pos + this.gc_percent_window, ssr.getSequenceLength()));
if (!gcp.isEmpty()) {
vcb.attribute("GC_PERCENT", gcp.getGCPercentAsInteger());
}
}
vcb.attribute(VCFConstants.DEPTH_KEY, count.values().stream().mapToInt(S -> S.dp).sum());
vcb.genotypes(count.values().stream().map(C -> new GenotypeBuilder(C.sample, NO_CALLS).DP(C.dp).attribute("DR", C.negative_strand).attribute("DF", C.dp - C.negative_strand).make()).collect(Collectors.toList()));
for (final Integer treshold : this.minDepthTresholds) {
final int count_lt = (int) count.values().stream().filter(S -> S.dp < treshold).count();
if (count_lt == samples.size()) {
filters.add("DP_LT_" + treshold);
}
vcb.attribute("NUM_DP_LT_" + treshold, count_lt);
if (!samples.isEmpty()) {
vcb.attribute("FRACT_DP_LT_" + treshold, count_lt / (float) samples.size());
}
}
if (!samples.isEmpty()) {
final int[] array = count.values().stream().mapToInt(S -> S.dp).toArray();
vcb.attribute("AVG_DP", Percentile.average().evaluate(array).getAsDouble());
vcb.attribute("MEDIAN_DP", Percentile.median().evaluate(array).getAsDouble());
vcb.attribute("MIN_DP", (int) Percentile.min().evaluate(array).getAsDouble());
vcb.attribute("MAX_DP", (int) Percentile.max().evaluate(array).getAsDouble());
}
if (filters.isEmpty()) {
vcb.passFilters();
} else {
vcb.filters(filters);
}
out.add(vcb.make());
}
}
progress.finish();
peekIter.close();
out.close();
out = null;
return 0;
} catch (final Exception err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(out);
CloserUtil.close(samIterators);
CloserUtil.close(samReaders);
CloserUtil.close(indexedFastaSequenceFile);
}
}
use of com.github.lindenb.jvarkit.bed.BedLineReader in project jvarkit by lindenb.
the class FaidxSplitter method doWork.
@Override
public int doWork(final List<String> args) {
try {
if (this.faidx == null) {
LOG.error("reference undefined");
return -1;
}
if (window_size <= 0 || overlap_size >= window_size || this.small_size < 0) {
LOG.info("bad parameters");
return -1;
}
final SAMSequenceDictionary dict = SequenceDictionaryUtils.extractRequired(this.faidx);
final ContigNameConverter contigNameConverter = ContigNameConverter.fromOneDictionary(dict);
final Pattern excludePat = Pattern.compile(this.excludeChromStr);
final IntervalTreeMap<Interval> intervals1 = new IntervalTreeMap<>();
dict.getSequences().stream().filter(R -> !excludePat.matcher(R.getSequenceName()).matches()).map(SSR -> new Interval(SSR)).forEach(I -> intervals1.put(I, I));
if (intervals1.isEmpty()) {
LOG.error("No sequence in dict after filtering.");
return -1;
}
final IntervalTreeMap<Interval> geneMap = new IntervalTreeMap<>();
final Function<Interval, Interval> findGene = I -> {
final Collection<Interval> coll = geneMap.getOverlapping(I);
if (coll.isEmpty())
return null;
final int min = coll.stream().mapToInt(G -> G.getStart()).min().getAsInt();
final int max = coll.stream().mapToInt(G -> G.getEnd()).max().getAsInt();
return new Interval(I.getContig(), min, max + this.overlap_size + 1);
};
if (this.geneSource != null) {
LOG.info("loading genes " + this.geneSource);
final Set<String> notFound = new HashSet<>();
try (final BedLineReader br = new BedLineReader(IOUtils.openURIForBufferedReading(this.geneSource), this.geneSource)) {
while (br.hasNext()) {
final BedLine gene = br.next();
if (gene == null)
continue;
final String contig2 = contigNameConverter.apply(gene.getContig());
if (StringUtil.isBlank(contig2)) {
if (notFound.add(gene.getContig()))
LOG.warn("gene contig not found :" + gene.getContig());
continue;
}
if (excludePat.matcher(contig2).matches())
continue;
final Interval g = new Interval(contig2, gene.getStart(), gene.getEnd());
if (geneMap.getOverlapping(g).stream().anyMatch(X -> X.contains(g)))
continue;
geneMap.put(g, g);
}
}
}
if (this.gapSource != null) {
LOG.info("loading gaps " + this.gapSource);
final Set<String> notFound = new HashSet<>();
try (final BedLineReader br = new BedLineReader(IOUtils.openURIForBufferedReading(this.gapSource), this.gapSource)) {
while (br.hasNext()) {
final BedLine gap0 = br.next();
final String contig2 = contigNameConverter.apply(gap0.getContig());
if (StringUtil.isBlank(contig2)) {
if (notFound.add(gap0.getContig()))
LOG.warn("gap contig not found :" + gap0.getContig());
continue;
}
if (excludePat.matcher(contig2).matches())
continue;
final Interval gap = new Interval(contig2, gap0.getStart(), gap0.getEnd());
final Collection<Interval> genes = geneMap.getOverlapping(gap);
for (final Interval gene_interval : genes) {
if (!gap.overlaps(gene_interval))
throw new IllegalStateException();
LOG.warn("gene " + gene_interval + " overlap gap " + gap + " " + this.split(gene_interval, gap));
geneMap.remove(gene_interval);
this.split(gene_interval, gap).forEach(N -> geneMap.put(N, N));
}
if (geneMap.containsOverlapping(gap))
throw new IllegalStateException();
final Collection<Interval> list = intervals1.getOverlapping(gap);
for (final Interval i : list) {
if (!gap.overlaps(i))
throw new IllegalStateException();
intervals1.remove(i);
this.split(i, gap).forEach(N -> intervals1.put(N, N));
}
}
}
}
final Comparator<String> contigCmp = new ContigDictComparator(dict);
final List<Interval> intervalsList = new ArrayList<>(intervals1.values());
intervalsList.sort((A, B) -> {
final int i = contigCmp.compare(A.getContig(), B.getContig());
if (i != 0)
return i;
return A.getStart() - B.getStart();
});
/* start writing output */
try (PrintWriter pw = super.openPathOrStdoutAsPrintWriter(this.outputFile)) {
for (final Interval interval : intervalsList) {
if (interval.length() <= this.window_size) {
echo(pw, interval);
continue;
}
int start = interval.getStart();
while (start < interval.getEnd()) {
int chromEnd = Math.min(interval.getEnd(), start + this.window_size);
// extend interval while there is a gene that shouldn't be broken */
for (; ; ) {
final Interval record = new Interval(interval.getContig(), start, chromEnd);
final Interval gene = findGene.apply(record);
// no gene
if (gene == null || record.contains(gene))
break;
if (gene.getStart() < record.getStart())
throw new IllegalStateException("gene start " + gene.getStart() + " < " + start + " " + "\ngene:\t" + gene + "\ninterval\t" + interval + "\nrecord\t" + record);
chromEnd = Math.min(interval.getEnd(), gene.getEnd());
if (chromEnd >= interval.getEnd())
break;
}
if (interval.getEnd() - chromEnd <= this.small_size) {
chromEnd = interval.getEnd();
}
echo(pw, new Interval(interval.getContig(), start, chromEnd));
if (chromEnd >= interval.getEnd())
break;
start = chromEnd - this.overlap_size + 1;
}
}
pw.flush();
}
LOG.info("Done N=" + this.count_echoed);
return 0;
} catch (final Throwable err) {
LOG.error(err);
return -1;
} finally {
}
}
use of com.github.lindenb.jvarkit.bed.BedLineReader in project jvarkit by lindenb.
the class GcPercentAndDepth method doWork.
@Override
public int doWork(final List<String> args) {
if (this.windowSize <= 0) {
LOG.error("Bad window size.");
return -1;
}
if (this.windowStep <= 0) {
LOG.error("Bad window step.");
return -1;
}
if (this.refFile == null) {
LOG.error("Undefined REF File");
return -1;
}
if (args.isEmpty()) {
LOG.error("Illegal Number of arguments.");
return -1;
}
ReferenceSequenceFile indexedFastaSequenceFile = null;
List<SamReader> readers = new ArrayList<SamReader>();
PrintWriter out = null;
try {
LOG.info("Loading " + this.refFile);
indexedFastaSequenceFile = ReferenceSequenceFileFactory.getReferenceSequenceFile(this.refFile);
this.samSequenceDictionary = SequenceDictionaryUtils.extractRequired(indexedFastaSequenceFile);
if (this.samSequenceDictionary == null) {
LOG.error("Cannot get sequence dictionary for " + this.refFile);
return -1;
}
out = super.openPathOrStdoutAsPrintWriter(outPutFile);
Set<String> all_samples = new TreeSet<String>();
/* create input, collect sample names */
for (int optind = 0; optind < args.size(); ++optind) {
LOG.info("Opening " + args.get(optind));
final SamReader samFileReaderScan = super.openSamReader(args.get(optind));
readers.add(samFileReaderScan);
final SAMFileHeader header = samFileReaderScan.getFileHeader();
if (!SequenceUtil.areSequenceDictionariesEqual(this.samSequenceDictionary, header.getSequenceDictionary())) {
LOG.error(JvarkitException.DictionariesAreNotTheSame.getMessage(this.samSequenceDictionary, header.getSequenceDictionary()));
return -1;
}
for (final SAMReadGroupRecord g : header.getReadGroups()) {
final String sample = this.partition.apply(g);
if (StringUtil.isBlank(sample)) {
LOG.warning("Read group " + g.getId() + " has no sample in merged dictionary");
continue;
}
all_samples.add(sample);
}
}
LOG.info("N " + this.partition.name() + "=" + all_samples.size());
/* print header */
out.print("#");
if (!this.hide_genomic_index) {
out.print("id");
out.print("\t");
}
out.print("chrom");
out.print("\t");
out.print("start");
out.print("\t");
out.print("end");
out.print("\t");
out.print("GCPercent");
for (final String sample : all_samples) {
out.print("\t");
out.print(sample);
}
out.println();
final List<RegionCaptured> regionsCaptured = new ArrayList<RegionCaptured>();
if (bedFile != null) {
LOG.info("Reading BED:" + bedFile);
try (BedLineReader r = new BedLineReader(bedFile)) {
r.stream().filter(B -> B != null).forEach(B -> {
final SAMSequenceRecord ssr = this.samSequenceDictionary.getSequence(B.getContig());
if (ssr == null) {
LOG.warning("Cannot resolve " + B.getContig());
return;
}
final RegionCaptured roi = new RegionCaptured(ssr, B.getStart() - 1, B.getEnd());
regionsCaptured.add(roi);
});
}
LOG.info("end Reading BED:" + bedFile);
Collections.sort(regionsCaptured);
} else {
LOG.info("No capture, peeking everything");
for (final SAMSequenceRecord ssr : this.samSequenceDictionary.getSequences()) {
final RegionCaptured roi = new RegionCaptured(ssr, 0, ssr.getSequenceLength());
regionsCaptured.add(roi);
}
}
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(this.samSequenceDictionary).logger(LOG);
GenomicSequence genomicSequence = null;
for (final RegionCaptured roi : regionsCaptured) {
if (genomicSequence == null || !genomicSequence.getChrom().equals(roi.getContig())) {
genomicSequence = new GenomicSequence(indexedFastaSequenceFile, roi.getContig());
}
Map<String, int[]> sample2depth = new HashMap<String, int[]>();
Map<String, Double> sample2meanDepth = new HashMap<String, Double>();
for (final String sample : all_samples) {
int[] depth = new int[roi.length()];
Arrays.fill(depth, 0);
sample2depth.put(sample, depth);
}
List<CloseableIterator<SAMRecord>> iterators = new ArrayList<CloseableIterator<SAMRecord>>();
for (final SamReader r : readers) {
iterators.add(r.query(roi.getContig(), roi.getStart(), roi.getEnd(), false));
}
final MergingIterator<SAMRecord> merginIter = new MergingIterator<>(new SAMRecordCoordinateComparator(), iterators);
while (merginIter.hasNext()) {
final SAMRecord rec = merginIter.next();
if (rec.getReadUnmappedFlag())
continue;
if (this.filter.filterOut(rec))
continue;
final String sample = this.partition.getPartion(rec, null);
if (sample == null)
continue;
final int[] depth = sample2depth.get(sample);
if (depth == null)
continue;
final Cigar cigar = rec.getCigar();
if (cigar == null)
continue;
int refpos1 = rec.getAlignmentStart();
for (final CigarElement ce : cigar.getCigarElements()) {
final CigarOperator op = ce.getOperator();
if (!op.consumesReferenceBases())
continue;
if (op.consumesReadBases()) {
for (int i = 0; i < ce.getLength(); ++i) {
if (refpos1 + i < roi.getStart())
continue;
if (refpos1 + i > roi.getEnd())
break;
depth[refpos1 + i - roi.getStart()]++;
}
}
refpos1 += ce.getLength();
}
}
merginIter.close();
for (final RegionCaptured.SlidingWindow win : roi) {
double total = 0f;
int countN = 0;
for (int pos1 = win.getStart(); pos1 <= win.getEnd(); ++pos1) {
switch(genomicSequence.charAt(pos1 - 1)) {
case 'c':
case 'C':
case 'g':
case 'G':
case 's':
case 'S':
{
total++;
break;
}
case 'n':
case 'N':
countN++;
break;
default:
break;
}
}
if (skip_if_contains_N && countN > 0)
continue;
double GCPercent = total / (double) win.length();
int max_depth_for_win = 0;
sample2meanDepth.clear();
for (final String sample : all_samples) {
int[] depth = sample2depth.get(sample);
double sum = 0;
for (int pos = win.getStart(); pos < win.getEnd() && (pos - roi.getStart()) < depth.length; ++pos) {
sum += depth[pos - roi.getStart()];
}
double mean = (sum / (double) depth.length);
max_depth_for_win = Math.max(max_depth_for_win, (int) mean);
sample2meanDepth.put(sample, mean);
}
if (max_depth_for_win < this.min_depth)
continue;
if (!this.hide_genomic_index) {
out.print(win.getGenomicIndex());
out.print("\t");
}
out.print(win.getContig());
out.print("\t");
out.print(win.getStart() - 1);
out.print("\t");
out.print(win.getEnd());
out.print("\t");
out.printf("%.2f", GCPercent);
for (String sample : all_samples) {
out.print("\t");
out.printf("%.2f", (double) sample2meanDepth.get(sample));
}
out.println();
}
}
progress.finish();
out.flush();
return 0;
} catch (Exception err) {
LOG.error(err);
return -1;
} finally {
for (SamReader r : readers) CloserUtil.close(r);
CloserUtil.close(indexedFastaSequenceFile);
CloserUtil.close(out);
}
}
Aggregations