Search in sources :

Example 1 with IntervalListProvider

use of com.github.lindenb.jvarkit.samtools.util.IntervalListProvider in project jvarkit by lindenb.

the class ValidateCnv method doWork.

@Override
public int doWork(final List<String> args) {
    if (this.extendFactor <= 0) {
        LOG.error("bad extend factor " + this.extendFactor);
        return -1;
    }
    if (this.treshold < 0 || this.treshold >= 0.25) {
        LOG.error("Bad treshold 0 < " + this.treshold + " >=0.25 ");
        return -1;
    }
    final Map<String, BamInfo> sample2bam = new HashMap<>();
    VariantContextWriter out = null;
    Iterator<? extends Locatable> iterIn = null;
    try {
        final SAMSequenceDictionary dict = SequenceDictionaryUtils.extractRequired(this.rererencePath);
        final CRAMReferenceSource cramReferenceSource = new ReferenceSource(this.rererencePath);
        final List<Path> bamPaths = IOUtils.unrollPaths(this.bamFiles);
        final String input = oneFileOrNull(args);
        if (input == null) {
            iterIn = IntervalListProvider.empty().dictionary(dict).skipUnknownContigs().fromInputStream(stdin(), "bed").iterator();
        } else {
            final IntervalListProvider ilp = IntervalListProvider.from(input).setVariantPredicate(CTX -> {
                if (CTX.isSNP())
                    return false;
                final String svType = CTX.getAttributeAsString(VCFConstants.SVTYPE, "");
                if (svType != null && (svType.equals("INV") || svType.equals("BND")))
                    return false;
                return true;
            }).dictionary(dict).skipUnknownContigs();
            iterIn = ilp.stream().iterator();
        }
        /* register each bam */
        for (final Path p2 : bamPaths) {
            final BamInfo bi = new BamInfo(p2, cramReferenceSource);
            if (sample2bam.containsKey(bi.sampleName)) {
                LOG.error("sample " + bi.sampleName + " specified twice.");
                bi.close();
                return -1;
            }
            sample2bam.put(bi.sampleName, bi);
        }
        if (sample2bam.isEmpty()) {
            LOG.error("no bam was defined");
            return -1;
        }
        final Set<VCFHeaderLine> metadata = new HashSet<>();
        final VCFInfoHeaderLine infoSVSamples = new VCFInfoHeaderLine("N_SAMPLES", 1, VCFHeaderLineType.Integer, "Number of Samples that could carry a SV");
        metadata.add(infoSVSamples);
        final VCFInfoHeaderLine infoSvLen = new VCFInfoHeaderLine("SVLEN", 1, VCFHeaderLineType.Integer, "SV length");
        metadata.add(infoSvLen);
        final BiFunction<String, String, VCFFormatHeaderLine> makeFmt = (TAG, DESC) -> new VCFFormatHeaderLine(TAG, 1, VCFHeaderLineType.Integer, DESC);
        final VCFFormatHeaderLine formatCN = new VCFFormatHeaderLine("CN", 1, VCFHeaderLineType.Float, "normalized copy-number. Treshold was " + this.treshold);
        metadata.add(formatCN);
        final VCFFormatHeaderLine nReadsSupportingSv = makeFmt.apply("RSD", "number of split reads supporting SV.");
        metadata.add(nReadsSupportingSv);
        final VCFFilterHeaderLine filterAllDel = new VCFFilterHeaderLine("ALL_DEL", "number of samples greater than 1 and all are deletions");
        metadata.add(filterAllDel);
        final VCFFilterHeaderLine filterAllDup = new VCFFilterHeaderLine("ALL_DUP", "number of samples  greater than  1 and all are duplication");
        metadata.add(filterAllDup);
        final VCFFilterHeaderLine filterNoSV = new VCFFilterHeaderLine("NO_SV", "There is no DUP or DEL in this variant");
        metadata.add(filterNoSV);
        final VCFFilterHeaderLine filterHomDel = new VCFFilterHeaderLine("HOM_DEL", "There is one Homozygous deletion.");
        metadata.add(filterHomDel);
        final VCFFilterHeaderLine filterHomDup = new VCFFilterHeaderLine("HOM_DUP", "There is one Homozygous duplication.");
        metadata.add(filterHomDup);
        VCFStandardHeaderLines.addStandardFormatLines(metadata, true, VCFConstants.DEPTH_KEY, VCFConstants.GENOTYPE_KEY, VCFConstants.GENOTYPE_FILTER_KEY, VCFConstants.GENOTYPE_QUALITY_KEY);
        VCFStandardHeaderLines.addStandardInfoLines(metadata, true, VCFConstants.DEPTH_KEY, VCFConstants.END_KEY, VCFConstants.ALLELE_COUNT_KEY, VCFConstants.ALLELE_FREQUENCY_KEY, VCFConstants.ALLELE_NUMBER_KEY);
        final VCFHeader header = new VCFHeader(metadata, sample2bam.keySet());
        if (dict != null)
            header.setSequenceDictionary(dict);
        JVarkitVersion.getInstance().addMetaData(this, header);
        final ProgressFactory.Watcher<VariantContext> progress = ProgressFactory.newInstance().dictionary(dict).logger(LOG).build();
        out = this.writingVariantsDelegate.dictionary(dict).open(this.outputFile);
        out.writeHeader(header);
        final Allele DUP_ALLELE = Allele.create("<DUP>", false);
        final Allele DEL_ALLELE = Allele.create("<DEL>", false);
        final Allele REF_ALLELE = Allele.create("N", true);
        while (iterIn.hasNext()) {
            final Locatable ctx = iterIn.next();
            if (ctx == null)
                continue;
            final SAMSequenceRecord ssr = dict.getSequence(ctx.getContig());
            if (ssr == null || ctx.getStart() >= ssr.getSequenceLength())
                continue;
            final int svLen = ctx.getLengthOnReference();
            if (svLen < this.min_abs_sv_size)
                continue;
            if (svLen > this.max_abs_sv_size)
                continue;
            int n_samples_with_cnv = 0;
            final SimplePosition breakPointLeft = new SimplePosition(ctx.getContig(), ctx.getStart());
            final SimplePosition breakPointRight = new SimplePosition(ctx.getContig(), ctx.getEnd());
            final int extend = 1 + (int) (svLen * this.extendFactor);
            final int leftPos = Math.max(1, breakPointLeft.getPosition() - extend);
            final int array_mid_start = breakPointLeft.getPosition() - leftPos;
            final int array_mid_end = breakPointRight.getPosition() - leftPos;
            final int rightPos = Math.min(breakPointRight.getPosition() + extend, ssr.getSequenceLength());
            final VariantContextBuilder vcb = new VariantContextBuilder();
            vcb.chr(ctx.getContig());
            vcb.start(ctx.getStart());
            vcb.stop(ctx.getEnd());
            vcb.attribute(VCFConstants.END_KEY, ctx.getEnd());
            final Set<Allele> alleles = new HashSet<>();
            alleles.add(REF_ALLELE);
            int count_dup = 0;
            int count_del = 0;
            int an = 0;
            final Counter<Allele> countAlleles = new Counter<>();
            final List<Genotype> genotypes = new ArrayList<>(sample2bam.size());
            Double badestGQ = null;
            final double[] raw_coverage = new double[CoordMath.getLength(leftPos, rightPos)];
            for (final String sampleName : sample2bam.keySet()) {
                final BamInfo bi = sample2bam.get(sampleName);
                Arrays.fill(raw_coverage, 0.0);
                int n_reads_supporting_sv = 0;
                try (CloseableIterator<SAMRecord> iter2 = bi.samReader.queryOverlapping(ctx.getContig(), leftPos, rightPos)) {
                    while (iter2.hasNext()) {
                        final SAMRecord rec = iter2.next();
                        if (!SAMRecordDefaultFilter.accept(rec, this.min_mapq))
                            continue;
                        final Cigar cigar = rec.getCigar();
                        if (cigar == null || cigar.isEmpty())
                            continue;
                        // any clip supporting deletion ?
                        boolean read_supports_cnv = false;
                        final int breakpoint_distance = 10;
                        // any clip on left ?
                        if (cigar.isLeftClipped() && rec.getUnclippedStart() < rec.getAlignmentStart() && new SimpleInterval(ctx.getContig(), rec.getUnclippedStart(), rec.getAlignmentStart()).withinDistanceOf(breakPointLeft, breakpoint_distance)) {
                            read_supports_cnv = true;
                        }
                        // any clip on right ?
                        if (!read_supports_cnv && cigar.isRightClipped() && rec.getAlignmentEnd() < rec.getUnclippedEnd() && new SimpleInterval(ctx.getContig(), rec.getAlignmentEnd(), rec.getUnclippedEnd()).withinDistanceOf(breakPointRight, breakpoint_distance)) {
                            read_supports_cnv = true;
                        }
                        if (read_supports_cnv) {
                            n_reads_supporting_sv++;
                        }
                        int ref = rec.getStart();
                        for (final CigarElement ce : cigar) {
                            final CigarOperator op = ce.getOperator();
                            if (op.consumesReferenceBases()) {
                                if (op.consumesReadBases()) {
                                    for (int x = 0; x < ce.getLength() && ref + x - leftPos < raw_coverage.length; ++x) {
                                        final int p = ref + x - leftPos;
                                        if (p < 0 || p >= raw_coverage.length)
                                            continue;
                                        raw_coverage[p]++;
                                    }
                                }
                                ref += ce.getLength();
                            }
                        }
                    }
                // end while iter record
                }
                // end try query for iterator
                // test for great difference between DP left and DP right
                final OptionalDouble medianDepthLeft = Percentile.median().evaluate(raw_coverage, 0, array_mid_start);
                final OptionalDouble medianDepthRight = Percentile.median().evaluate(raw_coverage, array_mid_end, raw_coverage.length - array_mid_end);
                // any is just too low
                if (!medianDepthLeft.isPresent() || medianDepthLeft.getAsDouble() < this.min_depth || !medianDepthRight.isPresent() || medianDepthRight.getAsDouble() < this.min_depth) {
                    final Genotype gt2 = new GenotypeBuilder(sampleName, Arrays.asList(Allele.NO_CALL, Allele.NO_CALL)).filter("LowDp").make();
                    genotypes.add(gt2);
                    continue;
                }
                final double difference_factor = 2.0;
                // even if a value is divided , it remains greater than the other size
                if (medianDepthLeft.getAsDouble() / difference_factor > medianDepthRight.getAsDouble() || medianDepthRight.getAsDouble() / difference_factor > medianDepthLeft.getAsDouble()) {
                    final Genotype gt2 = new GenotypeBuilder(sampleName, Arrays.asList(Allele.NO_CALL, Allele.NO_CALL)).filter("DiffLR").make();
                    genotypes.add(gt2);
                    continue;
                }
                // run median to smooth spline
                final double[] smoothed_cov = new RunMedian(RunMedian.getTurlachSize(raw_coverage.length)).apply(raw_coverage);
                final double[] bounds_cov = IntStream.concat(IntStream.range(0, array_mid_start), IntStream.range(array_mid_end, smoothed_cov.length)).mapToDouble(IDX -> raw_coverage[IDX]).toArray();
                final OptionalDouble optMedianBound = Percentile.median().evaluate(bounds_cov);
                if (!optMedianBound.isPresent() || optMedianBound.getAsDouble() == 0) {
                    final Genotype gt2 = new GenotypeBuilder(sampleName, Arrays.asList(Allele.NO_CALL, Allele.NO_CALL)).filter("MedZero").make();
                    genotypes.add(gt2);
                    continue;
                }
                final double medianBound = optMedianBound.getAsDouble();
                // divide coverage per medianBound
                final double[] normalized_mid_coverage = new double[array_mid_end - array_mid_start];
                for (int i = 0; i < normalized_mid_coverage.length; ++i) {
                    normalized_mid_coverage[i] = smoothed_cov[array_mid_start + i] / medianBound;
                }
                final double normDepth = Percentile.median().evaluate(normalized_mid_coverage).getAsDouble();
                final boolean is_sv;
                final boolean is_hom_deletion = Math.abs(normDepth - 0.0) <= this.treshold;
                final boolean is_het_deletion = Math.abs(normDepth - 0.5) <= this.treshold || (!is_hom_deletion && normDepth <= 0.5);
                final boolean is_hom_dup = Math.abs(normDepth - 2.0) <= this.treshold || normDepth > 2.0;
                final boolean is_het_dup = Math.abs(normDepth - 1.5) <= this.treshold || (!is_hom_dup && normDepth >= 1.5);
                final boolean is_ref = Math.abs(normDepth - 1.0) <= this.treshold;
                final double theoritical_depth;
                final GenotypeBuilder gb;
                if (is_ref) {
                    gb = new GenotypeBuilder(sampleName, Arrays.asList(REF_ALLELE, REF_ALLELE));
                    is_sv = false;
                    theoritical_depth = 1.0;
                    an += 2;
                } else if (is_het_deletion) {
                    gb = new GenotypeBuilder(sampleName, Arrays.asList(REF_ALLELE, DEL_ALLELE));
                    alleles.add(DEL_ALLELE);
                    is_sv = true;
                    theoritical_depth = 0.5;
                    count_del++;
                    an += 2;
                    countAlleles.incr(DEL_ALLELE);
                } else if (is_hom_deletion) {
                    gb = new GenotypeBuilder(sampleName, Arrays.asList(DEL_ALLELE, DEL_ALLELE));
                    alleles.add(DEL_ALLELE);
                    vcb.filter(filterHomDel.getID());
                    is_sv = true;
                    theoritical_depth = 0.0;
                    count_del++;
                    an += 2;
                    countAlleles.incr(DEL_ALLELE, 2);
                } else if (is_het_dup) {
                    gb = new GenotypeBuilder(sampleName, Arrays.asList(REF_ALLELE, DUP_ALLELE));
                    alleles.add(DUP_ALLELE);
                    is_sv = true;
                    theoritical_depth = 1.5;
                    count_dup++;
                    an += 2;
                    countAlleles.incr(DUP_ALLELE);
                } else if (is_hom_dup) {
                    gb = new GenotypeBuilder(sampleName, Arrays.asList(DUP_ALLELE, DUP_ALLELE));
                    alleles.add(DUP_ALLELE);
                    vcb.filter(filterHomDup.getID());
                    is_sv = true;
                    theoritical_depth = 2.0;
                    count_dup++;
                    an += 2;
                    countAlleles.incr(DUP_ALLELE, 2);
                } else {
                    gb = new GenotypeBuilder(sampleName, Arrays.asList(Allele.NO_CALL, Allele.NO_CALL)).filter("Ambigous");
                    is_sv = false;
                    theoritical_depth = 1.0;
                }
                if (is_sv) {
                    n_samples_with_cnv++;
                }
                double gq = Math.abs(theoritical_depth - normDepth);
                gq = Math.min(0.5, gq);
                gq = gq * gq;
                gq = gq / 0.25;
                gq = 99 * (1.0 - gq);
                gb.GQ((int) gq);
                if (badestGQ == null || badestGQ.compareTo(gq) > 0) {
                    badestGQ = gq;
                }
                gb.attribute(formatCN.getID(), normDepth);
                gb.attribute(nReadsSupportingSv.getID(), n_reads_supporting_sv);
                genotypes.add(gb.make());
            }
            vcb.attribute(VCFConstants.ALLELE_NUMBER_KEY, an);
            final List<Allele> orderedAlleles = new ArrayList<>(alleles);
            Collections.sort(orderedAlleles);
            if (orderedAlleles.size() > 1) {
                final List<Integer> acL = new ArrayList<>();
                final List<Double> afL = new ArrayList<>();
                for (int i = 1; i < orderedAlleles.size(); i++) {
                    final Allele a = orderedAlleles.get(i);
                    final int c = (int) countAlleles.count(a);
                    acL.add(c);
                    if (an > 0)
                        afL.add(c / (double) an);
                }
                vcb.attribute(VCFConstants.ALLELE_COUNT_KEY, acL);
                if (an > 0)
                    vcb.attribute(VCFConstants.ALLELE_FREQUENCY_KEY, afL);
            }
            // if(alleles.size()<=1) continue;
            vcb.alleles(orderedAlleles);
            vcb.noID();
            vcb.genotypes(genotypes);
            vcb.attribute(infoSVSamples.getID(), n_samples_with_cnv);
            vcb.attribute(infoSvLen.getID(), svLen);
            if (count_dup == sample2bam.size() && sample2bam.size() != 1) {
                vcb.filter(filterAllDup.getID());
            }
            if (count_del == sample2bam.size() && sample2bam.size() != 1) {
                vcb.filter(filterAllDel.getID());
            }
            if (n_samples_with_cnv == 0) {
                vcb.filter(filterNoSV.getID());
            }
            if (badestGQ != null) {
                vcb.log10PError(badestGQ / -10.0);
            }
            out.add(vcb.make());
        }
        progress.close();
        out.close();
        return 0;
    } catch (final Throwable err) {
        LOG.error(err);
        return -1;
    } finally {
        CloserUtil.close(iterIn);
        CloserUtil.close(out);
        sample2bam.values().forEach(F -> CloserUtil.close(F));
    }
}
Also used : WritingVariantsDelegate(com.github.lindenb.jvarkit.variant.variantcontext.writer.WritingVariantsDelegate) Allele(htsjdk.variant.variantcontext.Allele) Arrays(java.util.Arrays) Program(com.github.lindenb.jvarkit.util.jcommander.Program) VCFStandardHeaderLines(htsjdk.variant.vcf.VCFStandardHeaderLines) VCFHeader(htsjdk.variant.vcf.VCFHeader) BiFunction(java.util.function.BiFunction) CigarElement(htsjdk.samtools.CigarElement) CigarOperator(htsjdk.samtools.CigarOperator) IntervalListProvider(com.github.lindenb.jvarkit.samtools.util.IntervalListProvider) CRAMReferenceSource(htsjdk.samtools.cram.ref.CRAMReferenceSource) DistanceParser(com.github.lindenb.jvarkit.util.bio.DistanceParser) StringUtil(htsjdk.samtools.util.StringUtil) Map(java.util.Map) Path(java.nio.file.Path) CloserUtil(htsjdk.samtools.util.CloserUtil) SimpleInterval(com.github.lindenb.jvarkit.samtools.util.SimpleInterval) SequenceDictionaryUtils(com.github.lindenb.jvarkit.util.bio.SequenceDictionaryUtils) GenotypeBuilder(htsjdk.variant.variantcontext.GenotypeBuilder) Logger(com.github.lindenb.jvarkit.util.log.Logger) Set(java.util.Set) Percentile(com.github.lindenb.jvarkit.math.stats.Percentile) SAMRecord(htsjdk.samtools.SAMRecord) SimplePosition(com.github.lindenb.jvarkit.samtools.util.SimplePosition) List(java.util.List) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) CoordMath(htsjdk.samtools.util.CoordMath) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) VariantContext(htsjdk.variant.variantcontext.VariantContext) SamReaderFactory(htsjdk.samtools.SamReaderFactory) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder) IntStream(java.util.stream.IntStream) Genotype(htsjdk.variant.variantcontext.Genotype) VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) Cigar(htsjdk.samtools.Cigar) CloseableIterator(htsjdk.samtools.util.CloseableIterator) Parameter(com.beust.jcommander.Parameter) OptionalDouble(java.util.OptionalDouble) HashMap(java.util.HashMap) ValidationStringency(htsjdk.samtools.ValidationStringency) ParametersDelegate(com.beust.jcommander.ParametersDelegate) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) ReferenceSource(htsjdk.samtools.cram.ref.ReferenceSource) NoSplitter(com.github.lindenb.jvarkit.util.jcommander.NoSplitter) SAMRecordDefaultFilter(com.github.lindenb.jvarkit.samtools.SAMRecordDefaultFilter) IOUtils(com.github.lindenb.jvarkit.io.IOUtils) Launcher(com.github.lindenb.jvarkit.util.jcommander.Launcher) VCFConstants(htsjdk.variant.vcf.VCFConstants) Counter(com.github.lindenb.jvarkit.util.Counter) Locatable(htsjdk.samtools.util.Locatable) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) VCFHeaderLineType(htsjdk.variant.vcf.VCFHeaderLineType) Iterator(java.util.Iterator) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) ProgressFactory(com.github.lindenb.jvarkit.util.log.ProgressFactory) IOException(java.io.IOException) JVarkitVersion(com.github.lindenb.jvarkit.util.JVarkitVersion) SamReader(htsjdk.samtools.SamReader) FractionConverter(com.github.lindenb.jvarkit.jcommander.converter.FractionConverter) Closeable(java.io.Closeable) VCFFormatHeaderLine(htsjdk.variant.vcf.VCFFormatHeaderLine) RunMedian(com.github.lindenb.jvarkit.math.RunMedian) SAMSequenceRecord(htsjdk.samtools.SAMSequenceRecord) Collections(java.util.Collections) VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) HashMap(java.util.HashMap) ProgressFactory(com.github.lindenb.jvarkit.util.log.ProgressFactory) ArrayList(java.util.ArrayList) SAMSequenceRecord(htsjdk.samtools.SAMSequenceRecord) CRAMReferenceSource(htsjdk.samtools.cram.ref.CRAMReferenceSource) ReferenceSource(htsjdk.samtools.cram.ref.ReferenceSource) Counter(com.github.lindenb.jvarkit.util.Counter) CRAMReferenceSource(htsjdk.samtools.cram.ref.CRAMReferenceSource) SimpleInterval(com.github.lindenb.jvarkit.samtools.util.SimpleInterval) HashSet(java.util.HashSet) Genotype(htsjdk.variant.variantcontext.Genotype) CigarOperator(htsjdk.samtools.CigarOperator) GenotypeBuilder(htsjdk.variant.variantcontext.GenotypeBuilder) OptionalDouble(java.util.OptionalDouble) CigarElement(htsjdk.samtools.CigarElement) RunMedian(com.github.lindenb.jvarkit.math.RunMedian) SAMRecord(htsjdk.samtools.SAMRecord) Locatable(htsjdk.samtools.util.Locatable) VariantContext(htsjdk.variant.variantcontext.VariantContext) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) IntervalListProvider(com.github.lindenb.jvarkit.samtools.util.IntervalListProvider) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) SimplePosition(com.github.lindenb.jvarkit.samtools.util.SimplePosition) VCFHeader(htsjdk.variant.vcf.VCFHeader) VCFFormatHeaderLine(htsjdk.variant.vcf.VCFFormatHeaderLine) Path(java.nio.file.Path) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) OptionalDouble(java.util.OptionalDouble) Allele(htsjdk.variant.variantcontext.Allele) Cigar(htsjdk.samtools.Cigar) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder)

Example 2 with IntervalListProvider

use of com.github.lindenb.jvarkit.samtools.util.IntervalListProvider in project jvarkit by lindenb.

the class BedMergeCnv method doWork.

@Override
public int doWork(final List<String> args) {
    if (this.fraction <= 0) {
        LOG.error("Bad fraction : " + this.fraction);
        return -1;
    }
    PrintWriter w = null;
    try {
        if (args.isEmpty()) {
            parseBed(new LineIterator(new InputStreamReader(stdin())));
        } else {
            for (final String fname : args) {
                final IntervalListProvider ivp = IntervalListProvider.from(fname);
                final Iterator<? extends Locatable> iter = ivp.stream().iterator();
                parseLocatables(iter);
            }
        }
        w = super.openPathOrStdoutAsPrintWriter(this.outputFile);
        final List<Cluster> ordered = new ArrayList<>(this.clusters.values());
        Collections.sort(ordered, (A, B) -> {
            int i = A.getContig().compareTo(B.getContig());
            if (i != 0)
                return i;
            i = Integer.compare(A.getStart(), B.getStart());
            if (i != 0)
                return i;
            return Integer.compare(A.getEnd(), B.getEnd());
        });
        Cluster prev = null;
        for (final Cluster bc : ordered) {
            w.print(bc.getContig());
            w.print("\t");
            w.print(bc.getStart() - 1);
            w.print("\t");
            w.print(bc.getEnd());
            w.print("\t");
            w.print(bc.intervals.size());
            w.print("\t");
            w.print(prev != null && prev.contigsMatch(bc) ? String.format("%.2f", this.fractionOverlap(prev, bc)) : "0.0");
            w.print("\t");
            w.print(bc.getLabel());
            w.println();
            prev = bc;
        }
        w.flush();
        w.close();
        return 0;
    } catch (final Throwable err) {
        LOG.error(err);
        return -1;
    } finally {
        CloserUtil.close(w);
    }
}
Also used : InputStreamReader(java.io.InputStreamReader) IntervalListProvider(com.github.lindenb.jvarkit.samtools.util.IntervalListProvider) ArrayList(java.util.ArrayList) LineIterator(com.github.lindenb.jvarkit.util.iterator.LineIterator) PrintWriter(java.io.PrintWriter)

Example 3 with IntervalListProvider

use of com.github.lindenb.jvarkit.samtools.util.IntervalListProvider in project jvarkit by lindenb.

the class ScanStructuralVariants method doWork.

@Override
public int doWork(final List<String> args) {
    final List<VCFReader> casesFiles = new ArrayList<>();
    if (this.svComparator.getBndDistance() < 0) {
        LOG.error("bad max_distance :" + this.svComparator.getBndDistance());
        return -1;
    }
    VariantContextWriter out = null;
    try {
        final List<Path> casesPaths = (IOUtils.unrollPaths(args));
        if (casesPaths.isEmpty()) {
            LOG.error("cases list is empty");
            return -1;
        }
        if (!print_all_ctx && casesPaths.size() == 1) {
            LOG.warning("One case: switching to --all");
            print_all_ctx = true;
        }
        if (this.controlsPath.size() == 1 && this.controlsPath.get(0).toString().endsWith(".list")) {
            this.controlsPath = Files.lines(this.controlsPath.get(0)).filter(L -> !(L.startsWith("#") || StringUtils.isBlank(L))).map(L -> Paths.get(L)).collect(Collectors.toList());
        }
        SAMSequenceDictionary dict = null;
        final Set<VCFHeaderLine> metadata = new HashSet<>();
        for (int side = 0; side < 2; side++) {
            for (final Path input : (side == 0 ? casesPaths : this.controlsPath)) {
                final VCFReader vcfInput = VCFReaderFactory.makeDefault().open(input);
                final VCFHeader header = vcfInput.getHeader();
                if (side == 0) {
                    casesFiles.add(vcfInput);
                } else {
                    vcfInput.close();
                }
                final SAMSequenceDictionary dict2 = SequenceDictionaryUtils.extractRequired(header);
                if (dict == null) {
                    dict = dict2;
                } else if (!SequenceUtil.areSequenceDictionariesEqual(dict, dict2)) {
                    LOG.error(JvarkitException.DictionariesAreNotTheSame.getMessage(dict2, dict));
                    return -1;
                }
            }
        }
        final IntervalTreeMap<Boolean> intervalTreeMap;
        if (intervalListProvider != null) {
            intervalTreeMap = new IntervalTreeMap<>();
            intervalListProvider.dictionary(dict).stream().forEach(R -> intervalTreeMap.put(new Interval(R), true));
        } else {
            intervalTreeMap = null;
        }
        casesFiles.stream().flatMap(F -> F.getHeader().getMetaDataInInputOrder().stream()).forEach(H -> metadata.add(H));
        VCFStandardHeaderLines.addStandardFormatLines(metadata, true, VCFConstants.GENOTYPE_KEY);
        VCFStandardHeaderLines.addStandardInfoLines(metadata, true, VCFConstants.END_KEY);
        metadata.add(new VCFInfoHeaderLine("SAMPLES", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "Samples carrying the SV"));
        metadata.add(new VCFInfoHeaderLine("NSAMPLES", 1, VCFHeaderLineType.Integer, "Number of Samples carrying the SV"));
        metadata.add(new VCFInfoHeaderLine("SVLEN", 1, VCFHeaderLineType.Integer, "SV length"));
        metadata.add(new VCFInfoHeaderLine("CIPOS", 2, VCFHeaderLineType.Integer, "Confidence interval around POS for imprecise variants"));
        metadata.add(new VCFInfoHeaderLine("CIEND", 2, VCFHeaderLineType.Integer, "Confidence interval around END for imprecise variants"));
        metadata.add(new VCFInfoHeaderLine("IMPRECISE", 0, VCFHeaderLineType.Flag, "Imprecise structural variation"));
        metadata.add(new VCFInfoHeaderLine(ATT_FILENAME, 1, VCFHeaderLineType.String, "Source of variant"));
        metadata.add(new VCFInfoHeaderLine(ATT_CLUSTER, 1, VCFHeaderLineType.String, "Variant cluster"));
        /*metadata.add(new VCFFormatHeaderLine(
					"OV",1,
					VCFHeaderLineType.Integer,
					"Number calls (with different sample) overlapping this genotype"
					));*/
        metadata.add(new VCFInfoHeaderLine(VCFConstants.SVTYPE, 1, VCFHeaderLineType.String, "SV type"));
        metadata.add(new VCFFilterHeaderLine(ATT_CONTROL, "Variant is found in controls (max MAF=" + this.max_maf + ")"));
        final VCFHeader header = new VCFHeader(metadata);
        header.setSequenceDictionary(dict);
        JVarkitVersion.getInstance().addMetaData(this, header);
        final List<ShadowedVcfReader> controlShadowReaders = new ArrayList<>(this.controlsPath.size());
        for (int i = 0; i < this.controlsPath.size(); i++) {
            boolean large_flag = this.max_control_large_flag < 0 || i >= this.max_control_large_flag;
            controlShadowReaders.add(new ShadowedVcfReader(this.controlsPath.get(i), large_flag));
        }
        out = super.openVariantContextWriter(this.outputFile);
        out.writeHeader(header);
        final CloseableIterator<VariantContext> iter = casesFiles.get(0).iterator();
        final ProgressFactory.Watcher<VariantContext> progress = ProgressFactory.newInstance().dictionary(dict).logger(LOG).build();
        final Decoy decoy = Decoy.getDefaultInstance();
        VariantContext prevCtx = null;
        while (iter.hasNext()) {
            final VariantContext ctx = progress.apply(iter.next());
            if (decoy.isDecoy(ctx.getContig()))
                continue;
            if (Breakend.parse(ctx).stream().anyMatch(B -> decoy.isDecoy(B.getContig())))
                continue;
            if (intervalTreeMap != null && !intervalTreeMap.containsOverlapping(ctx))
                continue;
            // in manta, I see the same variant multiple times in the same vcf
            if (prevCtx != null && ctx.getContig().equals(prevCtx.getContig()) && ctx.getStart() == prevCtx.getStart() && ctx.getEnd() == prevCtx.getEnd())
                continue;
            prevCtx = ctx;
            final List<VariantContext> candidate = new ArrayList<>(casesFiles.size());
            candidate.add(ctx);
            recursive(ctx, candidate, casesFiles, controlShadowReaders, out);
        }
        iter.close();
        progress.close();
        out.close();
        out = null;
        casesFiles.stream().forEach(F -> {
            try {
                F.close();
            } catch (Exception err) {
            }
        });
        controlShadowReaders.stream().forEach(F -> F.realClose());
        return 0;
    } catch (final Throwable err) {
        LOG.error(err);
        return -1;
    } finally {
        CloserUtil.close(out);
    }
}
Also used : VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) CloseableIterator(htsjdk.samtools.util.CloseableIterator) Allele(htsjdk.variant.variantcontext.Allele) Arrays(java.util.Arrays) SequenceUtil(htsjdk.samtools.util.SequenceUtil) Program(com.github.lindenb.jvarkit.util.jcommander.Program) Parameter(com.beust.jcommander.Parameter) VCFStandardHeaderLines(htsjdk.variant.vcf.VCFStandardHeaderLines) VCFHeader(htsjdk.variant.vcf.VCFHeader) IntervalListProvider(com.github.lindenb.jvarkit.samtools.util.IntervalListProvider) htsjdk.samtools.util(htsjdk.samtools.util) Function(java.util.function.Function) ParametersDelegate(com.beust.jcommander.ParametersDelegate) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) Decoy(com.github.lindenb.jvarkit.samtools.Decoy) IOUtils(com.github.lindenb.jvarkit.io.IOUtils) Breakend(com.github.lindenb.jvarkit.variant.variantcontext.Breakend) Launcher(com.github.lindenb.jvarkit.util.jcommander.Launcher) VCFReaderFactory(com.github.lindenb.jvarkit.variant.vcf.VCFReaderFactory) Path(java.nio.file.Path) CloserUtil(htsjdk.samtools.util.CloserUtil) VCFConstants(htsjdk.variant.vcf.VCFConstants) SequenceDictionaryUtils(com.github.lindenb.jvarkit.util.bio.SequenceDictionaryUtils) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) VCFHeaderLineType(htsjdk.variant.vcf.VCFHeaderLineType) Files(java.nio.file.Files) Logger(com.github.lindenb.jvarkit.util.log.Logger) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) ProgressFactory(com.github.lindenb.jvarkit.util.log.ProgressFactory) VCFReader(htsjdk.variant.vcf.VCFReader) Set(java.util.Set) JVarkitVersion(com.github.lindenb.jvarkit.util.JVarkitVersion) StructuralVariantComparator(com.github.lindenb.jvarkit.variant.sv.StructuralVariantComparator) Collectors(java.util.stream.Collectors) JvarkitException(com.github.lindenb.jvarkit.lang.JvarkitException) File(java.io.File) List(java.util.List) Paths(java.nio.file.Paths) StringUtils(com.github.lindenb.jvarkit.lang.StringUtils) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) VariantContext(htsjdk.variant.variantcontext.VariantContext) VCFHeaderLineCount(htsjdk.variant.vcf.VCFHeaderLineCount) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder) VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) ProgressFactory(com.github.lindenb.jvarkit.util.log.ProgressFactory) ArrayList(java.util.ArrayList) VariantContext(htsjdk.variant.variantcontext.VariantContext) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) VCFReader(htsjdk.variant.vcf.VCFReader) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) VCFHeader(htsjdk.variant.vcf.VCFHeader) HashSet(java.util.HashSet) Path(java.nio.file.Path) Decoy(com.github.lindenb.jvarkit.samtools.Decoy) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) JvarkitException(com.github.lindenb.jvarkit.lang.JvarkitException)

Aggregations

IntervalListProvider (com.github.lindenb.jvarkit.samtools.util.IntervalListProvider)3 Parameter (com.beust.jcommander.Parameter)2 ParametersDelegate (com.beust.jcommander.ParametersDelegate)2 IOUtils (com.github.lindenb.jvarkit.io.IOUtils)2 JVarkitVersion (com.github.lindenb.jvarkit.util.JVarkitVersion)2 SequenceDictionaryUtils (com.github.lindenb.jvarkit.util.bio.SequenceDictionaryUtils)2 Launcher (com.github.lindenb.jvarkit.util.jcommander.Launcher)2 Program (com.github.lindenb.jvarkit.util.jcommander.Program)2 Logger (com.github.lindenb.jvarkit.util.log.Logger)2 ProgressFactory (com.github.lindenb.jvarkit.util.log.ProgressFactory)2 SAMSequenceDictionary (htsjdk.samtools.SAMSequenceDictionary)2 CloseableIterator (htsjdk.samtools.util.CloseableIterator)2 CloserUtil (htsjdk.samtools.util.CloserUtil)2 Allele (htsjdk.variant.variantcontext.Allele)2 VariantContext (htsjdk.variant.variantcontext.VariantContext)2 ArrayList (java.util.ArrayList)2 FractionConverter (com.github.lindenb.jvarkit.jcommander.converter.FractionConverter)1 JvarkitException (com.github.lindenb.jvarkit.lang.JvarkitException)1 StringUtils (com.github.lindenb.jvarkit.lang.StringUtils)1 RunMedian (com.github.lindenb.jvarkit.math.RunMedian)1