Search in sources :

Example 11 with Transcript

use of com.github.lindenb.jvarkit.util.bio.structure.Transcript in project jvarkit by lindenb.

the class VcfGtfSplitter method doWork.

@Override
public int doWork(final List<String> args) {
    ArchiveFactory archiveFactory = null;
    PrintWriter manifest = null;
    VCFReader vcfFileReader = null;
    try {
        this.attCleaner = AttributeCleaner.compile(this.xannotatePattern);
        for (final String s : featuresString.split("[;, ]")) {
            if (StringUtils.isBlank(s))
                continue;
            if (s.equals("cds")) {
                use_cds = true;
            } else if (s.equals("intron")) {
                use_cds = true;
            } else if (s.equals("exon")) {
                use_exon = true;
            } else if (s.equals("stop")) {
                use_stop = true;
            } else if (s.equals("start")) {
                use_start = true;
            } else if (s.equals("transcript")) {
                use_exon = true;
                use_intron = true;
            } else if (s.equals("utr5")) {
                use_utr5 = true;
            } else if (s.equals("utr3")) {
                use_utr3 = true;
            } else if (s.equals("utr")) {
                use_utr3 = true;
                use_utr5 = true;
            } else if (s.equals("upstream")) {
                use_upstream = true;
            } else if (s.equals("downstream")) {
                use_downstream = true;
            } else if (s.equals("splice")) {
                use_splice = true;
            } else if (s.equals("cds_utr5")) {
                use_cds_utr5 = true;
            } else if (s.equals("cds_utr3")) {
                use_cds_utr3 = true;
            } else if (s.equals("cds_utr")) {
                use_cds_utr3 = true;
                use_cds_utr5 = true;
            } else {
                LOG.error("unknown code " + s + " in " + this.featuresString);
                return -1;
            }
        }
        final Path tmpVcf = Files.createTempFile("tmp.", (use_bcf ? FileExtensions.BCF : FileExtensions.COMPRESSED_VCF));
        String input = oneAndOnlyOneFile(args);
        vcfFileReader = VCFReaderFactory.makeDefault().open(Paths.get(input), true);
        final VCFHeader header1 = vcfFileReader.getHeader();
        final SAMSequenceDictionary dict = header1.getSequenceDictionary();
        if (dict == null && this.use_bcf) {
            throw new JvarkitException.VcfDictionaryMissing(input);
        }
        if (dict != null && !limitToContigs.isEmpty()) {
            final ContigNameConverter ctgNameConverter = ContigNameConverter.fromOneDictionary(dict);
            final Set<String> set2 = new HashSet<>(this.limitToContigs.size());
            for (final String ctg : this.limitToContigs) {
                final String ctg2 = ctgNameConverter.apply(ctg);
                if (StringUtils.isBlank(ctg2)) {
                    LOG.error(JvarkitException.ContigNotFoundInDictionary.getMessage(ctg, dict));
                    return -1;
                }
                set2.add(ctg2);
            }
            this.limitToContigs = set2;
        }
        final List<Gene> all_genes;
        try (GtfReader gtfReader = new GtfReader(this.gtfPath)) {
            final Comparator<Gene> cmp;
            if (dict != null) {
                gtfReader.setContigNameConverter(ContigNameConverter.fromOneDictionary(dict));
                cmp = new ContigDictComparator(dict).createLocatableComparator();
            } else {
                cmp = (A, B) -> {
                    final int i = A.getContig().compareTo(B.getContig());
                    if (i != 0)
                        return i;
                    return Integer.compare(A.getStart(), B.getStart());
                };
            }
            all_genes = gtfReader.getAllGenes().stream().filter(G -> {
                if (this.protein_coding_only && !"protein_coding".equals(G.getGeneBiotype()))
                    return false;
                if (this.limitToContigs.isEmpty())
                    return true;
                return this.limitToContigs.contains(G.getContig());
            }).sorted(cmp).collect(Collectors.toList());
        }
        archiveFactory = ArchiveFactory.open(this.outputFile);
        archiveFactory.setCompressionLevel(0);
        manifest = new PrintWriter(this.manifestFile == null ? new NullOuputStream() : IOUtils.openPathForWriting(manifestFile));
        manifest.println("#chrom\tstart\tend\tGene-Id\tGene-Name\tGene-Biotype\tTranscript-Id\tpath\tCount_Variants");
        if (this.split_by_transcript) {
            final Iterator<Transcript> triter = all_genes.stream().flatMap(G -> G.getTranscripts().stream()).iterator();
            while (triter.hasNext()) {
                final Transcript tr = triter.next();
                final AbstractSplitter splitter = new TranscriptSplitter(tr);
                this.split(splitter, vcfFileReader, header1, dict, archiveFactory, tmpVcf, manifest);
            }
        } else {
            for (Gene gene : all_genes) {
                final AbstractSplitter splitter = new GeneSplitter(gene);
                this.split(splitter, vcfFileReader, header1, dict, archiveFactory, tmpVcf, manifest);
            }
        }
        vcfFileReader.close();
        vcfFileReader = null;
        manifest.flush();
        manifest.close();
        manifest = null;
        archiveFactory.close();
        Files.deleteIfExists(tmpVcf);
        return RETURN_OK;
    } catch (final Exception err) {
        LOG.error(err);
        return -1;
    } finally {
        CloserUtil.close(vcfFileReader);
        CloserUtil.close(archiveFactory);
        CloserUtil.close(manifest);
    }
}
Also used : VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) CloseableIterator(htsjdk.samtools.util.CloseableIterator) ContigNameConverter(com.github.lindenb.jvarkit.util.bio.fasta.ContigNameConverter) Program(com.github.lindenb.jvarkit.util.jcommander.Program) AttributeCleaner(com.github.lindenb.jvarkit.variant.variantcontext.AttributeCleaner) Parameter(com.beust.jcommander.Parameter) NullOuputStream(com.github.lindenb.jvarkit.io.NullOuputStream) Transcript(com.github.lindenb.jvarkit.util.bio.structure.Transcript) VCFHeader(htsjdk.variant.vcf.VCFHeader) TabixIndexCreator(htsjdk.tribble.index.tabix.TabixIndexCreator) Gene(com.github.lindenb.jvarkit.util.bio.structure.Gene) VariantContextWriterBuilder(htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder) HashSet(java.util.HashSet) ContigDictComparator(com.github.lindenb.jvarkit.util.samtools.ContigDictComparator) DistanceParser(com.github.lindenb.jvarkit.util.bio.DistanceParser) NoSplitter(com.github.lindenb.jvarkit.util.jcommander.NoSplitter) IOUtils(com.github.lindenb.jvarkit.io.IOUtils) Launcher(com.github.lindenb.jvarkit.util.jcommander.Launcher) VCFReaderFactory(com.github.lindenb.jvarkit.variant.vcf.VCFReaderFactory) Path(java.nio.file.Path) CloserUtil(htsjdk.samtools.util.CloserUtil) OutputStream(java.io.OutputStream) PrintWriter(java.io.PrintWriter) SimpleInterval(com.github.lindenb.jvarkit.samtools.util.SimpleInterval) Locatable(htsjdk.samtools.util.Locatable) Iterator(java.util.Iterator) Files(java.nio.file.Files) Logger(com.github.lindenb.jvarkit.util.log.Logger) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) VCFReader(htsjdk.variant.vcf.VCFReader) Set(java.util.Set) IOException(java.io.IOException) Collectors(java.util.stream.Collectors) JvarkitException(com.github.lindenb.jvarkit.lang.JvarkitException) File(java.io.File) GtfReader(com.github.lindenb.jvarkit.util.bio.structure.GtfReader) SimplePosition(com.github.lindenb.jvarkit.samtools.util.SimplePosition) List(java.util.List) Intron(com.github.lindenb.jvarkit.util.bio.structure.Intron) Paths(java.nio.file.Paths) StringUtils(com.github.lindenb.jvarkit.lang.StringUtils) FileExtensions(htsjdk.samtools.util.FileExtensions) Options(htsjdk.variant.variantcontext.writer.Options) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) VariantContext(htsjdk.variant.variantcontext.VariantContext) Comparator(java.util.Comparator) TabixFormat(htsjdk.tribble.index.tabix.TabixFormat) ArchiveFactory(com.github.lindenb.jvarkit.io.ArchiveFactory) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) ContigDictComparator(com.github.lindenb.jvarkit.util.samtools.ContigDictComparator) Gene(com.github.lindenb.jvarkit.util.bio.structure.Gene) VCFReader(htsjdk.variant.vcf.VCFReader) NullOuputStream(com.github.lindenb.jvarkit.io.NullOuputStream) VCFHeader(htsjdk.variant.vcf.VCFHeader) ContigNameConverter(com.github.lindenb.jvarkit.util.bio.fasta.ContigNameConverter) PrintWriter(java.io.PrintWriter) HashSet(java.util.HashSet) Path(java.nio.file.Path) ArchiveFactory(com.github.lindenb.jvarkit.io.ArchiveFactory) Transcript(com.github.lindenb.jvarkit.util.bio.structure.Transcript) IOException(java.io.IOException) JvarkitException(com.github.lindenb.jvarkit.lang.JvarkitException) GtfReader(com.github.lindenb.jvarkit.util.bio.structure.GtfReader)

Example 12 with Transcript

use of com.github.lindenb.jvarkit.util.bio.structure.Transcript in project jvarkit by lindenb.

the class VcfStrechToSvg method run.

private void run(final ArchiveFactory archive, final BedLine bed, final VCFHeader header, final VCFReader in, final PrintWriter manifest) {
    LOG.info("processing " + bed);
    final Set<String> limitSamples;
    if (StringUtils.isBlank(this.sampleStr)) {
        limitSamples = new TreeSet<>(header.getSampleNamesInOrder());
    } else if (this.sampleStr.startsWith("^")) {
        limitSamples = new TreeSet<>(header.getSampleNamesInOrder());
        limitSamples.removeAll(Arrays.stream(this.sampleStr.substring(1).split("[, ]+")).filter(S -> !StringUtils.isBlank(S)).collect(Collectors.toSet()));
    } else {
        limitSamples = Arrays.stream(this.sampleStr.split("[, ]+")).filter(S -> !StringUtils.isBlank(S)).collect(Collectors.toCollection(TreeSet::new));
    }
    final SAMSequenceDictionary dict = header.getSequenceDictionary();
    try (CloseableIterator<VariantContext> iter = in.query(bed)) {
        final List<VariantSet> L = iter.stream().filter(V -> acceptVariant(V)).map(V -> new VariantSet(V)).collect(Collectors.toCollection(ArrayList::new));
        if (L.isEmpty()) {
            LOG.warn("No valid variant found for \"" + bed + "\"");
            return;
        }
        int i = 0;
        while (i + 1 < L.size()) {
            if (L.get(i).withinDistanceOf(L.get(i + 1), this.withinDistance)) {
                L.get(i).variants.addAll(L.get(i + 1).variants);
                L.remove(i + 1);
            } else {
                i++;
            }
        }
        final int margin_left = 50;
        final int margin_right = 10;
        final double drawingAreaWidth = image_width_pixel - (margin_left + margin_right);
        final int intervalLength = L.stream().mapToInt(V -> V.getLengthOnReference()).sum();
        double x = 0;
        for (i = 0; i < L.size(); i++) {
            L.get(i).x = x;
            x += (L.get(i).getLengthOnReference() / (double) intervalLength) * drawingAreaWidth;
        }
        for (i = 0; i < L.size(); i++) {
            L.get(i).width = (i + 1 < L.size() ? L.get(i + 1).x : drawingAreaWidth) - L.get(i).x;
        }
        try {
            final DocumentBuilderFactory db = DocumentBuilderFactory.newInstance();
            final DocumentBuilder dom = db.newDocumentBuilder();
            this.document = dom.newDocument();
            final Element svgRoot = element("svg");
            this.document.appendChild(svgRoot);
            final String mainTitleStr = SequenceDictionaryUtils.getBuildName(dict).orElse("") + " " + new SimpleInterval(bed).toNiceString() + " length:" + StringUtils.niceInt(bed.getLengthOnReference());
            /* SVG title */
            {
                final Element title = element("title");
                svgRoot.appendChild(title);
                title.appendChild(text(mainTitleStr));
            }
            /* SVG style */
            {
                final String gtopacity = this.dynamicParams.getOrDefault("gt.opacity", "0.7");
                final Element style = element("style");
                svgRoot.appendChild(style);
                style.appendChild(text(".maintitle {text-anchor:middle;fill:blue} " + ".vc {stroke-width:0.5px;} " + ".transcript {stroke:black;stroke-width:1px;opacity:1;}" + ".exon {stroke:black;stroke-width:0.5px;fill:blue;opacity:1;}" + ".sample {fill:blue;font-size:7px;} " + ".samplelabel {stroke:gray;stroke-width:0.5px;font-size:" + this.dynamicParams.getOrDefault("sample.fontsize", "7") + "px;}\n" + ".coverage { fill:gray; stroke:yellow;opacity:0.2;} " + ".frame { fill:none; stroke: darkgray;} " + ".area0 {fill:white;}\n" + ".area1 {fill:floralwhite;}\n" + "circle.HOM_REF {fill:green;opacity:" + gtopacity + ";stroke-width:0.5px;}\n" + "circle.HET {fill:blue;opacity:" + gtopacity + ";stroke-width:0.5px;}\n" + "circle.HOM_VAR {fill:red;opacity:" + gtopacity + ";stroke-width:0.5px;}\n" + "a {cursor: pointer;}\n"));
            }
            /* desc */
            {
                final Element descr = element("desc");
                svgRoot.appendChild(descr);
                descr.appendChild(text("Author: Pierre Lindenbaum\n" + JVarkitVersion.getInstance().getCompilationDate() + "\n" + JVarkitVersion.getInstance().getGitHash()));
            }
            // main title
            {
                final Element gtitle = element("text", mainTitleStr);
                gtitle.setAttribute("class", "maintitle");
                gtitle.setAttribute("x", format(this.image_width_pixel / 2.0));
                gtitle.setAttribute("y", "15");
                svgRoot.appendChild(wrapLoc(gtitle, bed));
            }
            int margin_top = 50;
            double y = margin_top;
            final double min_circle_radius = Double.parseDouble(this.dynamicParams.getOrDefault("gt.r1", "1"));
            final double max_circle_radius = Double.parseDouble(this.dynamicParams.getOrDefault("gt.r2", "7"));
            final Element main_g = element("g");
            svgRoot.appendChild(main_g);
            /**
             * plot genes
             */
            if (!this.all_genes.isEmpty()) {
                final double transcript_height = 5;
                final double exon_height = (transcript_height - 1);
                final double save_y = y;
                final Element g_genes = element("g");
                g_genes.setAttribute("transform", "translate(" + margin_left + ",0)");
                main_g.appendChild(g_genes);
                /* loop over each vset */
                for (i = 0; i < L.size(); i++) {
                    final VariantSet vset = L.get(i);
                    // find transcript in this vset
                    final List<Transcript> transcripts = this.all_genes.getOverlapping(vset).stream().flatMap(G -> G.getTranscripts().stream()).filter(T -> T.overlaps(vset)).collect(Collectors.toList());
                    if (transcripts.isEmpty())
                        continue;
                    final Element g_vset = element("g");
                    g_vset.setAttribute("transform", "translate(" + format(vset.x) + ",0)");
                    g_genes.appendChild(g_vset);
                    // y in this vset
                    double y2 = save_y;
                    /* convert base to pixel */
                    final ToDoubleFunction<Integer> base2pixel = vset.createBaseToPixelFunction();
                    /* loop over transcripts */
                    for (final Transcript tr : transcripts) {
                        final Element g_tr = element("g");
                        g_tr.setAttribute("transform", "translate(0," + format(y2) + ")");
                        g_vset.appendChild(g_tr);
                        final Element line = element("line");
                        line.setAttribute("class", "transcript");
                        line.setAttribute("x1", format(Math.max(0, base2pixel.applyAsDouble(tr.getStart()))));
                        line.setAttribute("y1", format(transcript_height / 2.0));
                        line.setAttribute("x2", format(Math.min(vset.width, base2pixel.applyAsDouble(tr.getEnd()))));
                        line.setAttribute("y2", format(transcript_height / 2.0));
                        line.appendChild(element("title", tr.getId()));
                        g_tr.appendChild(wrapLoc(line, tr));
                        /* loop over exons */
                        for (final Exon exon : tr.getExons()) {
                            if (!exon.overlaps(vset))
                                continue;
                            final Element exRect = element("rect");
                            exRect.setAttribute("class", "exon");
                            final double x_start = Math.max(0, base2pixel.applyAsDouble(exon.getStart()));
                            exRect.setAttribute("x", format(x_start));
                            exRect.setAttribute("y", format(transcript_height / 2.0 - exon_height / 2.0));
                            final double x_end = Math.min(vset.width, base2pixel.applyAsDouble(exon.getEnd()));
                            exRect.setAttribute("width", format(x_end - x_start));
                            exRect.setAttribute("height", format(exon_height));
                            exRect.appendChild(element("title", exon.getName()));
                            g_tr.appendChild(wrapLoc(exRect, exon));
                        }
                        y2 += transcript_height + 0.5;
                    }
                    y = Math.max(y, y2);
                }
                y++;
            }
            final double sample_height = Double.parseDouble(this.dynamicParams.getOrDefault("sample.height", "25"));
            final double sample_height2 = sample_height - (max_circle_radius * 2.0);
            int space_between_samples = 2;
            int got_n_samples = 0;
            for (final String sn : header.getSampleNamesInOrder()) {
                if (!limitSamples.contains(sn))
                    continue;
                boolean got_this_sample = false;
                final Element g_sample = element("g");
                g_sample.setAttribute("transform", "translate(" + margin_left + "," + format(y) + ")");
                /* get coverage */
                final int maxCoverage;
                if (this.sample2bam.containsKey(sn)) {
                    final CoverageFactory coverageFactory = new CoverageFactory();
                    try (SamReader sr = this.openSamReader(this.sample2bam.get(sn))) {
                        /* loop over each variant set */
                        for (final VariantSet vset : L) {
                            vset.coverage = coverageFactory.getSimpleCoverage(sr, vset, sn);
                        }
                    }
                    maxCoverage = L.stream().flatMapToInt(V -> V.coverage.stream()).max().orElse(0);
                } else {
                    maxCoverage = 0;
                    for (final VariantSet vset : L) {
                        vset.coverage = null;
                    }
                }
                /* loop over each variant set */
                for (i = 0; i < L.size(); i++) {
                    final VariantSet vset = L.get(i);
                    final Element g_vset = element("g");
                    g_vset.setAttribute("transform", "translate(" + format(vset.x) + ",0)");
                    g_sample.appendChild(g_vset);
                    /* convert base to pixel */
                    final ToDoubleFunction<Integer> base2pixel = vset.createBaseToPixelFunction();
                    // plot set length
                    final Element rect = element("rect");
                    rect.setAttribute("class", "area" + (i % 2));
                    rect.setAttribute("x", "0");
                    rect.setAttribute("y", "0");
                    rect.setAttribute("width", format(vset.width));
                    rect.setAttribute("height", format(sample_height));
                    if (!remove_tooltip)
                        rect.appendChild(element("title", vset.toString()));
                    g_vset.appendChild(rect);
                    // plot coverage
                    if (maxCoverage > 0 && this.sample2bam.containsKey(sn)) {
                        final double[] scaled = vset.coverage.scaleAverage((int) vset.width);
                        final StringBuilder sb = new StringBuilder();
                        sb.append("0," + sample_height);
                        for (int t = 0; t < scaled.length; t++) {
                            if (t > 1 && t + 1 < scaled.length && format(scaled[t - 1]).equals(format(scaled[t + 1])) && format(scaled[t - 1]).equals(format(scaled[t])))
                                continue;
                            sb.append(" ").append(t).append(",");
                            sb.append(format(sample_height * (1.0 - scaled[t] / maxCoverage)));
                        }
                        sb.append(" " + format(vset.width) + "," + sample_height);
                        final Element polyline = element("polyline");
                        polyline.setAttribute("class", "coverage");
                        polyline.setAttribute("points", sb.toString());
                        g_vset.appendChild(polyline);
                        vset.coverage = null;
                    }
                    // plot vertical line if colorTag defined
                    if (!StringUtils.isBlank(this.colorTag)) {
                        for (final VariantContext vc : vset.variants) {
                            if (!vc.hasAttribute(this.colorTag))
                                continue;
                            final String cssColor = vc.getAttributeAsString(this.colorTag, "");
                            if (StringUtils.isBlank(cssColor))
                                continue;
                            final double x0 = base2pixel.applyAsDouble(vc.getStart());
                            final Element line = element("line");
                            line.setAttribute("class", "vc");
                            line.setAttribute("style", "stroke:" + cssColor);
                            line.setAttribute("x1", format(x0));
                            line.setAttribute("y1", "0");
                            line.setAttribute("x2", format(x0));
                            line.setAttribute("y2", format(sample_height));
                            g_vset.appendChild(line);
                        }
                    }
                    // print all variants in this vcfset for this sample
                    for (final VariantContext vc : vset.variants) {
                        final Genotype gt = vc.getGenotype(sn);
                        if (gt.isNoCall())
                            continue;
                        if (hide_hom_ref && gt.isHomRef())
                            continue;
                        if (gt.hasGQ() && gt.getGQ() < this.minGQ)
                            continue;
                        final OptionalDouble alt_ratio = getAltRatio(gt);
                        if (!alt_ratio.isPresent())
                            continue;
                        final OptionalDouble af = getAF(vc);
                        final double circle_radius = min_circle_radius + (max_circle_radius - min_circle_radius) * (1.0 - af.orElse(1.0));
                        // HOMREF=0;  HET =0.5; HOMVAR = 1;
                        final double gtx = base2pixel.applyAsDouble(vc.getStart());
                        final double gty = sample_height - (sample_height2 * alt_ratio.getAsDouble() + (sample_height - sample_height2) / 2.0);
                        final Element circle = element("circle");
                        circle.setAttribute("class", gt.getType().name());
                        circle.setAttribute("cx", format(gtx));
                        circle.setAttribute("cy", format(gty));
                        circle.setAttribute("r", format(circle_radius));
                        if (!remove_tooltip)
                            circle.appendChild(element("title", vc.getStart() + " " + (vc.hasID() ? vc.getID() : "") + " " + vc.getAlleles().stream().map(A -> A.getDisplayString()).collect(Collectors.joining("/")) + " " + gt.getType().name() + " AF=" + format(af.orElse(-1))));
                        g_vset.appendChild(wrapLoc(circle, vc));
                        got_this_sample = true;
                    }
                }
                final Element frame_sample = element("rect");
                frame_sample.setAttribute("class", "frame");
                frame_sample.setAttribute("x", "0");
                frame_sample.setAttribute("y", "0");
                frame_sample.setAttribute("width", format(drawingAreaWidth));
                frame_sample.setAttribute("height", format(sample_height));
                g_sample.appendChild(frame_sample);
                final Element label = element("text", sn + (maxCoverage == 0 ? "" : " Max Cov. " + maxCoverage));
                label.setAttribute("class", "samplelabel");
                label.setAttribute("x", "0");
                label.setAttribute("y", "0");
                // label.setAttribute("transform", "translate("+format(-10)+","+0+") rotate(90) ");
                label.setAttribute("transform", "translate(12,12)");
                if (!remove_tooltip)
                    label.appendChild(element("title", sn));
                g_sample.appendChild(label);
                if (got_this_sample) {
                    got_n_samples++;
                    main_g.appendChild(g_sample);
                    y += sample_height + space_between_samples;
                } else {
                    LOG.warn("no valid data for sample " + sn + " in " + bed);
                }
            }
            // remove extra sample space
            y -= space_between_samples;
            svgRoot.setAttribute("width", format(this.image_width_pixel + 1));
            svgRoot.setAttribute("height", format(y + 1));
            if (got_n_samples == 0) {
                LOG.info("no sample/genotype found for " + bed);
                return;
            }
            // save
            final Transformer tr = TransformerFactory.newInstance().newTransformer();
            final String filename = bed.getContig() + "_" + bed.getStart() + "_" + bed.getEnd() + ".svg" + (this.compressed_svg ? ".gz" : "");
            LOG.info("writing " + filename);
            if (this.compressed_svg) {
                try (final OutputStream pw = archive.openOuputStream(filename)) {
                    try (GZIPOutputStream gzout = new GZIPOutputStream(pw)) {
                        tr.transform(new DOMSource(this.document), new StreamResult(gzout));
                        gzout.finish();
                        gzout.flush();
                    }
                    pw.flush();
                }
            } else {
                try (final PrintWriter pw = archive.openWriter(filename)) {
                    tr.transform(new DOMSource(this.document), new StreamResult(pw));
                    pw.flush();
                }
            }
            manifest.print(bed.getContig());
            manifest.print("\t");
            manifest.print(bed.getStart() - 1);
            manifest.print("\t");
            manifest.print(bed.getEnd());
            manifest.print("\t");
            manifest.print(filename);
            manifest.println();
        } catch (final Throwable err) {
            throw new RuntimeException(err);
        } finally {
            this.document = null;
        }
    }
}
Also used : Transformer(javax.xml.transform.Transformer) Text(org.w3c.dom.Text) Arrays(java.util.Arrays) Program(com.github.lindenb.jvarkit.util.jcommander.Program) Transcript(com.github.lindenb.jvarkit.util.bio.structure.Transcript) VCFHeader(htsjdk.variant.vcf.VCFHeader) StreamResult(javax.xml.transform.stream.StreamResult) Gene(com.github.lindenb.jvarkit.util.bio.structure.Gene) SVG(com.github.lindenb.jvarkit.util.svg.SVG) SAMFileHeader(htsjdk.samtools.SAMFileHeader) Hyperlink(com.github.lindenb.jvarkit.net.Hyperlink) DistanceParser(com.github.lindenb.jvarkit.util.bio.DistanceParser) CoverageFactory(com.github.lindenb.jvarkit.samtools.CoverageFactory) Document(org.w3c.dom.Document) Map(java.util.Map) Path(java.nio.file.Path) PrintWriter(java.io.PrintWriter) SimpleInterval(com.github.lindenb.jvarkit.samtools.util.SimpleInterval) SequenceDictionaryUtils(com.github.lindenb.jvarkit.util.bio.SequenceDictionaryUtils) SAMException(htsjdk.samtools.SAMException) IntervalTreeMap(htsjdk.samtools.util.IntervalTreeMap) Logger(com.github.lindenb.jvarkit.util.log.Logger) Set(java.util.Set) Collectors(java.util.stream.Collectors) AFExtractorFactory(com.github.lindenb.jvarkit.util.vcf.AFExtractorFactory) List(java.util.List) StringUtils(com.github.lindenb.jvarkit.lang.StringUtils) ToDoubleFunction(java.util.function.ToDoubleFunction) VariantContext(htsjdk.variant.variantcontext.VariantContext) GZIPOutputStream(java.util.zip.GZIPOutputStream) DocumentBuilderFactory(javax.xml.parsers.DocumentBuilderFactory) BedLine(com.github.lindenb.jvarkit.util.bio.bed.BedLine) Genotype(htsjdk.variant.variantcontext.Genotype) DOMSource(javax.xml.transform.dom.DOMSource) CloseableIterator(htsjdk.samtools.util.CloseableIterator) SequenceUtil(htsjdk.samtools.util.SequenceUtil) ContigNameConverter(com.github.lindenb.jvarkit.util.bio.fasta.ContigNameConverter) Parameter(com.beust.jcommander.Parameter) NullOuputStream(com.github.lindenb.jvarkit.io.NullOuputStream) BedLineCodec(com.github.lindenb.jvarkit.util.bio.bed.BedLineCodec) OptionalDouble(java.util.OptionalDouble) Exon(com.github.lindenb.jvarkit.util.bio.structure.Exon) HashMap(java.util.HashMap) TreeSet(java.util.TreeSet) ArrayList(java.util.ArrayList) Interval(htsjdk.samtools.util.Interval) NoSplitter(com.github.lindenb.jvarkit.util.jcommander.NoSplitter) Node(org.w3c.dom.Node) IOUtils(com.github.lindenb.jvarkit.io.IOUtils) Launcher(com.github.lindenb.jvarkit.util.jcommander.Launcher) VCFReaderFactory(com.github.lindenb.jvarkit.variant.vcf.VCFReaderFactory) VCFConstants(htsjdk.variant.vcf.VCFConstants) OutputStream(java.io.OutputStream) Locatable(htsjdk.samtools.util.Locatable) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) DecimalFormat(java.text.DecimalFormat) VCFReader(htsjdk.variant.vcf.VCFReader) IOException(java.io.IOException) JVarkitVersion(com.github.lindenb.jvarkit.util.JVarkitVersion) SamReader(htsjdk.samtools.SamReader) GtfReader(com.github.lindenb.jvarkit.util.bio.structure.GtfReader) Element(org.w3c.dom.Element) FractionConverter(com.github.lindenb.jvarkit.jcommander.converter.FractionConverter) DocumentBuilder(javax.xml.parsers.DocumentBuilder) DynamicParameter(com.beust.jcommander.DynamicParameter) BufferedReader(java.io.BufferedReader) TransformerFactory(javax.xml.transform.TransformerFactory) ArchiveFactory(com.github.lindenb.jvarkit.io.ArchiveFactory) DOMSource(javax.xml.transform.dom.DOMSource) DocumentBuilderFactory(javax.xml.parsers.DocumentBuilderFactory) Transformer(javax.xml.transform.Transformer) Element(org.w3c.dom.Element) GZIPOutputStream(java.util.zip.GZIPOutputStream) OutputStream(java.io.OutputStream) VariantContext(htsjdk.variant.variantcontext.VariantContext) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) SamReader(htsjdk.samtools.SamReader) Exon(com.github.lindenb.jvarkit.util.bio.structure.Exon) GZIPOutputStream(java.util.zip.GZIPOutputStream) TreeSet(java.util.TreeSet) SimpleInterval(com.github.lindenb.jvarkit.samtools.util.SimpleInterval) PrintWriter(java.io.PrintWriter) Transcript(com.github.lindenb.jvarkit.util.bio.structure.Transcript) CoverageFactory(com.github.lindenb.jvarkit.samtools.CoverageFactory) StreamResult(javax.xml.transform.stream.StreamResult) Genotype(htsjdk.variant.variantcontext.Genotype) OptionalDouble(java.util.OptionalDouble) DocumentBuilder(javax.xml.parsers.DocumentBuilder)

Example 13 with Transcript

use of com.github.lindenb.jvarkit.util.bio.structure.Transcript in project jvarkit by lindenb.

the class VCFCombineTwoSnvs method doVcfToVcf.

@Override
protected int doVcfToVcf(final String inputName, File saveAs) {
    BufferedReader bufferedReader = null;
    htsjdk.variant.variantcontext.writer.VariantContextWriter w = null;
    SortingCollection<CombinedMutation> mutations = null;
    CloseableIterator<Variant> varIter = null;
    CloseableIterator<CombinedMutation> mutIter = null;
    final Map<String, SamReader> sample2samReader = new HashMap<>();
    PrintWriter bedPeReport = null;
    try {
        bufferedReader = inputName == null ? IOUtils.openStreamForBufferedReader(stdin()) : IOUtils.openURIForBufferedReading(inputName);
        final VCFUtils.CodecAndHeader cah = VCFUtils.parseHeader(bufferedReader);
        /* get VCF header */
        final VCFHeader header = cah.header;
        final List<String> sampleList = header.getSampleNamesInOrder();
        this.indexedFastaSequenceFile = ReferenceSequenceFileFactory.getReferenceSequenceFile(this.referencePath);
        final SAMSequenceDictionary dict = SequenceDictionaryUtils.extractRequired(this.indexedFastaSequenceFile);
        this.rnaSequenceFactory.setContigToGenomicSequence(C -> getGenomicSequenceForContig(C));
        if (this.bamIn != null) {
            final Set<String> sampleSet = new HashSet<>(sampleList);
            /**
             * unroll and open bam file
             */
            for (final Path bamFile : IOUtils.unrollPaths(Collections.singletonList(this.bamIn.toString()))) {
                LOG.info("opening BAM :" + this.bamIn);
                final SamReader samReader = SamReaderFactory.makeDefault().referenceSequence(this.referencePath).validationStringency(ValidationStringency.LENIENT).open(this.bamIn);
                if (!samReader.hasIndex()) {
                    samReader.close();
                    throw new IOException("Sam file is NOT indexed: " + bamFile);
                }
                final SAMFileHeader samHeader = samReader.getFileHeader();
                if (samHeader.getSequenceDictionary() == null || !SequenceUtil.areSequenceDictionariesEqual(dict, samReader.getFileHeader().getSequenceDictionary())) {
                    samReader.close();
                    throw new JvarkitException.DictionariesAreNotTheSame(dict, samReader.getFileHeader().getSequenceDictionary());
                }
                /* get sample name */
                String sampleName = null;
                for (final SAMReadGroupRecord rg : samHeader.getReadGroups()) {
                    if (rg.getSample() == null)
                        continue;
                    if (sampleName != null && !sampleName.equals(rg.getSample())) {
                        samReader.close();
                        throw new IOException(bamFile + " Contains two samples " + sampleName + " " + rg.getSample());
                    }
                    sampleName = rg.getSample();
                }
                if (sampleName == null) {
                    samReader.close();
                    LOG.warn("no sample in " + bamFile);
                    continue;
                }
                if (!sampleSet.contains(sampleName)) {
                    samReader.close();
                    LOG.warn("no sample " + sampleName + " in vcf. Ignoring " + bamFile);
                    continue;
                }
                sample2samReader.put(sampleName, samReader);
            }
        }
        loadTranscripts();
        this.variants = SortingCollection.newInstance(Variant.class, new VariantCodec(), new VariantComparatorTwo(dict), this.writingSortingCollection.getMaxRecordsInRam(), this.writingSortingCollection.getTmpPaths());
        this.variants.setDestructiveIteration(true);
        ProgressFactory.Watcher<VariantContext> progress1 = ProgressFactory.newInstance().dictionary(header).logger(LOG).build();
        String vcfLine = null;
        while ((vcfLine = bufferedReader.readLine()) != null) {
            final VariantContext ctx = progress1.apply(cah.codec.decode(vcfLine));
            /* discard non SNV variant */
            if (!ctx.isVariant() || ctx.isIndel()) {
                continue;
            }
            /* find the overlapping genes : extend the interval of the variant to include the stop codon */
            final Collection<Transcript> genes = this.knownGenes.getOverlapping(new Interval(ctx.getContig(), Math.max(1, ctx.getStart() - 3), ctx.getEnd() + 3)).stream().flatMap(L -> L.stream()).collect(Collectors.toList());
            final List<Allele> alternateAlleles = ctx.getAlternateAlleles();
            /* loop over overlapping genes */
            for (final Transcript kg : genes) {
                /* loop over available alleles */
                for (int allele_idx = 0; allele_idx < alternateAlleles.size(); ++allele_idx) {
                    final Allele alt = alternateAlleles.get(allele_idx);
                    challenge(ctx, alt, kg, vcfLine);
                }
            }
        }
        progress1.close();
        this.variants.doneAdding();
        bedPeReport = this.bedPePath == null ? new PrintWriter(new NullOuputStream()) : IOUtils.openPathForPrintWriter(this.bedPePath);
        mutations = SortingCollection.newInstance(CombinedMutation.class, new MutationCodec(), new MutationComparatorTwo(dict), this.writingSortingCollection.getMaxRecordsInRam(), this.writingSortingCollection.getTmpPaths());
        mutations.setDestructiveIteration(true);
        final VCFFilterHeaderLine vcfFilterHeaderLine = new VCFFilterHeaderLine("TwoHaplotypes", "(number of reads carrying both mutation) < (reads carrying variant 1 + reads carrying variant 2) ");
        varIter = this.variants.iterator();
        @SuppressWarnings("resource") EqualRangeIterator<Variant> eqVarIter = new EqualRangeIterator<>(varIter, new VariantComparatorOne(dict));
        ProgressFactory.Watcher<Variant> progress2 = ProgressFactory.newInstance().dictionary(header).logger(LOG).build();
        while (eqVarIter.hasNext()) {
            final List<Variant> buffer = eqVarIter.next();
            if (buffer.size() < 2)
                continue;
            for (int i = 0; i + 1 < buffer.size(); ++i) {
                final Variant v1 = buffer.get(i);
                for (int j = i + 1; j < buffer.size(); ++j) {
                    final Variant v2 = buffer.get(j);
                    if (v1.codonStart() != v2.codonStart())
                        continue;
                    if (v1.positionInCodon() == v2.positionInCodon())
                        continue;
                    if (!v1.wildCodon.equals(v2.wildCodon)) {
                        throw new IllegalStateException();
                    }
                    // no sample share the two variants
                    final Set<Integer> sharedSamplesIdx = v1.getSharedSampleIndexes(v2);
                    if (sharedSamplesIdx.isEmpty() && !sampleList.isEmpty())
                        continue;
                    final StringBuilder combinedCodon = new StringBuilder(v1.wildCodon);
                    combinedCodon.setCharAt(v1.positionInCodon(), v1.mutCodon.charAt(v1.positionInCodon()));
                    combinedCodon.setCharAt(v2.positionInCodon(), v2.mutCodon.charAt(v2.positionInCodon()));
                    final String pwild = PeptideSequence.of(v1.wildCodon).toString();
                    final String p1 = PeptideSequence.of(v1.mutCodon).toString();
                    final String p2 = PeptideSequence.of(v2.mutCodon).toString();
                    final String pCombined = PeptideSequence.of(combinedCodon).toString();
                    final String combinedSO;
                    final String combinedType;
                    /* both AA are synonymous, while combined is not */
                    if (!pCombined.equals(pwild) && p1.equals(pwild) && p2.equals(pwild)) {
                        combinedType = "combined_is_nonsynonymous";
                        if (pCombined.equals("*")) {
                            /* http://www.sequenceontology.org/browser/current_svn/term/SO:0001587 */
                            combinedSO = "stop_gained";
                        } else if (pwild.equals("*")) {
                            /* http://www.sequenceontology.org/browser/current_svn/term/SO:0002012 */
                            combinedSO = "stop_lost";
                        } else {
                            /* http://www.sequenceontology.org/miso/current_svn/term/SO:0001992 */
                            combinedSO = "nonsynonymous_variant";
                        }
                    } else if (!pCombined.equals(p1) && !pCombined.equals(p2) && !pCombined.equals(pwild)) {
                        combinedType = "combined_is_new";
                        if (pCombined.equals("*")) {
                            /* http://www.sequenceontology.org/browser/current_svn/term/SO:0001587 */
                            combinedSO = "stop_gained";
                        } else {
                            /* http://www.sequenceontology.org/miso/current_svn/term/SO:0001992 */
                            combinedSO = "nonsynonymous_variant";
                        }
                    } else {
                        combinedType = null;
                        combinedSO = null;
                    }
                    /**
                     * ok, there is something interesting here ,
                     * create two new Mutations carrying the
                     * two variants
                     */
                    if (combinedSO != null) {
                        /**
                         * grantham score is max found combined vs (p1/p2/wild)
                         */
                        int grantham_score = GranthamScore.score(pCombined.charAt(0), pwild.charAt(0));
                        grantham_score = Math.max(grantham_score, GranthamScore.score(pCombined.charAt(0), p1.charAt(0)));
                        grantham_score = Math.max(grantham_score, GranthamScore.score(pCombined.charAt(0), p2.charAt(0)));
                        /**
                         * info that will be displayed in the vcf
                         */
                        final Map<String, Object> info1 = v1.getInfo(v2);
                        final Map<String, Object> info2 = v2.getInfo(v1);
                        // filter for this combined: default it fails the filter
                        String filter = vcfFilterHeaderLine.getID();
                        final Map<String, Object> combinedMap = new LinkedHashMap<>();
                        combinedMap.put("CombinedCodon", combinedCodon);
                        combinedMap.put("CombinedAA", pCombined);
                        combinedMap.put("CombinedSO", combinedSO);
                        combinedMap.put("CombinedType", combinedType);
                        combinedMap.put("GranthamScore", grantham_score);
                        info1.putAll(combinedMap);
                        info2.putAll(combinedMap);
                        final Map<String, CoverageInfo> sample2coverageInfo = new HashMap<>(sample2samReader.size());
                        final int chromStart = Math.min(v1.genomicPosition1, v2.genomicPosition1);
                        final int chromEnd = Math.max(v1.genomicPosition1, v2.genomicPosition1);
                        /* get phasing info for each sample*/
                        for (final String sampleName : sample2samReader.keySet()) {
                            final SamReader samReader = sample2samReader.get(sampleName);
                            final CoverageInfo covInfo = new CoverageInfo();
                            sample2coverageInfo.put(sampleName, covInfo);
                            SAMRecordIterator iter = null;
                            try {
                                iter = samReader.query(v1.contig, chromStart, chromEnd, false);
                                while (iter.hasNext()) {
                                    final SAMRecord rec = iter.next();
                                    if (rec.getReadUnmappedFlag())
                                        continue;
                                    if (rec.isSecondaryOrSupplementary())
                                        continue;
                                    if (rec.getDuplicateReadFlag())
                                        continue;
                                    if (rec.getReadFailsVendorQualityCheckFlag())
                                        continue;
                                    // get DEPTh for variant 1
                                    if (rec.getAlignmentStart() <= v1.genomicPosition1 && v1.genomicPosition1 <= rec.getAlignmentEnd()) {
                                        covInfo.depth1++;
                                    }
                                    // get DEPTh for variant 2
                                    if (rec.getAlignmentStart() <= v2.genomicPosition1 && v2.genomicPosition1 <= rec.getAlignmentEnd()) {
                                        covInfo.depth2++;
                                    }
                                    if (rec.getAlignmentEnd() < chromEnd)
                                        continue;
                                    if (rec.getAlignmentStart() > chromStart)
                                        continue;
                                    final Cigar cigar = rec.getCigar();
                                    if (cigar == null)
                                        continue;
                                    final byte[] bases = rec.getReadBases();
                                    if (bases == null)
                                        continue;
                                    int refpos1 = rec.getAlignmentStart();
                                    int readpos = 0;
                                    boolean found_variant1_on_this_read = false;
                                    boolean found_variant2_on_this_read = false;
                                    /**
                                     * loop over cigar
                                     */
                                    for (final CigarElement ce : cigar.getCigarElements()) {
                                        final CigarOperator op = ce.getOperator();
                                        switch(op) {
                                            case P:
                                                continue;
                                            case S:
                                            case I:
                                                readpos += ce.getLength();
                                                break;
                                            case D:
                                            case N:
                                                refpos1 += ce.getLength();
                                                break;
                                            case H:
                                                continue;
                                            case EQ:
                                            case M:
                                            case X:
                                                for (int x = 0; x < ce.getLength(); ++x) {
                                                    if (refpos1 == v1.genomicPosition1 && same(bases[readpos], v1.altAllele)) {
                                                        found_variant1_on_this_read = true;
                                                    } else if (refpos1 == v2.genomicPosition1 && same(bases[readpos], v2.altAllele)) {
                                                        found_variant2_on_this_read = true;
                                                    }
                                                    refpos1++;
                                                    readpos++;
                                                }
                                                break;
                                            default:
                                                throw new IllegalStateException(op.name());
                                        }
                                        /* skip remaining bases after last variant */
                                        if (refpos1 > chromEnd)
                                            break;
                                    }
                                    /* sum-up what we found */
                                    if (found_variant1_on_this_read && found_variant2_on_this_read) {
                                        covInfo.count_reads_having_both_variants++;
                                    } else if (!found_variant1_on_this_read && !found_variant2_on_this_read) {
                                        covInfo.count_reads_having_no_variants++;
                                    } else if (found_variant1_on_this_read) {
                                        covInfo.count_reads_having_variant1++;
                                    } else if (found_variant2_on_this_read) {
                                        covInfo.count_reads_having_variant2++;
                                    }
                                }
                            /* end of loop over reads */
                            } finally {
                                iter.close();
                                iter = null;
                            }
                            info1.put("N_READS_BOTH_VARIANTS_" + sampleName, covInfo.count_reads_having_both_variants);
                            info2.put("N_READS_BOTH_VARIANTS_" + sampleName, covInfo.count_reads_having_both_variants);
                            info1.put("N_READS_NO_VARIANTS_" + sampleName, covInfo.count_reads_having_no_variants);
                            info2.put("N_READS_NO_VARIANTS_" + sampleName, covInfo.count_reads_having_no_variants);
                            info1.put("N_READS_TOTAL_" + sampleName, covInfo.count_reads_having_both_variants + covInfo.count_reads_having_no_variants + covInfo.count_reads_having_variant1 + covInfo.count_reads_having_variant2);
                            info2.put("N_READS_TOTAL_" + sampleName, covInfo.count_reads_having_both_variants + covInfo.count_reads_having_no_variants + covInfo.count_reads_having_variant1 + covInfo.count_reads_having_variant2);
                            // count for variant 1
                            info1.put("N_READS_ONLY_1_" + sampleName, covInfo.count_reads_having_variant1);
                            info1.put("N_READS_ONLY_2_" + sampleName, covInfo.count_reads_having_variant2);
                            info1.put("DEPTH_1_" + sampleName, covInfo.depth1);
                            // inverse previous count
                            info2.put("N_READS_ONLY_1_" + sampleName, covInfo.count_reads_having_variant2);
                            info2.put("N_READS_ONLY_2_" + sampleName, covInfo.count_reads_having_variant1);
                            info2.put("DEPTH_2_" + sampleName, covInfo.depth2);
                            /* number of reads with both variant is greater than
								 * reads carrying only one variant: reset the filter 
								 */
                            if (2 * covInfo.count_reads_having_both_variants > (covInfo.count_reads_having_variant1 + covInfo.count_reads_having_variant2)) {
                                /* reset filter */
                                filter = VCFConstants.UNFILTERED;
                                info1.put("FILTER_1_" + sampleName, ".");
                                info2.put("FILTER_2_" + sampleName, ".");
                            } else {
                                info1.put("FILTER_1_" + sampleName, vcfFilterHeaderLine.getID());
                                info2.put("FILTER_2_" + sampleName, vcfFilterHeaderLine.getID());
                            }
                        }
                        /* end of loop over bams */
                        final CombinedMutation m1 = new CombinedMutation();
                        m1.contig = v1.contig;
                        m1.genomicPosition1 = v1.genomicPosition1;
                        m1.id = v1.id;
                        m1.refAllele = v1.refAllele;
                        m1.altAllele = v1.altAllele;
                        m1.vcfLine = v1.vcfLine;
                        m1.info = mapToString(info1);
                        m1.filter = filter;
                        m1.grantham_score = grantham_score;
                        m1.sampleIndexes.addAll(sharedSamplesIdx);
                        m1.sorting_id = ID_GENERATOR++;
                        mutations.add(m1);
                        final CombinedMutation m2 = new CombinedMutation();
                        m2.contig = v2.contig;
                        m2.genomicPosition1 = v2.genomicPosition1;
                        m2.id = v2.id;
                        m2.refAllele = v2.refAllele;
                        m2.altAllele = v2.altAllele;
                        m2.vcfLine = v2.vcfLine;
                        m2.info = mapToString(info2);
                        m2.filter = filter;
                        m2.grantham_score = grantham_score;
                        m2.sampleIndexes.addAll(sharedSamplesIdx);
                        m2.sorting_id = ID_GENERATOR++;
                        mutations.add(m2);
                        bedPeReport.print(m1.contig);
                        bedPeReport.print('\t');
                        bedPeReport.print(m1.genomicPosition1 - 1);
                        bedPeReport.print('\t');
                        bedPeReport.print(m1.genomicPosition1);
                        bedPeReport.print('\t');
                        bedPeReport.print(m2.contig);
                        bedPeReport.print('\t');
                        bedPeReport.print(m2.genomicPosition1 - 1);
                        bedPeReport.print('\t');
                        bedPeReport.print(m2.genomicPosition1);
                        bedPeReport.print('\t');
                        // name
                        bedPeReport.print(v1.transcriptId);
                        bedPeReport.print('\t');
                        // score
                        bedPeReport.print(grantham_score == GranthamScore.getDefaultScore() ? 0 : (int) ((grantham_score / 255.0) * 1000.0));
                        bedPeReport.print('\t');
                        final Transcript kg = this.knownGenes.getOverlapping(new Interval(v1.getContig(), v1.genomicPosition1 - 1, v1.genomicPosition1 + 1)).stream().flatMap(L -> L.stream()).filter(P -> P.getContig().equals(v1.contig) && P.getId().equals(v1.transcriptId)).findFirst().orElseThrow(IllegalStateException::new);
                        // strand1
                        bedPeReport.print(kg.isNegativeStrand() ? "-" : "+");
                        bedPeReport.print('\t');
                        // strand2
                        bedPeReport.print(kg.isNegativeStrand() ? "-" : "+");
                        bedPeReport.print('\t');
                        if (sharedSamplesIdx.isEmpty()) {
                            bedPeReport.print('.');
                        } else {
                            bedPeReport.print(sharedSamplesIdx.stream().map(I -> sampleList.get(I.intValue())).collect(Collectors.joining(";")));
                        }
                        bedPeReport.print('\t');
                        bedPeReport.print(combinedSO);
                        bedPeReport.print('\t');
                        bedPeReport.print(String.join(":", pwild, p1, p2, pCombined));
                        bedPeReport.println();
                    }
                }
            }
        }
        progress2.close();
        mutations.doneAdding();
        eqVarIter.close();
        eqVarIter = null;
        varIter.close();
        varIter = null;
        variants.cleanup();
        variants = null;
        bedPeReport.flush();
        bedPeReport.close();
        bedPeReport = null;
        final VCFHeader header2 = new VCFHeader(header);
        header2.addMetaDataLine(new VCFHeaderLine(getProgramName() + "AboutQUAL", "QUAL is filled with Grantham Score  http://www.ncbi.nlm.nih.gov/pubmed/4843792"));
        final StringBuilder infoDesc = new StringBuilder("Variant affected by two distinct mutation. Format is defined in the INFO column. ");
        final VCFInfoHeaderLine CodonVariantHeader = new VCFInfoHeaderLine("CodonVariant", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, infoDesc.toString());
        header2.addMetaDataLine(CodonVariantHeader);
        final VCFInfoHeaderLine CodonSampleHeader = new VCFInfoHeaderLine("Samples", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "Samples that could be affected");
        header2.addMetaDataLine(CodonSampleHeader);
        JVarkitVersion.getInstance().addMetaData(this, header2);
        if (!sample2samReader.isEmpty()) {
            header2.addMetaDataLine(vcfFilterHeaderLine);
        }
        w = this.writingVariantsDelegate.dictionary(dict).open(IOUtil.toPath(saveAs));
        w.writeHeader(header2);
        ProgressFactory.Watcher<CombinedMutation> progress3 = ProgressFactory.newInstance().dictionary(header).logger(LOG).build();
        mutIter = mutations.iterator();
        EqualRangeIterator<CombinedMutation> eqRangeMutIter = new EqualRangeIterator<>(mutIter, new MutationComparatorOne(dict));
        while (eqRangeMutIter.hasNext()) {
            final List<CombinedMutation> mBuffer = eqRangeMutIter.next();
            if (mBuffer.isEmpty())
                break;
            progress3.apply(mBuffer.get(0));
            // default grantham score used in QUAL
            int grantham_score = -1;
            // default filter fails
            String filter = vcfFilterHeaderLine.getID();
            final CombinedMutation first = mBuffer.get(0);
            final Set<String> info = new HashSet<>();
            final VariantContext ctx = cah.codec.decode(first.vcfLine);
            final VariantContextBuilder vcb = new VariantContextBuilder(ctx);
            vcb.chr(first.contig);
            vcb.start(first.genomicPosition1);
            vcb.stop(first.genomicPosition1 + first.refAllele.length() - 1);
            if (!first.id.equals(VCFConstants.EMPTY_ID_FIELD))
                vcb.id(first.id);
            for (final CombinedMutation m : mBuffer) {
                info.add(m.info);
                grantham_score = Math.max(grantham_score, m.grantham_score);
                if (VCFConstants.UNFILTERED.equals(m.filter)) {
                    // at least one SNP is ok one this line
                    filter = null;
                }
            }
            if (!sampleList.isEmpty()) {
                vcb.attribute(CodonSampleHeader.getID(), new ArrayList<>(mBuffer.stream().flatMap(S -> S.sampleIndexes.stream()).map(IDX -> sampleList.get(IDX)).collect(Collectors.toSet())));
            }
            vcb.unfiltered();
            if (filter != null && !sample2samReader.isEmpty()) {
                vcb.filter(filter);
            } else {
                vcb.passFilters();
            }
            vcb.attribute(CodonVariantHeader.getID(), new ArrayList<String>(info));
            if (grantham_score > 0) {
                vcb.log10PError(grantham_score / -10.0);
            } else {
                vcb.log10PError(VariantContext.NO_LOG10_PERROR);
            }
            w.add(vcb.make());
        }
        progress3.close();
        eqRangeMutIter.close();
        mutIter.close();
        mutations.cleanup();
        mutations = null;
        return RETURN_OK;
    } catch (final Throwable err) {
        LOG.error(err);
        return -1;
    } finally {
        CloserUtil.close(this.indexedFastaSequenceFile);
        CloserUtil.close(mutIter);
        CloserUtil.close(varIter);
        CloserUtil.close(bedPeReport);
        if (this.variants != null)
            this.variants.cleanup();
        if (mutations != null)
            mutations.cleanup();
        this.variants = null;
        for (SamReader r : sample2samReader.values()) CloserUtil.close(r);
        CloserUtil.close(w);
        CloserUtil.close(bufferedReader);
    }
}
Also used : WritingVariantsDelegate(com.github.lindenb.jvarkit.variant.variantcontext.writer.WritingVariantsDelegate) Allele(htsjdk.variant.variantcontext.Allele) Program(com.github.lindenb.jvarkit.util.jcommander.Program) IOUtil(htsjdk.samtools.util.IOUtil) Transcript(com.github.lindenb.jvarkit.util.bio.structure.Transcript) VCFHeader(htsjdk.variant.vcf.VCFHeader) CigarElement(htsjdk.samtools.CigarElement) CigarOperator(htsjdk.samtools.CigarOperator) GenomicSequence(com.github.lindenb.jvarkit.util.picard.GenomicSequence) SAMFileHeader(htsjdk.samtools.SAMFileHeader) ReferenceSequenceFile(htsjdk.samtools.reference.ReferenceSequenceFile) DataOutputStream(java.io.DataOutputStream) AbstractDataCodec(com.github.lindenb.jvarkit.util.picard.AbstractDataCodec) Map(java.util.Map) Path(java.nio.file.Path) CloserUtil(htsjdk.samtools.util.CloserUtil) PrintWriter(java.io.PrintWriter) SequenceDictionaryUtils(com.github.lindenb.jvarkit.util.bio.SequenceDictionaryUtils) GranthamScore(com.github.lindenb.jvarkit.util.bio.GranthamScore) IntervalTreeMap(htsjdk.samtools.util.IntervalTreeMap) SAMRecordIterator(htsjdk.samtools.SAMRecordIterator) Collection(java.util.Collection) Logger(com.github.lindenb.jvarkit.util.log.Logger) Set(java.util.Set) Collectors(java.util.stream.Collectors) JvarkitException(com.github.lindenb.jvarkit.lang.JvarkitException) SAMRecord(htsjdk.samtools.SAMRecord) ReferenceSequenceFileFactory(htsjdk.samtools.reference.ReferenceSequenceFileFactory) List(java.util.List) SAMReadGroupRecord(htsjdk.samtools.SAMReadGroupRecord) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) VariantContext(htsjdk.variant.variantcontext.VariantContext) VCFHeaderLineCount(htsjdk.variant.vcf.VCFHeaderLineCount) SamReaderFactory(htsjdk.samtools.SamReaderFactory) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder) Genotype(htsjdk.variant.variantcontext.Genotype) VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) DataInputStream(java.io.DataInputStream) VCFUtils(com.github.lindenb.jvarkit.util.vcf.VCFUtils) Cigar(htsjdk.samtools.Cigar) CloseableIterator(htsjdk.samtools.util.CloseableIterator) PeptideSequence(com.github.lindenb.jvarkit.util.bio.structure.PeptideSequence) SequenceUtil(htsjdk.samtools.util.SequenceUtil) ContigNameConverter(com.github.lindenb.jvarkit.util.bio.fasta.ContigNameConverter) VCFIterator(htsjdk.variant.vcf.VCFIterator) Parameter(com.beust.jcommander.Parameter) NullOuputStream(com.github.lindenb.jvarkit.io.NullOuputStream) AcidNucleics(com.github.lindenb.jvarkit.util.bio.AcidNucleics) HashMap(java.util.HashMap) OptionalInt(java.util.OptionalInt) ValidationStringency(htsjdk.samtools.ValidationStringency) TreeSet(java.util.TreeSet) ParametersDelegate(com.beust.jcommander.ParametersDelegate) RNASequenceFactory(com.github.lindenb.jvarkit.util.bio.structure.RNASequenceFactory) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) LinkedHashMap(java.util.LinkedHashMap) ContigDictComparator(com.github.lindenb.jvarkit.util.samtools.ContigDictComparator) Interval(htsjdk.samtools.util.Interval) DelegateCharSequence(com.github.lindenb.jvarkit.lang.DelegateCharSequence) IOUtils(com.github.lindenb.jvarkit.io.IOUtils) Launcher(com.github.lindenb.jvarkit.util.jcommander.Launcher) WeakHashMap(java.util.WeakHashMap) VCFConstants(htsjdk.variant.vcf.VCFConstants) Locatable(htsjdk.samtools.util.Locatable) SortingCollection(htsjdk.samtools.util.SortingCollection) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) VCFHeaderLineType(htsjdk.variant.vcf.VCFHeaderLineType) RNASequence(com.github.lindenb.jvarkit.util.bio.structure.RNASequence) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) ProgressFactory(com.github.lindenb.jvarkit.util.log.ProgressFactory) IOException(java.io.IOException) JVarkitVersion(com.github.lindenb.jvarkit.util.JVarkitVersion) SamReader(htsjdk.samtools.SamReader) File(java.io.File) GtfReader(com.github.lindenb.jvarkit.util.bio.structure.GtfReader) EqualRangeIterator(com.github.lindenb.jvarkit.util.iterator.EqualRangeIterator) BufferedReader(java.io.BufferedReader) Comparator(java.util.Comparator) Collections(java.util.Collections) VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) SAMRecordIterator(htsjdk.samtools.SAMRecordIterator) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) WeakHashMap(java.util.WeakHashMap) ProgressFactory(com.github.lindenb.jvarkit.util.log.ProgressFactory) EqualRangeIterator(com.github.lindenb.jvarkit.util.iterator.EqualRangeIterator) LinkedHashMap(java.util.LinkedHashMap) HashSet(java.util.HashSet) CigarOperator(htsjdk.samtools.CigarOperator) CigarElement(htsjdk.samtools.CigarElement) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) SAMRecord(htsjdk.samtools.SAMRecord) SAMFileHeader(htsjdk.samtools.SAMFileHeader) Interval(htsjdk.samtools.util.Interval) VCFUtils(com.github.lindenb.jvarkit.util.vcf.VCFUtils) SAMReadGroupRecord(htsjdk.samtools.SAMReadGroupRecord) VariantContext(htsjdk.variant.variantcontext.VariantContext) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) SamReader(htsjdk.samtools.SamReader) NullOuputStream(com.github.lindenb.jvarkit.io.NullOuputStream) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) VCFHeader(htsjdk.variant.vcf.VCFHeader) PrintWriter(java.io.PrintWriter) Path(java.nio.file.Path) Transcript(com.github.lindenb.jvarkit.util.bio.structure.Transcript) IOException(java.io.IOException) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) Allele(htsjdk.variant.variantcontext.Allele) Cigar(htsjdk.samtools.Cigar) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder) BufferedReader(java.io.BufferedReader)

Example 14 with Transcript

use of com.github.lindenb.jvarkit.util.bio.structure.Transcript in project jvarkit by lindenb.

the class VCFCombineTwoSnvs method loadTranscripts.

/**
 * load KnownGenes
 */
private void loadTranscripts() throws IOException {
    GtfReader gtfReader = null;
    try {
        final SAMSequenceDictionary dict = SequenceDictionaryUtils.extractRequired(this.indexedFastaSequenceFile);
        final ContigNameConverter ctgNameConverter = ContigNameConverter.fromOneDictionary(dict);
        LOG.info("loading genes from " + this.gtfPath);
        gtfReader = new GtfReader(this.gtfPath);
        gtfReader.setContigNameConverter(ctgNameConverter);
        gtfReader.getAllGenes().stream().flatMap(G -> G.getTranscripts().stream()).filter(T -> T.hasStrand() && T.hasCDS()).forEach(T -> {
            // use 1 based interval
            final Interval interval = new Interval(T);
            List<Transcript> lkg = this.knownGenes.get(interval);
            if (lkg == null) {
                lkg = new ArrayList<>(2);
                this.knownGenes.put(interval, lkg);
            }
            lkg.add(T);
        });
    } finally {
        CloserUtil.close(gtfReader);
    }
}
Also used : WritingVariantsDelegate(com.github.lindenb.jvarkit.variant.variantcontext.writer.WritingVariantsDelegate) Allele(htsjdk.variant.variantcontext.Allele) Program(com.github.lindenb.jvarkit.util.jcommander.Program) IOUtil(htsjdk.samtools.util.IOUtil) Transcript(com.github.lindenb.jvarkit.util.bio.structure.Transcript) VCFHeader(htsjdk.variant.vcf.VCFHeader) CigarElement(htsjdk.samtools.CigarElement) CigarOperator(htsjdk.samtools.CigarOperator) GenomicSequence(com.github.lindenb.jvarkit.util.picard.GenomicSequence) SAMFileHeader(htsjdk.samtools.SAMFileHeader) ReferenceSequenceFile(htsjdk.samtools.reference.ReferenceSequenceFile) DataOutputStream(java.io.DataOutputStream) AbstractDataCodec(com.github.lindenb.jvarkit.util.picard.AbstractDataCodec) Map(java.util.Map) Path(java.nio.file.Path) CloserUtil(htsjdk.samtools.util.CloserUtil) PrintWriter(java.io.PrintWriter) SequenceDictionaryUtils(com.github.lindenb.jvarkit.util.bio.SequenceDictionaryUtils) GranthamScore(com.github.lindenb.jvarkit.util.bio.GranthamScore) IntervalTreeMap(htsjdk.samtools.util.IntervalTreeMap) SAMRecordIterator(htsjdk.samtools.SAMRecordIterator) Collection(java.util.Collection) Logger(com.github.lindenb.jvarkit.util.log.Logger) Set(java.util.Set) Collectors(java.util.stream.Collectors) JvarkitException(com.github.lindenb.jvarkit.lang.JvarkitException) SAMRecord(htsjdk.samtools.SAMRecord) ReferenceSequenceFileFactory(htsjdk.samtools.reference.ReferenceSequenceFileFactory) List(java.util.List) SAMReadGroupRecord(htsjdk.samtools.SAMReadGroupRecord) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) VariantContext(htsjdk.variant.variantcontext.VariantContext) VCFHeaderLineCount(htsjdk.variant.vcf.VCFHeaderLineCount) SamReaderFactory(htsjdk.samtools.SamReaderFactory) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder) Genotype(htsjdk.variant.variantcontext.Genotype) VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) DataInputStream(java.io.DataInputStream) VCFUtils(com.github.lindenb.jvarkit.util.vcf.VCFUtils) Cigar(htsjdk.samtools.Cigar) CloseableIterator(htsjdk.samtools.util.CloseableIterator) PeptideSequence(com.github.lindenb.jvarkit.util.bio.structure.PeptideSequence) SequenceUtil(htsjdk.samtools.util.SequenceUtil) ContigNameConverter(com.github.lindenb.jvarkit.util.bio.fasta.ContigNameConverter) VCFIterator(htsjdk.variant.vcf.VCFIterator) Parameter(com.beust.jcommander.Parameter) NullOuputStream(com.github.lindenb.jvarkit.io.NullOuputStream) AcidNucleics(com.github.lindenb.jvarkit.util.bio.AcidNucleics) HashMap(java.util.HashMap) OptionalInt(java.util.OptionalInt) ValidationStringency(htsjdk.samtools.ValidationStringency) TreeSet(java.util.TreeSet) ParametersDelegate(com.beust.jcommander.ParametersDelegate) RNASequenceFactory(com.github.lindenb.jvarkit.util.bio.structure.RNASequenceFactory) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) LinkedHashMap(java.util.LinkedHashMap) ContigDictComparator(com.github.lindenb.jvarkit.util.samtools.ContigDictComparator) Interval(htsjdk.samtools.util.Interval) DelegateCharSequence(com.github.lindenb.jvarkit.lang.DelegateCharSequence) IOUtils(com.github.lindenb.jvarkit.io.IOUtils) Launcher(com.github.lindenb.jvarkit.util.jcommander.Launcher) WeakHashMap(java.util.WeakHashMap) VCFConstants(htsjdk.variant.vcf.VCFConstants) Locatable(htsjdk.samtools.util.Locatable) SortingCollection(htsjdk.samtools.util.SortingCollection) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) VCFHeaderLineType(htsjdk.variant.vcf.VCFHeaderLineType) RNASequence(com.github.lindenb.jvarkit.util.bio.structure.RNASequence) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) ProgressFactory(com.github.lindenb.jvarkit.util.log.ProgressFactory) IOException(java.io.IOException) JVarkitVersion(com.github.lindenb.jvarkit.util.JVarkitVersion) SamReader(htsjdk.samtools.SamReader) File(java.io.File) GtfReader(com.github.lindenb.jvarkit.util.bio.structure.GtfReader) EqualRangeIterator(com.github.lindenb.jvarkit.util.iterator.EqualRangeIterator) BufferedReader(java.io.BufferedReader) Comparator(java.util.Comparator) Collections(java.util.Collections) Transcript(com.github.lindenb.jvarkit.util.bio.structure.Transcript) GtfReader(com.github.lindenb.jvarkit.util.bio.structure.GtfReader) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) ContigNameConverter(com.github.lindenb.jvarkit.util.bio.fasta.ContigNameConverter) Interval(htsjdk.samtools.util.Interval)

Example 15 with Transcript

use of com.github.lindenb.jvarkit.util.bio.structure.Transcript in project jvarkit by lindenb.

the class VcfScanUpstreamOrf method doVcfToVcf.

@Override
protected int doVcfToVcf(final String inputName, final VCFIterator iter, final VariantContextWriter out) {
    try {
        /**
         * build vcf header
         */
        final VCFHeader header = iter.getHeader();
        header.addMetaDataLine(this.infoAddATG);
        header.addMetaDataLine(this.infoAddSTOP);
        header.addMetaDataLine(this.infoDelATG);
        header.addMetaDataLine(this.infoDelSTOP);
        header.addMetaDataLine(this.infoKozakAlteration);
        JVarkitVersion.getInstance().addMetaData(this, header);
        out.writeHeader(header);
        while (iter.hasNext()) {
            final VariantContext ctx = iter.next();
            if (!ctx.isVariant()) {
                if (!this.print_uorf_only)
                    out.add(ctx);
                continue;
            }
            if (!ctx.isSNP()) {
                if (!this.print_uorf_only)
                    out.add(ctx);
                continue;
            }
            final String refContig = this.refCtgNameConverter.apply(ctx.getContig());
            if (StringUtils.isBlank(refContig)) {
                if (!this.print_uorf_only)
                    out.add(ctx);
                continue;
            }
            /* new reference sequence */
            if (this.genomicSequence == null || !this.genomicSequence.getChrom().equals(refContig)) {
                this.genomicSequence = new GenomicSequence(this.indexedFastaSequenceFile, refContig);
            }
            final Interval interval = new Interval(refContig, ctx.getStart(), ctx.getEnd());
            final List<Transcript> kgGenes = this.transcriptMap.getOverlapping(interval).stream().flatMap(C -> C.stream()).collect(Collectors.toList());
            if (kgGenes.isEmpty()) {
                if (!this.print_uorf_only)
                    out.add(ctx);
                continue;
            }
            final List<UpstreamORF> uorfs = kgGenes.stream().map(KG -> new TranscriptRNA(KG)).filter(KG -> KG.containsGenomicPos0(ctx.getStart() - 1)).map(KG -> new UpstreamORF(KG)).filter(KG -> KG.containsGenomicPos0(ctx.getStart() - 1)).sorted((A, B) -> Integer.compare(A.getChromStart(), B.getChromStart())).collect(Collectors.toList());
            if (uorfs.isEmpty()) {
                if (!this.print_uorf_only)
                    out.add(ctx);
                continue;
            }
            final List<MutatedUTR> mutatedList = new ArrayList<>();
            for (final UpstreamORF uorf : uorfs) {
                for (int alt_idx = 1; /* 0==REF */
                alt_idx < ctx.getAlleles().size(); ++alt_idx) {
                    final Allele alt_allele = ctx.getAlleles().get(alt_idx);
                    if (alt_allele.isSymbolic() || !alt_allele.isCalled() || alt_allele.length() != 1)
                        continue;
                    if (!AcidNucleics.isATGC(alt_allele.getDisplayBases()[0]))
                        continue;
                    final MutatedUTR mutated = new MutatedUTR(uorf, ctx, alt_idx);
                    mutated.invoke();
                    mutatedList.add(mutated);
                }
            }
            if (mutatedList.isEmpty() || mutatedList.stream().noneMatch(M -> M.isChanging())) {
                if (!this.print_uorf_only)
                    out.add(ctx);
                continue;
            }
            final VariantContextBuilder vcb = new VariantContextBuilder(ctx);
            List<String> ann = new ArrayList<>(mutatedList.stream().flatMap(M -> M.remove_atg_set.stream()).collect(Collectors.toCollection(LinkedHashSet::new)));
            if (!ann.isEmpty()) {
                vcb.attribute(this.infoDelATG.getID(), ann);
            }
            ann = new ArrayList<>(mutatedList.stream().flatMap(M -> M.remove_stop_set.stream()).collect(Collectors.toCollection(LinkedHashSet::new)));
            if (!ann.isEmpty()) {
                vcb.attribute(this.infoDelSTOP.getID(), ann);
            }
            ann = new ArrayList<>(mutatedList.stream().flatMap(M -> M.denovo_atg_set.stream()).collect(Collectors.toCollection(LinkedHashSet::new)));
            if (!ann.isEmpty()) {
                vcb.attribute(this.infoAddATG.getID(), ann);
            }
            ann = new ArrayList<>(mutatedList.stream().flatMap(M -> M.denovo_stop_set.stream()).collect(Collectors.toCollection(LinkedHashSet::new)));
            if (!ann.isEmpty()) {
                vcb.attribute(this.infoAddSTOP.getID(), ann);
            }
            ann = new ArrayList<>(mutatedList.stream().flatMap(M -> M.kozak_alterations_set.stream()).collect(Collectors.toCollection(LinkedHashSet::new)));
            if (!ann.isEmpty()) {
                vcb.attribute(this.infoKozakAlteration.getID(), ann);
            }
            out.add(vcb.make());
        }
        return 0;
    } catch (final Exception err) {
        LOG.error(err);
        return -1;
    } finally {
    }
}
Also used : Allele(htsjdk.variant.variantcontext.Allele) Arrays(java.util.Arrays) Program(com.github.lindenb.jvarkit.util.jcommander.Program) Transcript(com.github.lindenb.jvarkit.util.bio.structure.Transcript) VCFHeader(htsjdk.variant.vcf.VCFHeader) AbstractCharSequence(com.github.lindenb.jvarkit.lang.AbstractCharSequence) GenomicSequence(com.github.lindenb.jvarkit.util.picard.GenomicSequence) ReferenceSequenceFile(htsjdk.samtools.reference.ReferenceSequenceFile) Map(java.util.Map) Path(java.nio.file.Path) CloserUtil(htsjdk.samtools.util.CloserUtil) PrintWriter(java.io.PrintWriter) SequenceDictionaryUtils(com.github.lindenb.jvarkit.util.bio.SequenceDictionaryUtils) IntervalTreeMap(htsjdk.samtools.util.IntervalTreeMap) Logger(com.github.lindenb.jvarkit.util.log.Logger) Set(java.util.Set) Collectors(java.util.stream.Collectors) ReferenceSequenceFileFactory(htsjdk.samtools.reference.ReferenceSequenceFileFactory) List(java.util.List) KozakSequence(com.github.lindenb.jvarkit.util.bio.KozakSequence) StringUtils(com.github.lindenb.jvarkit.lang.StringUtils) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) VariantContext(htsjdk.variant.variantcontext.VariantContext) VCFHeaderLineCount(htsjdk.variant.vcf.VCFHeaderLineCount) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder) ContigNameConverter(com.github.lindenb.jvarkit.util.bio.fasta.ContigNameConverter) VCFIterator(htsjdk.variant.vcf.VCFIterator) Parameter(com.beust.jcommander.Parameter) Exon(com.github.lindenb.jvarkit.util.bio.structure.Exon) AcidNucleics(com.github.lindenb.jvarkit.util.bio.AcidNucleics) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) ContigDictComparator(com.github.lindenb.jvarkit.util.samtools.ContigDictComparator) Interval(htsjdk.samtools.util.Interval) GeneticCode(com.github.lindenb.jvarkit.util.bio.GeneticCode) LinkedHashSet(java.util.LinkedHashSet) Locatable(htsjdk.samtools.util.Locatable) VCFHeaderLineType(htsjdk.variant.vcf.VCFHeaderLineType) UTR(com.github.lindenb.jvarkit.util.bio.structure.UTR) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) ProgressFactory(com.github.lindenb.jvarkit.util.log.ProgressFactory) JVarkitVersion(com.github.lindenb.jvarkit.util.JVarkitVersion) GtfReader(com.github.lindenb.jvarkit.util.bio.structure.GtfReader) OnePassVcfLauncher(com.github.lindenb.jvarkit.jcommander.OnePassVcfLauncher) Paranoid(com.github.lindenb.jvarkit.lang.Paranoid) Comparator(java.util.Comparator) Algorithms(com.github.lindenb.jvarkit.util.Algorithms) ArchiveFactory(com.github.lindenb.jvarkit.io.ArchiveFactory) LinkedHashSet(java.util.LinkedHashSet) Transcript(com.github.lindenb.jvarkit.util.bio.structure.Transcript) GenomicSequence(com.github.lindenb.jvarkit.util.picard.GenomicSequence) ArrayList(java.util.ArrayList) VariantContext(htsjdk.variant.variantcontext.VariantContext) Allele(htsjdk.variant.variantcontext.Allele) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder) VCFHeader(htsjdk.variant.vcf.VCFHeader) Interval(htsjdk.samtools.util.Interval)

Aggregations

Transcript (com.github.lindenb.jvarkit.util.bio.structure.Transcript)21 Parameter (com.beust.jcommander.Parameter)18 ContigNameConverter (com.github.lindenb.jvarkit.util.bio.fasta.ContigNameConverter)18 GtfReader (com.github.lindenb.jvarkit.util.bio.structure.GtfReader)18 Program (com.github.lindenb.jvarkit.util.jcommander.Program)18 Logger (com.github.lindenb.jvarkit.util.log.Logger)18 Path (java.nio.file.Path)18 List (java.util.List)18 Collectors (java.util.stream.Collectors)17 SAMSequenceDictionary (htsjdk.samtools.SAMSequenceDictionary)16 CloserUtil (htsjdk.samtools.util.CloserUtil)16 VariantContext (htsjdk.variant.variantcontext.VariantContext)15 Set (java.util.Set)14 SequenceDictionaryUtils (com.github.lindenb.jvarkit.util.bio.SequenceDictionaryUtils)13 Interval (htsjdk.samtools.util.Interval)13 VCFHeader (htsjdk.variant.vcf.VCFHeader)13 ArrayList (java.util.ArrayList)13 StringUtils (com.github.lindenb.jvarkit.lang.StringUtils)12 Launcher (com.github.lindenb.jvarkit.util.jcommander.Launcher)12 IntervalTreeMap (htsjdk.samtools.util.IntervalTreeMap)12