Search in sources :

Example 21 with VCFFilterHeaderLine

use of htsjdk.variant.vcf.VCFFilterHeaderLine in project jvarkit by lindenb.

the class VcfStage method buildFilterHeaderTab.

/**
 * build a table describing the INFO column
 */
private Tab buildFilterHeaderTab(final VCFHeader header) {
    final TableView<VCFFilterHeaderLine> table = new TableView<>(FXCollections.observableArrayList(header.getFilterLines()));
    table.getColumns().add(makeColumn("ID", F -> F.getID()));
    table.getColumns().add(makeColumn("Description", F -> F.getDescription()));
    final Tab tab = new Tab("FILTER", table);
    tab.setClosable(false);
    table.setPlaceholder(new Label("No FILTER defined."));
    return tab;
}
Also used : Arrays(java.util.Arrays) VCFHeader(htsjdk.variant.vcf.VCFHeader) ChartFactory(com.github.lindenb.jvarkit.tools.vcfviewgui.chart.ChartFactory) VariantContextChartFactory(com.github.lindenb.jvarkit.tools.vcfviewgui.chart.VariantContextChartFactory) ScrollPane(javafx.scene.control.ScrollPane) TabPane(javafx.scene.control.TabPane) ReadOnlyObjectWrapper(javafx.beans.property.ReadOnlyObjectWrapper) VariantDepthChartFactory(com.github.lindenb.jvarkit.tools.vcfviewgui.chart.VariantDepthChartFactory) Map(java.util.Map) AlleleFrequencyChartFactory(com.github.lindenb.jvarkit.tools.vcfviewgui.chart.AlleleFrequencyChartFactory) CloserUtil(htsjdk.samtools.util.CloserUtil) Rectangle2D(javafx.geometry.Rectangle2D) SplitPane(javafx.scene.control.SplitPane) PropertyValueFactory(javafx.scene.control.cell.PropertyValueFactory) GenotypeBuilder(htsjdk.variant.variantcontext.GenotypeBuilder) GraphicsContext(javafx.scene.canvas.GraphicsContext) AFByPopulationChartFactory(com.github.lindenb.jvarkit.tools.vcfviewgui.chart.AFByPopulationChartFactory) TiTvChartFactory(com.github.lindenb.jvarkit.tools.vcfviewgui.chart.TiTvChartFactory) Set(java.util.Set) Screen(javafx.stage.Screen) CellDataFeatures(javafx.scene.control.TableColumn.CellDataFeatures) ArcType(javafx.scene.shape.ArcType) Separator(javafx.scene.control.Separator) PieChart(javafx.scene.chart.PieChart) BooleanProperty(javafx.beans.property.BooleanProperty) FlowPane(javafx.scene.layout.FlowPane) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) CheckBoxTableCell(javafx.scene.control.cell.CheckBoxTableCell) ObservableList(javafx.collections.ObservableList) BorderPane(javafx.scene.layout.BorderPane) Genotype(htsjdk.variant.variantcontext.Genotype) CloseableIterator(htsjdk.samtools.util.CloseableIterator) OutputType(htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder.OutputType) FXCollections(javafx.collections.FXCollections) TextFlow(javafx.scene.text.TextFlow) Supplier(java.util.function.Supplier) TreeSet(java.util.TreeSet) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) TabClosingPolicy(javafx.scene.control.TabPane.TabClosingPolicy) SequenceOntologyTree(com.github.lindenb.jvarkit.util.so.SequenceOntologyTree) VCFConstants(htsjdk.variant.vcf.VCFConstants) Counter(com.github.lindenb.jvarkit.util.Counter) Color(javafx.scene.paint.Color) CheckBox(javafx.scene.control.CheckBox) IOException(java.io.IOException) AFBySexChartFactory(com.github.lindenb.jvarkit.tools.vcfviewgui.chart.AFBySexChartFactory) File(java.io.File) Menu(javafx.scene.control.Menu) FileChooser(javafx.stage.FileChooser) Tab(javafx.scene.control.Tab) CompiledScript(javax.script.CompiledScript) AnnPredictionParserFactory(com.github.lindenb.jvarkit.util.vcf.predictions.AnnPredictionParserFactory) VCFFormatHeaderLine(htsjdk.variant.vcf.VCFFormatHeaderLine) ObservableValue(javafx.beans.value.ObservableValue) VariantTypeChartFactory(com.github.lindenb.jvarkit.tools.vcfviewgui.chart.VariantTypeChartFactory) EventHandler(javafx.event.EventHandler) Button(javafx.scene.control.Button) Allele(htsjdk.variant.variantcontext.Allele) VariantContextWriterBuilder(htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder) VBox(javafx.scene.layout.VBox) AlertType(javafx.scene.control.Alert.AlertType) ContextMenu(javafx.scene.control.ContextMenu) WindowEvent(javafx.stage.WindowEvent) TableView(javafx.scene.control.TableView) Orientation(javafx.geometry.Orientation) Alert(javafx.scene.control.Alert) HBox(javafx.scene.layout.HBox) TextField(javafx.scene.control.TextField) PatternSyntaxException(java.util.regex.PatternSyntaxException) MenuItem(javafx.scene.control.MenuItem) Predicate(java.util.function.Predicate) VariantQualChartFactory(com.github.lindenb.jvarkit.tools.vcfviewgui.chart.VariantQualChartFactory) GenotypeType(htsjdk.variant.variantcontext.GenotypeType) Font(javafx.scene.text.Font) Collectors(java.util.stream.Collectors) SeparatorMenuItem(javafx.scene.control.SeparatorMenuItem) Text(javafx.scene.text.Text) List(java.util.List) Paint(javafx.scene.paint.Paint) Term(com.github.lindenb.jvarkit.util.so.SequenceOntologyTree.Term) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) Optional(java.util.Optional) VariantContext(htsjdk.variant.variantcontext.VariantContext) Pattern(java.util.regex.Pattern) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder) VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) Scene(javafx.scene.Scene) AnnPredictionParser(com.github.lindenb.jvarkit.util.vcf.predictions.AnnPredictionParser) ButtonType(javafx.scene.control.ButtonType) Function(java.util.function.Function) TableColumn(javafx.scene.control.TableColumn) Interval(htsjdk.samtools.util.Interval) Insets(javafx.geometry.Insets) VepPredictionParser(com.github.lindenb.jvarkit.util.vcf.predictions.VepPredictionParser) VepPredictionParserFactory(com.github.lindenb.jvarkit.util.vcf.predictions.VepPredictionParserFactory) Callback(javafx.util.Callback) GenotypeTypeChartFactory(com.github.lindenb.jvarkit.tools.vcfviewgui.chart.GenotypeTypeChartFactory) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) Label(javafx.scene.control.Label) ActionEvent(javafx.event.ActionEvent) SimpleBooleanProperty(javafx.beans.property.SimpleBooleanProperty) SpinnerValueFactory(javafx.scene.control.SpinnerValueFactory) ExtensionFilter(javafx.stage.FileChooser.ExtensionFilter) Collections(java.util.Collections) Tab(javafx.scene.control.Tab) Label(javafx.scene.control.Label) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) TableView(javafx.scene.control.TableView)

Example 22 with VCFFilterHeaderLine

use of htsjdk.variant.vcf.VCFFilterHeaderLine in project jvarkit by lindenb.

the class VCFReplaceTag method doVcfToVcf.

@Override
protected int doVcfToVcf(final String inputName, final VcfIterator r, final VariantContextWriter w) {
    final VCFHeader header = r.getHeader();
    final HashSet<VCFHeaderLine> copyMeta = new HashSet<>(header.getMetaDataInInputOrder());
    for (final String key : this.transformMap.keySet()) {
        switch(this.replaceTypeNo) {
            case // INFO
            0:
                {
                    final VCFInfoHeaderLine info = header.getInfoHeaderLine(key);
                    if (info != null) {
                        copyMeta.remove(info);
                        copyMeta.add(VCFUtils.renameVCFInfoHeaderLine(info, this.transformMap.get(key)));
                    }
                    break;
                }
            case // FORMAT
            1:
                {
                    final VCFFormatHeaderLine fmt = header.getFormatHeaderLine(key);
                    if (fmt != null) {
                        copyMeta.remove(fmt);
                        copyMeta.add(VCFUtils.renameVCFFormatHeaderLine(fmt, this.transformMap.get(key)));
                    }
                    break;
                }
            case // FILTER
            2:
                {
                    final VCFFilterHeaderLine filter = header.getFilterHeaderLine(key);
                    if (filter != null) {
                        copyMeta.remove(filter);
                        copyMeta.add(VCFUtils.renameVCFFilterHeaderLine(filter, this.transformMap.get(key)));
                    }
                    break;
                }
            default:
                throw new IllegalStateException("" + this.replaceTypeNo);
        }
    }
    final VCFHeader h2 = new VCFHeader(copyMeta, header.getSampleNamesInOrder());
    addMetaData(h2);
    final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(h2);
    w.writeHeader(h2);
    while (r.hasNext()) {
        VariantContext ctx = progress.watch(r.next());
        VariantContextBuilder b = new VariantContextBuilder(ctx);
        switch(this.replaceTypeNo) {
            case // INFO
            0:
                {
                    for (String key : this.transformMap.keySet()) {
                        Object o = ctx.getAttribute(key);
                        if (o != null) {
                            b.rmAttribute(key);
                            b.attribute(this.transformMap.get(key), o);
                        }
                    }
                    break;
                }
            case // FORMAT
            1:
                {
                    List<Genotype> newgenotypes = new ArrayList<>(ctx.getNSamples());
                    for (int i = 0; i < ctx.getNSamples(); ++i) {
                        Genotype g = ctx.getGenotype(i);
                        Map<String, Object> atts = g.getExtendedAttributes();
                        GenotypeBuilder gb = new GenotypeBuilder(g);
                        for (String key : this.transformMap.keySet()) {
                            Object o = atts.get(key);
                            if (o != null) {
                                atts.remove(key);
                                atts.put(this.transformMap.get(key), o);
                            }
                        }
                        gb.attributes(atts);
                        newgenotypes.add(gb.make());
                    }
                    b.genotypes(newgenotypes);
                    break;
                }
            case // FILTER
            2:
                {
                    Set<String> filters = new HashSet<>(ctx.getFilters());
                    for (String key : this.transformMap.keySet()) {
                        if (filters.contains(key)) {
                            filters.remove(key);
                            filters.add(this.transformMap.get(key));
                        }
                    }
                    b.filters(filters);
                    break;
                }
            default:
                throw new IllegalStateException("" + this.replaceTypeNo);
        }
        w.add(b.make());
        if (w.checkError())
            break;
    }
    progress.finish();
    LOG.info("done");
    return 0;
}
Also used : VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) HashSet(java.util.HashSet) Set(java.util.Set) SAMSequenceDictionaryProgress(com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress) VariantContext(htsjdk.variant.variantcontext.VariantContext) Genotype(htsjdk.variant.variantcontext.Genotype) GenotypeBuilder(htsjdk.variant.variantcontext.GenotypeBuilder) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder) ArrayList(java.util.ArrayList) List(java.util.List) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) VCFHeader(htsjdk.variant.vcf.VCFHeader) HashMap(java.util.HashMap) Map(java.util.Map) HashSet(java.util.HashSet) VCFFormatHeaderLine(htsjdk.variant.vcf.VCFFormatHeaderLine)

Example 23 with VCFFilterHeaderLine

use of htsjdk.variant.vcf.VCFFilterHeaderLine in project jvarkit by lindenb.

the class XContaminations method doWork.

@Override
public int doWork(final List<String> args) {
    long last_save_ms = System.currentTimeMillis();
    if (this.output_as_vcf && !this.use_only_sample_name) {
        LOG.error("cannot write vcf if --sample is not set");
        return -1;
    }
    if (args.size() < 2) {
        LOG.error("Illegal Number of args");
        return -1;
    }
    final Set<File> bamFiles = IOUtils.unrollFiles(args.subList(1, args.size())).stream().map(S -> new File(S)).collect(Collectors.toSet());
    if (bamFiles.isEmpty()) {
        LOG.error("Undefined BAM file(s)");
        return -1;
    }
    SAMRecordIterator iter = null;
    VcfIterator in = null;
    Map<String, SamReader> sample2samReader = new HashMap<>();
    VariantContextWriter vcfw = null;
    try {
        final SamReaderFactory srf = super.createSamReaderFactory();
        if (args.get(0).equals("-")) {
            in = super.openVcfIterator(null);
        } else {
            in = super.openVcfIterator(args.get(0));
        }
        VCFHeader vcfHeader = in.getHeader();
        final SAMSequenceDictionary dict1 = vcfHeader.getSequenceDictionary();
        if (dict1 == null) {
            LOG.error(JvarkitException.VcfDictionaryMissing.getMessage(args.get(0)));
            return -1;
        }
        final Set<String> sampleNames = new HashSet<>(vcfHeader.getSampleNamesInOrder());
        if (sampleNames.isEmpty()) {
            LOG.error("VCF contains no sample");
            return -1;
        }
        for (final File bamFile : bamFiles) {
            LOG.info("Opening " + bamFile);
            final SamReader samReader = srf.open(bamFile);
            final SAMFileHeader samHeader = samReader.getFileHeader();
            final SAMSequenceDictionary dict2 = samHeader.getSequenceDictionary();
            if (dict2 == null) {
                samReader.close();
                LOG.error(JvarkitException.BamDictionaryMissing.getMessage(bamFile.getPath()));
                return -1;
            }
            if (!SequenceUtil.areSequenceDictionariesEqual(dict1, dict2)) {
                samReader.close();
                LOG.error(JvarkitException.DictionariesAreNotTheSame.getMessage(dict1, dict2));
                return -1;
            }
            if (!samReader.hasIndex()) {
                samReader.close();
                LOG.error("sam is not indexed : " + bamFile);
                return -1;
            }
            String sampleName = null;
            for (final SAMReadGroupRecord rgr : samHeader.getReadGroups()) {
                final String s = rgr.getSample();
                if (StringUtil.isBlank(s))
                    continue;
                if (sampleName == null) {
                    sampleName = s;
                } else if (!sampleName.equals(s)) {
                    samReader.close();
                    LOG.error("Cannot handle more than one sample/bam  " + bamFile + " " + sampleName);
                    return -1;
                }
            }
            if (sampleName == null) {
                samReader.close();
                LOG.error("No sample in " + bamFile);
                // skip this bam
                continue;
            }
            if (!sampleNames.contains(sampleName)) {
                samReader.close();
                LOG.error("Not in VCF header: sample " + sampleName + " " + bamFile);
                // skip this bam
                continue;
            }
            if (sample2samReader.containsKey(sampleName)) {
                samReader.close();
                LOG.error("Cannot handle more than one bam/sample: " + bamFile + " " + sampleName);
                return -1;
            }
            sample2samReader.put(sampleName, samReader);
        }
        if (sample2samReader.size() < 2) {
            LOG.error("Not engough BAM/samples. Expected at least two valid BAMs");
            return -1;
        }
        sampleNames.retainAll(sample2samReader.keySet());
        /* create a VCF is VCF output asked */
        final List<SamplePair> sampleListForVcf;
        if (this.output_as_vcf) {
            vcfw = super.openVariantContextWriter(outputFile);
            final Set<VCFHeaderLine> metaData = new HashSet<>();
            metaData.add(new VCFFormatHeaderLine("S1S1", 1, VCFHeaderLineType.Integer, "reads sample 1 supporting sample 1"));
            metaData.add(new VCFFormatHeaderLine("S1S2", 1, VCFHeaderLineType.Integer, "reads sample 1 supporting sample 2"));
            metaData.add(new VCFFormatHeaderLine("S1SO", 1, VCFHeaderLineType.Integer, "reads sample 1 supporting others"));
            metaData.add(new VCFFormatHeaderLine("S2S1", 1, VCFHeaderLineType.Integer, "reads sample 2 supporting sample 1"));
            metaData.add(new VCFFormatHeaderLine("S2S2", 1, VCFHeaderLineType.Integer, "reads sample 2 supporting sample 2"));
            metaData.add(new VCFFormatHeaderLine("S2SO", 1, VCFHeaderLineType.Integer, "reads sample 2 supporting others"));
            metaData.add(new VCFFormatHeaderLine("FR", 1, VCFHeaderLineType.Float, "Fraction. '-1' for unavailable."));
            metaData.add(new VCFFormatHeaderLine("S1A", 1, VCFHeaderLineType.Character, "sample 1 allele"));
            metaData.add(new VCFFormatHeaderLine("S2A", 1, VCFHeaderLineType.Character, "sample 2 allele"));
            metaData.add(new VCFFilterHeaderLine("XCONTAMINATION", "Fraction test is > " + fraction_treshold));
            metaData.add(new VCFFilterHeaderLine("BADSAMPLES", "At least one pair of genotype fails the 'LE' test"));
            metaData.add(new VCFInfoHeaderLine("LE", 1, VCFHeaderLineType.Integer, "number of pair of genotypes having (S1S1<=S1S2 or S2S2<=S2S1)."));
            metaData.add(new VCFInfoHeaderLine("BADSAMPLES", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "Samples founds failing the 'LE' test"));
            sampleListForVcf = new ArrayList<>();
            final List<String> sampleList = new ArrayList<>(sampleNames);
            for (int x = 0; x + 1 < sampleList.size(); ++x) {
                for (int y = x + 1; y < sampleList.size(); ++y) {
                    sampleListForVcf.add(new SamplePair(new SimpleSampleIdenfifier(sampleList.get(x)), new SimpleSampleIdenfifier(sampleList.get(y))));
                }
            }
            final VCFHeader header2 = new VCFHeader(metaData, sampleListForVcf.stream().map(V -> V.getLabel()).sorted().collect(Collectors.toList()));
            header2.setSequenceDictionary(dict1);
            vcfw.writeHeader(header2);
        } else {
            vcfw = null;
            sampleListForVcf = null;
        }
        final Map<SamplePair, SampleAlleles> contaminationTable = new HashMap<>();
        final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(dict1).logger(LOG);
        while (in.hasNext()) {
            final VariantContext ctx = progress.watch(in.next());
            if (!ctx.isSNP() || ctx.isFiltered() || !ctx.isBiallelic() || ctx.isSymbolic() || !this.variantFilter.test(ctx)) {
                continue;
            }
            int count_homref = 0;
            int count_homvar = 0;
            int count_het = 0;
            final Map<String, Genotype> sample2gt = new HashMap<>();
            for (int gidx = 0; gidx < ctx.getNSamples(); ++gidx) {
                final Genotype G = ctx.getGenotype(gidx);
                if (!G.isCalled())
                    continue;
                if (G.isHet()) {
                    // here because in use_singleton we must be sure that there is only one hom_var
                    count_het++;
                    if (this.use_singleton && count_het > 0)
                        break;
                } else if (G.isHomVar()) {
                    // here because in use_singleton we must be sure that there is only one hom_var
                    count_homvar++;
                    if (this.use_singleton && count_homvar > 1)
                        break;
                }
                if (G.isFiltered())
                    continue;
                if (!sample2samReader.containsKey(G.getSampleName()))
                    continue;
                if (!sampleNames.contains(G.getSampleName()))
                    continue;
                if (!this.genotypeFilter.test(ctx, G))
                    continue;
                sample2gt.put(G.getSampleName(), G);
            }
            if (this.use_singleton && count_het > 0)
                continue;
            if (this.use_singleton && count_homvar > 1)
                continue;
            if (sample2gt.size() < 2)
                continue;
            // reset and recount
            count_homref = 0;
            count_homvar = 0;
            count_het = 0;
            for (final String sampleName : sample2gt.keySet()) {
                final Genotype G = ctx.getGenotype(sampleName);
                switch(G.getType()) {
                    case HOM_REF:
                        count_homref++;
                        break;
                    case HOM_VAR:
                        count_homvar++;
                        break;
                    case HET:
                        count_het++;
                        break;
                    default:
                        break;
                }
            }
            // singleton check
            if (this.use_singleton && (count_het > 0 || count_homvar != 1)) {
                continue;
            }
            // at least one HOM_REF and one HOM_VAR
            if (count_homref == 0)
                continue;
            if (count_homvar == 0)
                continue;
            final Map<SampleIdentifier, Counter<Character>> sample_identifier_2allelesCount = new HashMap<>();
            /* scan Reads for those Genotype/Samples */
            for (final String sampleName : sample2gt.keySet()) {
                if (!sample2samReader.containsKey(sampleName))
                    continue;
                // sample name is not in vcf header
                final SamReader samReader = sample2samReader.get(sampleName);
                if (samReader == null)
                    continue;
                final Genotype genotype = sample2gt.get(sampleName);
                if (genotype == null)
                    continue;
                iter = samReader.query(ctx.getContig(), ctx.getStart(), ctx.getEnd(), false);
                while (iter.hasNext()) {
                    final SAMRecord record = iter.next();
                    if (record.getEnd() < ctx.getStart())
                        continue;
                    if (ctx.getEnd() < record.getStart())
                        continue;
                    if (record.getReadUnmappedFlag())
                        continue;
                    if (this.filter.filterOut(record))
                        continue;
                    final SAMReadGroupRecord srgr = record.getReadGroup();
                    // not current sample
                    if (srgr == null)
                        continue;
                    if (!sampleName.equals(srgr.getSample()))
                        continue;
                    final Cigar cigar = record.getCigar();
                    if (cigar == null || cigar.isEmpty())
                        continue;
                    byte[] readSeq = record.getReadBases();
                    if (readSeq == null || readSeq.length == 0)
                        continue;
                    int readPos = record.getReadPositionAtReferencePosition(ctx.getStart());
                    if (readPos < 1)
                        continue;
                    readPos--;
                    if (readPos >= readSeq.length)
                        continue;
                    final char base = Character.toUpperCase((char) readSeq[readPos]);
                    if (base == 'N')
                        continue;
                    final SampleIdentifier sampleIdentifier;
                    if (this.use_only_sample_name) {
                        sampleIdentifier = new SimpleSampleIdenfifier(sampleName);
                    } else {
                        final ShortReadName readName = ShortReadName.parse(record);
                        if (!readName.isValid()) {
                            LOG.info("No a valid read name " + record.getReadName());
                            continue;
                        }
                        sampleIdentifier = new SequencerFlowCellRunLaneSample(readName, sampleName);
                    }
                    Counter<Character> sampleAlleles = sample_identifier_2allelesCount.get(sampleIdentifier);
                    if (sampleAlleles == null) {
                        sampleAlleles = new Counter<Character>();
                        sample_identifier_2allelesCount.put(sampleIdentifier, sampleAlleles);
                    }
                    sampleAlleles.incr(base);
                }
                iter.close();
                iter = null;
            }
            /* end scan reads for this sample */
            /* sum-up data for this SNP */
            final VariantContextBuilder vcb;
            final List<Genotype> genotypeList;
            if (this.output_as_vcf) {
                vcb = new VariantContextBuilder(args.get(0), ctx.getContig(), ctx.getStart(), ctx.getEnd(), ctx.getAlleles());
                if (ctx.hasID())
                    vcb.id(ctx.getID());
                genotypeList = new ArrayList<>();
            } else {
                vcb = null;
                genotypeList = null;
            }
            for (final String sample1 : sample2gt.keySet()) {
                final Genotype g1 = sample2gt.get(sample1);
                final char a1 = g1.getAllele(0).getBaseString().charAt(0);
                for (final String sample2 : sample2gt.keySet()) {
                    if (sample1.compareTo(sample2) >= 0)
                        continue;
                    final Genotype g2 = sample2gt.get(sample2);
                    if (g2.sameGenotype(g1))
                        continue;
                    final char a2 = g2.getAllele(0).getBaseString().charAt(0);
                    for (final SampleIdentifier sfcr1 : sample_identifier_2allelesCount.keySet()) {
                        if (!sfcr1.getSampleName().equals(sample1))
                            continue;
                        final Counter<Character> counter1 = sample_identifier_2allelesCount.get(sfcr1);
                        if (counter1 == null)
                            continue;
                        for (final SampleIdentifier sfcr2 : sample_identifier_2allelesCount.keySet()) {
                            if (!sfcr2.getSampleName().equals(sample2))
                                continue;
                            final SamplePair samplePair = new SamplePair(sfcr1, sfcr2);
                            final Counter<Character> counter2 = sample_identifier_2allelesCount.get(sfcr2);
                            if (counter2 == null)
                                continue;
                            SampleAlleles sampleAlleles = contaminationTable.get(samplePair);
                            if (sampleAlleles == null) {
                                sampleAlleles = new SampleAlleles();
                                contaminationTable.put(samplePair, sampleAlleles);
                                if (!this.output_as_vcf && contaminationTable.size() % 10000 == 0)
                                    LOG.info("n(pairs)=" + contaminationTable.size());
                            }
                            sampleAlleles.number_of_comparaisons++;
                            for (final Character allele : counter1.keySet()) {
                                final long n = counter1.count(allele);
                                if (allele.equals(a1)) {
                                    sampleAlleles.reads_sample1_supporting_sample1 += n;
                                } else if (allele.equals(a2)) {
                                    sampleAlleles.reads_sample1_supporting_sample2 += n;
                                } else {
                                    sampleAlleles.reads_sample1_supporting_other += n;
                                }
                            }
                            for (final Character allele : counter2.keySet()) {
                                final long n = counter2.count(allele);
                                if (allele.equals(a2)) {
                                    sampleAlleles.reads_sample2_supporting_sample2 += n;
                                } else if (allele.equals(a1)) {
                                    sampleAlleles.reads_sample2_supporting_sample1 += n;
                                } else {
                                    sampleAlleles.reads_sample2_supporting_other += n;
                                }
                            }
                        }
                    }
                }
            }
            if (this.output_as_vcf) {
                final Set<String> bad_samples = new TreeSet<>();
                boolean fraction_flag = false;
                int num_lt = 0;
                for (final SamplePair samplepair : sampleListForVcf) {
                    final GenotypeBuilder gb = new GenotypeBuilder(samplepair.getLabel());
                    final SampleAlleles sampleAlleles = contaminationTable.get(samplepair);
                    if (sampleAlleles != null) {
                        gb.attribute("S1S1", sampleAlleles.reads_sample1_supporting_sample1);
                        gb.attribute("S1S2", sampleAlleles.reads_sample1_supporting_sample2);
                        gb.attribute("S1SO", sampleAlleles.reads_sample1_supporting_other);
                        gb.attribute("S2S1", sampleAlleles.reads_sample2_supporting_sample1);
                        gb.attribute("S2S2", sampleAlleles.reads_sample2_supporting_sample2);
                        gb.attribute("S2SO", sampleAlleles.reads_sample2_supporting_other);
                        gb.attribute("S1A", sample2gt.get(samplepair.sample1.getSampleName()).getAllele(0).getDisplayString().charAt(0));
                        gb.attribute("S2A", sample2gt.get(samplepair.sample2.getSampleName()).getAllele(0).getDisplayString().charAt(0));
                        final double fraction = sampleAlleles.getFraction();
                        gb.attribute("FR", fraction);
                        if (!this.passFractionTreshold.test(fraction)) {
                            fraction_flag = true;
                        }
                        boolean bad_lt_flag = false;
                        if (sampleAlleles.reads_sample1_supporting_sample1 <= this.fail_factor * sampleAlleles.reads_sample1_supporting_sample2) {
                            bad_samples.add(samplepair.sample1.getSampleName());
                            bad_lt_flag = true;
                        }
                        if (sampleAlleles.reads_sample2_supporting_sample2 <= this.fail_factor * sampleAlleles.reads_sample2_supporting_sample1) {
                            bad_samples.add(samplepair.sample2.getSampleName());
                            bad_lt_flag = true;
                        }
                        if (bad_lt_flag) {
                            num_lt++;
                        }
                    } else {
                        gb.attribute("S1S1", -1);
                        gb.attribute("S1S2", -1);
                        gb.attribute("S1SO", -1);
                        gb.attribute("S2S1", -1);
                        gb.attribute("S2S2", -1);
                        gb.attribute("S2SO", -1);
                        gb.attribute("S1A", '.');
                        gb.attribute("S2A", '.');
                        gb.attribute("FR", -1f);
                    }
                    genotypeList.add(gb.make());
                }
                if (!bad_samples.isEmpty()) {
                    vcb.attribute("BADSAMPLES", new ArrayList<>(bad_samples));
                }
                vcb.attribute("LE", num_lt);
                if (fraction_flag || !bad_samples.isEmpty()) {
                    if (fraction_flag)
                        vcb.filter("XCONTAMINATION");
                    if (!bad_samples.isEmpty())
                        vcb.filter("BADSAMPLES");
                } else {
                    vcb.passFilters();
                }
                vcb.genotypes(genotypeList);
                vcfw.add(vcb.make());
                contaminationTable.clear();
            } else {
                final long now = System.currentTimeMillis();
                if (this.outputFile != null && this.save_every_sec > -1L && last_save_ms + (this.save_every_sec * 1000L) > now) {
                    saveToFile(contaminationTable);
                    last_save_ms = now;
                }
            }
        }
        progress.finish();
        if (this.output_as_vcf) {
            vcfw.close();
            vcfw = null;
        } else {
            saveToFile(contaminationTable);
        }
        return 0;
    } catch (final Exception e) {
        LOG.error(e);
        return -1;
    } finally {
        CloserUtil.close(vcfw);
        CloserUtil.close(in);
        CloserUtil.close(iter);
        for (SamReader samReader : sample2samReader.values()) CloserUtil.close(samReader);
        sample2samReader.clear();
    }
}
Also used : JexlVariantPredicate(com.github.lindenb.jvarkit.util.vcf.JexlVariantPredicate) Program(com.github.lindenb.jvarkit.util.jcommander.Program) VCFHeader(htsjdk.variant.vcf.VCFHeader) SAMSequenceDictionaryProgress(com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress) SAMFileHeader(htsjdk.samtools.SAMFileHeader) StringUtil(htsjdk.samtools.util.StringUtil) FisherExactTest(com.github.lindenb.jvarkit.math.stats.FisherExactTest) Map(java.util.Map) CloserUtil(htsjdk.samtools.util.CloserUtil) PrintWriter(java.io.PrintWriter) GenotypeBuilder(htsjdk.variant.variantcontext.GenotypeBuilder) SAMRecordIterator(htsjdk.samtools.SAMRecordIterator) Predicate(java.util.function.Predicate) Logger(com.github.lindenb.jvarkit.util.log.Logger) Set(java.util.Set) Collectors(java.util.stream.Collectors) JvarkitException(com.github.lindenb.jvarkit.lang.JvarkitException) SAMRecord(htsjdk.samtools.SAMRecord) List(java.util.List) SAMReadGroupRecord(htsjdk.samtools.SAMReadGroupRecord) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) VariantContext(htsjdk.variant.variantcontext.VariantContext) VCFHeaderLineCount(htsjdk.variant.vcf.VCFHeaderLineCount) SamReaderFactory(htsjdk.samtools.SamReaderFactory) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder) Genotype(htsjdk.variant.variantcontext.Genotype) VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) Cigar(htsjdk.samtools.Cigar) SequenceUtil(htsjdk.samtools.util.SequenceUtil) Parameter(com.beust.jcommander.Parameter) HashMap(java.util.HashMap) TreeSet(java.util.TreeSet) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) BiPredicate(java.util.function.BiPredicate) IOUtils(com.github.lindenb.jvarkit.io.IOUtils) Launcher(com.github.lindenb.jvarkit.util.jcommander.Launcher) ShortReadName(com.github.lindenb.jvarkit.util.illumina.ShortReadName) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) VCFHeaderLineType(htsjdk.variant.vcf.VCFHeaderLineType) Counter(com.github.lindenb.jvarkit.util.Counter) JexlGenotypePredicate(com.github.lindenb.jvarkit.util.vcf.JexlGenotypePredicate) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) VcfIterator(com.github.lindenb.jvarkit.util.vcf.VcfIterator) IOException(java.io.IOException) SamReader(htsjdk.samtools.SamReader) File(java.io.File) SamRecordFilter(htsjdk.samtools.filter.SamRecordFilter) SamRecordJEXLFilter(com.github.lindenb.jvarkit.util.samtools.SamRecordJEXLFilter) VCFFormatHeaderLine(htsjdk.variant.vcf.VCFFormatHeaderLine) DoublePredicate(java.util.function.DoublePredicate) VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) SAMRecordIterator(htsjdk.samtools.SAMRecordIterator) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) VcfIterator(com.github.lindenb.jvarkit.util.vcf.VcfIterator) Counter(com.github.lindenb.jvarkit.util.Counter) ShortReadName(com.github.lindenb.jvarkit.util.illumina.ShortReadName) TreeSet(java.util.TreeSet) HashSet(java.util.HashSet) Genotype(htsjdk.variant.variantcontext.Genotype) GenotypeBuilder(htsjdk.variant.variantcontext.GenotypeBuilder) SAMRecord(htsjdk.samtools.SAMRecord) SAMFileHeader(htsjdk.samtools.SAMFileHeader) File(java.io.File) SAMReadGroupRecord(htsjdk.samtools.SAMReadGroupRecord) VariantContext(htsjdk.variant.variantcontext.VariantContext) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) SamReader(htsjdk.samtools.SamReader) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) VCFHeader(htsjdk.variant.vcf.VCFHeader) VCFFormatHeaderLine(htsjdk.variant.vcf.VCFFormatHeaderLine) SamReaderFactory(htsjdk.samtools.SamReaderFactory) SAMSequenceDictionaryProgress(com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) JvarkitException(com.github.lindenb.jvarkit.lang.JvarkitException) IOException(java.io.IOException) Cigar(htsjdk.samtools.Cigar) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder)

Example 24 with VCFFilterHeaderLine

use of htsjdk.variant.vcf.VCFFilterHeaderLine in project jvarkit by lindenb.

the class Biostar78285 method doWork.

@Override
public int doWork(final List<String> args) {
    if (this.gc_percent_window < 1) {
        LOG.error("Bad GC% window size:" + this.gc_percent_window);
        return -1;
    }
    final List<File> bamFiles = IOUtil.unrollFiles(args.stream().map(F -> new File(F)).collect(Collectors.toCollection(HashSet::new)), ".bam");
    SAMSequenceDictionary dict = null;
    final List<SamReader> samReaders = new ArrayList<>();
    final List<CloseableIterator<SAMRecord>> samIterators = new ArrayList<>();
    final TreeSet<String> samples = new TreeSet<>();
    final String DEFAULT_PARTITION = "UNDEFINED_PARTITION";
    IndexedFastaSequenceFile indexedFastaSequenceFile = null;
    VariantContextWriter out = null;
    try {
        final SamReaderFactory samReaderFactory = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.LENIENT);
        for (final File bamFile : bamFiles) {
            LOG.info("Opening " + bamFile);
            final SamReader samReader = samReaderFactory.open(bamFile);
            samReaders.add(samReader);
            final SAMFileHeader header = samReader.getFileHeader();
            if (header == null) {
                LOG.error("No header in " + bamFile);
                return -1;
            }
            if (header.getSortOrder() != SortOrder.coordinate) {
                LOG.error("Sam file " + bamFile + " is not sorted on coordinate :" + header.getSortOrder());
                return -1;
            }
            samples.addAll(header.getReadGroups().stream().map(RG -> this.partition.apply(RG, DEFAULT_PARTITION)).collect(Collectors.toSet()));
            final SAMSequenceDictionary currDict = header.getSequenceDictionary();
            if (currDict == null) {
                LOG.error("SamFile doesn't contain a SAMSequenceDictionary : " + bamFile);
                return -1;
            }
            if (dict == null) {
                dict = currDict;
            } else if (!SequenceUtil.areSequenceDictionariesEqual(dict, currDict)) {
                LOG.error(JvarkitException.DictionariesAreNotTheSame.getMessage(dict, currDict));
                return -1;
            }
        }
        if (samReaders.isEmpty()) {
            LOG.error("no bam");
            return -1;
        }
        if (dict == null) {
            LOG.error("no dictionary");
            return -1;
        }
        final QueryInterval[] intervals;
        if (this.captureBed != null) {
            LOG.info("Opening " + this.captureBed);
            ContigNameConverter.setDefaultAliases(dict);
            final List<QueryInterval> L = new ArrayList<>();
            final BedLineCodec codec = new BedLineCodec();
            final LineIterator li = IOUtils.openFileForLineIterator(this.captureBed);
            while (li.hasNext()) {
                final BedLine bed = codec.decode(li.next());
                if (bed == null)
                    continue;
                final QueryInterval q = bed.toQueryInterval(dict);
                L.add(q);
            }
            CloserUtil.close(li);
            intervals = QueryInterval.optimizeIntervals(L.toArray(new QueryInterval[L.size()]));
        } else {
            intervals = null;
        }
        for (final SamReader samReader : samReaders) {
            LOG.info("querying " + samReader.getResourceDescription());
            final CloseableIterator<SAMRecord> iter;
            if (intervals == null) {
                iter = samReader.iterator();
            } else {
                iter = samReader.queryOverlapping(intervals);
            }
            samIterators.add(new FilterIterator<SAMRecord>(iter, R -> !R.getReadUnmappedFlag() && !filter.filterOut(R)));
        }
        if (this.refFile != null) {
            LOG.info("opening " + refFile);
            indexedFastaSequenceFile = new IndexedFastaSequenceFile(this.refFile);
            final SAMSequenceDictionary refdict = indexedFastaSequenceFile.getSequenceDictionary();
            ContigNameConverter.setDefaultAliases(refdict);
            if (refdict == null) {
                throw new JvarkitException.FastaDictionaryMissing(this.refFile);
            }
            if (!SequenceUtil.areSequenceDictionariesEqual(dict, refdict)) {
                LOG.error(JvarkitException.DictionariesAreNotTheSame.getMessage(dict, refdict));
                return -1;
            }
        }
        out = openVariantContextWriter(this.outputFile);
        final Set<VCFHeaderLine> metaData = new HashSet<>();
        VCFStandardHeaderLines.addStandardFormatLines(metaData, true, VCFConstants.DEPTH_KEY, VCFConstants.GENOTYPE_KEY);
        VCFStandardHeaderLines.addStandardInfoLines(metaData, true, VCFConstants.DEPTH_KEY);
        metaData.add(new VCFFormatHeaderLine("DF", 1, VCFHeaderLineType.Integer, "Number of Reads on plus strand"));
        metaData.add(new VCFFormatHeaderLine("DR", 1, VCFHeaderLineType.Integer, "Number of Reads on minus strand"));
        metaData.add(new VCFInfoHeaderLine("AVG_DP", 1, VCFHeaderLineType.Float, "Mean depth"));
        metaData.add(new VCFInfoHeaderLine("MEDIAN_DP", 1, VCFHeaderLineType.Float, "Median depth"));
        metaData.add(new VCFInfoHeaderLine("MIN_DP", 1, VCFHeaderLineType.Integer, "Min depth"));
        metaData.add(new VCFInfoHeaderLine("MAX_DP", 1, VCFHeaderLineType.Integer, "Max depth"));
        metaData.add(new VCFHeaderLine(Biostar78285.class.getSimpleName() + ".SamFilter", this.filter.toString()));
        for (final Integer treshold : this.minDepthTresholds) {
            metaData.add(new VCFFilterHeaderLine("DP_LT_" + treshold, "All  genotypes have DP< " + treshold));
            metaData.add(new VCFInfoHeaderLine("NUM_DP_LT_" + treshold, 1, VCFHeaderLineType.Integer, "Number of genotypes having DP< " + treshold));
            metaData.add(new VCFInfoHeaderLine("FRACT_DP_LT_" + treshold, 1, VCFHeaderLineType.Float, "Fraction of genotypes having DP< " + treshold));
        }
        if (indexedFastaSequenceFile != null) {
            metaData.add(new VCFInfoHeaderLine("GC_PERCENT", 1, VCFHeaderLineType.Integer, "GC% window_size:" + this.gc_percent_window));
        }
        final List<Allele> refAlleles = Collections.singletonList(Allele.create("N", true));
        final List<Allele> NO_CALLS = Arrays.asList(Allele.NO_CALL, Allele.NO_CALL);
        final VCFHeader vcfHeader = new VCFHeader(metaData, samples);
        vcfHeader.setSequenceDictionary(dict);
        out.writeHeader(vcfHeader);
        final SAMRecordCoordinateComparator samRecordCoordinateComparator = new SAMRecordCoordinateComparator();
        final PeekableIterator<SAMRecord> peekIter = new PeekableIterator<>(new MergingIterator<>((R1, R2) -> samRecordCoordinateComparator.fileOrderCompare(R1, R2), samIterators));
        final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(dict);
        for (final SAMSequenceRecord ssr : dict.getSequences()) {
            final IntervalTree<Boolean> capturePos;
            if (intervals != null) {
                if (!Arrays.stream(intervals).anyMatch(I -> I.referenceIndex == ssr.getSequenceIndex())) {
                    continue;
                }
                capturePos = new IntervalTree<>();
                Arrays.stream(intervals).filter(I -> I.referenceIndex == ssr.getSequenceIndex()).forEach(I -> capturePos.put(I.start, I.end, true));
                ;
            } else {
                capturePos = null;
            }
            final GenomicSequence genomicSequence;
            if (indexedFastaSequenceFile != null && indexedFastaSequenceFile.getSequenceDictionary().getSequence(ssr.getSequenceName()) != null) {
                genomicSequence = new GenomicSequence(indexedFastaSequenceFile, ssr.getSequenceName());
            } else {
                genomicSequence = null;
            }
            final List<SAMRecord> buffer = new ArrayList<>();
            for (int ssr_pos = 1; ssr_pos <= ssr.getSequenceLength(); ++ssr_pos) {
                if (capturePos != null && !capturePos.overlappers(ssr_pos, ssr_pos).hasNext())
                    continue;
                progress.watch(ssr.getSequenceName(), ssr_pos);
                while (peekIter.hasNext()) {
                    final SAMRecord rec = peekIter.peek();
                    if (rec.getReadUnmappedFlag()) {
                        // consumme
                        peekIter.next();
                        continue;
                    }
                    if (this.filter.filterOut(rec)) {
                        // consumme
                        peekIter.next();
                        continue;
                    }
                    if (rec.getReferenceIndex() < ssr.getSequenceIndex()) {
                        throw new IllegalStateException("should not happen");
                    }
                    if (rec.getReferenceIndex() > ssr.getSequenceIndex()) {
                        break;
                    }
                    if (rec.getAlignmentEnd() < ssr_pos) {
                        throw new IllegalStateException("should not happen");
                    }
                    if (rec.getAlignmentStart() > ssr_pos) {
                        break;
                    }
                    buffer.add(peekIter.next());
                }
                int x = 0;
                while (x < buffer.size()) {
                    final SAMRecord R = buffer.get(x);
                    if (R.getReferenceIndex() != ssr.getSequenceIndex() || R.getAlignmentEnd() < ssr_pos) {
                        buffer.remove(x);
                    } else {
                        x++;
                    }
                }
                final Map<String, PosInfo> count = samples.stream().map(S -> new PosInfo(S)).collect(Collectors.toMap(P -> P.sample, Function.identity()));
                for (final SAMRecord rec : buffer) {
                    if (rec.getReferenceIndex() != ssr.getSequenceIndex())
                        throw new IllegalStateException("should not happen");
                    if (rec.getAlignmentEnd() < ssr_pos)
                        continue;
                    if (rec.getAlignmentStart() > ssr_pos)
                        continue;
                    final Cigar cigar = rec.getCigar();
                    if (cigar == null)
                        continue;
                    int refpos = rec.getAlignmentStart();
                    final String sample = this.partition.getPartion(rec, DEFAULT_PARTITION);
                    for (final CigarElement ce : cigar.getCigarElements()) {
                        if (refpos > ssr_pos)
                            break;
                        final CigarOperator op = ce.getOperator();
                        if (op.consumesReferenceBases()) {
                            if (op.consumesReadBases()) {
                                if (refpos <= ssr_pos && ssr_pos <= refpos + ce.getLength()) {
                                    final PosInfo posInfo = count.get(sample);
                                    if (posInfo != null) {
                                        posInfo.dp++;
                                        if (rec.getReadNegativeStrandFlag()) {
                                            posInfo.negative_strand++;
                                        }
                                    }
                                    break;
                                }
                            }
                            refpos += ce.getLength();
                        }
                    }
                }
                final VariantContextBuilder vcb = new VariantContextBuilder();
                final Set<String> filters = new HashSet<>();
                vcb.chr(ssr.getSequenceName());
                vcb.start(ssr_pos);
                vcb.stop(ssr_pos);
                if (genomicSequence == null) {
                    vcb.alleles(refAlleles);
                } else {
                    vcb.alleles(Collections.singletonList(Allele.create((byte) genomicSequence.charAt(ssr_pos - 1), true)));
                    final GenomicSequence.GCPercent gcp = genomicSequence.getGCPercent(Math.max((ssr_pos - 1) - this.gc_percent_window, 0), Math.min(ssr_pos + this.gc_percent_window, ssr.getSequenceLength()));
                    if (!gcp.isEmpty()) {
                        vcb.attribute("GC_PERCENT", gcp.getGCPercentAsInteger());
                    }
                }
                vcb.attribute(VCFConstants.DEPTH_KEY, (int) count.values().stream().mapToInt(S -> S.dp).sum());
                vcb.genotypes(count.values().stream().map(C -> new GenotypeBuilder(C.sample, NO_CALLS).DP((int) C.dp).attribute("DR", C.negative_strand).attribute("DF", C.dp - C.negative_strand).make()).collect(Collectors.toList()));
                for (final Integer treshold : this.minDepthTresholds) {
                    final int count_lt = (int) count.values().stream().filter(S -> S.dp < treshold).count();
                    if (count_lt == samples.size()) {
                        filters.add("DP_LT_" + treshold);
                    }
                    vcb.attribute("NUM_DP_LT_" + treshold, count_lt);
                    if (!samples.isEmpty()) {
                        vcb.attribute("FRACT_DP_LT_" + treshold, count_lt / (float) samples.size());
                    }
                }
                if (!samples.isEmpty()) {
                    final int[] array = count.values().stream().mapToInt(S -> S.dp).toArray();
                    vcb.attribute("AVG_DP", Percentile.average().evaluate(array));
                    vcb.attribute("MEDIAN_DP", Percentile.median().evaluate(array));
                    vcb.attribute("MIN_DP", (int) Percentile.min().evaluate(array));
                    vcb.attribute("MAX_DP", (int) Percentile.max().evaluate(array));
                }
                if (filters.isEmpty()) {
                    vcb.passFilters();
                } else {
                    vcb.filters(filters);
                }
                out.add(vcb.make());
            }
        }
        progress.finish();
        peekIter.close();
        out.close();
        out = null;
        return 0;
    } catch (final Exception err) {
        LOG.error(err);
        return -1;
    } finally {
        CloserUtil.close(out);
        CloserUtil.close(samIterators);
        CloserUtil.close(samReaders);
        CloserUtil.close(indexedFastaSequenceFile);
    }
}
Also used : Allele(htsjdk.variant.variantcontext.Allele) Arrays(java.util.Arrays) Program(com.github.lindenb.jvarkit.util.jcommander.Program) LineIterator(htsjdk.tribble.readers.LineIterator) IOUtil(htsjdk.samtools.util.IOUtil) VCFStandardHeaderLines(htsjdk.variant.vcf.VCFStandardHeaderLines) VCFHeader(htsjdk.variant.vcf.VCFHeader) CigarElement(htsjdk.samtools.CigarElement) SAMSequenceDictionaryProgress(com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress) CigarOperator(htsjdk.samtools.CigarOperator) SAMRecordPartition(com.github.lindenb.jvarkit.util.samtools.SAMRecordPartition) GenomicSequence(com.github.lindenb.jvarkit.util.picard.GenomicSequence) SAMFileHeader(htsjdk.samtools.SAMFileHeader) SortOrder(htsjdk.samtools.SAMFileHeader.SortOrder) Map(java.util.Map) PeekableIterator(htsjdk.samtools.util.PeekableIterator) CloserUtil(htsjdk.samtools.util.CloserUtil) GenotypeBuilder(htsjdk.variant.variantcontext.GenotypeBuilder) Logger(com.github.lindenb.jvarkit.util.log.Logger) Set(java.util.Set) Collectors(java.util.stream.Collectors) JvarkitException(com.github.lindenb.jvarkit.lang.JvarkitException) Percentile(com.github.lindenb.jvarkit.math.stats.Percentile) SAMRecord(htsjdk.samtools.SAMRecord) List(java.util.List) MergingIterator(com.github.lindenb.jvarkit.util.iterator.MergingIterator) IndexedFastaSequenceFile(htsjdk.samtools.reference.IndexedFastaSequenceFile) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) BedLine(com.github.lindenb.jvarkit.util.bio.bed.BedLine) SamReaderFactory(htsjdk.samtools.SamReaderFactory) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder) VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) Cigar(htsjdk.samtools.Cigar) CloseableIterator(htsjdk.samtools.util.CloseableIterator) SequenceUtil(htsjdk.samtools.util.SequenceUtil) ContigNameConverter(com.github.lindenb.jvarkit.util.bio.fasta.ContigNameConverter) Parameter(com.beust.jcommander.Parameter) BedLineCodec(com.github.lindenb.jvarkit.util.bio.bed.BedLineCodec) Function(java.util.function.Function) ValidationStringency(htsjdk.samtools.ValidationStringency) TreeSet(java.util.TreeSet) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) IOUtils(com.github.lindenb.jvarkit.io.IOUtils) Launcher(com.github.lindenb.jvarkit.util.jcommander.Launcher) VCFConstants(htsjdk.variant.vcf.VCFConstants) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) VCFHeaderLineType(htsjdk.variant.vcf.VCFHeaderLineType) FilterIterator(com.github.lindenb.jvarkit.util.iterator.FilterIterator) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) IntervalTree(htsjdk.samtools.util.IntervalTree) SamReader(htsjdk.samtools.SamReader) File(java.io.File) SamRecordFilter(htsjdk.samtools.filter.SamRecordFilter) SamRecordJEXLFilter(com.github.lindenb.jvarkit.util.samtools.SamRecordJEXLFilter) QueryInterval(htsjdk.samtools.QueryInterval) SAMRecordCoordinateComparator(htsjdk.samtools.SAMRecordCoordinateComparator) VCFFormatHeaderLine(htsjdk.variant.vcf.VCFFormatHeaderLine) SAMSequenceRecord(htsjdk.samtools.SAMSequenceRecord) Collections(java.util.Collections) VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) ArrayList(java.util.ArrayList) SAMSequenceRecord(htsjdk.samtools.SAMSequenceRecord) IndexedFastaSequenceFile(htsjdk.samtools.reference.IndexedFastaSequenceFile) TreeSet(java.util.TreeSet) HashSet(java.util.HashSet) CigarOperator(htsjdk.samtools.CigarOperator) GenotypeBuilder(htsjdk.variant.variantcontext.GenotypeBuilder) CigarElement(htsjdk.samtools.CigarElement) BedLineCodec(com.github.lindenb.jvarkit.util.bio.bed.BedLineCodec) BedLine(com.github.lindenb.jvarkit.util.bio.bed.BedLine) SAMRecord(htsjdk.samtools.SAMRecord) SAMFileHeader(htsjdk.samtools.SAMFileHeader) IndexedFastaSequenceFile(htsjdk.samtools.reference.IndexedFastaSequenceFile) File(java.io.File) QueryInterval(htsjdk.samtools.QueryInterval) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) LineIterator(htsjdk.tribble.readers.LineIterator) SamReader(htsjdk.samtools.SamReader) SAMRecordCoordinateComparator(htsjdk.samtools.SAMRecordCoordinateComparator) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) VCFHeader(htsjdk.variant.vcf.VCFHeader) VCFFormatHeaderLine(htsjdk.variant.vcf.VCFFormatHeaderLine) CloseableIterator(htsjdk.samtools.util.CloseableIterator) SamReaderFactory(htsjdk.samtools.SamReaderFactory) SAMSequenceDictionaryProgress(com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress) GenomicSequence(com.github.lindenb.jvarkit.util.picard.GenomicSequence) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) JvarkitException(com.github.lindenb.jvarkit.lang.JvarkitException) Allele(htsjdk.variant.variantcontext.Allele) Cigar(htsjdk.samtools.Cigar) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder) PeekableIterator(htsjdk.samtools.util.PeekableIterator)

Example 25 with VCFFilterHeaderLine

use of htsjdk.variant.vcf.VCFFilterHeaderLine in project jvarkit by lindenb.

the class RDFVcfWriter method writeHeader.

public void writeHeader(VCFHeader header, URI source) {
    if (this.header != null)
        throw new RuntimeException("Header was already written");
    this.header = header;
    this.source = source;
    if (this.source == null)
        this.source = URI.create("urn:source/id" + (++id_generator));
    try {
        writeStartDocument();
        this.w.writeStartElement(PFX, "Source", NS);
        this.w.writeAttribute("rdf", RDF, "about", this.source.toString());
        this.w.writeStartElement("dc", "title", DC);
        this.w.writeCharacters(this.source.toString());
        // dc:title
        this.w.writeEndElement();
        this.w.writeEndElement();
        SAMSequenceDictionary dict = header.getSequenceDictionary();
        if (dict != null) {
            for (SAMSequenceRecord ssr : dict.getSequences()) {
                this.w.writeStartElement(PFX, "Chromosome", NS);
                this.w.writeAttribute("rdf", RDF, "about", "urn:chromosome/" + ssr.getSequenceName());
                this.w.writeStartElement("dc", "title", DC);
                this.w.writeCharacters(ssr.getSequenceName());
                // dc:title
                this.w.writeEndElement();
                this.w.writeStartElement(PFX, "length", NS);
                datatype("int");
                this.w.writeCharacters(String.valueOf(ssr.getSequenceLength()));
                // length
                this.w.writeEndElement();
                this.w.writeStartElement(PFX, "index", NS);
                datatype("int");
                this.w.writeCharacters(String.valueOf(ssr.getSequenceIndex()));
                // length
                this.w.writeEndElement();
                // Chromosome
                this.w.writeEndElement();
            }
        }
        key2infoHandler.put(SnpEffPredictionParser.getDefaultTag(), new SnpEffHandler());
        key2infoHandler.put(VepPredictionParser.getDefaultTag(), new VepHandler());
        key2infoHandler.put(VCFPredictions.TAG, new MyPredictionHandler());
        for (VCFInfoHeaderLine h : header.getInfoHeaderLines()) {
            RDFVcfInfoHandler handler = key2infoHandler.get(h.getID());
            if (handler == null) {
                LOG.info("creating default handler for INFO:" + h.getID());
                handler = createDefaultRdfVcfInfoHandlerFor(h);
                key2infoHandler.put(handler.getKey(), handler);
            }
            handler.init(h);
        }
        for (VCFFilterHeaderLine h : header.getFilterLines()) {
            this.w.writeStartElement(PFX, "Filter", NS);
            this.w.writeAttribute("rdf", RDF, "about", "urn:filter/" + h.getKey());
            this.w.writeStartElement("dc", "title", DC);
            this.w.writeCharacters(h.getKey());
            // dc:title
            this.w.writeEndElement();
            this.w.writeStartElement("dc", "description", DC);
            this.w.writeCharacters(h.getValue());
            // dc:title
            this.w.writeEndElement();
            // Filter
            this.w.writeEndElement();
        }
        // Sample
        for (String sample : header.getSampleNamesInOrder()) {
            this.w.writeStartElement(PFX, "Sample", NS);
            this.w.writeAttribute("rdf", RDF, "about", "urn:sample/" + sample);
            this.w.writeStartElement("dc", "title", DC);
            this.w.writeCharacters(sample);
            // dc:title
            this.w.writeEndElement();
            // rdf:RDF
            this.w.writeEndElement();
        }
    } catch (Exception e) {
        throw new RuntimeException("close failed", e);
    }
}
Also used : SAMSequenceRecord(htsjdk.samtools.SAMSequenceRecord) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) XMLStreamException(javax.xml.stream.XMLStreamException) IOException(java.io.IOException)

Aggregations

VCFFilterHeaderLine (htsjdk.variant.vcf.VCFFilterHeaderLine)25 VCFHeader (htsjdk.variant.vcf.VCFHeader)23 VariantContext (htsjdk.variant.variantcontext.VariantContext)17 VariantContextBuilder (htsjdk.variant.variantcontext.VariantContextBuilder)16 VariantContextWriter (htsjdk.variant.variantcontext.writer.VariantContextWriter)15 ArrayList (java.util.ArrayList)15 VCFInfoHeaderLine (htsjdk.variant.vcf.VCFInfoHeaderLine)14 VCFHeaderLine (htsjdk.variant.vcf.VCFHeaderLine)13 File (java.io.File)13 SAMSequenceDictionaryProgress (com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress)12 HashSet (java.util.HashSet)11 SAMSequenceDictionary (htsjdk.samtools.SAMSequenceDictionary)10 Allele (htsjdk.variant.variantcontext.Allele)10 VCFFormatHeaderLine (htsjdk.variant.vcf.VCFFormatHeaderLine)9 List (java.util.List)9 Genotype (htsjdk.variant.variantcontext.Genotype)8 GenotypeBuilder (htsjdk.variant.variantcontext.GenotypeBuilder)8 Set (java.util.Set)8 Collectors (java.util.stream.Collectors)8 Parameter (com.beust.jcommander.Parameter)7