Search in sources :

Example 61 with CloseableIterator

use of htsjdk.samtools.util.CloseableIterator in project jvarkit by lindenb.

the class ImpactOfDuplicates method doWork.

@Override
public int doWork(final List<String> args) {
    CloseableIterator<Duplicate> dupIter = null;
    final List<File> INPUT = args.stream().map(S -> new File(S)).collect(Collectors.toList());
    try {
        this.duplicates = SortingCollection.newInstance(Duplicate.class, new DuplicateCodec(), new Comparator<Duplicate>() {

            @Override
            public int compare(Duplicate o1, Duplicate o2) {
                return o1.compareTo(o2);
            }
        }, this.sortingCollectionArgs.getMaxRecordsInRam(), this.sortingCollectionArgs.getTmpPaths());
        for (this.bamIndex = 0; this.bamIndex < INPUT.size(); this.bamIndex++) {
            int prev_tid = -1;
            int prev_pos = -1;
            long nLines = 0L;
            File inFile = INPUT.get(this.bamIndex);
            LOG.info("Processing " + inFile);
            IOUtil.assertFileIsReadable(inFile);
            SamReader samReader = null;
            CloseableIterator<SAMRecord> iter = null;
            try {
                samReader = SamReaderFactory.make().validationStringency(ValidationStringency.LENIENT).open(inFile);
                final SAMFileHeader header = samReader.getFileHeader();
                this.samFileDicts.add(header.getSequenceDictionary());
                if (BEDFILE == null) {
                    iter = samReader.iterator();
                } else {
                    IntervalList intervalList = new IntervalList(header);
                    BufferedReader in = new BufferedReader(new FileReader(BEDFILE));
                    String line = null;
                    while ((line = in.readLine()) != null) {
                        if (line.isEmpty() || line.startsWith("#"))
                            continue;
                        String[] tokens = line.split("[\t]");
                        Interval interval = new Interval(tokens[0], 1 + Integer.parseInt(tokens[1]), Integer.parseInt(tokens[2]));
                        intervalList.add(interval);
                    }
                    in.close();
                    intervalList = intervalList.sorted();
                    List<Interval> uniqueIntervals = IntervalList.getUniqueIntervals(intervalList, false);
                    SamRecordIntervalIteratorFactory sriif = new SamRecordIntervalIteratorFactory();
                    iter = sriif.makeSamRecordIntervalIterator(samReader, uniqueIntervals, false);
                }
                while (iter.hasNext()) {
                    SAMRecord rec = iter.next();
                    if (rec.getReadUnmappedFlag())
                        continue;
                    if (!rec.getReadPairedFlag())
                        continue;
                    if (rec.getReferenceIndex() != rec.getMateReferenceIndex())
                        continue;
                    if (!rec.getProperPairFlag())
                        continue;
                    if (!rec.getFirstOfPairFlag())
                        continue;
                    if (prev_tid != -1) {
                        if (prev_tid > rec.getReferenceIndex()) {
                            throw new IOException("Bad sort order from " + rec);
                        } else if (prev_tid == rec.getReferenceIndex() && prev_pos > rec.getAlignmentStart()) {
                            throw new IOException("Bad sort order from " + rec);
                        } else {
                            prev_pos = rec.getAlignmentStart();
                        }
                    } else {
                        prev_tid = rec.getReferenceIndex();
                        prev_pos = -1;
                    }
                    if ((++nLines) % 1000000 == 0) {
                        LOG.info("In " + inFile + " N=" + nLines);
                    }
                    Duplicate dup = new Duplicate();
                    dup.bamIndex = this.bamIndex;
                    dup.pos = Math.min(rec.getAlignmentStart(), rec.getMateAlignmentStart());
                    dup.tid = rec.getReferenceIndex();
                    dup.size = Math.abs(rec.getInferredInsertSize());
                    this.duplicates.add(dup);
                }
            } finally {
                if (iter != null)
                    iter.close();
                if (samReader != null)
                    samReader.close();
            }
            LOG.info("done " + inFile);
        }
        /**
         * loop done, now scan the duplicates
         */
        LOG.info("doneAdding");
        this.duplicates.doneAdding();
        this.out = super.openFileOrStdoutAsPrintStream(outputFile);
        out.print("#INTERVAL\tMAX\tMEAN");
        for (int i = 0; i < INPUT.size(); ++i) {
            out.print('\t');
            out.print(INPUT.get(i));
        }
        out.println();
        dupIter = this.duplicates.iterator();
        while (dupIter.hasNext()) {
            Duplicate dup = dupIter.next();
            if (this.duplicatesBuffer.isEmpty() || dup.compareChromPosSize(this.duplicatesBuffer.get(0)) == 0) {
                this.duplicatesBuffer.add(dup);
            } else {
                dumpDuplicatesBuffer(INPUT);
                this.duplicatesBuffer.add(dup);
            }
        }
        dumpDuplicatesBuffer(INPUT);
        LOG.info("end iterator");
        out.flush();
        out.close();
    } catch (Exception e) {
        LOG.error(e);
        return -1;
    } finally {
        if (dupIter != null)
            dupIter.close();
        LOG.info("cleaning duplicates");
        this.duplicates.cleanup();
    }
    return 0;
}
Also used : RuntimeEOFException(htsjdk.samtools.util.RuntimeEOFException) CloseableIterator(htsjdk.samtools.util.CloseableIterator) Arrays(java.util.Arrays) Program(com.github.lindenb.jvarkit.util.jcommander.Program) Parameter(com.beust.jcommander.Parameter) IOUtil(htsjdk.samtools.util.IOUtil) SAMFileHeader(htsjdk.samtools.SAMFileHeader) ValidationStringency(htsjdk.samtools.ValidationStringency) ParametersDelegate(com.beust.jcommander.ParametersDelegate) ArrayList(java.util.ArrayList) Interval(htsjdk.samtools.util.Interval) Launcher(com.github.lindenb.jvarkit.util.jcommander.Launcher) PrintStream(java.io.PrintStream) SortingCollection(htsjdk.samtools.util.SortingCollection) BinaryCodec(htsjdk.samtools.util.BinaryCodec) Logger(com.github.lindenb.jvarkit.util.log.Logger) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) IntervalList(htsjdk.samtools.util.IntervalList) IOException(java.io.IOException) SamReader(htsjdk.samtools.SamReader) Collectors(java.util.stream.Collectors) File(java.io.File) SAMRecord(htsjdk.samtools.SAMRecord) List(java.util.List) SamRecordIntervalIteratorFactory(htsjdk.samtools.util.SamRecordIntervalIteratorFactory) BufferedReader(java.io.BufferedReader) FileReader(java.io.FileReader) Comparator(java.util.Comparator) SamReaderFactory(htsjdk.samtools.SamReaderFactory) IOException(java.io.IOException) SamRecordIntervalIteratorFactory(htsjdk.samtools.util.SamRecordIntervalIteratorFactory) RuntimeEOFException(htsjdk.samtools.util.RuntimeEOFException) IOException(java.io.IOException) Comparator(java.util.Comparator) SamReader(htsjdk.samtools.SamReader) IntervalList(htsjdk.samtools.util.IntervalList) SAMRecord(htsjdk.samtools.SAMRecord) BufferedReader(java.io.BufferedReader) FileReader(java.io.FileReader) SAMFileHeader(htsjdk.samtools.SAMFileHeader) File(java.io.File) Interval(htsjdk.samtools.util.Interval)

Example 62 with CloseableIterator

use of htsjdk.samtools.util.CloseableIterator in project jvarkit by lindenb.

the class LumpySort method doWork.

@Override
public int doWork(final List<String> args) {
    VariantContextWriter vcw = null;
    LineIterator vcfIn = null;
    Environment environment = null;
    Database variantsDb1 = null;
    final List<File> inputs = IOUtil.unrollFiles(args.stream().map(S -> new File(S)).collect(Collectors.toList()), ".vcf", ".vcf.gz");
    if (inputs.isEmpty()) {
        LOG.error("empty vcf list");
        return -1;
    }
    try {
        IOUtil.assertDirectoryIsWritable(this.bdbHomeDir);
        final Set<VCFHeaderLine> metaData = new HashSet<>();
        final Set<String> sampleNames = new TreeSet<>();
        final IntervalTreeMap<Boolean> intervalTreeMapBed;
        if (this.bedFile != null) {
            intervalTreeMapBed = new IntervalTreeMap<>();
            final BedLineCodec bedLineCodec = new BedLineCodec();
            final BufferedReader br = IOUtils.openFileForBufferedReading(this.bedFile);
            br.lines().map(L -> bedLineCodec.decode(L)).filter(L -> L != null).forEach(B -> intervalTreeMapBed.put(B.toInterval(), true));
            br.close();
        } else {
            intervalTreeMapBed = null;
        }
        for (int idx = 0; idx < inputs.size(); ++idx) {
            final File vcfFile = inputs.get(idx);
            LOG.info("Read header " + (idx + 1) + "/" + inputs.size());
            final VCFReader r = VCFReaderFactory.makeDefault().open(vcfFile.toPath(), false);
            final VCFHeader header = r.getHeader();
            if (!LumpyConstants.isLumpyHeader(header)) {
                LOG.error("doesn't look like a Lumpy-SV vcf header " + vcfFile);
                r.close();
                return -1;
            }
            if (!header.hasGenotypingData()) {
                LOG.error("No sample in " + vcfFile);
                r.close();
                return -1;
            }
            for (final String sampleName : header.getSampleNamesInOrder()) {
                if (sampleNames.contains(sampleName)) {
                    LOG.error("Sample found twice " + sampleName + " in " + vcfFile);
                    r.close();
                    return -1;
                }
                sampleNames.add(sampleName);
            }
            metaData.addAll(header.getMetaDataInInputOrder().stream().filter(H -> !H.getKey().equals("fileDate")).collect(Collectors.toSet()));
            r.close();
        }
        final VCFInfoHeaderLine nSampleInfoHeaderLine = new VCFInfoHeaderLine("NSAMPLES", 1, VCFHeaderLineType.Integer, "Number of affected samples.");
        metaData.add(nSampleInfoHeaderLine);
        final VCFFormatHeaderLine chromStartFormatHeaderLine = new VCFFormatHeaderLine("CB", 1, VCFHeaderLineType.Integer, "Original Variant POS");
        metaData.add(chromStartFormatHeaderLine);
        final VCFFormatHeaderLine chromEndFormatHeaderLine = new VCFFormatHeaderLine("CE", 1, VCFHeaderLineType.Integer, "Original Variant END");
        metaData.add(chromEndFormatHeaderLine);
        final VCFHeader outHeader = new VCFHeader(metaData, sampleNames);
        final VCFHeaderVersion[] versions = VCFHeaderVersion.values();
        this.vcfEncoder = new VCFEncoder(outHeader, false, true);
        this.vcfCodec.setVCFHeader(outHeader, versions[versions.length - 1]);
        /* open BDB env */
        final Transaction txn = null;
        environment = new Environment(this.bdbHomeDir, new EnvironmentConfig().setAllowCreate(true).setReadOnly(false));
        variantsDb1 = environment.openDatabase(txn, "variants1", new DatabaseConfig().setBtreeComparator(KeySorterComparator.class).setAllowCreate(true).setReadOnly(false).setTemporary(true));
        long total_variants = 0L;
        final LumpyVarBinding lumpVarBinding = new LumpyVarBinding();
        final KeySorterBinding keySorterBinding = new KeySorterBinding();
        for (int idx = 0; idx < inputs.size(); ++idx) {
            final long millisecstart = System.currentTimeMillis();
            final File vcfFile = inputs.get(idx);
            int nVariant = 0;
            final VCFReader r = VCFReaderFactory.makeDefault().open(vcfFile.toPath(), false);
            final List<Genotype> missing = new ArrayList<>(sampleNames.size());
            for (final String sn : sampleNames) {
                if (r.getHeader().getSampleNamesInOrder().contains(sn))
                    continue;
                missing.add(GenotypeBuilder.createMissing(sn, 2));
            }
            final CloseableIterator<VariantContext> iter = r.iterator();
            while (iter.hasNext()) {
                VariantContext ctx = iter.next();
                if (!this.keep_secondary) {
                    if (ctx.hasAttribute("SECONDARY"))
                        continue;
                }
                if (!this.variantFilter.test(ctx))
                    continue;
                if (intervalTreeMapBed != null && !intervalTreeMapBed.containsOverlapping(ctx))
                    continue;
                final List<Genotype> gtList = new ArrayList<>(ctx.getGenotypes());
                for (int gi = 0; gi < gtList.size(); gi++) {
                    Genotype g = gtList.get(gi);
                    final GenotypeBuilder gb;
                    if (this.do_genotype && isAvailableGenotype(g)) {
                        gb = new GenotypeBuilder(g.getSampleName(), ctx.getAlternateAlleles());
                        gb.attributes(g.getExtendedAttributes());
                    } else {
                        gb = new GenotypeBuilder(g);
                    }
                    gb.attribute(chromStartFormatHeaderLine.getID(), ctx.getStart());
                    gb.attribute(chromEndFormatHeaderLine.getID(), ctx.getEnd());
                    gtList.set(gi, gb.make());
                }
                gtList.addAll(missing);
                ctx = new VariantContextBuilder(ctx).genotypes(gtList).rmAttribute("PRPOS").make();
                final LumpyVar lvar = new LumpyVar(ctx, total_variants);
                final DatabaseEntry key = new DatabaseEntry();
                final DatabaseEntry data = new DatabaseEntry();
                lumpVarBinding.objectToEntry(lvar, data);
                keySorterBinding.objectToEntry(lvar.getSortKey(), key);
                if (variantsDb1.put(txn, key, data) != OperationStatus.SUCCESS) {
                    r.close();
                    LOG.error("insertion failed");
                    return -1;
                }
                nVariant++;
                total_variants++;
            }
            iter.close();
            r.close();
            LOG.info("Read  " + (idx + 1) + "/" + inputs.size() + " variants of " + vcfFile + " N=" + nVariant + " Total:" + total_variants + " That took: " + Duration.ofMillis(System.currentTimeMillis() - millisecstart));
            System.gc();
        }
        if (intervalTreeMapBed != null)
            intervalTreeMapBed.clear();
        System.gc();
        LOG.info("Writing output");
        final List<Allele> ALLELES_NO_CALLS = this.do_genotype ? Collections.singletonList(Allele.NO_CALL) : Arrays.asList(Allele.NO_CALL, Allele.NO_CALL);
        final Cursor cursor = variantsDb1.openCursor(txn, null);
        vcw = this.writingVariantsDelegate.open(this.outputFile);
        vcw.writeHeader(outHeader);
        for (; ; ) {
            final DatabaseEntry key = new DatabaseEntry();
            final DatabaseEntry data = new DatabaseEntry();
            OperationStatus status = cursor.getNext(key, data, LockMode.DEFAULT);
            if (!status.equals(OperationStatus.SUCCESS))
                break;
            final LumpyVar first = lumpVarBinding.entryToObject(data);
            if (this.do_not_merge_ctx) {
                vcw.add(first.ctx);
                continue;
            }
            final KeySorter keySorter1 = keySorterBinding.entryToObject(key);
            final List<LumpyVar> buffer = new ArrayList<>();
            buffer.add(first);
            final DatabaseEntry key2 = new DatabaseEntry();
            final DatabaseEntry data2 = new DatabaseEntry();
            final Cursor cursor2 = cursor.dup(true);
            for (; ; ) {
                status = cursor2.getNext(key2, data2, LockMode.DEFAULT);
                if (!status.equals(OperationStatus.SUCCESS))
                    break;
                final KeySorter keySorter2 = keySorterBinding.entryToObject(key2);
                if (keySorter1.compare1(keySorter2) != 0) {
                    break;
                }
                final LumpyVar lv = lumpVarBinding.entryToObject(data2);
                if (lv.ctx.getStart() > first.ctx.getEnd()) {
                    break;
                }
                if (first.canMerge(lv)) {
                    buffer.add(lv);
                    cursor2.delete();
                }
            }
            cursor2.close();
            // delete 'first'
            cursor.delete();
            final int variantStartA = buffer.stream().mapToInt(V -> V.ctx.getStart()).min().getAsInt();
            final int variantStartB = (int) buffer.stream().mapToInt(V -> V.ctx.getStart()).average().getAsDouble();
            final int variantStartC = buffer.stream().mapToInt(V -> V.ctx.getStart()).max().getAsInt();
            final int variantEndA = buffer.stream().mapToInt(V -> V.ctx.getEnd()).min().getAsInt();
            final int variantEndB = (int) buffer.stream().mapToInt(V -> V.ctx.getEnd()).average().getAsDouble();
            final int variantEndC = buffer.stream().mapToInt(V -> V.ctx.getEnd()).max().getAsInt();
            final VariantContextBuilder vcb = new VariantContextBuilder("lumpymerge", first.ctx.getContig(), variantStartB, variantEndB, first.ctx.getAlleles());
            vcb.attribute("END", variantEndB);
            vcb.attribute("SVTYPE", first.ctx.getAttribute("SVTYPE"));
            vcb.attribute("SVLEN", (int) Percentile.median().evaluate(buffer.stream().mapToInt(V -> V.ctx.getEnd() - V.ctx.getStart())).getAsDouble());
            vcb.attribute("CIPOS", Arrays.asList(variantStartB - variantStartA, variantStartC - variantStartB));
            vcb.attribute("CIEND", Arrays.asList(variantEndB - variantEndA, variantEndC - variantEndB));
            vcb.attribute("SU", buffer.stream().flatMap(V -> V.ctx.getGenotypes().stream()).mapToInt(G -> G.getAttributeAsInt("SU", 0)).sum());
            vcb.attribute("SR", buffer.stream().flatMap(V -> V.ctx.getGenotypes().stream()).mapToInt(G -> G.getAttributeAsInt("SR", 0)).sum());
            vcb.attribute("PE", buffer.stream().flatMap(V -> V.ctx.getGenotypes().stream()).mapToInt(G -> G.getAttributeAsInt("PE", 0)).sum());
            final Map<String, Genotype> sample2genotype = new HashMap<>(sampleNames.size());
            buffer.stream().flatMap(V -> V.ctx.getGenotypes().stream()).filter(G -> isAvailableGenotype(G)).forEach(G -> {
                sample2genotype.put(G.getSampleName(), G);
            });
            vcb.attribute(nSampleInfoHeaderLine.getID(), sample2genotype.size());
            for (final String sn : sampleNames) {
                if (!sample2genotype.containsKey(sn)) {
                    sample2genotype.put(sn, new GenotypeBuilder(sn, ALLELES_NO_CALLS).attribute("SU", 0).attribute("SR", 0).attribute("PE", 0).make());
                }
            }
            vcb.genotypes(sample2genotype.values());
            vcw.add(vcb.make());
        }
        cursor.close();
        vcw.close();
        vcw = null;
        variantsDb1.close();
        variantsDb1 = null;
        environment.close();
        environment = null;
        return 0;
    } catch (final Exception err) {
        LOG.error(err);
        return -1;
    } finally {
        CloserUtil.close(vcfIn);
        CloserUtil.close(vcw);
        CloserUtil.close(variantsDb1);
        CloserUtil.close(environment);
    }
}
Also used : WritingVariantsDelegate(com.github.lindenb.jvarkit.variant.variantcontext.writer.WritingVariantsDelegate) Allele(htsjdk.variant.variantcontext.Allele) Arrays(java.util.Arrays) JexlVariantPredicate(com.github.lindenb.jvarkit.util.vcf.JexlVariantPredicate) Program(com.github.lindenb.jvarkit.util.jcommander.Program) LineIterator(htsjdk.tribble.readers.LineIterator) IOUtil(htsjdk.samtools.util.IOUtil) VCFHeader(htsjdk.variant.vcf.VCFHeader) LockMode(com.sleepycat.je.LockMode) VCFEncoder(htsjdk.variant.vcf.VCFEncoder) VCFHeaderVersion(htsjdk.variant.vcf.VCFHeaderVersion) Duration(java.time.Duration) Map(java.util.Map) Path(java.nio.file.Path) CloserUtil(htsjdk.samtools.util.CloserUtil) TupleInput(com.sleepycat.bind.tuple.TupleInput) IntervalTreeMap(htsjdk.samtools.util.IntervalTreeMap) GenotypeBuilder(htsjdk.variant.variantcontext.GenotypeBuilder) Predicate(java.util.function.Predicate) Logger(com.github.lindenb.jvarkit.util.log.Logger) DatabaseEntry(com.sleepycat.je.DatabaseEntry) Set(java.util.Set) Collectors(java.util.stream.Collectors) Percentile(com.github.lindenb.jvarkit.math.stats.Percentile) List(java.util.List) DatabaseConfig(com.sleepycat.je.DatabaseConfig) StructuralVariantType(htsjdk.variant.variantcontext.StructuralVariantType) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) VariantContext(htsjdk.variant.variantcontext.VariantContext) EnvironmentConfig(com.sleepycat.je.EnvironmentConfig) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder) Genotype(htsjdk.variant.variantcontext.Genotype) VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) OperationStatus(com.sleepycat.je.OperationStatus) CloseableIterator(htsjdk.samtools.util.CloseableIterator) Parameter(com.beust.jcommander.Parameter) TupleOutput(com.sleepycat.bind.tuple.TupleOutput) BedLineCodec(com.github.lindenb.jvarkit.util.bio.bed.BedLineCodec) HashMap(java.util.HashMap) Function(java.util.function.Function) TreeSet(java.util.TreeSet) ParametersDelegate(com.beust.jcommander.ParametersDelegate) TupleBinding(com.sleepycat.bind.tuple.TupleBinding) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) Interval(htsjdk.samtools.util.Interval) IOUtils(com.github.lindenb.jvarkit.io.IOUtils) Database(com.sleepycat.je.Database) Launcher(com.github.lindenb.jvarkit.util.jcommander.Launcher) VCFReaderFactory(com.github.lindenb.jvarkit.variant.vcf.VCFReaderFactory) VCFCodec(htsjdk.variant.vcf.VCFCodec) VCFHeaderLineType(htsjdk.variant.vcf.VCFHeaderLineType) VCFReader(htsjdk.variant.vcf.VCFReader) Environment(com.sleepycat.je.Environment) File(java.io.File) Cursor(com.sleepycat.je.Cursor) VCFFormatHeaderLine(htsjdk.variant.vcf.VCFFormatHeaderLine) BufferedReader(java.io.BufferedReader) Comparator(java.util.Comparator) Transaction(com.sleepycat.je.Transaction) Collections(java.util.Collections) VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Cursor(com.sleepycat.je.Cursor) TreeSet(java.util.TreeSet) Database(com.sleepycat.je.Database) HashSet(java.util.HashSet) VCFHeaderVersion(htsjdk.variant.vcf.VCFHeaderVersion) EnvironmentConfig(com.sleepycat.je.EnvironmentConfig) Genotype(htsjdk.variant.variantcontext.Genotype) GenotypeBuilder(htsjdk.variant.variantcontext.GenotypeBuilder) BedLineCodec(com.github.lindenb.jvarkit.util.bio.bed.BedLineCodec) VCFEncoder(htsjdk.variant.vcf.VCFEncoder) Transaction(com.sleepycat.je.Transaction) Environment(com.sleepycat.je.Environment) File(java.io.File) VariantContext(htsjdk.variant.variantcontext.VariantContext) DatabaseEntry(com.sleepycat.je.DatabaseEntry) LineIterator(htsjdk.tribble.readers.LineIterator) VCFReader(htsjdk.variant.vcf.VCFReader) OperationStatus(com.sleepycat.je.OperationStatus) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) VCFHeader(htsjdk.variant.vcf.VCFHeader) VCFFormatHeaderLine(htsjdk.variant.vcf.VCFFormatHeaderLine) DatabaseConfig(com.sleepycat.je.DatabaseConfig) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) Allele(htsjdk.variant.variantcontext.Allele) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder) BufferedReader(java.io.BufferedReader)

Example 63 with CloseableIterator

use of htsjdk.samtools.util.CloseableIterator in project jvarkit by lindenb.

the class IjgvdToVcf method doWork.

@Override
public int doWork(List<String> args) {
    try {
        final SAMSequenceDictionary dict = SequenceDictionaryUtils.extractRequired(this.fai);
        this.ctgNameConverter = ContigNameConverter.fromOneDictionary(dict);
        List<Path> zipPaths = IOUtils.unrollPaths(args);
        List<CloseableIterator<VariantContext>> iterators = new ArrayList<>(zipPaths.size() * 2);
        for (final Path zipPath : zipPaths) {
            if (zipPath.getFileName().toString().endsWith(".tsv")) {
                String fname = zipPath.getFileName().toString();
                if (fname.endsWith("filtered.tsv") && skip_filtered) {
                    continue;
                }
                if (fname.endsWith("_multiallelic.tsv") && skip_multiallelic) {
                    continue;
                }
                final InputStream in = Files.newInputStream(zipPath);
                iterators.add(new ZipIterator(fname, in));
                continue;
            }
            for (int i = 0; i < 2; i++) {
                final ZipInputStream zin = new ZipInputStream(Files.newInputStream(zipPath));
                ZipEntry entry = null;
                ZipIterator zipIter = null;
                while ((entry = zin.getNextEntry()) != null) {
                    if (entry.getName().endsWith("filtered.tsv") && skip_filtered) {
                        zin.closeEntry();
                        continue;
                    }
                    if (entry.getName().endsWith("_multiallelic.tsv") && skip_multiallelic) {
                        zin.closeEntry();
                        continue;
                    }
                    if (i == 0 && !(entry.getName().endsWith("passed.tsv") || entry.getName().endsWith("multiallelic.tsv"))) {
                        zin.closeEntry();
                        continue;
                    }
                    if (i == 1 && !entry.getName().endsWith("filtered.tsv")) {
                        zin.closeEntry();
                        continue;
                    }
                    zipIter = new ZipIterator(entry.getName(), zin);
                    break;
                }
                if (zipIter == null) {
                    zin.close();
                } else {
                    iterators.add(zipIter);
                }
            }
        }
        final ContigDictComparator contigDictComparator = new ContigDictComparator(dict);
        final Comparator<VariantContext> comparator = (A, B) -> {
            int i = contigDictComparator.compare(A.getContig(), B.getContig());
            if (i != 0)
                return i;
            i = Integer.compare(A.getStart(), B.getStart());
            if (i != 0)
                return i;
            return A.getReference().compareTo(B.getReference());
        };
        final MergingIterator<VariantContext> iter = new MergingIterator<>(comparator, iterators);
        final VariantContextWriter vcw = writingVariantsDelegate.dictionary(dict).open(out);
        final Set<VCFHeaderLine> metaData = new HashSet<>();
        VCFStandardHeaderLines.addStandardInfoLines(metaData, true, VCFConstants.ALLELE_COUNT_KEY, VCFConstants.ALLELE_FREQUENCY_KEY, VCFConstants.ALLELE_NUMBER_KEY);
        metaData.add(new VCFInfoHeaderLine(REF_ALLELE_FREQ, 1, VCFHeaderLineType.Float, "Ref Allele Freq."));
        metaData.add(new VCFInfoHeaderLine(ALT_ALLELE_FREQ, 1, VCFHeaderLineType.Float, "Alt Allele Freq."));
        metaData.add(new VCFInfoHeaderLine(REF_ALLELE_COUNT, 1, VCFHeaderLineType.Integer, "Ref Allele count"));
        metaData.add(new VCFInfoHeaderLine(ALT_ALLELE_COUNT, 1, VCFHeaderLineType.Integer, "Alt Allele count."));
        metaData.add(new VCFInfoHeaderLine(TOTAL_ALLELES_COUNT, 1, VCFHeaderLineType.Integer, "Total allele count"));
        metaData.add(new VCFInfoHeaderLine(N_SAMPLES, 1, VCFHeaderLineType.Integer, "N samples"));
        metaData.add(new VCFFilterHeaderLine(MULTIALLELIC, "multiallelic"));
        metaData.add(new VCFFilterHeaderLine(FILTER1, "filtered in input"));
        VCFHeader header = new VCFHeader(metaData);
        header.setSequenceDictionary(dict);
        JVarkitVersion.getInstance().addMetaData(this, header);
        vcw.writeHeader(header);
        ProgressFactory.Watcher<VariantContext> progress = ProgressFactory.newInstance().dictionary(dict).logger(LOG).build();
        while (iter.hasNext()) {
            final VariantContext ctx = progress.apply(iter.next());
            vcw.add(ctx);
        }
        vcw.close();
        iter.close();
        progress.close();
        return 0;
    } catch (Exception e) {
        LOG.error(e);
        return -1;
    }
}
Also used : VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) IntStream(java.util.stream.IntStream) WritingVariantsDelegate(com.github.lindenb.jvarkit.variant.variantcontext.writer.WritingVariantsDelegate) CloseableIterator(htsjdk.samtools.util.CloseableIterator) Allele(htsjdk.variant.variantcontext.Allele) Arrays(java.util.Arrays) CharSplitter(com.github.lindenb.jvarkit.lang.CharSplitter) MergingIterator(htsjdk.samtools.util.MergingIterator) ZipInputStream(java.util.zip.ZipInputStream) ContigNameConverter(com.github.lindenb.jvarkit.util.bio.fasta.ContigNameConverter) Program(com.github.lindenb.jvarkit.util.jcommander.Program) Parameter(com.beust.jcommander.Parameter) VCFStandardHeaderLines(htsjdk.variant.vcf.VCFStandardHeaderLines) VCFHeader(htsjdk.variant.vcf.VCFHeader) ParametersDelegate(com.beust.jcommander.ParametersDelegate) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) ContigDictComparator(com.github.lindenb.jvarkit.util.samtools.ContigDictComparator) RuntimeIOException(htsjdk.samtools.util.RuntimeIOException) IOUtils(com.github.lindenb.jvarkit.io.IOUtils) Launcher(com.github.lindenb.jvarkit.util.jcommander.Launcher) Path(java.nio.file.Path) ZipEntry(java.util.zip.ZipEntry) CloserUtil(htsjdk.samtools.util.CloserUtil) VCFConstants(htsjdk.variant.vcf.VCFConstants) AbstractIterator(htsjdk.samtools.util.AbstractIterator) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) VCFHeaderLineType(htsjdk.variant.vcf.VCFHeaderLineType) SequenceDictionaryUtils(com.github.lindenb.jvarkit.util.bio.SequenceDictionaryUtils) Files(java.nio.file.Files) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) Logger(com.github.lindenb.jvarkit.util.log.Logger) ProgressFactory(com.github.lindenb.jvarkit.util.log.ProgressFactory) Set(java.util.Set) IOException(java.io.IOException) JVarkitVersion(com.github.lindenb.jvarkit.util.JVarkitVersion) InputStreamReader(java.io.InputStreamReader) Collectors(java.util.stream.Collectors) List(java.util.List) StringUtils(com.github.lindenb.jvarkit.lang.StringUtils) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) VariantContext(htsjdk.variant.variantcontext.VariantContext) BufferedReader(java.io.BufferedReader) Comparator(java.util.Comparator) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder) InputStream(java.io.InputStream) VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) ProgressFactory(com.github.lindenb.jvarkit.util.log.ProgressFactory) ZipEntry(java.util.zip.ZipEntry) ArrayList(java.util.ArrayList) VariantContext(htsjdk.variant.variantcontext.VariantContext) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) ContigDictComparator(com.github.lindenb.jvarkit.util.samtools.ContigDictComparator) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) VCFHeader(htsjdk.variant.vcf.VCFHeader) HashSet(java.util.HashSet) Path(java.nio.file.Path) CloseableIterator(htsjdk.samtools.util.CloseableIterator) ZipInputStream(java.util.zip.ZipInputStream) InputStream(java.io.InputStream) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) RuntimeIOException(htsjdk.samtools.util.RuntimeIOException) IOException(java.io.IOException) MergingIterator(htsjdk.samtools.util.MergingIterator) ZipInputStream(java.util.zip.ZipInputStream)

Example 64 with CloseableIterator

use of htsjdk.samtools.util.CloseableIterator in project jvarkit by lindenb.

the class GcPercentAndDepth method doWork.

@Override
public int doWork(final List<String> args) {
    if (this.windowSize <= 0) {
        LOG.error("Bad window size.");
        return -1;
    }
    if (this.windowStep <= 0) {
        LOG.error("Bad window step.");
        return -1;
    }
    if (this.refFile == null) {
        LOG.error("Undefined REF File");
        return -1;
    }
    if (args.isEmpty()) {
        LOG.error("Illegal Number of arguments.");
        return -1;
    }
    ReferenceSequenceFile indexedFastaSequenceFile = null;
    List<SamReader> readers = new ArrayList<SamReader>();
    PrintWriter out = null;
    try {
        LOG.info("Loading " + this.refFile);
        indexedFastaSequenceFile = ReferenceSequenceFileFactory.getReferenceSequenceFile(this.refFile);
        this.samSequenceDictionary = SequenceDictionaryUtils.extractRequired(indexedFastaSequenceFile);
        if (this.samSequenceDictionary == null) {
            LOG.error("Cannot get sequence dictionary for " + this.refFile);
            return -1;
        }
        out = super.openPathOrStdoutAsPrintWriter(outPutFile);
        Set<String> all_samples = new TreeSet<String>();
        /* create input, collect sample names */
        for (int optind = 0; optind < args.size(); ++optind) {
            LOG.info("Opening " + args.get(optind));
            final SamReader samFileReaderScan = super.openSamReader(args.get(optind));
            readers.add(samFileReaderScan);
            final SAMFileHeader header = samFileReaderScan.getFileHeader();
            if (!SequenceUtil.areSequenceDictionariesEqual(this.samSequenceDictionary, header.getSequenceDictionary())) {
                LOG.error(JvarkitException.DictionariesAreNotTheSame.getMessage(this.samSequenceDictionary, header.getSequenceDictionary()));
                return -1;
            }
            for (final SAMReadGroupRecord g : header.getReadGroups()) {
                final String sample = this.partition.apply(g);
                if (StringUtil.isBlank(sample)) {
                    LOG.warning("Read group " + g.getId() + " has no sample in merged dictionary");
                    continue;
                }
                all_samples.add(sample);
            }
        }
        LOG.info("N " + this.partition.name() + "=" + all_samples.size());
        /* print header */
        out.print("#");
        if (!this.hide_genomic_index) {
            out.print("id");
            out.print("\t");
        }
        out.print("chrom");
        out.print("\t");
        out.print("start");
        out.print("\t");
        out.print("end");
        out.print("\t");
        out.print("GCPercent");
        for (final String sample : all_samples) {
            out.print("\t");
            out.print(sample);
        }
        out.println();
        final List<RegionCaptured> regionsCaptured = new ArrayList<RegionCaptured>();
        if (bedFile != null) {
            LOG.info("Reading BED:" + bedFile);
            try (BedLineReader r = new BedLineReader(bedFile)) {
                r.stream().filter(B -> B != null).forEach(B -> {
                    final SAMSequenceRecord ssr = this.samSequenceDictionary.getSequence(B.getContig());
                    if (ssr == null) {
                        LOG.warning("Cannot resolve " + B.getContig());
                        return;
                    }
                    final RegionCaptured roi = new RegionCaptured(ssr, B.getStart() - 1, B.getEnd());
                    regionsCaptured.add(roi);
                });
            }
            LOG.info("end Reading BED:" + bedFile);
            Collections.sort(regionsCaptured);
        } else {
            LOG.info("No capture, peeking everything");
            for (final SAMSequenceRecord ssr : this.samSequenceDictionary.getSequences()) {
                final RegionCaptured roi = new RegionCaptured(ssr, 0, ssr.getSequenceLength());
                regionsCaptured.add(roi);
            }
        }
        final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(this.samSequenceDictionary).logger(LOG);
        GenomicSequence genomicSequence = null;
        for (final RegionCaptured roi : regionsCaptured) {
            if (genomicSequence == null || !genomicSequence.getChrom().equals(roi.getContig())) {
                genomicSequence = new GenomicSequence(indexedFastaSequenceFile, roi.getContig());
            }
            Map<String, int[]> sample2depth = new HashMap<String, int[]>();
            Map<String, Double> sample2meanDepth = new HashMap<String, Double>();
            for (final String sample : all_samples) {
                int[] depth = new int[roi.length()];
                Arrays.fill(depth, 0);
                sample2depth.put(sample, depth);
            }
            List<CloseableIterator<SAMRecord>> iterators = new ArrayList<CloseableIterator<SAMRecord>>();
            for (final SamReader r : readers) {
                iterators.add(r.query(roi.getContig(), roi.getStart(), roi.getEnd(), false));
            }
            final MergingIterator<SAMRecord> merginIter = new MergingIterator<>(new SAMRecordCoordinateComparator(), iterators);
            while (merginIter.hasNext()) {
                final SAMRecord rec = merginIter.next();
                if (rec.getReadUnmappedFlag())
                    continue;
                if (this.filter.filterOut(rec))
                    continue;
                final String sample = this.partition.getPartion(rec, null);
                if (sample == null)
                    continue;
                final int[] depth = sample2depth.get(sample);
                if (depth == null)
                    continue;
                final Cigar cigar = rec.getCigar();
                if (cigar == null)
                    continue;
                int refpos1 = rec.getAlignmentStart();
                for (final CigarElement ce : cigar.getCigarElements()) {
                    final CigarOperator op = ce.getOperator();
                    if (!op.consumesReferenceBases())
                        continue;
                    if (op.consumesReadBases()) {
                        for (int i = 0; i < ce.getLength(); ++i) {
                            if (refpos1 + i < roi.getStart())
                                continue;
                            if (refpos1 + i > roi.getEnd())
                                break;
                            depth[refpos1 + i - roi.getStart()]++;
                        }
                    }
                    refpos1 += ce.getLength();
                }
            }
            merginIter.close();
            for (final RegionCaptured.SlidingWindow win : roi) {
                double total = 0f;
                int countN = 0;
                for (int pos1 = win.getStart(); pos1 <= win.getEnd(); ++pos1) {
                    switch(genomicSequence.charAt(pos1 - 1)) {
                        case 'c':
                        case 'C':
                        case 'g':
                        case 'G':
                        case 's':
                        case 'S':
                            {
                                total++;
                                break;
                            }
                        case 'n':
                        case 'N':
                            countN++;
                            break;
                        default:
                            break;
                    }
                }
                if (skip_if_contains_N && countN > 0)
                    continue;
                double GCPercent = total / (double) win.length();
                int max_depth_for_win = 0;
                sample2meanDepth.clear();
                for (final String sample : all_samples) {
                    int[] depth = sample2depth.get(sample);
                    double sum = 0;
                    for (int pos = win.getStart(); pos < win.getEnd() && (pos - roi.getStart()) < depth.length; ++pos) {
                        sum += depth[pos - roi.getStart()];
                    }
                    double mean = (sum / (double) depth.length);
                    max_depth_for_win = Math.max(max_depth_for_win, (int) mean);
                    sample2meanDepth.put(sample, mean);
                }
                if (max_depth_for_win < this.min_depth)
                    continue;
                if (!this.hide_genomic_index) {
                    out.print(win.getGenomicIndex());
                    out.print("\t");
                }
                out.print(win.getContig());
                out.print("\t");
                out.print(win.getStart() - 1);
                out.print("\t");
                out.print(win.getEnd());
                out.print("\t");
                out.printf("%.2f", GCPercent);
                for (String sample : all_samples) {
                    out.print("\t");
                    out.printf("%.2f", (double) sample2meanDepth.get(sample));
                }
                out.println();
            }
        }
        progress.finish();
        out.flush();
        return 0;
    } catch (Exception err) {
        LOG.error(err);
        return -1;
    } finally {
        for (SamReader r : readers) CloserUtil.close(r);
        CloserUtil.close(indexedFastaSequenceFile);
        CloserUtil.close(out);
    }
}
Also used : Cigar(htsjdk.samtools.Cigar) CloseableIterator(htsjdk.samtools.util.CloseableIterator) Arrays(java.util.Arrays) SequenceUtil(htsjdk.samtools.util.SequenceUtil) MergingIterator(htsjdk.samtools.util.MergingIterator) Program(com.github.lindenb.jvarkit.util.jcommander.Program) Parameter(com.beust.jcommander.Parameter) CigarElement(htsjdk.samtools.CigarElement) CigarOperator(htsjdk.samtools.CigarOperator) SAMSequenceDictionaryProgress(com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress) HashMap(java.util.HashMap) SAMRecordPartition(com.github.lindenb.jvarkit.util.samtools.SAMRecordPartition) GenomicSequence(com.github.lindenb.jvarkit.util.picard.GenomicSequence) SAMFileHeader(htsjdk.samtools.SAMFileHeader) TreeSet(java.util.TreeSet) ArrayList(java.util.ArrayList) ReferenceSequenceFile(htsjdk.samtools.reference.ReferenceSequenceFile) BedLineReader(com.github.lindenb.jvarkit.bed.BedLineReader) StringUtil(htsjdk.samtools.util.StringUtil) Map(java.util.Map) Launcher(com.github.lindenb.jvarkit.util.jcommander.Launcher) Path(java.nio.file.Path) CloserUtil(htsjdk.samtools.util.CloserUtil) PrintWriter(java.io.PrintWriter) AbstractIterator(htsjdk.samtools.util.AbstractIterator) Locatable(htsjdk.samtools.util.Locatable) SequenceDictionaryUtils(com.github.lindenb.jvarkit.util.bio.SequenceDictionaryUtils) Iterator(java.util.Iterator) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) Logger(com.github.lindenb.jvarkit.util.log.Logger) Set(java.util.Set) SamReader(htsjdk.samtools.SamReader) JvarkitException(com.github.lindenb.jvarkit.lang.JvarkitException) File(java.io.File) SAMRecord(htsjdk.samtools.SAMRecord) SamRecordFilter(htsjdk.samtools.filter.SamRecordFilter) ReferenceSequenceFileFactory(htsjdk.samtools.reference.ReferenceSequenceFileFactory) List(java.util.List) SamRecordJEXLFilter(com.github.lindenb.jvarkit.util.samtools.SamRecordJEXLFilter) SAMReadGroupRecord(htsjdk.samtools.SAMReadGroupRecord) SAMRecordCoordinateComparator(htsjdk.samtools.SAMRecordCoordinateComparator) SAMSequenceRecord(htsjdk.samtools.SAMSequenceRecord) Collections(java.util.Collections) HashMap(java.util.HashMap) SAMReadGroupRecord(htsjdk.samtools.SAMReadGroupRecord) ArrayList(java.util.ArrayList) ReferenceSequenceFile(htsjdk.samtools.reference.ReferenceSequenceFile) SAMSequenceRecord(htsjdk.samtools.SAMSequenceRecord) BedLineReader(com.github.lindenb.jvarkit.bed.BedLineReader) SamReader(htsjdk.samtools.SamReader) SAMRecordCoordinateComparator(htsjdk.samtools.SAMRecordCoordinateComparator) TreeSet(java.util.TreeSet) PrintWriter(java.io.PrintWriter) CloseableIterator(htsjdk.samtools.util.CloseableIterator) SAMSequenceDictionaryProgress(com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress) GenomicSequence(com.github.lindenb.jvarkit.util.picard.GenomicSequence) CigarOperator(htsjdk.samtools.CigarOperator) CigarElement(htsjdk.samtools.CigarElement) JvarkitException(com.github.lindenb.jvarkit.lang.JvarkitException) MergingIterator(htsjdk.samtools.util.MergingIterator) Cigar(htsjdk.samtools.Cigar) SAMRecord(htsjdk.samtools.SAMRecord) SAMFileHeader(htsjdk.samtools.SAMFileHeader)

Example 65 with CloseableIterator

use of htsjdk.samtools.util.CloseableIterator in project jvarkit by lindenb.

the class SamReadLengthDistribution method scan.

private void scan(final SamReader in, Path pathName) throws IOException {
    final String defName = (pathName == null ? "STDIN" : pathName.toString()) + "#" + this.partition.name();
    in.getFileHeader().getReadGroups().stream().map(RG -> this.partition.apply(RG)).map(S -> StringUtils.isBlank(S) ? defName : S).filter(S -> !this.sample2discreteMedian.containsKey(S)).forEach(S -> this.sample2discreteMedian.put(S, new DiscreteMedian<Integer>()));
    final CloseableIterator<SAMRecord> iter = openSamIterator(in);
    while (iter.hasNext()) {
        final SAMRecord rec = iter.next();
        if (rec.getReadFailsVendorQualityCheckFlag())
            continue;
        if (rec.getDuplicateReadFlag())
            continue;
        if (rec.isSecondaryOrSupplementary())
            continue;
        if (!rec.getReadUnmappedFlag() && rec.getMappingQuality() < this.mapq)
            continue;
        final String sampleName = this.partition.getPartion(rec, defName);
        DiscreteMedian<Integer> counter = this.sample2discreteMedian.get(sampleName);
        if (counter == null) {
            counter = new DiscreteMedian<>();
            this.sample2discreteMedian.put(sampleName, counter);
        }
        final int len;
        switch(this.method) {
            case SEQ_LENGTH:
                len = rec.getReadLength();
                break;
            case CIGAR_REF_LENGTH:
                {
                    if (rec.getReadUnmappedFlag())
                        continue;
                    final Cigar c = rec.getCigar();
                    if (c == null || c.isEmpty())
                        continue;
                    len = c.getReferenceLength();
                    break;
                }
            case CIGAR_PADDED_REF_LENGTH:
                {
                    if (rec.getReadUnmappedFlag())
                        continue;
                    final Cigar c = rec.getCigar();
                    if (c == null || c.isEmpty())
                        continue;
                    len = c.getPaddedReferenceLength();
                    break;
                }
            case INSERT_LENGTH:
                {
                    if (rec.getReadUnmappedFlag())
                        continue;
                    if (!rec.getReadPairedFlag())
                        continue;
                    if (rec.getMateUnmappedFlag())
                        continue;
                    if (!rec.getContig().equals(rec.getMateReferenceName()))
                        continue;
                    // ignore 2nd
                    if (!rec.getFirstOfPairFlag())
                        continue;
                    len = Math.abs(rec.getInferredInsertSize());
                    break;
                }
            default:
                throw new IllegalStateException("unsupported method " + this.method);
        }
        counter.add(len);
    }
    iter.close();
}
Also used : Cigar(htsjdk.samtools.Cigar) CloseableIterator(htsjdk.samtools.util.CloseableIterator) Program(com.github.lindenb.jvarkit.util.jcommander.Program) Parameter(com.beust.jcommander.Parameter) IntervalFilter(htsjdk.samtools.filter.IntervalFilter) IntervalListProvider(com.github.lindenb.jvarkit.samtools.util.IntervalListProvider) SAMRecordPartition(com.github.lindenb.jvarkit.util.samtools.SAMRecordPartition) Interval(htsjdk.samtools.util.Interval) NoSplitter(com.github.lindenb.jvarkit.util.jcommander.NoSplitter) Map(java.util.Map) IOUtils(com.github.lindenb.jvarkit.io.IOUtils) Launcher(com.github.lindenb.jvarkit.util.jcommander.Launcher) DiscreteMedian(com.github.lindenb.jvarkit.math.DiscreteMedian) Path(java.nio.file.Path) PrintWriter(java.io.PrintWriter) SequenceDictionaryUtils(com.github.lindenb.jvarkit.util.bio.SequenceDictionaryUtils) FilteringSamIterator(htsjdk.samtools.filter.FilteringSamIterator) SAMRecordIterator(htsjdk.samtools.SAMRecordIterator) Logger(com.github.lindenb.jvarkit.util.log.Logger) IOException(java.io.IOException) SamReader(htsjdk.samtools.SamReader) Collectors(java.util.stream.Collectors) SAMRecord(htsjdk.samtools.SAMRecord) List(java.util.List) SamInputResource(htsjdk.samtools.SamInputResource) TreeMap(java.util.TreeMap) StringUtils(com.github.lindenb.jvarkit.lang.StringUtils) RangeOfIntegers(com.github.lindenb.jvarkit.math.RangeOfIntegers) SamReaderFactory(htsjdk.samtools.SamReaderFactory) Cigar(htsjdk.samtools.Cigar) SAMRecord(htsjdk.samtools.SAMRecord) DiscreteMedian(com.github.lindenb.jvarkit.math.DiscreteMedian)

Aggregations

CloseableIterator (htsjdk.samtools.util.CloseableIterator)103 List (java.util.List)86 Logger (com.github.lindenb.jvarkit.util.log.Logger)85 Parameter (com.beust.jcommander.Parameter)82 Program (com.github.lindenb.jvarkit.util.jcommander.Program)78 ArrayList (java.util.ArrayList)73 Collectors (java.util.stream.Collectors)71 SAMSequenceDictionary (htsjdk.samtools.SAMSequenceDictionary)69 Path (java.nio.file.Path)69 Launcher (com.github.lindenb.jvarkit.util.jcommander.Launcher)66 CloserUtil (htsjdk.samtools.util.CloserUtil)64 Set (java.util.Set)64 VCFHeader (htsjdk.variant.vcf.VCFHeader)59 VariantContext (htsjdk.variant.variantcontext.VariantContext)54 IOException (java.io.IOException)53 SequenceDictionaryUtils (com.github.lindenb.jvarkit.util.bio.SequenceDictionaryUtils)51 SAMRecord (htsjdk.samtools.SAMRecord)51 VariantContextWriter (htsjdk.variant.variantcontext.writer.VariantContextWriter)51 IOUtils (com.github.lindenb.jvarkit.io.IOUtils)50 StringUtils (com.github.lindenb.jvarkit.lang.StringUtils)49