Search in sources :

Example 91 with SAMRecordIterator

use of htsjdk.samtools.SAMRecordIterator in project jvarkit by lindenb.

the class SamScanSplitReads method scanFile.

private void scanFile(SamReader r) {
    final SAMSequenceDictionaryProgress progess = new SAMSequenceDictionaryProgress(r.getFileHeader());
    final SAMRecordIterator iter = r.iterator();
    while (iter.hasNext()) {
        analyseSamRecord(progess.watch(iter.next()));
    }
    progess.finish();
    iter.close();
}
Also used : SAMSequenceDictionaryProgress(com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress) SAMRecordIterator(htsjdk.samtools.SAMRecordIterator)

Example 92 with SAMRecordIterator

use of htsjdk.samtools.SAMRecordIterator in project jvarkit by lindenb.

the class SamShortInvertion method doWork.

@Override
public int doWork(List<String> input) {
    SamReader r = null;
    PrintStream out = null;
    // SAMFileWriter w=null;
    try {
        final List<SamReaderList> samReaders = new ArrayList<>();
        final Set<String> args = IOUtils.unrollFiles(input);
        if (args.isEmpty()) {
            LOG.error("No input file");
            return -1;
        }
        SAMSequenceDictionary dict = null;
        for (final String bam : args) {
            final SamReaderList samReader = new SamReaderList(new File(bam), 2);
            for (int i = 0; i < samReaders.size(); ++i) {
                if (samReaders.get(i).sample.equals(samReader.sample)) {
                    samReader.close();
                    LOG.error("Sample defined in two bams " + samReader.sample);
                    return -1;
                }
                if (dict == null) {
                    dict = samReader.dict;
                } else if (!SequenceUtil.areSequenceDictionariesEqual(dict, samReader.dict)) {
                    samReader.close();
                    LOG.error("bam contains two sequence dict.");
                    return -1;
                }
            }
            samReaders.add(samReader);
        }
        final Predicate<SAMRecord> samRecordFilter = new Predicate<SAMRecord>() {

            @Override
            public boolean test(SAMRecord rec) {
                if (rec.getReadUnmappedFlag())
                    return false;
                if (rec.isSecondaryOrSupplementary())
                    return false;
                if (rec.getDuplicateReadFlag())
                    return false;
                if (rec.getReadFailsVendorQualityCheckFlag())
                    return false;
                return true;
            }
        };
        for (SamReaderList samReader : samReaders) {
            SAMRecordIterator iter = samReader.get(0).iterator();
            while (iter.hasNext()) {
                final SAMRecord rec = iter.next();
                if (!samRecordFilter.test(rec))
                    continue;
                boolean skip = true;
                if (rec.getCigar() != null && rec.getCigar().isClipped()) {
                    skip = false;
                }
                if (skip)
                    continue;
                if (rec.getReadPairedFlag()) {
                    if (!rec.getMateUnmappedFlag()) {
                        SAMRecordIterator iter2 = samReader.get(1).query(rec.getMateReferenceName(), rec.getMateAlignmentStart(), rec.getMateAlignmentStart(), false);
                        while (iter2.hasNext()) {
                            final SAMRecord rec2 = iter2.next();
                            if (!samRecordFilter.test(rec2))
                                continue;
                            if (!rec2.getReadName().equals(rec.getReadName()))
                                continue;
                            if (rec2.getFirstOfPairFlag() == rec.getFirstOfPairFlag())
                                continue;
                            if (rec2.getSecondOfPairFlag() == rec.getSecondOfPairFlag())
                                continue;
                            challenge(rec, rec2);
                            break;
                        }
                        iter2.close();
                    } else {
                        challenge(rec, null);
                    }
                }
            }
            iter.close();
        }
        r = null;
        out = openFileOrStdoutAsPrintStream(outputFile);
        final SAMFileHeader header = r.getFileHeader();
        OtherCanonicalAlignFactory xpalignFactory = new OtherCanonicalAlignFactory(header);
        int prev_tid = -1;
        short[] coverage = null;
        short max_coverage = 0;
        // w=swf.make(header, System.out);
        final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(header.getSequenceDictionary());
        final SAMRecordIterator it = r.iterator();
        for (; ; ) {
            SAMRecord rec = null;
            if (it.hasNext()) {
                rec = progress.watch(it.next());
                if (rec.getReadUnmappedFlag())
                    continue;
                if (rec.isSecondaryOrSupplementary())
                    continue;
                if (rec.getDuplicateReadFlag())
                    continue;
                if (rec.getReadFailsVendorQualityCheckFlag())
                    continue;
            }
            if (rec == null || prev_tid == -1 || (prev_tid != -1 && prev_tid != rec.getReferenceIndex())) {
                if (coverage != null) {
                    while (max_coverage >= Math.max(1, this.min_coverage)) {
                        LOG.info("Scanning " + header.getSequence(prev_tid).getSequenceName() + " for cov:" + max_coverage);
                        int chromStart0 = 0;
                        while (chromStart0 < coverage.length) {
                            if (coverage[chromStart0] == max_coverage) {
                                // reset that pos
                                coverage[chromStart0] = 0;
                                int chromEnd0 = chromStart0 + 1;
                                while (chromEnd0 < coverage.length && coverage[chromEnd0] == max_coverage) {
                                    // reset that pos
                                    coverage[chromEnd0] = 0;
                                    ++chromEnd0;
                                }
                                out.print(header.getSequence(prev_tid).getSequenceName());
                                out.print('\t');
                                out.print(chromStart0);
                                out.print('\t');
                                out.print(chromEnd0);
                                out.print('\t');
                                out.print(max_coverage);
                                out.println();
                                // reset 3'
                                for (int x = chromEnd0; x < coverage.length && coverage[x] > 0; ++x) {
                                    coverage[x] = 0;
                                }
                                // reset 5'
                                for (int x = chromStart0 - 1; x >= 0 && coverage[x] > 0; --x) {
                                    coverage[x] = 0;
                                }
                                chromStart0 = chromEnd0;
                            } else {
                                ++chromStart0;
                            }
                        }
                        max_coverage--;
                    }
                    coverage = null;
                }
                if (rec == null)
                    break;
                prev_tid = rec.getReferenceIndex();
                LOG.info("Alloc sizeof(short)*" + header.getSequence(prev_tid).getSequenceLength());
                coverage = new short[header.getSequence(prev_tid).getSequenceLength()];
                Arrays.fill(coverage, (short) 0);
                max_coverage = 0;
            }
            List<OtherCanonicalAlign> saList = xpalignFactory.getXPAligns(rec);
            if (saList.isEmpty())
                continue;
            for (OtherCanonicalAlign xp : saList) {
                if (!xp.getReferenceName().equals(rec.getReferenceName()))
                    continue;
                if (// read is plus
                !rec.getReadNegativeStrandFlag()) {
                    if (!xp.getReadNegativeStrandFlag()) {
                        // ignore both same strand
                        continue;
                    }
                } else // read.strand=='-'
                {
                    if (xp.getReadNegativeStrandFlag()) {
                        // ignore both same strand
                        continue;
                    }
                }
                if (Math.abs(rec.getUnclippedStart() - xp.getAlignmentStart()) > max_size_inversion) {
                    // info(xp+" vs pos "+rec.getUnclippedStart());
                    continue;
                }
                int chromStart = Math.min(rec.getUnclippedStart(), xp.getAlignmentStart()) - 1;
                int chromEnd = Math.max(rec.getUnclippedEnd(), xp.getAlignmentStart()) - 1;
                for (int x = chromStart; x <= chromEnd && x < coverage.length; ++x) {
                    if (coverage[x] < Short.MAX_VALUE)
                        coverage[x]++;
                    if (max_coverage < coverage[x]) {
                        LOG.info("Max coverage " + max_coverage);
                        max_coverage = coverage[x];
                    }
                }
            }
        }
        it.close();
        r.close();
        r = null;
        progress.finish();
        return RETURN_OK;
    } catch (Exception err) {
        LOG.error(err);
        return -1;
    } finally {
        CloserUtil.close(r);
        CloserUtil.close(out);
    }
}
Also used : PrintStream(java.io.PrintStream) SAMRecordIterator(htsjdk.samtools.SAMRecordIterator) SAMSequenceDictionaryProgress(com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress) OtherCanonicalAlign(com.github.lindenb.jvarkit.util.picard.OtherCanonicalAlign) ArrayList(java.util.ArrayList) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) OtherCanonicalAlignFactory(com.github.lindenb.jvarkit.util.picard.OtherCanonicalAlignFactory) RuntimeIOException(htsjdk.samtools.util.RuntimeIOException) IOException(java.io.IOException) Predicate(java.util.function.Predicate) SamReader(htsjdk.samtools.SamReader) SAMRecord(htsjdk.samtools.SAMRecord) SAMFileHeader(htsjdk.samtools.SAMFileHeader) File(java.io.File)

Example 93 with SAMRecordIterator

use of htsjdk.samtools.SAMRecordIterator in project jvarkit by lindenb.

the class XContaminations method doWork.

@Override
public int doWork(final List<String> args) {
    long last_save_ms = System.currentTimeMillis();
    if (this.output_as_vcf && !this.use_only_sample_name) {
        LOG.error("cannot write vcf if --sample is not set");
        return -1;
    }
    if (args.size() < 2) {
        LOG.error("Illegal Number of args");
        return -1;
    }
    final Set<File> bamFiles = IOUtils.unrollFiles(args.subList(1, args.size())).stream().map(S -> new File(S)).collect(Collectors.toSet());
    if (bamFiles.isEmpty()) {
        LOG.error("Undefined BAM file(s)");
        return -1;
    }
    SAMRecordIterator iter = null;
    VcfIterator in = null;
    Map<String, SamReader> sample2samReader = new HashMap<>();
    VariantContextWriter vcfw = null;
    try {
        final SamReaderFactory srf = super.createSamReaderFactory();
        if (args.get(0).equals("-")) {
            in = super.openVcfIterator(null);
        } else {
            in = super.openVcfIterator(args.get(0));
        }
        VCFHeader vcfHeader = in.getHeader();
        final SAMSequenceDictionary dict1 = vcfHeader.getSequenceDictionary();
        if (dict1 == null) {
            LOG.error(JvarkitException.VcfDictionaryMissing.getMessage(args.get(0)));
            return -1;
        }
        final Set<String> sampleNames = new HashSet<>(vcfHeader.getSampleNamesInOrder());
        if (sampleNames.isEmpty()) {
            LOG.error("VCF contains no sample");
            return -1;
        }
        for (final File bamFile : bamFiles) {
            LOG.info("Opening " + bamFile);
            final SamReader samReader = srf.open(bamFile);
            final SAMFileHeader samHeader = samReader.getFileHeader();
            final SAMSequenceDictionary dict2 = samHeader.getSequenceDictionary();
            if (dict2 == null) {
                samReader.close();
                LOG.error(JvarkitException.BamDictionaryMissing.getMessage(bamFile.getPath()));
                return -1;
            }
            if (!SequenceUtil.areSequenceDictionariesEqual(dict1, dict2)) {
                samReader.close();
                LOG.error(JvarkitException.DictionariesAreNotTheSame.getMessage(dict1, dict2));
                return -1;
            }
            if (!samReader.hasIndex()) {
                samReader.close();
                LOG.error("sam is not indexed : " + bamFile);
                return -1;
            }
            String sampleName = null;
            for (final SAMReadGroupRecord rgr : samHeader.getReadGroups()) {
                final String s = rgr.getSample();
                if (StringUtil.isBlank(s))
                    continue;
                if (sampleName == null) {
                    sampleName = s;
                } else if (!sampleName.equals(s)) {
                    samReader.close();
                    LOG.error("Cannot handle more than one sample/bam  " + bamFile + " " + sampleName);
                    return -1;
                }
            }
            if (sampleName == null) {
                samReader.close();
                LOG.error("No sample in " + bamFile);
                // skip this bam
                continue;
            }
            if (!sampleNames.contains(sampleName)) {
                samReader.close();
                LOG.error("Not in VCF header: sample " + sampleName + " " + bamFile);
                // skip this bam
                continue;
            }
            if (sample2samReader.containsKey(sampleName)) {
                samReader.close();
                LOG.error("Cannot handle more than one bam/sample: " + bamFile + " " + sampleName);
                return -1;
            }
            sample2samReader.put(sampleName, samReader);
        }
        if (sample2samReader.size() < 2) {
            LOG.error("Not engough BAM/samples. Expected at least two valid BAMs");
            return -1;
        }
        sampleNames.retainAll(sample2samReader.keySet());
        /* create a VCF is VCF output asked */
        final List<SamplePair> sampleListForVcf;
        if (this.output_as_vcf) {
            vcfw = super.openVariantContextWriter(outputFile);
            final Set<VCFHeaderLine> metaData = new HashSet<>();
            metaData.add(new VCFFormatHeaderLine("S1S1", 1, VCFHeaderLineType.Integer, "reads sample 1 supporting sample 1"));
            metaData.add(new VCFFormatHeaderLine("S1S2", 1, VCFHeaderLineType.Integer, "reads sample 1 supporting sample 2"));
            metaData.add(new VCFFormatHeaderLine("S1SO", 1, VCFHeaderLineType.Integer, "reads sample 1 supporting others"));
            metaData.add(new VCFFormatHeaderLine("S2S1", 1, VCFHeaderLineType.Integer, "reads sample 2 supporting sample 1"));
            metaData.add(new VCFFormatHeaderLine("S2S2", 1, VCFHeaderLineType.Integer, "reads sample 2 supporting sample 2"));
            metaData.add(new VCFFormatHeaderLine("S2SO", 1, VCFHeaderLineType.Integer, "reads sample 2 supporting others"));
            metaData.add(new VCFFormatHeaderLine("FR", 1, VCFHeaderLineType.Float, "Fraction. '-1' for unavailable."));
            metaData.add(new VCFFormatHeaderLine("S1A", 1, VCFHeaderLineType.Character, "sample 1 allele"));
            metaData.add(new VCFFormatHeaderLine("S2A", 1, VCFHeaderLineType.Character, "sample 2 allele"));
            metaData.add(new VCFFilterHeaderLine("XCONTAMINATION", "Fraction test is > " + fraction_treshold));
            metaData.add(new VCFFilterHeaderLine("BADSAMPLES", "At least one pair of genotype fails the 'LE' test"));
            metaData.add(new VCFInfoHeaderLine("LE", 1, VCFHeaderLineType.Integer, "number of pair of genotypes having (S1S1<=S1S2 or S2S2<=S2S1)."));
            metaData.add(new VCFInfoHeaderLine("BADSAMPLES", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "Samples founds failing the 'LE' test"));
            sampleListForVcf = new ArrayList<>();
            final List<String> sampleList = new ArrayList<>(sampleNames);
            for (int x = 0; x + 1 < sampleList.size(); ++x) {
                for (int y = x + 1; y < sampleList.size(); ++y) {
                    sampleListForVcf.add(new SamplePair(new SimpleSampleIdenfifier(sampleList.get(x)), new SimpleSampleIdenfifier(sampleList.get(y))));
                }
            }
            final VCFHeader header2 = new VCFHeader(metaData, sampleListForVcf.stream().map(V -> V.getLabel()).sorted().collect(Collectors.toList()));
            header2.setSequenceDictionary(dict1);
            vcfw.writeHeader(header2);
        } else {
            vcfw = null;
            sampleListForVcf = null;
        }
        final Map<SamplePair, SampleAlleles> contaminationTable = new HashMap<>();
        final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(dict1).logger(LOG);
        while (in.hasNext()) {
            final VariantContext ctx = progress.watch(in.next());
            if (!ctx.isSNP() || ctx.isFiltered() || !ctx.isBiallelic() || ctx.isSymbolic() || !this.variantFilter.test(ctx)) {
                continue;
            }
            int count_homref = 0;
            int count_homvar = 0;
            int count_het = 0;
            final Map<String, Genotype> sample2gt = new HashMap<>();
            for (int gidx = 0; gidx < ctx.getNSamples(); ++gidx) {
                final Genotype G = ctx.getGenotype(gidx);
                if (!G.isCalled())
                    continue;
                if (G.isHet()) {
                    // here because in use_singleton we must be sure that there is only one hom_var
                    count_het++;
                    if (this.use_singleton && count_het > 0)
                        break;
                } else if (G.isHomVar()) {
                    // here because in use_singleton we must be sure that there is only one hom_var
                    count_homvar++;
                    if (this.use_singleton && count_homvar > 1)
                        break;
                }
                if (G.isFiltered())
                    continue;
                if (!sample2samReader.containsKey(G.getSampleName()))
                    continue;
                if (!sampleNames.contains(G.getSampleName()))
                    continue;
                if (!this.genotypeFilter.test(ctx, G))
                    continue;
                sample2gt.put(G.getSampleName(), G);
            }
            if (this.use_singleton && count_het > 0)
                continue;
            if (this.use_singleton && count_homvar > 1)
                continue;
            if (sample2gt.size() < 2)
                continue;
            // reset and recount
            count_homref = 0;
            count_homvar = 0;
            count_het = 0;
            for (final String sampleName : sample2gt.keySet()) {
                final Genotype G = ctx.getGenotype(sampleName);
                switch(G.getType()) {
                    case HOM_REF:
                        count_homref++;
                        break;
                    case HOM_VAR:
                        count_homvar++;
                        break;
                    case HET:
                        count_het++;
                        break;
                    default:
                        break;
                }
            }
            // singleton check
            if (this.use_singleton && (count_het > 0 || count_homvar != 1)) {
                continue;
            }
            // at least one HOM_REF and one HOM_VAR
            if (count_homref == 0)
                continue;
            if (count_homvar == 0)
                continue;
            final Map<SampleIdentifier, Counter<Character>> sample_identifier_2allelesCount = new HashMap<>();
            /* scan Reads for those Genotype/Samples */
            for (final String sampleName : sample2gt.keySet()) {
                if (!sample2samReader.containsKey(sampleName))
                    continue;
                // sample name is not in vcf header
                final SamReader samReader = sample2samReader.get(sampleName);
                if (samReader == null)
                    continue;
                final Genotype genotype = sample2gt.get(sampleName);
                if (genotype == null)
                    continue;
                iter = samReader.query(ctx.getContig(), ctx.getStart(), ctx.getEnd(), false);
                while (iter.hasNext()) {
                    final SAMRecord record = iter.next();
                    if (record.getEnd() < ctx.getStart())
                        continue;
                    if (ctx.getEnd() < record.getStart())
                        continue;
                    if (record.getReadUnmappedFlag())
                        continue;
                    if (this.filter.filterOut(record))
                        continue;
                    final SAMReadGroupRecord srgr = record.getReadGroup();
                    // not current sample
                    if (srgr == null)
                        continue;
                    if (!sampleName.equals(srgr.getSample()))
                        continue;
                    final Cigar cigar = record.getCigar();
                    if (cigar == null || cigar.isEmpty())
                        continue;
                    byte[] readSeq = record.getReadBases();
                    if (readSeq == null || readSeq.length == 0)
                        continue;
                    int readPos = record.getReadPositionAtReferencePosition(ctx.getStart());
                    if (readPos < 1)
                        continue;
                    readPos--;
                    if (readPos >= readSeq.length)
                        continue;
                    final char base = Character.toUpperCase((char) readSeq[readPos]);
                    if (base == 'N')
                        continue;
                    final SampleIdentifier sampleIdentifier;
                    if (this.use_only_sample_name) {
                        sampleIdentifier = new SimpleSampleIdenfifier(sampleName);
                    } else {
                        final ShortReadName readName = ShortReadName.parse(record);
                        if (!readName.isValid()) {
                            LOG.info("No a valid read name " + record.getReadName());
                            continue;
                        }
                        sampleIdentifier = new SequencerFlowCellRunLaneSample(readName, sampleName);
                    }
                    Counter<Character> sampleAlleles = sample_identifier_2allelesCount.get(sampleIdentifier);
                    if (sampleAlleles == null) {
                        sampleAlleles = new Counter<Character>();
                        sample_identifier_2allelesCount.put(sampleIdentifier, sampleAlleles);
                    }
                    sampleAlleles.incr(base);
                }
                iter.close();
                iter = null;
            }
            /* end scan reads for this sample */
            /* sum-up data for this SNP */
            final VariantContextBuilder vcb;
            final List<Genotype> genotypeList;
            if (this.output_as_vcf) {
                vcb = new VariantContextBuilder(args.get(0), ctx.getContig(), ctx.getStart(), ctx.getEnd(), ctx.getAlleles());
                if (ctx.hasID())
                    vcb.id(ctx.getID());
                genotypeList = new ArrayList<>();
            } else {
                vcb = null;
                genotypeList = null;
            }
            for (final String sample1 : sample2gt.keySet()) {
                final Genotype g1 = sample2gt.get(sample1);
                final char a1 = g1.getAllele(0).getBaseString().charAt(0);
                for (final String sample2 : sample2gt.keySet()) {
                    if (sample1.compareTo(sample2) >= 0)
                        continue;
                    final Genotype g2 = sample2gt.get(sample2);
                    if (g2.sameGenotype(g1))
                        continue;
                    final char a2 = g2.getAllele(0).getBaseString().charAt(0);
                    for (final SampleIdentifier sfcr1 : sample_identifier_2allelesCount.keySet()) {
                        if (!sfcr1.getSampleName().equals(sample1))
                            continue;
                        final Counter<Character> counter1 = sample_identifier_2allelesCount.get(sfcr1);
                        if (counter1 == null)
                            continue;
                        for (final SampleIdentifier sfcr2 : sample_identifier_2allelesCount.keySet()) {
                            if (!sfcr2.getSampleName().equals(sample2))
                                continue;
                            final SamplePair samplePair = new SamplePair(sfcr1, sfcr2);
                            final Counter<Character> counter2 = sample_identifier_2allelesCount.get(sfcr2);
                            if (counter2 == null)
                                continue;
                            SampleAlleles sampleAlleles = contaminationTable.get(samplePair);
                            if (sampleAlleles == null) {
                                sampleAlleles = new SampleAlleles();
                                contaminationTable.put(samplePair, sampleAlleles);
                                if (!this.output_as_vcf && contaminationTable.size() % 10000 == 0)
                                    LOG.info("n(pairs)=" + contaminationTable.size());
                            }
                            sampleAlleles.number_of_comparaisons++;
                            for (final Character allele : counter1.keySet()) {
                                final long n = counter1.count(allele);
                                if (allele.equals(a1)) {
                                    sampleAlleles.reads_sample1_supporting_sample1 += n;
                                } else if (allele.equals(a2)) {
                                    sampleAlleles.reads_sample1_supporting_sample2 += n;
                                } else {
                                    sampleAlleles.reads_sample1_supporting_other += n;
                                }
                            }
                            for (final Character allele : counter2.keySet()) {
                                final long n = counter2.count(allele);
                                if (allele.equals(a2)) {
                                    sampleAlleles.reads_sample2_supporting_sample2 += n;
                                } else if (allele.equals(a1)) {
                                    sampleAlleles.reads_sample2_supporting_sample1 += n;
                                } else {
                                    sampleAlleles.reads_sample2_supporting_other += n;
                                }
                            }
                        }
                    }
                }
            }
            if (this.output_as_vcf) {
                final Set<String> bad_samples = new TreeSet<>();
                boolean fraction_flag = false;
                int num_lt = 0;
                for (final SamplePair samplepair : sampleListForVcf) {
                    final GenotypeBuilder gb = new GenotypeBuilder(samplepair.getLabel());
                    final SampleAlleles sampleAlleles = contaminationTable.get(samplepair);
                    if (sampleAlleles != null) {
                        gb.attribute("S1S1", sampleAlleles.reads_sample1_supporting_sample1);
                        gb.attribute("S1S2", sampleAlleles.reads_sample1_supporting_sample2);
                        gb.attribute("S1SO", sampleAlleles.reads_sample1_supporting_other);
                        gb.attribute("S2S1", sampleAlleles.reads_sample2_supporting_sample1);
                        gb.attribute("S2S2", sampleAlleles.reads_sample2_supporting_sample2);
                        gb.attribute("S2SO", sampleAlleles.reads_sample2_supporting_other);
                        gb.attribute("S1A", sample2gt.get(samplepair.sample1.getSampleName()).getAllele(0).getDisplayString().charAt(0));
                        gb.attribute("S2A", sample2gt.get(samplepair.sample2.getSampleName()).getAllele(0).getDisplayString().charAt(0));
                        final double fraction = sampleAlleles.getFraction();
                        gb.attribute("FR", fraction);
                        if (!this.passFractionTreshold.test(fraction)) {
                            fraction_flag = true;
                        }
                        boolean bad_lt_flag = false;
                        if (sampleAlleles.reads_sample1_supporting_sample1 <= this.fail_factor * sampleAlleles.reads_sample1_supporting_sample2) {
                            bad_samples.add(samplepair.sample1.getSampleName());
                            bad_lt_flag = true;
                        }
                        if (sampleAlleles.reads_sample2_supporting_sample2 <= this.fail_factor * sampleAlleles.reads_sample2_supporting_sample1) {
                            bad_samples.add(samplepair.sample2.getSampleName());
                            bad_lt_flag = true;
                        }
                        if (bad_lt_flag) {
                            num_lt++;
                        }
                    } else {
                        gb.attribute("S1S1", -1);
                        gb.attribute("S1S2", -1);
                        gb.attribute("S1SO", -1);
                        gb.attribute("S2S1", -1);
                        gb.attribute("S2S2", -1);
                        gb.attribute("S2SO", -1);
                        gb.attribute("S1A", '.');
                        gb.attribute("S2A", '.');
                        gb.attribute("FR", -1f);
                    }
                    genotypeList.add(gb.make());
                }
                if (!bad_samples.isEmpty()) {
                    vcb.attribute("BADSAMPLES", new ArrayList<>(bad_samples));
                }
                vcb.attribute("LE", num_lt);
                if (fraction_flag || !bad_samples.isEmpty()) {
                    if (fraction_flag)
                        vcb.filter("XCONTAMINATION");
                    if (!bad_samples.isEmpty())
                        vcb.filter("BADSAMPLES");
                } else {
                    vcb.passFilters();
                }
                vcb.genotypes(genotypeList);
                vcfw.add(vcb.make());
                contaminationTable.clear();
            } else {
                final long now = System.currentTimeMillis();
                if (this.outputFile != null && this.save_every_sec > -1L && last_save_ms + (this.save_every_sec * 1000L) > now) {
                    saveToFile(contaminationTable);
                    last_save_ms = now;
                }
            }
        }
        progress.finish();
        if (this.output_as_vcf) {
            vcfw.close();
            vcfw = null;
        } else {
            saveToFile(contaminationTable);
        }
        return 0;
    } catch (final Exception e) {
        LOG.error(e);
        return -1;
    } finally {
        CloserUtil.close(vcfw);
        CloserUtil.close(in);
        CloserUtil.close(iter);
        for (SamReader samReader : sample2samReader.values()) CloserUtil.close(samReader);
        sample2samReader.clear();
    }
}
Also used : JexlVariantPredicate(com.github.lindenb.jvarkit.util.vcf.JexlVariantPredicate) Program(com.github.lindenb.jvarkit.util.jcommander.Program) VCFHeader(htsjdk.variant.vcf.VCFHeader) SAMSequenceDictionaryProgress(com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress) SAMFileHeader(htsjdk.samtools.SAMFileHeader) StringUtil(htsjdk.samtools.util.StringUtil) FisherExactTest(com.github.lindenb.jvarkit.math.stats.FisherExactTest) Map(java.util.Map) CloserUtil(htsjdk.samtools.util.CloserUtil) PrintWriter(java.io.PrintWriter) GenotypeBuilder(htsjdk.variant.variantcontext.GenotypeBuilder) SAMRecordIterator(htsjdk.samtools.SAMRecordIterator) Predicate(java.util.function.Predicate) Logger(com.github.lindenb.jvarkit.util.log.Logger) Set(java.util.Set) Collectors(java.util.stream.Collectors) JvarkitException(com.github.lindenb.jvarkit.lang.JvarkitException) SAMRecord(htsjdk.samtools.SAMRecord) List(java.util.List) SAMReadGroupRecord(htsjdk.samtools.SAMReadGroupRecord) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) VariantContext(htsjdk.variant.variantcontext.VariantContext) VCFHeaderLineCount(htsjdk.variant.vcf.VCFHeaderLineCount) SamReaderFactory(htsjdk.samtools.SamReaderFactory) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder) Genotype(htsjdk.variant.variantcontext.Genotype) VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) Cigar(htsjdk.samtools.Cigar) SequenceUtil(htsjdk.samtools.util.SequenceUtil) Parameter(com.beust.jcommander.Parameter) HashMap(java.util.HashMap) TreeSet(java.util.TreeSet) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) BiPredicate(java.util.function.BiPredicate) IOUtils(com.github.lindenb.jvarkit.io.IOUtils) Launcher(com.github.lindenb.jvarkit.util.jcommander.Launcher) ShortReadName(com.github.lindenb.jvarkit.util.illumina.ShortReadName) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) VCFHeaderLineType(htsjdk.variant.vcf.VCFHeaderLineType) Counter(com.github.lindenb.jvarkit.util.Counter) JexlGenotypePredicate(com.github.lindenb.jvarkit.util.vcf.JexlGenotypePredicate) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) VcfIterator(com.github.lindenb.jvarkit.util.vcf.VcfIterator) IOException(java.io.IOException) SamReader(htsjdk.samtools.SamReader) File(java.io.File) SamRecordFilter(htsjdk.samtools.filter.SamRecordFilter) SamRecordJEXLFilter(com.github.lindenb.jvarkit.util.samtools.SamRecordJEXLFilter) VCFFormatHeaderLine(htsjdk.variant.vcf.VCFFormatHeaderLine) DoublePredicate(java.util.function.DoublePredicate) VCFHeaderLine(htsjdk.variant.vcf.VCFHeaderLine) SAMRecordIterator(htsjdk.samtools.SAMRecordIterator) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) VcfIterator(com.github.lindenb.jvarkit.util.vcf.VcfIterator) Counter(com.github.lindenb.jvarkit.util.Counter) ShortReadName(com.github.lindenb.jvarkit.util.illumina.ShortReadName) TreeSet(java.util.TreeSet) HashSet(java.util.HashSet) Genotype(htsjdk.variant.variantcontext.Genotype) GenotypeBuilder(htsjdk.variant.variantcontext.GenotypeBuilder) SAMRecord(htsjdk.samtools.SAMRecord) SAMFileHeader(htsjdk.samtools.SAMFileHeader) File(java.io.File) SAMReadGroupRecord(htsjdk.samtools.SAMReadGroupRecord) VariantContext(htsjdk.variant.variantcontext.VariantContext) SAMSequenceDictionary(htsjdk.samtools.SAMSequenceDictionary) SamReader(htsjdk.samtools.SamReader) VariantContextWriter(htsjdk.variant.variantcontext.writer.VariantContextWriter) VCFFilterHeaderLine(htsjdk.variant.vcf.VCFFilterHeaderLine) VCFHeader(htsjdk.variant.vcf.VCFHeader) VCFFormatHeaderLine(htsjdk.variant.vcf.VCFFormatHeaderLine) SamReaderFactory(htsjdk.samtools.SamReaderFactory) SAMSequenceDictionaryProgress(com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress) VCFInfoHeaderLine(htsjdk.variant.vcf.VCFInfoHeaderLine) JvarkitException(com.github.lindenb.jvarkit.lang.JvarkitException) IOException(java.io.IOException) Cigar(htsjdk.samtools.Cigar) VariantContextBuilder(htsjdk.variant.variantcontext.VariantContextBuilder)

Example 94 with SAMRecordIterator

use of htsjdk.samtools.SAMRecordIterator in project jvarkit by lindenb.

the class Biostar170742 method doWork.

@Override
public int doWork(final List<String> args) {
    if (this.faidx == null) {
        LOG.error("Reference sequence was not defined");
        return -1;
    }
    PrintStream out = null;
    SamReader sfr = null;
    SAMRecordIterator iter = null;
    GenomicSequence genomicSequence = null;
    IndexedFastaSequenceFile indexedFastaSequenceFile = null;
    try {
        indexedFastaSequenceFile = new IndexedFastaSequenceFile(this.faidx);
        long align_id = 0;
        sfr = openSamReader(oneFileOrNull(args));
        out = super.openFileOrStdoutAsPrintStream(this.outputFile);
        final StringBuilder refseq = new StringBuilder();
        final StringBuilder readseq = new StringBuilder();
        final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(sfr.getFileHeader());
        iter = sfr.iterator();
        while (iter.hasNext()) {
            final SAMRecord rec = progress.watch(iter.next());
            if (rec.getReadUnmappedFlag())
                continue;
            final Cigar cigar = rec.getCigar();
            if (cigar == null)
                continue;
            final byte[] readbases = rec.getReadBases();
            if (readbases == null)
                continue;
            if (genomicSequence == null || !rec.getReferenceName().equals(genomicSequence.getChrom())) {
                genomicSequence = new GenomicSequence(indexedFastaSequenceFile, rec.getReferenceName());
            }
            int refpos1 = rec.getAlignmentStart();
            int readpos = 0;
            refseq.setLength(0);
            readseq.setLength(0);
            for (final CigarElement ce : cigar.getCigarElements()) {
                final CigarOperator op = ce.getOperator();
                if (op.equals(CigarOperator.S)) {
                    readpos += ce.getLength();
                    continue;
                }
                if (op.equals(CigarOperator.H)) {
                    continue;
                }
                for (int i = 0; i < ce.getLength(); ++i) {
                    if (op.consumesReferenceBases() && op.consumesReadBases()) {
                        refseq.append(genomicSequence.charAt(refpos1 - 1));
                        readseq.append((char) readbases[readpos]);
                        readpos++;
                        refpos1++;
                    } else if (op.consumesReferenceBases()) {
                        refseq.append(genomicSequence.charAt(refpos1 - 1));
                        readseq.append('-');
                        refpos1++;
                    } else if (op.consumesReadBases()) {
                        refseq.append('-');
                        readseq.append((char) readbases[readpos]);
                        readpos++;
                    }
                }
            }
            out.print(align_id);
            out.print(' ');
            out.print(rec.getReferenceName());
            out.print(' ');
            out.print(rec.getAlignmentStart());
            out.print(' ');
            out.print(rec.getAlignmentEnd());
            out.print(' ');
            out.print(rec.getReadName());
            if (rec.getReadPairedFlag()) {
                if (rec.getFirstOfPairFlag()) {
                    out.print("/1");
                } else if (rec.getSecondOfPairFlag()) {
                    out.print("/2");
                }
            }
            out.print(' ');
            out.print(1 + rec.getAlignmentStart() - rec.getUnclippedStart());
            out.print(' ');
            out.print(rec.getReadLength() - (rec.getUnclippedEnd() - rec.getAlignmentEnd()));
            out.print(' ');
            out.print(rec.getReadNegativeStrandFlag() ? "-" : "+");
            out.print(' ');
            out.print(rec.getMappingQuality());
            out.println();
            out.println(refseq);
            out.println(readseq);
            out.println();
            ++align_id;
        }
        progress.finish();
        iter.close();
        out.flush();
        LOG.info("done");
        return RETURN_OK;
    } catch (Exception err) {
        LOG.error(err);
        return -1;
    } finally {
        CloserUtil.close(out);
        CloserUtil.close(iter);
        CloserUtil.close(sfr);
        CloserUtil.close(indexedFastaSequenceFile);
    }
}
Also used : PrintStream(java.io.PrintStream) SAMRecordIterator(htsjdk.samtools.SAMRecordIterator) SAMSequenceDictionaryProgress(com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress) GenomicSequence(com.github.lindenb.jvarkit.util.picard.GenomicSequence) CigarOperator(htsjdk.samtools.CigarOperator) CigarElement(htsjdk.samtools.CigarElement) IndexedFastaSequenceFile(htsjdk.samtools.reference.IndexedFastaSequenceFile) SamReader(htsjdk.samtools.SamReader) Cigar(htsjdk.samtools.Cigar) SAMRecord(htsjdk.samtools.SAMRecord)

Example 95 with SAMRecordIterator

use of htsjdk.samtools.SAMRecordIterator in project jvarkit by lindenb.

the class Biostar173114 method doWork.

@Override
public int doWork(final List<String> args) {
    if (keepQualities)
        keepSequence = true;
    SamReader sfr = null;
    SAMFileWriter sfw = null;
    SAMRecordIterator iter = null;
    try {
        sfr = super.openSamReader(oneFileOrNull(args));
        sfw = this.writingBamArgs.openSAMFileWriter(this.outputFile, sfr.getFileHeader(), true);
        iter = sfr.iterator();
        final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(sfr.getFileHeader()).logger(LOG);
        long nReads = 0;
        while (iter.hasNext()) {
            final SAMRecord record = progress.watch(iter.next());
            if (!this.keepAttributes) {
                final SAMReadGroupRecord g = record.getReadGroup();
                record.clearAttributes();
                if (g != null && this.keepReadGroup) {
                    record.setAttribute("RG", g.getId());
                }
            }
            record.setReadName(this.keepName ? record.getReadName() : "R" + Long.toHexString(nReads++));
            if (!this.keepMate && record.getReadPairedFlag()) {
                record.setReadPairedFlag(false);
                record.setMateReferenceIndex(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
                record.setMateAlignmentStart(SAMRecord.NO_ALIGNMENT_START);
                record.setMateUnmappedFlag(false);
                record.setMateNegativeStrandFlag(false);
                record.setInferredInsertSize(0);
                record.setProperPairFlag(false);
            }
            if (!this.keepCigar && !record.getReadUnmappedFlag() && record.getCigar() != null) {
                record.setCigar(new Cigar(record.getCigar().getCigarElements().stream().filter(C -> !C.getOperator().equals(CigarOperator.H)).collect(Collectors.toList())));
            }
            if (!this.keepSequence) {
                record.setReadBases(SAMRecord.NULL_SEQUENCE);
            }
            if (!this.keepQualities) {
                record.setBaseQualities(SAMRecord.NULL_QUALS);
            }
            sfw.addAlignment(record);
        }
        progress.finish();
        sfw.close();
        sfw = null;
        LOG.info("done");
        return RETURN_OK;
    } catch (final Exception err) {
        LOG.error(err);
        return -1;
    } finally {
        CloserUtil.close(iter);
        CloserUtil.close(sfr);
        CloserUtil.close(sfw);
    }
}
Also used : Cigar(htsjdk.samtools.Cigar) Program(com.github.lindenb.jvarkit.util.jcommander.Program) Parameter(com.beust.jcommander.Parameter) SAMRecordIterator(htsjdk.samtools.SAMRecordIterator) Logger(com.github.lindenb.jvarkit.util.log.Logger) SAMSequenceDictionaryProgress(com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress) CigarOperator(htsjdk.samtools.CigarOperator) SAMFileWriter(htsjdk.samtools.SAMFileWriter) Term(com.github.lindenb.semontology.Term) SamReader(htsjdk.samtools.SamReader) Collectors(java.util.stream.Collectors) File(java.io.File) ParametersDelegate(com.beust.jcommander.ParametersDelegate) SAMRecord(htsjdk.samtools.SAMRecord) List(java.util.List) SAMReadGroupRecord(htsjdk.samtools.SAMReadGroupRecord) Launcher(com.github.lindenb.jvarkit.util.jcommander.Launcher) CloserUtil(htsjdk.samtools.util.CloserUtil) SamReader(htsjdk.samtools.SamReader) Cigar(htsjdk.samtools.Cigar) SAMRecordIterator(htsjdk.samtools.SAMRecordIterator) SAMSequenceDictionaryProgress(com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress) SAMFileWriter(htsjdk.samtools.SAMFileWriter) SAMRecord(htsjdk.samtools.SAMRecord) SAMReadGroupRecord(htsjdk.samtools.SAMReadGroupRecord)

Aggregations

SAMRecordIterator (htsjdk.samtools.SAMRecordIterator)107 SAMRecord (htsjdk.samtools.SAMRecord)92 SamReader (htsjdk.samtools.SamReader)83 SAMFileHeader (htsjdk.samtools.SAMFileHeader)49 SAMSequenceDictionaryProgress (com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress)47 File (java.io.File)47 SAMFileWriter (htsjdk.samtools.SAMFileWriter)45 IOException (java.io.IOException)41 ArrayList (java.util.ArrayList)34 CigarElement (htsjdk.samtools.CigarElement)30 Cigar (htsjdk.samtools.Cigar)26 SAMSequenceDictionary (htsjdk.samtools.SAMSequenceDictionary)24 SamReaderFactory (htsjdk.samtools.SamReaderFactory)21 SAMReadGroupRecord (htsjdk.samtools.SAMReadGroupRecord)18 CigarOperator (htsjdk.samtools.CigarOperator)16 Interval (htsjdk.samtools.util.Interval)16 PrintWriter (java.io.PrintWriter)15 HashMap (java.util.HashMap)15 SAMFileWriterFactory (htsjdk.samtools.SAMFileWriterFactory)14 SAMSequenceRecord (htsjdk.samtools.SAMSequenceRecord)14