use of com.github.lindenb.jvarkit.util.bio.fasta.ReferenceContig in project jvarkit by lindenb.
the class MiniCaller method doWork.
@Override
public int doWork(final List<String> args) {
ConcatSam.ConcatSamIterator iter = null;
try {
if (this.fastaFile == null) {
LOG.error("no REF");
return -1;
}
/* load faid */
final ReferenceGenomeFactory referenceGenomeFactory = new ReferenceGenomeFactory();
this.referenceGenome = referenceGenomeFactory.openFastaFile(this.fastaFile);
this.dictionary = this.referenceGenome.getDictionary();
if (this.dictionary == null) {
LOG.error(JvarkitException.FastaDictionaryMissing.getMessage(this.fastaFile.getPath()));
}
/* create sam record iterator */
iter = new ConcatSam.Factory().addInterval(this.rgnStr).setEnableUnrollList(true).open(args);
final SAMFileHeader samFileheader = iter.getFileHeader();
final SAMSequenceDictionary dict = samFileheader.getSequenceDictionary();
if (dict == null) {
LOG.error(JvarkitException.BamDictionaryMissing.getMessage(String.join(", ", args)));
return -1;
}
if (!SequenceUtil.areSequenceDictionariesEqual(dict, this.dictionary)) {
LOG.error(JvarkitException.DictionariesAreNotTheSame.getMessage(dict, this.dictionary));
return -1;
}
final List<SAMReadGroupRecord> groups = samFileheader.getReadGroups();
if (groups == null || groups.isEmpty()) {
LOG.error("No group defined in input");
return -1;
}
final Set<String> sampleSet = groups.stream().map(srgr -> this.samRecordPartition.apply(srgr, samRecordPartition.name())).collect(Collectors.toSet());
/* create VCF metadata */
final Set<VCFHeaderLine> metaData = new HashSet<VCFHeaderLine>();
metaData.add(VCFStandardHeaderLines.getFormatLine(VCFConstants.GENOTYPE_KEY));
metaData.add(VCFStandardHeaderLines.getFormatLine(VCFConstants.DEPTH_KEY));
metaData.add(VCFStandardHeaderLines.getInfoLine(VCFConstants.DEPTH_KEY));
metaData.add(VCFStandardHeaderLines.getInfoLine(VCFConstants.ALLELE_COUNT_KEY));
metaData.add(VCFStandardHeaderLines.getInfoLine(VCFConstants.ALLELE_NUMBER_KEY));
metaData.add(VCFStandardHeaderLines.getInfoLine(VCFConstants.ALLELE_FREQUENCY_KEY));
metaData.add(new VCFFormatHeaderLine("DPG", // one value of each genotype
VCFHeaderLineCount.G, VCFHeaderLineType.Integer, "Depth for each allele"));
metaData.add(new VCFFormatHeaderLine("DP4", 4, VCFHeaderLineType.Integer, "Depth ReforAlt|Strand : RF,RR,AF,AR"));
metaData.add(new VCFInfoHeaderLine("INDEL", 1, VCFHeaderLineType.Flag, "Variant is indel"));
// addMetaData(metaData);
final VCFHeader vcfHeader = new VCFHeader(metaData, sampleSet);
vcfHeader.setSequenceDictionary(this.dictionary);
/* create variant context */
this.variantContextWriter = super.openVariantContextWriter(outputFile);
this.variantContextWriter.writeHeader(vcfHeader);
ReferenceContig genomicSeq = null;
SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(this.dictionary);
for (; ; ) {
SAMRecord rec = null;
if (iter.hasNext()) {
rec = progress.watch(iter.next());
if (rec.getReadUnmappedFlag())
continue;
if (this.readFilter.filterOut(rec))
continue;
/* flush buffer if needed */
while (!this.buffer.isEmpty() && (this.buffer.get(0).tid < rec.getReferenceIndex() || (this.buffer.get(0).tid == rec.getReferenceIndex() && (this.buffer.get(0).getEnd()) < rec.getAlignmentStart()))) {
this.buffer.remove(0).print();
}
/* get genomic sequence at this position */
if (genomicSeq == null || !genomicSeq.getContig().equals(rec.getContig())) {
genomicSeq = this.referenceGenome.getContig(rec.getContig());
}
final Cigar cigar = rec.getCigar();
if (cigar == null)
continue;
int readPos = 0;
// 0 based-reference
int refPos0 = rec.getAlignmentStart() - 1;
final byte[] bases = rec.getReadBases();
final byte[] quals = rec.getBaseQualities();
final String sampleName = this.samRecordPartition.getPartion(rec, samRecordPartition.name());
for (final CigarElement ce : cigar.getCigarElements()) {
final CigarOperator op = ce.getOperator();
switch(op) {
case P:
break;
case H:
break;
case S:
readPos += ce.getLength();
break;
// go
case N:
case D:
{
if (// we need base before deletion
refPos0 > 0) {
char refBase = genomicSeq.charAt(refPos0 - 1);
/* we use base before deletion */
final StringBuilder sb = new StringBuilder(ce.getLength());
sb.append(refBase);
for (int i = 0; i < ce.getLength(); ++i) {
sb.append(genomicSeq.charAt(refPos0 + i));
}
findContext(rec.getReferenceIndex(), // we use base *before deletion */
refPos0 - 1, Allele.create(sb.toString(), true)).getSample(sampleName).getAllele(Allele.create(String.valueOf(refBase), false)).incr(rec.getReadNegativeStrandFlag());
}
refPos0 += ce.getLength();
break;
}
case I:
{
if (refPos0 > 0) {
// float qual=0;
char refBase = Character.toUpperCase(genomicSeq.charAt(refPos0 - 1));
final StringBuilder sb = new StringBuilder(1 + ce.getLength());
sb.append(refBase);
for (int i = 0; i < ce.getLength(); ++i) {
sb.append((char) bases[readPos + i]);
// qual+=(readPos + i < quals.length?quals[ readPos + i]:0);
}
findContext(rec.getReferenceIndex(), // we use base *before deletion */
refPos0 - 1, Allele.create(String.valueOf(refBase), true)).getSample(sampleName).getAllele(Allele.create(sb.toString().toUpperCase(), false)).incr(rec.getReadNegativeStrandFlag());
}
readPos += ce.getLength();
break;
}
case EQ:
case M:
case X:
{
for (int i = 0; i < ce.getLength(); ++i) {
findContext(rec.getReferenceIndex(), refPos0 + i, Allele.create(String.valueOf(genomicSeq.charAt(refPos0 + i)), true)).getSample(sampleName).getAllele(Allele.create(String.valueOf((char) bases[readPos + i]), false)).incr(rec.getReadNegativeStrandFlag());
}
readPos += ce.getLength();
refPos0 += ce.getLength();
break;
}
default:
throw new IllegalStateException("Case statement didn't deal with cigar op: " + op);
}
}
} else {
break;
}
}
while (!buffer.isEmpty()) buffer.remove(0).print();
progress.finish();
iter.close();
iter = null;
this.variantContextWriter.close();
this.variantContextWriter = null;
return RETURN_OK;
} catch (Exception e) {
LOG.error(e);
return -1;
} finally {
CloserUtil.close(iter);
CloserUtil.close(this.referenceGenome);
CloserUtil.close(this.variantContextWriter);
}
}
use of com.github.lindenb.jvarkit.util.bio.fasta.ReferenceContig in project jvarkit by lindenb.
the class VCFPredictions method doVcfToVcf.
@Override
protected int doVcfToVcf(final String inputName, final VcfIterator r, VariantContextWriter w) {
ReferenceContig genomicSequence = null;
try {
LOG.info("opening REF:" + this.referenceGenomeSource);
this.referenceGenome = new ReferenceGenomeFactory().open(this.referenceGenomeSource);
loadKnownGenesFromUri();
final VCFHeader header = (VCFHeader) r.getHeader();
final ContigNameConverter contigNameConverter = ContigNameConverter.fromOneDictionary(this.referenceGenome.getDictionary());
contigNameConverter.setOnNotFound(OnNotFound.SKIP);
final VCFHeader h2 = new VCFHeader(header);
addMetaData(h2);
switch(this.outputSyntax) {
case Vep:
{
h2.addMetaDataLine(new VCFInfoHeaderLine("CSQ", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "Consequence type as predicted by VEP" + ". Format: Allele|Feature|Feature_type|Consequence|CDS_position|Protein_position|Amino_acids|Codons"));
break;
}
case SnpEff:
{
h2.addMetaDataLine(new VCFInfoHeaderLine("ANN", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "Functional annotations: 'Allele | Annotation | Annotation_Impact | Gene_Name | Gene_ID | Feature_Type | Feature_ID | Transcript_BioType | Rank | HGVS.c | HGVS.p | cDNA.pos / cDNA.length | CDS.pos / CDS.length | AA.pos / AA.length | Distance | ERRORS / WARNINGS / INFO'"));
break;
}
default:
{
final StringBuilder format = new StringBuilder();
for (FORMAT1 f : FORMAT1.values()) {
if (format.length() > 0)
format.append("|");
format.append(f.name());
}
h2.addMetaDataLine(new VCFInfoHeaderLine(TAG, VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "Prediction from " + getClass().getSimpleName() + ". Format: " + format));
break;
}
}
w.writeHeader(h2);
final SequenceOntologyTree soTree = SequenceOntologyTree.getInstance();
final SequenceOntologyTree.Term so_intron = soTree.getTermByAcn("SO:0001627");
final SequenceOntologyTree.Term so_exon = soTree.getTermByAcn("SO:0001791");
final SequenceOntologyTree.Term so_splice_donor = soTree.getTermByAcn("SO:0001575");
final SequenceOntologyTree.Term so_splice_acceptor = soTree.getTermByAcn("SO:0001574");
final SequenceOntologyTree.Term so_5_prime_UTR_variant = soTree.getTermByAcn("SO:0001623");
final SequenceOntologyTree.Term so_3_prime_UTR_variant = soTree.getTermByAcn("SO:0001624");
final SequenceOntologyTree.Term so_splicing_variant = soTree.getTermByAcn("SO:0001568");
final SequenceOntologyTree.Term so_stop_lost = soTree.getTermByAcn("SO:0001578");
final SequenceOntologyTree.Term so_stop_gained = soTree.getTermByAcn("SO:0001587");
final SequenceOntologyTree.Term so_coding_synonymous = soTree.getTermByAcn("SO:0001819");
final SequenceOntologyTree.Term so_coding_non_synonymous = soTree.getTermByAcn("SO:0001583");
final SequenceOntologyTree.Term so_intergenic = soTree.getTermByAcn("SO:0001628");
final SequenceOntologyTree.Term so_nc_transcript_variant = soTree.getTermByAcn("SO:0001619");
final SequenceOntologyTree.Term so_non_coding_exon_variant = soTree.getTermByAcn("SO:0001792");
final SequenceOntologyTree.Term _2KB_upstream_variant = soTree.getTermByAcn("SO:0001636");
final SequenceOntologyTree.Term _5KB_upstream_variant = soTree.getTermByAcn("SO:0001635");
final SequenceOntologyTree.Term _5KB_downstream_variant = soTree.getTermByAcn("SO:0001633");
final SequenceOntologyTree.Term _500bp_downstream_variant = soTree.getTermByAcn("SO:0001634");
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(header);
while (r.hasNext()) {
final VariantContext ctx = progress.watch(r.next());
final String normalizedContig = contigNameConverter.apply(ctx.getContig());
final List<KnownGene> genes = new ArrayList<>();
if (!StringUtil.isBlank(normalizedContig)) {
for (final List<KnownGene> l2 : this.knownGenes.getOverlapping(new Interval(normalizedContig, ctx.getStart(), // 1-based
ctx.getEnd()))) {
genes.addAll(l2);
}
}
final List<Annotation> ctx_annotations = new ArrayList<Annotation>();
if (genes == null || genes.isEmpty()) {
// intergenic
Annotation a = new Annotation();
a.seqont.add(so_intergenic);
ctx_annotations.add(a);
} else {
if (genomicSequence == null || !genomicSequence.hasName(normalizedContig)) {
LOG.info("getting genomic Sequence for " + normalizedContig);
genomicSequence = this.referenceGenome.getContig(normalizedContig);
if (genomicSequence == null)
throw new JvarkitException.ContigNotFoundInDictionary(normalizedContig, this.referenceGenome.getDictionary());
}
for (final KnownGene gene : genes) {
final GeneticCode geneticCode = GeneticCode.getStandard();
for (final Allele alt2 : ctx.getAlternateAlleles()) {
if (alt2.isNoCall())
continue;
if (alt2.isSymbolic()) {
LOG.warn("symbolic allele are not handled... " + alt2.getDisplayString());
continue;
}
if (alt2.isReference())
continue;
final Annotation annotations = new Annotation();
annotations.kg = gene;
annotations.alt2 = alt2;
if (gene.isNonCoding()) {
annotations.seqont.add(so_nc_transcript_variant);
continue;
}
ctx_annotations.add(annotations);
StringBuilder wildRNA = null;
ProteinCharSequence wildProt = null;
ProteinCharSequence mutProt = null;
MutedSequence mutRNA = null;
int position_in_cds = -1;
final int position = ctx.getStart() - 1;
if (!String.valueOf(genomicSequence.charAt(position)).equalsIgnoreCase(ctx.getReference().getBaseString())) {
if (isSimpleBase(ctx.getReference())) {
LOG.warn("Warning REF!=GENOMIC SEQ!!! at " + position + "/" + ctx.getReference());
}
continue;
}
if (gene.isPositiveStrand()) {
if (position < gene.getTxStart() - 2000) {
annotations.seqont.add(_5KB_upstream_variant);
} else if (position < gene.getTxStart()) {
annotations.seqont.add(_2KB_upstream_variant);
} else if (position >= gene.getTxEnd() + 500) {
annotations.seqont.add(_5KB_downstream_variant);
} else if (position >= gene.getTxEnd()) {
annotations.seqont.add(_500bp_downstream_variant);
} else if (position < gene.getCdsStart()) {
// UTR5
annotations.seqont.add(so_5_prime_UTR_variant);
} else if (gene.getCdsEnd() <= position) {
annotations.seqont.add(so_3_prime_UTR_variant);
} else {
int exon_index = 0;
while (exon_index < gene.getExonCount()) {
final KnownGene.Exon exon = gene.getExon(exon_index);
for (int i = exon.getStart(); i < exon.getEnd(); ++i) {
if (i == position) {
annotations.exon_name = exon.getName();
if (exon.isNonCoding()) {
annotations.seqont.add(so_non_coding_exon_variant);
}
}
if (i < gene.getTxStart())
continue;
if (i < gene.getCdsStart())
continue;
if (i >= gene.getCdsEnd())
break;
if (wildRNA == null) {
wildRNA = new StringBuilder();
mutRNA = new MutedSequence(wildRNA);
}
if (i == position) {
annotations.seqont.add(so_exon);
annotations.exon_name = exon.getName();
position_in_cds = wildRNA.length();
annotations.position_cds = position_in_cds;
// in splicing ?
if (exon.isSplicing(position)) {
if (exon.isSplicingAcceptor(position)) {
// SPLICING_ACCEPTOR
annotations.seqont.add(so_splice_acceptor);
} else if (exon.isSplicingDonor(position)) {
// SPLICING_DONOR
annotations.seqont.add(so_splice_donor);
} else // ??
{
annotations.seqont.add(so_splicing_variant);
}
}
}
wildRNA.append(genomicSequence.charAt(i));
if (i == position && isSimpleBase(alt2) && isSimpleBase(ctx.getReference())) {
mutRNA.put(position_in_cds, alt2.getBaseString().charAt(0));
}
if (wildRNA.length() % 3 == 0 && wildRNA.length() > 0 && wildProt == null) {
wildProt = new ProteinCharSequence(geneticCode, wildRNA);
mutProt = new ProteinCharSequence(geneticCode, mutRNA);
}
}
final KnownGene.Intron intron = exon.getNextIntron();
if (intron != null && intron.contains(position)) {
annotations.intron_name = intron.getName();
annotations.seqont.add(so_intron);
if (intron.isSplicing(position)) {
if (intron.isSplicingAcceptor(position)) {
annotations.seqont.add(so_splice_acceptor);
} else if (intron.isSplicingDonor(position)) {
annotations.seqont.add(so_splice_donor);
} else // ???
{
annotations.seqont.add(so_splicing_variant);
}
}
}
++exon_index;
}
}
} else // reverse orientation
{
if (position >= gene.getTxEnd() + 2000) {
annotations.seqont.add(_5KB_upstream_variant);
} else if (position >= gene.getTxEnd()) {
annotations.seqont.add(_2KB_upstream_variant);
} else if (position < gene.getTxStart() - 500) {
annotations.seqont.add(_5KB_downstream_variant);
} else if (position < gene.getTxStart()) {
annotations.seqont.add(_500bp_downstream_variant);
} else if (position < gene.getCdsStart()) {
annotations.seqont.add(so_3_prime_UTR_variant);
} else if (gene.getCdsEnd() <= position) {
annotations.seqont.add(so_5_prime_UTR_variant);
} else {
int exon_index = gene.getExonCount() - 1;
while (exon_index >= 0) {
final KnownGene.Exon exon = gene.getExon(exon_index);
for (int i = exon.getEnd() - 1; i >= exon.getStart(); --i) {
if (i == position) {
annotations.exon_name = exon.getName();
if (exon.isNonCoding()) {
annotations.seqont.add(so_non_coding_exon_variant);
}
}
if (i >= gene.getCdsEnd())
continue;
if (i < gene.getCdsStart())
break;
if (wildRNA == null) {
wildRNA = new StringBuilder();
mutRNA = new MutedSequence(wildRNA);
}
if (i == position) {
annotations.seqont.add(so_exon);
position_in_cds = wildRNA.length();
annotations.position_cds = position_in_cds;
// in splicing ?
if (exon.isSplicing(position)) {
if (exon.isSplicingAcceptor(position)) {
annotations.seqont.add(so_splice_acceptor);
} else if (exon.isSplicingDonor(position)) {
annotations.seqont.add(so_splice_donor);
} else // ?
{
annotations.seqont.add(so_splicing_variant);
}
}
if (isSimpleBase(alt2) && isSimpleBase(ctx.getReference())) {
mutRNA.put(position_in_cds, AcidNucleics.complement(alt2.getBaseString().charAt(0)));
}
}
wildRNA.append(AcidNucleics.complement(genomicSequence.charAt(i)));
if (wildRNA.length() % 3 == 0 && wildRNA.length() > 0 && wildProt == null) {
wildProt = new ProteinCharSequence(geneticCode, wildRNA);
mutProt = new ProteinCharSequence(geneticCode, mutRNA);
}
}
final KnownGene.Intron intron = exon.getPrevIntron();
if (intron != null && intron.contains(position)) {
annotations.intron_name = intron.getName();
annotations.seqont.add(so_intron);
if (intron.isSplicing(position)) {
if (intron.isSplicingAcceptor(position)) {
annotations.seqont.add(so_splice_acceptor);
} else if (intron.isSplicingDonor(position)) {
annotations.seqont.add(so_splice_donor);
} else // ?
{
annotations.seqont.add(so_splicing_variant);
}
}
}
--exon_index;
}
}
}
if (isSimpleBase(alt2) && isSimpleBase(ctx.getReference()) && wildProt != null && mutProt != null && position_in_cds >= 0) {
final int pos_aa = position_in_cds / 3;
final int mod = position_in_cds % 3;
annotations.wildCodon = ("" + wildRNA.charAt(position_in_cds - mod + 0) + wildRNA.charAt(position_in_cds - mod + 1) + wildRNA.charAt(position_in_cds - mod + 2));
annotations.mutCodon = ("" + mutRNA.charAt(position_in_cds - mod + 0) + mutRNA.charAt(position_in_cds - mod + 1) + mutRNA.charAt(position_in_cds - mod + 2));
annotations.position_protein = (pos_aa + 1);
annotations.wildAA = String.valueOf(wildProt.charAt(pos_aa));
annotations.mutAA = (String.valueOf(mutProt.charAt(pos_aa)));
annotations.seqont.remove(so_exon);
if (isStop(wildProt.charAt(pos_aa)) && !isStop(mutProt.charAt(pos_aa))) {
annotations.seqont.add(so_stop_lost);
} else if (!isStop(wildProt.charAt(pos_aa)) && isStop(mutProt.charAt(pos_aa))) {
annotations.seqont.add(so_stop_gained);
} else if (wildProt.charAt(pos_aa) == mutProt.charAt(pos_aa)) {
annotations.seqont.add(so_coding_synonymous);
} else {
annotations.seqont.add(so_coding_non_synonymous);
}
}
}
}
}
final Set<String> info = new HashSet<String>(ctx_annotations.size());
for (final Annotation a : ctx_annotations) {
info.add(a.toString());
}
final VariantContextBuilder vb = new VariantContextBuilder(ctx);
final String thetag;
switch(this.outputSyntax) {
case Vep:
thetag = "CSQ";
break;
case SnpEff:
thetag = "ANN";
break;
default:
thetag = TAG;
break;
}
vb.attribute(thetag, info.toArray());
w.add(vb.make());
}
return RETURN_OK;
} catch (Exception err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(this.referenceGenome);
}
}
use of com.github.lindenb.jvarkit.util.bio.fasta.ReferenceContig in project jvarkit by lindenb.
the class GcPercentAndDepth method doWork.
@Override
public int doWork(final List<String> args) {
if (this.windowSize <= 0) {
LOG.error("Bad window size.");
return -1;
}
if (this.windowStep <= 0) {
LOG.error("Bad window step.");
return -1;
}
if (this.refFile == null) {
LOG.error("Undefined REF File");
return -1;
}
if (args.isEmpty()) {
LOG.error("Illegal Number of arguments.");
return -1;
}
ReferenceGenome indexedFastaSequenceFile = null;
List<SamReader> readers = new ArrayList<SamReader>();
PrintWriter out = null;
try {
LOG.info("Loading " + this.refFile);
indexedFastaSequenceFile = new ReferenceGenomeFactory().openFastaFile(this.refFile);
this.samSequenceDictionary = indexedFastaSequenceFile.getDictionary();
if (this.samSequenceDictionary == null) {
LOG.error("Cannot get sequence dictionary for " + this.refFile);
return -1;
}
out = super.openFileOrStdoutAsPrintWriter(outPutFile);
Set<String> all_samples = new TreeSet<String>();
/* create input, collect sample names */
for (int optind = 0; optind < args.size(); ++optind) {
LOG.info("Opening " + args.get(optind));
final SamReader samFileReaderScan = super.openSamReader(args.get(optind));
readers.add(samFileReaderScan);
final SAMFileHeader header = samFileReaderScan.getFileHeader();
if (!SequenceUtil.areSequenceDictionariesEqual(this.samSequenceDictionary, header.getSequenceDictionary())) {
LOG.error(JvarkitException.DictionariesAreNotTheSame.getMessage(this.samSequenceDictionary, header.getSequenceDictionary()));
return -1;
}
for (final SAMReadGroupRecord g : header.getReadGroups()) {
final String sample = this.partition.apply(g);
if (StringUtil.isBlank(sample)) {
LOG.warning("Read group " + g.getId() + " has no sample in merged dictionary");
continue;
}
all_samples.add(sample);
}
}
LOG.info("N " + this.partition.name() + "=" + all_samples.size());
/* print header */
out.print("#");
if (!this.hide_genomic_index) {
out.print("id");
out.print("\t");
}
out.print("chrom");
out.print("\t");
out.print("start");
out.print("\t");
out.print("end");
out.print("\t");
out.print("GCPercent");
for (final String sample : all_samples) {
out.print("\t");
out.print(sample);
}
out.println();
final List<RegionCaptured> regionsCaptured = new ArrayList<RegionCaptured>();
if (bedFile != null) {
LOG.info("Reading BED:" + bedFile);
final BedLineCodec bedLineCodec = new BedLineCodec();
BufferedReader r = IOUtils.openFileForBufferedReading(bedFile);
r.lines().filter(L -> !L.startsWith("#")).filter(L -> !StringUtil.isBlank(L)).map(L -> bedLineCodec.decode(L)).filter(B -> B != null).forEach(B -> {
final SAMSequenceRecord ssr = this.samSequenceDictionary.getSequence(B.getContig());
if (ssr == null) {
LOG.warning("Cannot resolve " + B.getContig());
return;
}
final RegionCaptured roi = new RegionCaptured(ssr, B.getStart() - 1, B.getEnd());
regionsCaptured.add(roi);
});
CloserUtil.close(r);
LOG.info("end Reading BED:" + bedFile);
Collections.sort(regionsCaptured);
} else {
LOG.info("No capture, peeking everything");
for (final SAMSequenceRecord ssr : this.samSequenceDictionary.getSequences()) {
final RegionCaptured roi = new RegionCaptured(ssr, 0, ssr.getSequenceLength());
regionsCaptured.add(roi);
}
}
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(this.samSequenceDictionary).logger(LOG);
ReferenceContig genomicSequence = null;
for (final RegionCaptured roi : regionsCaptured) {
if (genomicSequence == null || !genomicSequence.hasName(roi.getContig())) {
genomicSequence = indexedFastaSequenceFile.getContig(roi.getContig());
if (genomicSequence == null) {
LOG.error(JvarkitException.ContigNotFoundInDictionary.getMessage(roi.getContig(), this.samSequenceDictionary));
return -1;
}
}
Map<String, int[]> sample2depth = new HashMap<String, int[]>();
Map<String, Double> sample2meanDepth = new HashMap<String, Double>();
for (final String sample : all_samples) {
int[] depth = new int[roi.length()];
Arrays.fill(depth, 0);
sample2depth.put(sample, depth);
}
List<CloseableIterator<SAMRecord>> iterators = new ArrayList<CloseableIterator<SAMRecord>>();
for (final SamReader r : readers) {
iterators.add(r.query(roi.getContig(), roi.getStart(), roi.getEnd(), false));
}
final MergingIterator<SAMRecord> merginIter = new MergingIterator<>(new SAMRecordCoordinateComparator(), iterators);
while (merginIter.hasNext()) {
final SAMRecord rec = merginIter.next();
if (rec.getReadUnmappedFlag())
continue;
if (this.filter.filterOut(rec))
continue;
final String sample = this.partition.getPartion(rec, null);
if (sample == null)
continue;
final int[] depth = sample2depth.get(sample);
if (depth == null)
continue;
final Cigar cigar = rec.getCigar();
if (cigar == null)
continue;
int refpos1 = rec.getAlignmentStart();
for (final CigarElement ce : cigar.getCigarElements()) {
final CigarOperator op = ce.getOperator();
if (!op.consumesReferenceBases())
continue;
if (op.consumesReadBases()) {
for (int i = 0; i < ce.getLength(); ++i) {
if (refpos1 + i < roi.getStart())
continue;
if (refpos1 + i > roi.getEnd())
break;
depth[refpos1 + i - roi.getStart()]++;
}
}
refpos1 += ce.getLength();
}
}
merginIter.close();
for (final RegionCaptured.SlidingWindow win : roi) {
double total = 0f;
int countN = 0;
for (int pos1 = win.getStart(); pos1 <= win.getEnd(); ++pos1) {
switch(genomicSequence.charAt(pos1 - 1)) {
case 'c':
case 'C':
case 'g':
case 'G':
case 's':
case 'S':
{
total++;
break;
}
case 'n':
case 'N':
countN++;
break;
default:
break;
}
}
if (skip_if_contains_N && countN > 0)
continue;
double GCPercent = total / (double) win.length();
int max_depth_for_win = 0;
sample2meanDepth.clear();
for (final String sample : all_samples) {
int[] depth = sample2depth.get(sample);
double sum = 0;
for (int pos = win.getStart(); pos < win.getEnd() && (pos - roi.getStart()) < depth.length; ++pos) {
sum += depth[pos - roi.getStart()];
}
double mean = (sum / (double) depth.length);
max_depth_for_win = Math.max(max_depth_for_win, (int) mean);
sample2meanDepth.put(sample, mean);
}
if (max_depth_for_win < this.min_depth)
continue;
if (!this.hide_genomic_index) {
out.print(win.getGenomicIndex());
out.print("\t");
}
out.print(win.getContig());
out.print("\t");
out.print(win.getStart() - 1);
out.print("\t");
out.print(win.getEnd());
out.print("\t");
out.printf("%.2f", GCPercent);
for (String sample : all_samples) {
out.print("\t");
out.printf("%.2f", (double) sample2meanDepth.get(sample));
}
out.println();
}
}
progress.finish();
out.flush();
return 0;
} catch (Exception err) {
LOG.error(err);
return -1;
} finally {
for (SamReader r : readers) CloserUtil.close(r);
CloserUtil.close(indexedFastaSequenceFile);
CloserUtil.close(out);
}
}
use of com.github.lindenb.jvarkit.util.bio.fasta.ReferenceContig in project jvarkit by lindenb.
the class TestNg01 method testRefGenomeFactoryForDAS.
@Test
public void testRefGenomeFactoryForDAS() throws IOException {
final ReferenceGenomeFactory rgf = new ReferenceGenomeFactory();
rgf.setBufferSize(10);
final ReferenceGenome ref = rgf.openDAS(new URL("http://genome.cse.ucsc.edu/cgi-bin/das/hg19"));
Assert.assertTrue(ref.size() > 23);
ReferenceContig contig = ref.getContig("1");
Assert.assertNotNull(contig);
Assert.assertEquals(contig.getContig(), "1");
Assert.assertEquals(contig.length(), 249250621);
contig = ref.getContig("M");
Assert.assertNotNull(contig);
Assert.assertEquals(contig.getContig(), "M");
Assert.assertEquals(contig.length(), 16571);
String dna = "gatcacaggtctatcacc";
for (int i = 0; i < dna.length(); ++i) {
Assert.assertEquals(dna.charAt(i), contig.charAt(i));
}
dna = "cttaaataagacatcacgatg";
for (int i = 0; i < dna.length(); ++i) {
Assert.assertEquals(dna.charAt(i), contig.charAt(contig.length() - dna.length() + i));
}
ref.close();
}
use of com.github.lindenb.jvarkit.util.bio.fasta.ReferenceContig in project jvarkit by lindenb.
the class BamStats04 method doWork.
@Override
public int doWork(final List<String> args) {
if (this.bedFile == null || !this.bedFile.exists()) {
LOG.error("undefined option -B (bed file)");
return -1;
}
if (args.isEmpty()) {
LOG.error("Bam files missing");
return -1;
}
if (this.minCoverages.isEmpty()) {
this.minCoverages.add(0);
}
final String NO_PARTITION = "N/A";
BufferedReader bedIn = null;
final List<SamReader> samReaders = new ArrayList<>(args.size());
PrintWriter pw = null;
ReferenceGenome referenceGenome = null;
ReferenceContig referenceContig = null;
try {
final BedLineCodec codec = new BedLineCodec();
final Set<String> all_partitions = new TreeSet<>();
bedIn = IOUtils.openFileForBufferedReading(this.bedFile);
SAMSequenceDictionary dict = null;
for (final String filename : IOUtils.unrollFiles(args)) {
LOG.info(filename);
final SamReader samReader = super.openSamReader(filename);
if (!samReader.hasIndex()) {
LOG.error(filename + " is not indexed");
samReader.close();
return -1;
}
final SAMFileHeader samFileheader = samReader.getFileHeader();
if (samFileheader == null) {
LOG.error("SAM file is missing a header " + filename);
return -1;
}
final List<SAMReadGroupRecord> readGroups = samFileheader.getReadGroups();
if (readGroups == null || readGroups.isEmpty()) {
LOG.warn("No Read group (RG) in the header of " + filename);
all_partitions.add(NO_PARTITION);
} else {
for (final SAMReadGroupRecord rg : readGroups) {
all_partitions.add(this.partition.apply(rg, NO_PARTITION));
}
}
final SAMSequenceDictionary d = samFileheader.getSequenceDictionary();
if (d == null) {
samReader.close();
LOG.error(JvarkitException.BamDictionaryMissing.getMessage(filename));
return -1;
}
samReaders.add(samReader);
if (dict == null) {
dict = d;
} else if (SequenceUtil.areSequenceDictionariesEqual(d, dict)) {
LOG.error(JvarkitException.DictionariesAreNotTheSame.getMessage(d, dict));
return -1;
}
}
if (samReaders.isEmpty()) {
LOG.error("No Bam defined");
return -1;
}
if (!StringUtil.isBlank(this.faidxUri)) {
referenceGenome = new ReferenceGenomeFactory().open(this.faidxUri);
}
pw = super.openFileOrStdoutAsPrintWriter(this.outputFile);
pw.print("#chrom\tstart\tend\tlength\t" + this.partition.name() + (referenceGenome == null ? "" : "\tgc_percent"));
pw.print("\tmincov\tmaxcov");
for (final int MIN_COVERAGE : this.minCoverages) {
pw.print("\tmeancov_" + MIN_COVERAGE + "\tmediancov_" + MIN_COVERAGE + "\tnocoveragebp_" + MIN_COVERAGE + "\tpercentcovered_" + MIN_COVERAGE);
}
pw.println();
String line = null;
while ((line = bedIn.readLine()) != null) {
if (line.isEmpty() || line.startsWith("#"))
continue;
final BedLine bedLine = codec.decode(line);
if (bedLine == null)
continue;
if (dict.getSequence(bedLine.getContig()) == null) {
LOG.error("Unknown contig in " + line);
return -1;
}
if (bedLine.getStart() > bedLine.getEnd()) {
LOG.info("ignoring " + bedLine);
continue;
}
if (referenceGenome != null && (referenceContig == null || !referenceContig.hasName(bedLine.getContig()))) {
referenceContig = referenceGenome.getContig(bedLine.getContig());
}
final Map<String, IntervalStat> sample2stats = new HashMap<>(all_partitions.size());
for (final String rgId : all_partitions) {
sample2stats.put(rgId, new IntervalStat(bedLine));
}
for (final SamReader samReader : samReaders) {
/**
* start - 1-based, inclusive start of interval of interest. Zero implies start of the reference sequence.
* end - 1-based, inclusive end of interval of interest. Zero implies end of the reference sequence.
*/
final SAMRecordIterator r = samReader.queryOverlapping(bedLine.getContig(), bedLine.getStart(), bedLine.getEnd());
while (r.hasNext()) {
final SAMRecord rec = r.next();
if (rec.getReadUnmappedFlag())
continue;
if (this.filter.filterOut(rec))
continue;
if (!rec.getReferenceName().equals(bedLine.getContig()))
continue;
final String partition;
final SAMReadGroupRecord group = rec.getReadGroup();
if (group == null) {
partition = NO_PARTITION;
} else {
final String name = this.partition.apply(group);
partition = (StringUtil.isBlank(name) ? NO_PARTITION : name);
}
IntervalStat stat = sample2stats.get(partition);
if (stat == null) {
stat = new IntervalStat(bedLine);
sample2stats.put(partition, stat);
}
stat.visit(rec);
}
r.close();
}
// end of loop over sam Readers
final OptionalInt gcPercentInt = (referenceContig == null ? OptionalInt.empty() : referenceContig.getGCPercent(bedLine.getStart() - 1, bedLine.getEnd()).getGCPercentAsInteger());
for (final String partitionName : sample2stats.keySet()) {
final IntervalStat stat = sample2stats.get(partitionName);
Arrays.sort(stat.counts);
pw.print(bedLine.getContig() + "\t" + (bedLine.getStart() - 1) + "\t" + (bedLine.getEnd()) + "\t" + stat.counts.length + "\t" + partitionName);
if (referenceGenome != null) {
pw.print("\t");
if (gcPercentInt.isPresent())
pw.print(gcPercentInt.getAsInt());
}
pw.print("\t" + stat.counts[0] + "\t" + stat.counts[stat.counts.length - 1]);
for (final int MIN_COVERAGE : this.minCoverages) {
/**
* map depth to 0 if depth <= MIN_COVERAGE
*/
final IntUnaryOperator depthAdjuster = (D) -> (D <= MIN_COVERAGE ? 0 : D);
final int count_no_coverage = (int) Arrays.stream(stat.counts).filter(D -> depthAdjuster.applyAsInt(D) <= 0).count();
final double mean = Percentile.average().evaluate(Arrays.stream(stat.counts).map(depthAdjuster));
final double median_depth = Percentile.median().evaluate(Arrays.stream(stat.counts).map(depthAdjuster));
pw.print("\t" + mean + "\t" + median_depth + "\t" + count_no_coverage + "\t" + (int) (((stat.counts.length - count_no_coverage) / (double) stat.counts.length) * 100.0));
}
pw.println();
}
}
pw.flush();
pw.close();
pw = null;
LOG.info("done");
return RETURN_OK;
} catch (final Exception err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(referenceGenome);
CloserUtil.close(pw);
CloserUtil.close(bedIn);
CloserUtil.close(samReaders);
}
}
Aggregations