use of htsjdk.samtools.SAMReadGroupRecord in project jvarkit by lindenb.
the class XContaminations method doWork.
@Override
public int doWork(final List<String> args) {
long last_save_ms = System.currentTimeMillis();
if (this.output_as_vcf && !this.use_only_sample_name) {
LOG.error("cannot write vcf if --sample is not set");
return -1;
}
if (args.size() < 2) {
LOG.error("Illegal Number of args");
return -1;
}
final Set<File> bamFiles = IOUtils.unrollFiles(args.subList(1, args.size())).stream().map(S -> new File(S)).collect(Collectors.toSet());
if (bamFiles.isEmpty()) {
LOG.error("Undefined BAM file(s)");
return -1;
}
SAMRecordIterator iter = null;
VcfIterator in = null;
Map<String, SamReader> sample2samReader = new HashMap<>();
VariantContextWriter vcfw = null;
try {
final SamReaderFactory srf = super.createSamReaderFactory();
if (args.get(0).equals("-")) {
in = super.openVcfIterator(null);
} else {
in = super.openVcfIterator(args.get(0));
}
VCFHeader vcfHeader = in.getHeader();
final SAMSequenceDictionary dict1 = vcfHeader.getSequenceDictionary();
if (dict1 == null) {
LOG.error(JvarkitException.VcfDictionaryMissing.getMessage(args.get(0)));
return -1;
}
final Set<String> sampleNames = new HashSet<>(vcfHeader.getSampleNamesInOrder());
if (sampleNames.isEmpty()) {
LOG.error("VCF contains no sample");
return -1;
}
for (final File bamFile : bamFiles) {
LOG.info("Opening " + bamFile);
final SamReader samReader = srf.open(bamFile);
final SAMFileHeader samHeader = samReader.getFileHeader();
final SAMSequenceDictionary dict2 = samHeader.getSequenceDictionary();
if (dict2 == null) {
samReader.close();
LOG.error(JvarkitException.BamDictionaryMissing.getMessage(bamFile.getPath()));
return -1;
}
if (!SequenceUtil.areSequenceDictionariesEqual(dict1, dict2)) {
samReader.close();
LOG.error(JvarkitException.DictionariesAreNotTheSame.getMessage(dict1, dict2));
return -1;
}
if (!samReader.hasIndex()) {
samReader.close();
LOG.error("sam is not indexed : " + bamFile);
return -1;
}
String sampleName = null;
for (final SAMReadGroupRecord rgr : samHeader.getReadGroups()) {
final String s = rgr.getSample();
if (StringUtil.isBlank(s))
continue;
if (sampleName == null) {
sampleName = s;
} else if (!sampleName.equals(s)) {
samReader.close();
LOG.error("Cannot handle more than one sample/bam " + bamFile + " " + sampleName);
return -1;
}
}
if (sampleName == null) {
samReader.close();
LOG.error("No sample in " + bamFile);
// skip this bam
continue;
}
if (!sampleNames.contains(sampleName)) {
samReader.close();
LOG.error("Not in VCF header: sample " + sampleName + " " + bamFile);
// skip this bam
continue;
}
if (sample2samReader.containsKey(sampleName)) {
samReader.close();
LOG.error("Cannot handle more than one bam/sample: " + bamFile + " " + sampleName);
return -1;
}
sample2samReader.put(sampleName, samReader);
}
if (sample2samReader.size() < 2) {
LOG.error("Not engough BAM/samples. Expected at least two valid BAMs");
return -1;
}
sampleNames.retainAll(sample2samReader.keySet());
/* create a VCF is VCF output asked */
final List<SamplePair> sampleListForVcf;
if (this.output_as_vcf) {
vcfw = super.openVariantContextWriter(outputFile);
final Set<VCFHeaderLine> metaData = new HashSet<>();
metaData.add(new VCFFormatHeaderLine("S1S1", 1, VCFHeaderLineType.Integer, "reads sample 1 supporting sample 1"));
metaData.add(new VCFFormatHeaderLine("S1S2", 1, VCFHeaderLineType.Integer, "reads sample 1 supporting sample 2"));
metaData.add(new VCFFormatHeaderLine("S1SO", 1, VCFHeaderLineType.Integer, "reads sample 1 supporting others"));
metaData.add(new VCFFormatHeaderLine("S2S1", 1, VCFHeaderLineType.Integer, "reads sample 2 supporting sample 1"));
metaData.add(new VCFFormatHeaderLine("S2S2", 1, VCFHeaderLineType.Integer, "reads sample 2 supporting sample 2"));
metaData.add(new VCFFormatHeaderLine("S2SO", 1, VCFHeaderLineType.Integer, "reads sample 2 supporting others"));
metaData.add(new VCFFormatHeaderLine("FR", 1, VCFHeaderLineType.Float, "Fraction. '-1' for unavailable."));
metaData.add(new VCFFormatHeaderLine("S1A", 1, VCFHeaderLineType.Character, "sample 1 allele"));
metaData.add(new VCFFormatHeaderLine("S2A", 1, VCFHeaderLineType.Character, "sample 2 allele"));
metaData.add(new VCFFilterHeaderLine("XCONTAMINATION", "Fraction test is > " + fraction_treshold));
metaData.add(new VCFFilterHeaderLine("BADSAMPLES", "At least one pair of genotype fails the 'LE' test"));
metaData.add(new VCFInfoHeaderLine("LE", 1, VCFHeaderLineType.Integer, "number of pair of genotypes having (S1S1<=S1S2 or S2S2<=S2S1)."));
metaData.add(new VCFInfoHeaderLine("BADSAMPLES", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "Samples founds failing the 'LE' test"));
sampleListForVcf = new ArrayList<>();
final List<String> sampleList = new ArrayList<>(sampleNames);
for (int x = 0; x + 1 < sampleList.size(); ++x) {
for (int y = x + 1; y < sampleList.size(); ++y) {
sampleListForVcf.add(new SamplePair(new SimpleSampleIdenfifier(sampleList.get(x)), new SimpleSampleIdenfifier(sampleList.get(y))));
}
}
final VCFHeader header2 = new VCFHeader(metaData, sampleListForVcf.stream().map(V -> V.getLabel()).sorted().collect(Collectors.toList()));
header2.setSequenceDictionary(dict1);
vcfw.writeHeader(header2);
} else {
vcfw = null;
sampleListForVcf = null;
}
final Map<SamplePair, SampleAlleles> contaminationTable = new HashMap<>();
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(dict1).logger(LOG);
while (in.hasNext()) {
final VariantContext ctx = progress.watch(in.next());
if (!ctx.isSNP() || ctx.isFiltered() || !ctx.isBiallelic() || ctx.isSymbolic() || !this.variantFilter.test(ctx)) {
continue;
}
int count_homref = 0;
int count_homvar = 0;
int count_het = 0;
final Map<String, Genotype> sample2gt = new HashMap<>();
for (int gidx = 0; gidx < ctx.getNSamples(); ++gidx) {
final Genotype G = ctx.getGenotype(gidx);
if (!G.isCalled())
continue;
if (G.isHet()) {
// here because in use_singleton we must be sure that there is only one hom_var
count_het++;
if (this.use_singleton && count_het > 0)
break;
} else if (G.isHomVar()) {
// here because in use_singleton we must be sure that there is only one hom_var
count_homvar++;
if (this.use_singleton && count_homvar > 1)
break;
}
if (G.isFiltered())
continue;
if (!sample2samReader.containsKey(G.getSampleName()))
continue;
if (!sampleNames.contains(G.getSampleName()))
continue;
if (!this.genotypeFilter.test(ctx, G))
continue;
sample2gt.put(G.getSampleName(), G);
}
if (this.use_singleton && count_het > 0)
continue;
if (this.use_singleton && count_homvar > 1)
continue;
if (sample2gt.size() < 2)
continue;
// reset and recount
count_homref = 0;
count_homvar = 0;
count_het = 0;
for (final String sampleName : sample2gt.keySet()) {
final Genotype G = ctx.getGenotype(sampleName);
switch(G.getType()) {
case HOM_REF:
count_homref++;
break;
case HOM_VAR:
count_homvar++;
break;
case HET:
count_het++;
break;
default:
break;
}
}
// singleton check
if (this.use_singleton && (count_het > 0 || count_homvar != 1)) {
continue;
}
// at least one HOM_REF and one HOM_VAR
if (count_homref == 0)
continue;
if (count_homvar == 0)
continue;
final Map<SampleIdentifier, Counter<Character>> sample_identifier_2allelesCount = new HashMap<>();
/* scan Reads for those Genotype/Samples */
for (final String sampleName : sample2gt.keySet()) {
if (!sample2samReader.containsKey(sampleName))
continue;
// sample name is not in vcf header
final SamReader samReader = sample2samReader.get(sampleName);
if (samReader == null)
continue;
final Genotype genotype = sample2gt.get(sampleName);
if (genotype == null)
continue;
iter = samReader.query(ctx.getContig(), ctx.getStart(), ctx.getEnd(), false);
while (iter.hasNext()) {
final SAMRecord record = iter.next();
if (record.getEnd() < ctx.getStart())
continue;
if (ctx.getEnd() < record.getStart())
continue;
if (record.getReadUnmappedFlag())
continue;
if (this.filter.filterOut(record))
continue;
final SAMReadGroupRecord srgr = record.getReadGroup();
// not current sample
if (srgr == null)
continue;
if (!sampleName.equals(srgr.getSample()))
continue;
final Cigar cigar = record.getCigar();
if (cigar == null || cigar.isEmpty())
continue;
byte[] readSeq = record.getReadBases();
if (readSeq == null || readSeq.length == 0)
continue;
int readPos = record.getReadPositionAtReferencePosition(ctx.getStart());
if (readPos < 1)
continue;
readPos--;
if (readPos >= readSeq.length)
continue;
final char base = Character.toUpperCase((char) readSeq[readPos]);
if (base == 'N')
continue;
final SampleIdentifier sampleIdentifier;
if (this.use_only_sample_name) {
sampleIdentifier = new SimpleSampleIdenfifier(sampleName);
} else {
final ShortReadName readName = ShortReadName.parse(record);
if (!readName.isValid()) {
LOG.info("No a valid read name " + record.getReadName());
continue;
}
sampleIdentifier = new SequencerFlowCellRunLaneSample(readName, sampleName);
}
Counter<Character> sampleAlleles = sample_identifier_2allelesCount.get(sampleIdentifier);
if (sampleAlleles == null) {
sampleAlleles = new Counter<Character>();
sample_identifier_2allelesCount.put(sampleIdentifier, sampleAlleles);
}
sampleAlleles.incr(base);
}
iter.close();
iter = null;
}
/* end scan reads for this sample */
/* sum-up data for this SNP */
final VariantContextBuilder vcb;
final List<Genotype> genotypeList;
if (this.output_as_vcf) {
vcb = new VariantContextBuilder(args.get(0), ctx.getContig(), ctx.getStart(), ctx.getEnd(), ctx.getAlleles());
if (ctx.hasID())
vcb.id(ctx.getID());
genotypeList = new ArrayList<>();
} else {
vcb = null;
genotypeList = null;
}
for (final String sample1 : sample2gt.keySet()) {
final Genotype g1 = sample2gt.get(sample1);
final char a1 = g1.getAllele(0).getBaseString().charAt(0);
for (final String sample2 : sample2gt.keySet()) {
if (sample1.compareTo(sample2) >= 0)
continue;
final Genotype g2 = sample2gt.get(sample2);
if (g2.sameGenotype(g1))
continue;
final char a2 = g2.getAllele(0).getBaseString().charAt(0);
for (final SampleIdentifier sfcr1 : sample_identifier_2allelesCount.keySet()) {
if (!sfcr1.getSampleName().equals(sample1))
continue;
final Counter<Character> counter1 = sample_identifier_2allelesCount.get(sfcr1);
if (counter1 == null)
continue;
for (final SampleIdentifier sfcr2 : sample_identifier_2allelesCount.keySet()) {
if (!sfcr2.getSampleName().equals(sample2))
continue;
final SamplePair samplePair = new SamplePair(sfcr1, sfcr2);
final Counter<Character> counter2 = sample_identifier_2allelesCount.get(sfcr2);
if (counter2 == null)
continue;
SampleAlleles sampleAlleles = contaminationTable.get(samplePair);
if (sampleAlleles == null) {
sampleAlleles = new SampleAlleles();
contaminationTable.put(samplePair, sampleAlleles);
if (!this.output_as_vcf && contaminationTable.size() % 10000 == 0)
LOG.info("n(pairs)=" + contaminationTable.size());
}
sampleAlleles.number_of_comparaisons++;
for (final Character allele : counter1.keySet()) {
final long n = counter1.count(allele);
if (allele.equals(a1)) {
sampleAlleles.reads_sample1_supporting_sample1 += n;
} else if (allele.equals(a2)) {
sampleAlleles.reads_sample1_supporting_sample2 += n;
} else {
sampleAlleles.reads_sample1_supporting_other += n;
}
}
for (final Character allele : counter2.keySet()) {
final long n = counter2.count(allele);
if (allele.equals(a2)) {
sampleAlleles.reads_sample2_supporting_sample2 += n;
} else if (allele.equals(a1)) {
sampleAlleles.reads_sample2_supporting_sample1 += n;
} else {
sampleAlleles.reads_sample2_supporting_other += n;
}
}
}
}
}
}
if (this.output_as_vcf) {
final Set<String> bad_samples = new TreeSet<>();
boolean fraction_flag = false;
int num_lt = 0;
for (final SamplePair samplepair : sampleListForVcf) {
final GenotypeBuilder gb = new GenotypeBuilder(samplepair.getLabel());
final SampleAlleles sampleAlleles = contaminationTable.get(samplepair);
if (sampleAlleles != null) {
gb.attribute("S1S1", sampleAlleles.reads_sample1_supporting_sample1);
gb.attribute("S1S2", sampleAlleles.reads_sample1_supporting_sample2);
gb.attribute("S1SO", sampleAlleles.reads_sample1_supporting_other);
gb.attribute("S2S1", sampleAlleles.reads_sample2_supporting_sample1);
gb.attribute("S2S2", sampleAlleles.reads_sample2_supporting_sample2);
gb.attribute("S2SO", sampleAlleles.reads_sample2_supporting_other);
gb.attribute("S1A", sample2gt.get(samplepair.sample1.getSampleName()).getAllele(0).getDisplayString().charAt(0));
gb.attribute("S2A", sample2gt.get(samplepair.sample2.getSampleName()).getAllele(0).getDisplayString().charAt(0));
final double fraction = sampleAlleles.getFraction();
gb.attribute("FR", fraction);
if (!this.passFractionTreshold.test(fraction)) {
fraction_flag = true;
}
boolean bad_lt_flag = false;
if (sampleAlleles.reads_sample1_supporting_sample1 <= this.fail_factor * sampleAlleles.reads_sample1_supporting_sample2) {
bad_samples.add(samplepair.sample1.getSampleName());
bad_lt_flag = true;
}
if (sampleAlleles.reads_sample2_supporting_sample2 <= this.fail_factor * sampleAlleles.reads_sample2_supporting_sample1) {
bad_samples.add(samplepair.sample2.getSampleName());
bad_lt_flag = true;
}
if (bad_lt_flag) {
num_lt++;
}
} else {
gb.attribute("S1S1", -1);
gb.attribute("S1S2", -1);
gb.attribute("S1SO", -1);
gb.attribute("S2S1", -1);
gb.attribute("S2S2", -1);
gb.attribute("S2SO", -1);
gb.attribute("S1A", '.');
gb.attribute("S2A", '.');
gb.attribute("FR", -1f);
}
genotypeList.add(gb.make());
}
if (!bad_samples.isEmpty()) {
vcb.attribute("BADSAMPLES", new ArrayList<>(bad_samples));
}
vcb.attribute("LE", num_lt);
if (fraction_flag || !bad_samples.isEmpty()) {
if (fraction_flag)
vcb.filter("XCONTAMINATION");
if (!bad_samples.isEmpty())
vcb.filter("BADSAMPLES");
} else {
vcb.passFilters();
}
vcb.genotypes(genotypeList);
vcfw.add(vcb.make());
contaminationTable.clear();
} else {
final long now = System.currentTimeMillis();
if (this.outputFile != null && this.save_every_sec > -1L && last_save_ms + (this.save_every_sec * 1000L) > now) {
saveToFile(contaminationTable);
last_save_ms = now;
}
}
}
progress.finish();
if (this.output_as_vcf) {
vcfw.close();
vcfw = null;
} else {
saveToFile(contaminationTable);
}
return 0;
} catch (final Exception e) {
LOG.error(e);
return -1;
} finally {
CloserUtil.close(vcfw);
CloserUtil.close(in);
CloserUtil.close(iter);
for (SamReader samReader : sample2samReader.values()) CloserUtil.close(samReader);
sample2samReader.clear();
}
}
use of htsjdk.samtools.SAMReadGroupRecord in project jvarkit by lindenb.
the class Biostar173114 method doWork.
@Override
public int doWork(final List<String> args) {
if (keepQualities)
keepSequence = true;
SamReader sfr = null;
SAMFileWriter sfw = null;
SAMRecordIterator iter = null;
try {
sfr = super.openSamReader(oneFileOrNull(args));
sfw = this.writingBamArgs.openSAMFileWriter(this.outputFile, sfr.getFileHeader(), true);
iter = sfr.iterator();
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(sfr.getFileHeader()).logger(LOG);
long nReads = 0;
while (iter.hasNext()) {
final SAMRecord record = progress.watch(iter.next());
if (!this.keepAttributes) {
final SAMReadGroupRecord g = record.getReadGroup();
record.clearAttributes();
if (g != null && this.keepReadGroup) {
record.setAttribute("RG", g.getId());
}
}
record.setReadName(this.keepName ? record.getReadName() : "R" + Long.toHexString(nReads++));
if (!this.keepMate && record.getReadPairedFlag()) {
record.setReadPairedFlag(false);
record.setMateReferenceIndex(SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX);
record.setMateAlignmentStart(SAMRecord.NO_ALIGNMENT_START);
record.setMateUnmappedFlag(false);
record.setMateNegativeStrandFlag(false);
record.setInferredInsertSize(0);
record.setProperPairFlag(false);
}
if (!this.keepCigar && !record.getReadUnmappedFlag() && record.getCigar() != null) {
record.setCigar(new Cigar(record.getCigar().getCigarElements().stream().filter(C -> !C.getOperator().equals(CigarOperator.H)).collect(Collectors.toList())));
}
if (!this.keepSequence) {
record.setReadBases(SAMRecord.NULL_SEQUENCE);
}
if (!this.keepQualities) {
record.setBaseQualities(SAMRecord.NULL_QUALS);
}
sfw.addAlignment(record);
}
progress.finish();
sfw.close();
sfw = null;
LOG.info("done");
return RETURN_OK;
} catch (final Exception err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(iter);
CloserUtil.close(sfr);
CloserUtil.close(sfw);
}
}
use of htsjdk.samtools.SAMReadGroupRecord in project jvarkit by lindenb.
the class Biostar214299 method doWork.
@Override
public int doWork(final List<String> args) {
if (this.positionFile == null) {
LOG.error("position File is not defined.");
return -1;
}
final String UNAFFECTED_SAMPLE = "UNAFFECTED";
final String AMBIGOUS_SAMPLE = "AMBIGOUS";
final String UNMAPPED = "UNMAPPED";
SamReader sfr = null;
SAMFileWriter sfw = null;
final IntervalTreeMap<Position> positionsTreeMap = new IntervalTreeMap<>();
final Set<String> samples = new HashSet<>();
try {
sfr = openSamReader(oneFileOrNull(args));
final SAMFileHeader header = sfr.getFileHeader();
final SAMSequenceDictionary dict = header.getSequenceDictionary();
if (dict == null) {
LOG.error("Dictionary missing in input sam");
return -1;
}
try (BufferedReader br = IOUtils.openFileForBufferedReading(this.positionFile)) {
String line;
while ((line = br.readLine()) != null) {
if (line.trim().isEmpty() || line.startsWith("#"))
continue;
final String[] tokens = line.split("[\t]");
if (tokens.length < 4) {
LOG.error("Not enough columns in " + line);
return -1;
}
final String contig = tokens[0];
if (dict.getSequence(contig) == null) {
LOG.error("No such contig in input's sam dictionary: " + contig);
return -1;
}
final int refpos = Integer.parseInt(tokens[1]);
final Interval interval = new Interval(contig, refpos, refpos);
Position position = positionsTreeMap.get(interval);
if (position == null) {
position = new Position();
// position.contig = contig;
position.refpos = refpos;
positionsTreeMap.put(interval, position);
}
final String bases = tokens[2].toUpperCase();
if (bases.length() != 1 || !bases.matches("[ATGC]")) {
LOG.error("in " + line + " bases should be one letter an ATGC");
return -1;
}
if (position.base2sample.containsKey(bases.charAt(0))) {
LOG.error("in " + line + " bases already defined for this position");
return -1;
}
final String sampleName = tokens[3].trim();
if (sampleName.isEmpty()) {
LOG.error("sample name cannot be empty");
return -1;
}
samples.add(sampleName);
position.base2sample.put(bases.charAt(0), sampleName);
}
} catch (final IOException err) {
LOG.error(err);
return -1;
}
if (samples.contains(UNAFFECTED_SAMPLE)) {
LOG.error("Sample cannot be named " + UNAFFECTED_SAMPLE);
return -1;
}
if (samples.contains(AMBIGOUS_SAMPLE)) {
LOG.error("Sample cannot be named " + AMBIGOUS_SAMPLE);
return -1;
}
if (samples.contains(UNMAPPED)) {
LOG.error("Sample cannot be named " + UNMAPPED);
return -1;
}
samples.add(UNAFFECTED_SAMPLE);
samples.add(AMBIGOUS_SAMPLE);
samples.add(UNMAPPED);
final SAMFileHeader newHeader = new SAMFileHeader();
newHeader.setSortOrder(header.getSortOrder());
newHeader.setSequenceDictionary(dict);
newHeader.addComment("generated with " + getProgramName() + " " + getVersion() + " Pierre Lindenbaum : " + getProgramCommandLine());
/* create groups */
for (final String sample : samples) {
final SAMReadGroupRecord rg = new SAMReadGroupRecord(sample);
rg.setSample(sample);
rg.setLibrary(sample);
newHeader.addReadGroup(rg);
}
sfw = this.writingBamArgs.openSAMFileWriter(this.outputFile, newHeader, true);
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(header).logger(LOG);
final SAMRecordIterator iter = sfr.iterator();
while (iter.hasNext()) {
final SAMRecord rec = progress.watch(iter.next());
rec.setAttribute("RG", null);
if (rec.getReadUnmappedFlag()) {
rec.setAttribute("RG", UNMAPPED);
sfw.addAlignment(rec);
continue;
}
final Cigar cigar = rec.getCigar();
final Collection<Position> snps = positionsTreeMap.getContained(new Interval(rec.getContig(), rec.getUnclippedStart(), rec.getUnclippedEnd()));
if (snps == null || snps.isEmpty()) {
rec.setAttribute("RG", UNAFFECTED_SAMPLE);
sfw.addAlignment(rec);
continue;
}
final Map<Integer, Position> index2pos = snps.stream().collect(Collectors.toMap(P -> P.refpos, P -> P));
final Set<String> selectedSamples = new HashSet<>();
final byte[] bases = rec.getReadBases();
if (bases == null || bases.equals(SAMRecord.NULL_SEQUENCE)) {
LOG.error("Bases missing in read " + rec);
return -1;
}
int refPos1 = rec.getUnclippedStart();
int readPos0 = 0;
for (final CigarElement ce : cigar.getCigarElements()) {
final CigarOperator op = ce.getOperator();
final boolean consummeReadBaseOrSoftClip = op.consumesReadBases() || op.equals(CigarOperator.S);
if (op.consumesReferenceBases() && consummeReadBaseOrSoftClip) {
for (int i = 0; i < ce.getLength(); ++i) {
final int nowRefPos1 = (refPos1 + i);
final int nowReadPos0 = (readPos0 + i);
final Position position = index2pos.get(nowRefPos1);
if (position == null)
continue;
if (nowReadPos0 >= bases.length)
continue;
final char base = (char) Character.toUpperCase(bases[nowReadPos0]);
final String sample = position.base2sample.get(base);
if (sample == null)
continue;
selectedSamples.add(sample);
index2pos.remove(nowRefPos1);
if (index2pos.isEmpty())
break;
}
}
if (op.consumesReferenceBases())
refPos1 += ce.getLength();
if (consummeReadBaseOrSoftClip || op.equals(CigarOperator.H)) {
readPos0 += ce.getLength();
}
}
if (selectedSamples.isEmpty()) {
rec.setAttribute("RG", UNAFFECTED_SAMPLE);
} else if (selectedSamples.size() == 1) {
rec.setAttribute("RG", selectedSamples.iterator().next());
} else {
rec.setAttribute("RG", AMBIGOUS_SAMPLE);
}
sfw.addAlignment(rec);
}
progress.finish();
LOG.info("done");
return RETURN_OK;
} catch (final Exception err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(sfr);
CloserUtil.close(sfw);
}
}
use of htsjdk.samtools.SAMReadGroupRecord in project jvarkit by lindenb.
the class Biostar78400Test method test01.
@Test
public void test01() throws IOException {
final String flowcell = "HS20001259127";
final String lane = "1";
final File in = createTmpFile(".bam");
SAMFileHeader header = new SAMFileHeader();
header.setSortOrder(SortOrder.unsorted);
SAMFileWriter sfw = new SAMFileWriterFactory().makeBAMWriter(header, true, in);
DefaultSAMRecordFactory recfactory = new DefaultSAMRecordFactory();
SAMRecord rec = recfactory.createSAMRecord(header);
rec.setReadName(flowcell + ":" + lane + ":1210:15640:52255");
rec.setReadString("GAATTC");
rec.setBaseQualityString("222222");
SAMUtils.makeReadUnmapped(rec);
sfw.addAlignment(rec);
sfw.close();
assertIsValidBam(in);
final File xml = createTmpFile(".xml");
PrintWriter pw = new PrintWriter(xml);
pw.println("<?xml version=\"1.0\"?><read-groups>" + "<flowcell name=\"" + flowcell + "\"><lane index=\"" + lane + "\">" + "<group ID=\"X1\"><library>L1</library><platform>P1</platform>" + "<sample>S1</sample><platformunit>PU1</platformunit>" + "<center>C1</center><description>blabla</description></group>" + "</lane></flowcell><flowcell name=\"HS20001259128\">" + "<lane index=\"2\"><group ID=\"x2\"><library>L2</library>" + "<platform>P2</platform><sample>S2</sample><platformunit>PU1</platformunit>" + "<center>C1</center><description>blabla</description></group></lane>" + "</flowcell></read-groups>");
pw.flush();
pw.close();
assertIsXml(xml);
final File out = createTmpFile(".bam");
Assert.assertEquals(new Biostar78400().instanceMain(newCmd().add("-o").add(out).add("-x").add(xml).add(in).make()), 0);
SamReader r = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.LENIENT).open(out);
Assert.assertTrue(r.getFileHeader() != null);
Assert.assertTrue(r.getFileHeader().getReadGroups() != null);
Assert.assertFalse(r.getFileHeader().getReadGroups().isEmpty());
SAMRecordIterator iter = r.iterator();
Assert.assertTrue(iter.hasNext());
rec = iter.next();
SAMReadGroupRecord rg = rec.getReadGroup();
Assert.assertNotNull(rg);
Assert.assertEquals(rg.getId(), "X1");
Assert.assertEquals(rg.getSample(), "S1");
Assert.assertFalse(iter.hasNext());
iter.close();
r.close();
}
use of htsjdk.samtools.SAMReadGroupRecord in project jvarkit by lindenb.
the class BlastToSam method convertIterationToSequenceIteration.
private SequenceIteration convertIterationToSequenceIteration(final List<Iteration> stack, final SAMFileHeader header) throws XMLStreamException, JAXBException {
final SequenceIteration sequenceIteration = new SequenceIteration();
if (stack.isEmpty())
return sequenceIteration;
final SAMReadGroupRecord rg1 = header.getReadGroup("g1");
// sequenceIteration.iteration=iter1;
final SAMRecordFactory samRecordFactory = new DefaultSAMRecordFactory();
final StringBuilder readContent = new StringBuilder();
final int iterLength = Integer.parseInt(stack.get(0).getIterationQueryLen());
for (final Iteration iter1 : stack) {
for (final Hit hit : iter1.getIterationHits().getHit()) {
for (final Hsp hsp : hit.getHitHsps().getHsp()) {
for (final BlastHspAlignment.Align a : new BlastHspAlignment(hsp)) {
char c = a.getQueryChar();
if (!Character.isLetter(c))
continue;
final int queryIndex0 = a.getQueryIndex1() - 1;
while (readContent.length() <= queryIndex0) readContent.append('N');
if (readContent.charAt(queryIndex0) == 'N') {
readContent.setCharAt(queryIndex0, c);
} else if (readContent.charAt(queryIndex0) != c) {
throw new IllegalStateException("Expected character '" + readContent.charAt(queryIndex0) + "' but got '" + c + "' at " + queryIndex0 + "\n" + hsp.getHspQseq() + "\n" + hsp.getHspMidline() + "\n" + hsp.getHspHseq() + "\n" + readContent + "\n");
}
}
}
}
}
for (Iteration iter1 : stack) {
for (Hit hit : iter1.getIterationHits().getHit()) {
for (Hsp hsp : hit.getHitHsps().getHsp()) {
SAMRecord rec = samRecordFactory.createSAMRecord(header);
rec.setReadUnmappedFlag(false);
rec.setReadName(iter1.getIterationQueryDef());
if (hit.getHitAccession() != null && !hit.getHitAccession().trim().isEmpty() && this.dictionary.getSequence(hit.getHitAccession()) != null) {
rec.setReferenceName(hit.getHitAccession());
} else {
rec.setReferenceName(hit.getHitDef());
}
final SAMSequenceRecord ssr = this.dictionary.getSequence(hit.getHitDef());
if (ssr == null) {
LOG.warn("Hit is not in SAMDictionary " + hit.getHitDef());
rec.setReferenceIndex(-1);
} else {
rec.setReferenceIndex(ssr.getSequenceIndex());
}
final BlastHspAlignment blastHspAlignment = new BlastHspAlignment(hsp);
rec.setReadNegativeStrandFlag(blastHspAlignment.isPlusMinus());
final List<CigarOperator> cigarL = new ArrayList<CigarOperator>();
for (BlastHspAlignment.Align a : blastHspAlignment) {
// System.err.println("##"+a);
if (a.getMidChar() == '|') {
cigarL.add(CigarOperator.EQ);
} else if (a.getMidChar() == ':') {
cigarL.add(CigarOperator.M);
} else if (a.getHitChar() == '-') {
cigarL.add(CigarOperator.I);
} else if (a.getQueryChar() == '-') {
cigarL.add(CigarOperator.D);
} else {
cigarL.add(CigarOperator.X);
}
}
if (cigarL.size() != hsp.getHspMidline().length()) {
throw new IllegalStateException("Boumm");
}
Cigar cigarE = new Cigar();
if (blastHspAlignment.getQueryFrom1() > 1) {
cigarE.add(new CigarElement(blastHspAlignment.getQueryFrom1() - 1, CigarOperator.S));
}
int x = 0;
while (x < cigarL.size()) {
int y = x + 1;
while (y < cigarL.size() && cigarL.get(x) == cigarL.get(y)) {
++y;
}
cigarE.add(new CigarElement(y - x, cigarL.get(x)));
x = y;
}
/* soft clip */
if (blastHspAlignment.getQueryTo1() < readContent.length()) {
cigarE.add(new CigarElement((readContent.length() - blastHspAlignment.getQueryTo1()), CigarOperator.S));
}
/* hard clip */
if (readContent.length() < iterLength) {
cigarE.add(new CigarElement((iterLength - readContent.length()), CigarOperator.H));
}
rec.setCigar(cigarE);
rec.setMappingQuality(40);
rec.setAlignmentStart(Math.min(blastHspAlignment.getHitFrom1(), blastHspAlignment.getHitTo1()));
rec.setAttribute("BB", Float.parseFloat(hsp.getHspBitScore()));
rec.setAttribute("BE", Float.parseFloat(hsp.getHspEvalue()));
rec.setAttribute("BS", Float.parseFloat(hsp.getHspScore()));
rec.setAttribute("NM", Integer.parseInt(hsp.getHspGaps()));
rec.setAttribute("RG", rg1.getId());
// setAlignmentEnd not supported in SAM API
// rec.setAlignmentEnd(Math.max(blastHspAlignment.getHitFrom1(),blastHspAlignment.getHitTo1()));
sequenceIteration.records.add(rec);
}
}
}
if (readContent.length() == 0) {
readContent.append('N');
}
byte[] readBases = readContent.toString().getBytes();
char[] readQuals = new char[readBases.length];
for (int i = 0; i < readBases.length; ++i) {
readQuals[i] = (readBases[i] == 'N' ? '#' : 'J');
}
if (sequenceIteration.records.isEmpty()) {
SAMRecord rec = samRecordFactory.createSAMRecord(header);
rec.setReadName(stack.get(0).getIterationQueryDef());
rec.setReadUnmappedFlag(true);
rec.setAttribute("RG", rg1.getId());
sequenceIteration.records.add(rec);
}
for (SAMRecord rec : sequenceIteration.records) {
rec.setReadString(new String(readBases));
rec.setReadBases(readBases);
rec.setBaseQualityString(new String(readQuals, 0, readQuals.length));
rec.setBaseQualities(htsjdk.samtools.SAMUtils.fastqToPhred(new String(readQuals, 0, readQuals.length)));
}
return sequenceIteration;
}
Aggregations