use of com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress in project jvarkit by lindenb.
the class BamToSql method doWork.
@Override
public int doWork(List<String> args) {
if (this.faidxFile == null) {
LOG.error("ref sequence faidx not defined");
return -1;
}
SAMRecordIterator iter = null;
SamReader sfr = null;
PrintWriter out = null;
GenomicSequence genomicSequence = null;
IndexedFastaSequenceFile indexedFastaSequenceFile = null;
args = new ArrayList<String>(IOUtils.unrollFiles(args));
try {
out = super.openFileOrStdoutAsPrintWriter(this.outputFile);
indexedFastaSequenceFile = new IndexedFastaSequenceFile(this.faidxFile);
out.println("CREATE TABLE IF NOT EXISTS SamFile");
out.println("(");
out.println("id INTEGER PRIMARY KEY,");
out.println("filename TEXT");
out.println(");");
out.println("CREATE TABLE IF NOT EXISTS Dictionary");
out.println("(");
out.println("id INTEGER PRIMARY KEY,");
out.println("name TEXT NOT NULL,");
out.println("length INT NOT NULL,");
out.println("tid INT NOT NULL,");
out.println("samfile_id INT NOT NULL,");
out.println("FOREIGN KEY(samfile_id) REFERENCES SamFile(id)");
out.println(");");
out.println("CREATE TABLE IF NOT EXISTS ReadGroup");
out.println("(");
out.println("id INTEGER PRIMARY KEY,");
out.println("groupId TEXT NOT NULL,");
out.println("sample TEXT NOT NULL,");
out.println("samfile_id INT NOT NULL,");
out.println("FOREIGN KEY(samfile_id) REFERENCES SamFile(id)");
out.println(");");
out.println("CREATE TABLE IF NOT EXISTS Read");
out.println("(");
out.println("id INTEGER PRIMARY KEY,");
out.println("name TEXT NOT NULL,");
out.println("flag INTEGER NOT NULL,");
if (this.printflag) {
for (final SAMFlag flg : SAMFlag.values()) {
out.println(flg.name() + " INTEGER NOT NULL,");
}
}
out.println("rname TEXT,");
out.println("pos INTEGER,");
out.println("mapq INTEGER NOT NULL,");
out.println("cigar TEXT,");
out.println("rnext TEXT,");
out.println("pnext INTEGER,");
out.println("tlen INTEGER,");
out.println("sequence TEXT NOT NULL,");
out.println("qualities TEXT NOT NULL,");
out.println("samfile_id INT NOT NULL,");
out.println("group_id INT,");
out.println("FOREIGN KEY(samfile_id) REFERENCES SamFile(id),");
out.println("FOREIGN KEY(group_id) REFERENCES ReadGroup(id)");
out.println(");");
out.println("CREATE TABLE IF NOT EXISTS Cigar");
out.println("(");
out.println("id INTEGER PRIMARY KEY,");
out.println("read_pos INT ,");
out.println("read_base TEXT,");
out.println("read_qual INT ,");
out.println("ref_pos INT ,");
out.println("ref_base TEXT,");
out.println("operator TEXT NOT NULL,");
out.println("read_id INT NOT NULL,");
out.println("FOREIGN KEY(read_id) REFERENCES Read(id)");
out.println(");");
out.println("begin transaction;");
int samIndex = 0;
do {
final String inputName;
if (samIndex == 0 && args.isEmpty()) {
sfr = openSamReader(null);
inputName = "<stdin>";
} else {
inputName = args.get(samIndex);
sfr = openSamReader(inputName);
}
final SAMFileHeader header1 = sfr.getFileHeader();
if (header1 == null) {
throw new JvarkitException.FileFormatError("File header missing");
}
final SAMSequenceDictionary dict = header1.getSequenceDictionary();
if (dict == null) {
throw new JvarkitException.DictionaryMissing("No Dictionary in input");
}
final IntervalParser intervalParser = new IntervalParser(dict);
final Interval userInterval;
iter = null;
if (this.regionStr == null || this.regionStr.isEmpty()) {
LOG.warn("You're currently scanning the whole BAM ???!!!");
iter = sfr.iterator();
userInterval = null;
} else {
userInterval = intervalParser.parse(this.regionStr);
if (userInterval == null) {
throw new JvarkitException.UserError("cannot parse interval " + this.regionStr);
}
iter = sfr.query(userInterval.getContig(), userInterval.getStart(), userInterval.getEnd(), false);
}
out.println(String.join(" ", "insert into SamFile(filename) values(", quote(inputName), ");"));
for (int i = 0; i < dict.size(); ++i) {
final SAMSequenceRecord ssr = dict.getSequence(i);
out.println("insert into Dictionary(name,length,tid,samfile_id) select " + quote(inputName) + "," + ssr.getSequenceLength() + "," + i + ",max(id) from SamFile;");
}
for (final SAMReadGroupRecord g : header1.getReadGroups()) {
out.println("insert into ReadGroup(groupId,sample,samfile_id) select " + quote(g.getId()) + "," + quote(g.getSample()) + "," + "max(id) from SamFile;");
}
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(header1);
while (iter.hasNext()) {
final SAMRecord rec = progress.watch(iter.next());
final StringBuilder sql = new StringBuilder();
sql.append("insert into Read(" + "name,flag,");
if (this.printflag) {
for (final SAMFlag flg : SAMFlag.values()) {
sql.append(flg.name()).append(",");
}
}
sql.append("rname,pos,mapq,cigar,rnext,pnext,tlen,sequence,qualities,group_id,samfile_id) select ");
sql.append(quote(rec.getReadName())).append(",");
sql.append(rec.getFlags()).append(",");
if (this.printflag) {
for (final SAMFlag flg : SAMFlag.values()) {
sql.append(flg.isSet(rec.getFlags()) ? 1 : 0);
sql.append(",");
}
}
if (rec.getReferenceName() == null || rec.getReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME)) {
sql.append("NULL,NULL");
} else {
sql.append(quote(rec.getReferenceName()));
sql.append(",");
sql.append(rec.getAlignmentStart());
}
sql.append(",");
sql.append(rec.getMappingQuality());
sql.append(",");
// cigar
if (rec.getCigarString() == null || rec.getCigarString().equals(SAMRecord.NO_ALIGNMENT_CIGAR)) {
sql.append("NULL");
} else {
sql.append(quote(rec.getCigarString()));
}
sql.append(",");
// rnext
if (rec.getMateReferenceName() == null || rec.getMateReferenceName().equals(SAMRecord.NO_ALIGNMENT_REFERENCE_NAME)) {
sql.append("NULL,NULL");
} else {
sql.append(quote(rec.getMateReferenceName()));
sql.append(",");
sql.append(rec.getMateAlignmentStart());
}
sql.append(",");
// tlen
sql.append(rec.getInferredInsertSize());
sql.append(",");
// sequence
sql.append(quote(rec.getReadString()));
sql.append(",");
// qualities
sql.append(quote(rec.getBaseQualityString()));
sql.append(",");
if (rec.getReadGroup() == null) {
sql.append("NULL");
} else {
sql.append("G.id");
}
sql.append(",F.id FROM SamFile as F");
if (rec.getReadGroup() != null) {
sql.append(" , ReadGroup as G where G.groupId=").append(quote(rec.getReadGroup().getId())).append(" and F.id = G.samfile_id ");
}
sql.append(" ORDER BY F.id DESC LIMIT 1;");
out.println(sql.toString());
if (this.printcigar && !rec.getReadUnmappedFlag() && rec.getCigar() != null) {
if (genomicSequence == null || !genomicSequence.getChrom().equals(rec.getReferenceName())) {
genomicSequence = new GenomicSequence(indexedFastaSequenceFile, rec.getReferenceName());
}
int ref = rec.getUnclippedStart();
final byte[] bases = rec.getReadBases();
final byte[] quals = rec.getBaseQualities();
int read = 0;
for (final CigarElement ce : rec.getCigar()) {
final CigarOperator op = ce.getOperator();
if (op.equals(CigarOperator.P))
continue;
for (int i = 0; i < ce.getLength(); ++i) {
sql.setLength(0);
boolean in_user_interval = true;
sql.append("insert into Cigar(operator,read_pos,read_base,read_qual,ref_pos,ref_base,read_id) ");
sql.append("select '");
sql.append(op.name());
sql.append("',");
if (userInterval != null && !(rec.getReferenceName().equals(userInterval.getContig()) && ref >= userInterval.getStart() && ref <= userInterval.getEnd())) {
in_user_interval = false;
}
switch(op) {
case I:
{
sql.append(read);
sql.append(",");
sql.append("'" + (char) bases[read] + "',");
sql.append("" + quals[read] + "");
sql.append(",");
sql.append("NULL,NULL");
read++;
break;
}
case D:
case N:
case // yes H (hard clip)
H:
{
sql.append("NULL,NULL,NULL,");
sql.append(ref);
sql.append(",'");
sql.append((ref < 1 || ref - 1 >= genomicSequence.length()) ? '*' : genomicSequence.charAt(ref - 1));
sql.append("'");
ref++;
break;
}
case M:
case X:
case EQ:
case // yes S, soft clip
S:
{
sql.append(read);
sql.append(",");
sql.append("'" + (char) bases[read] + "',");
sql.append("" + quals[read] + "");
sql.append(",");
sql.append(ref);
sql.append(",'");
sql.append((ref < 1 || ref - 1 >= genomicSequence.length()) ? '*' : genomicSequence.charAt(ref - 1));
sql.append("'");
ref++;
read++;
break;
}
default:
throw new IllegalStateException();
}
sql.append(", id from Read ORDER BY id DESC LIMIT 1;");
if (in_user_interval)
out.println(sql.toString());
}
}
}
}
iter.close();
iter = null;
sfr.close();
sfr = null;
progress.finish();
samIndex++;
} while (samIndex < args.size());
out.println("COMMIT;");
out.flush();
out.close();
LOG.info("done");
return 0;
} catch (Exception err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(iter);
CloserUtil.close(sfr);
CloserUtil.close(out);
CloserUtil.close(indexedFastaSequenceFile);
}
}
use of com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress in project jvarkit by lindenb.
the class SamAddPI method doWork.
@Override
public int doWork(final List<String> args) {
final Map<String, List<Integer>> rg2insertsize = new HashMap<>();
SamReader sfr = null;
SamReader sfrTmp = null;
SAMFileWriter sfw = null;
File tmpBam = null;
SAMFileWriter tmpBamWriter = null;
SAMFileWriter outWriter = null;
CloseableIterator<SAMRecord> iter = null;
CloseableIterator<SAMRecord> iterTmp = null;
try {
sfr = openSamReader(oneFileOrNull(args));
SAMFileHeader header = sfr.getFileHeader();
for (final SAMReadGroupRecord rg : header.getReadGroups()) {
if (!overwrite_existing && rg.getPredictedMedianInsertSize() != null) {
continue;
}
rg2insertsize.put(rg.getId(), new ArrayList<>(num_read_to_test < 1L ? 10000 : num_read_to_test));
}
tmpBam = File.createTempFile("__addpi", ".bam");
tmpBamWriter = this.writingBamArgs.openSAMFileWriter(tmpBam, header, true);
iter = sfr.iterator();
int n_processed = 0;
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(header).logger(LOG);
while (iter.hasNext() && (this.num_read_to_test < 0 || n_processed < this.num_read_to_test)) {
final SAMRecord rec = progress.watch(iter.next());
tmpBamWriter.addAlignment(rec);
final SAMReadGroupRecord rg = rec.getReadGroup();
final List<Integer> insertlist = rg2insertsize.get(rg.getId());
if (insertlist == null)
continue;
if (rec.getReadUnmappedFlag())
continue;
if (!rec.getReadPairedFlag())
continue;
if (!rec.getFirstOfPairFlag())
continue;
if (rec.getMateUnmappedFlag())
continue;
if (this.samRecordFilter.filterOut(rec))
continue;
final int len = rec.getInferredInsertSize();
if (len == 0)
continue;
insertlist.add(Math.abs(len));
++n_processed;
}
tmpBamWriter.close();
tmpBamWriter = null;
// reopen tmp file
sfrTmp = super.createSamReaderFactory().open(tmpBam);
iterTmp = sfrTmp.iterator();
// update dMedianInsertSize
for (final SAMReadGroupRecord rg : header.getReadGroups()) {
final List<Integer> insertlist = rg2insertsize.get(rg.getId());
if (insertlist == null || insertlist.isEmpty())
continue;
rg.setPredictedMedianInsertSize((int) Percentile.median().evaluate(insertlist.stream().mapToDouble(I -> I.doubleValue())));
}
header.addComment("Processed with " + getClass().getSimpleName() + " " + getProgramCommandLine());
outWriter = this.writingBamArgs.openSAMFileWriter(this.outputFile, header, true);
while (iterTmp.hasNext()) {
outWriter.addAlignment(iterTmp.next());
}
iterTmp.close();
iterTmp = null;
sfrTmp.close();
sfrTmp = null;
tmpBam.delete();
// finish writing original input
while (iter.hasNext()) {
outWriter.addAlignment(progress.watch(iter.next()));
}
progress.finish();
iter.close();
iter = null;
sfr.close();
sfr = null;
outWriter.close();
return RETURN_OK;
} catch (Exception err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(tmpBamWriter);
if (tmpBam != null)
tmpBam.delete();
CloserUtil.close(outWriter);
CloserUtil.close(sfr);
CloserUtil.close(sfw);
}
}
use of com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress in project jvarkit by lindenb.
the class SamClipIndelFraction method doWork.
@Override
public int doWork(final List<String> args) {
SamReader sfr = null;
SAMRecordIterator iter = null;
PrintWriter pw = null;
try {
sfr = openSamReader(oneFileOrNull(args));
pw = super.openFileOrStdoutAsPrintWriter(outputFile);
long total_bases_count = 0L;
long count_clipped_reads = 0L;
long count_clipped_left_reads = 0L;
long count_clipped_right_reads = 0L;
long count_unclipped_reads = 0L;
long count_unmapped_reads = 0L;
SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(sfr.getFileHeader()).logger(LOG);
Counter<Integer> counter = new Counter<>();
iter = sfr.iterator();
while (iter.hasNext()) {
final SAMRecord record = progress.watch(iter.next());
if (record.getReadUnmappedFlag()) {
++count_unmapped_reads;
continue;
}
final Cigar cigar = record.getCigar();
int left_clip_length = 0;
int right_clip_length = 0;
int deletion_N_length = 0;
int deletion_D_length = 0;
int insertion_length = 0;
boolean onLeft = true;
for (int i = 0; i < cigar.numCigarElements(); ++i) {
final CigarElement ce = cigar.getCigarElement(i);
final CigarOperator op = ce.getOperator();
switch(op) {
case N:
{
onLeft = false;
deletion_D_length += ce.getLength();
total_bases_count += ce.getLength();
break;
}
case D:
{
onLeft = false;
deletion_N_length += ce.getLength();
total_bases_count += ce.getLength();
break;
}
case I:
{
onLeft = false;
insertion_length += ce.getLength();
total_bases_count += ce.getLength();
break;
}
case S:
case H:
{
if (onLeft) {
if (record.getReadNegativeStrandFlag()) {
right_clip_length += ce.getLength();
} else {
left_clip_length += ce.getLength();
}
} else {
if (record.getReadNegativeStrandFlag()) {
left_clip_length += ce.getLength();
} else {
right_clip_length += ce.getLength();
}
}
total_bases_count += ce.getLength();
break;
}
default:
{
onLeft = false;
if (op.consumesReadBases()) {
total_bases_count += ce.getLength();
}
break;
}
}
}
if (left_clip_length + right_clip_length == 0) {
count_unclipped_reads++;
} else {
if (left_clip_length > 0)
count_clipped_left_reads++;
if (right_clip_length > 0)
count_clipped_right_reads++;
count_clipped_reads++;
}
switch(type) {
case leftclip:
counter.incr(left_clip_length);
break;
case rightclip:
counter.incr(right_clip_length);
break;
case allclip:
counter.incr(left_clip_length + right_clip_length);
break;
case deletion:
counter.incr(deletion_D_length + deletion_N_length);
break;
case insert:
counter.incr(insertion_length);
break;
default:
LOG.error("Bad type: " + type);
return -1;
}
}
progress.finish();
pw.println("##UNMAPPED_READS=" + count_unmapped_reads);
pw.println("##MAPPED_READS=" + (count_clipped_reads + count_unclipped_reads));
pw.println("##CLIPPED_READS=" + count_clipped_reads);
pw.println("##CLIPPED_READS_5_PRIME=" + count_clipped_left_reads);
pw.println("##CLIPPED_READS_3_PRIME=" + count_clipped_right_reads);
pw.println("##UNCLIPPED_READS=" + count_unclipped_reads);
pw.println("##COUNT_BASES=" + total_bases_count);
pw.print("#");
switch(type) {
case leftclip:
pw.print("CLIP_5_PRIME");
break;
case rightclip:
pw.print("CLIP_3_PRIME");
break;
case allclip:
pw.print("CLIP");
break;
case deletion:
pw.print("DELETION");
break;
case insert:
pw.print("INSERTION");
break;
default:
LOG.error("Bad type: " + type);
return -1;
}
pw.println("\tCOUNT\tFRACTION_OF_MAPPED_READS");
for (final Integer size : new TreeSet<Integer>(counter.keySet())) {
pw.print(size);
pw.print('\t');
pw.print(counter.count(size));
pw.print('\t');
pw.println(counter.count(size) / (double) (count_unclipped_reads + count_unclipped_reads));
}
pw.flush();
pw.close();
pw = null;
return 0;
} catch (final Exception err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(iter);
CloserUtil.close(sfr);
CloserUtil.close(pw);
}
}
use of com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress in project jvarkit by lindenb.
the class ConcatSam method doWork.
@Override
public int doWork(final List<String> args) {
SAMFileWriter out = null;
ConcatSamIterator iter = null;
try {
final Factory factory = new Factory().setConcatenate(!this.merging);
if (!StringUtil.isBlank(this.region_str)) {
factory.addInterval(region_str);
}
iter = factory.open(args);
out = this.writingBamArgs.openSAMFileWriter(outputFile, iter.getFileHeader(), true);
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(iter.getFileHeader()).logger(LOG);
while (iter.hasNext()) {
out.addAlignment(progress.watch(iter.next()));
}
iter.close();
iter = null;
out.close();
out = null;
progress.finish();
return RETURN_OK;
} catch (final Exception err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(out);
CloserUtil.close(iter);
}
}
use of com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress in project jvarkit by lindenb.
the class DownSampleVcf method doVcfToVcf.
@Override
protected int doVcfToVcf(final String inputName, final VcfIterator in, final VariantContextWriter out) {
final Random rand = new Random(this.seed == -1L ? System.currentTimeMillis() : this.seed);
final List<VariantContext> buffer = new ArrayList<>(this.reservoir_size);
final VCFHeader h2 = new VCFHeader(in.getHeader());
super.addMetaData(h2);
final SAMSequenceDictionaryProgress progess = new SAMSequenceDictionaryProgress(in.getHeader()).logger(LOG);
out.writeHeader(h2);
if (this.reservoir_size != 0) {
while (in.hasNext()) {
final VariantContext ctx = progess.watch(in.next());
if (buffer.size() < this.reservoir_size) {
buffer.add(ctx);
} else {
buffer.set(rand.nextInt(buffer.size()), ctx);
}
}
}
buffer.stream().forEach(V -> out.add(V));
progess.finish();
return 0;
}
Aggregations