use of htsjdk.samtools.SamReaderFactory in project ASCIIGenome by dariober.
the class Utils method bamHasIndex.
// private static String initRegionFromUcscGenePredSource(String x) throws ClassNotFoundException, IOException, InvalidCommandLineException, InvalidGenomicCoordsException, InvalidRecordException, SQLException {
//
// String xfile= new UcscGenePred(x, 1).getTabixFile();
// GZIPInputStream gzipStream;
// InputStream fileStream = new FileInputStream(xfile);
// gzipStream = new GZIPInputStream(fileStream);
// Reader decoder = new InputStreamReader(gzipStream, "UTF-8");
// BufferedReader br = new BufferedReader(decoder);
// String line= br.readLine();
// br.close();
// List<String> xlist= Lists.newArrayList(Splitter.on("\t").split(line));
// String region= xlist.get(0) + ":" + xlist.get(3) + "-" + xlist.get(4);
// return region;
// }
public static boolean bamHasIndex(String bam) throws IOException {
/* ------------------------------------------------------ */
/* This chunk prepares SamReader from local bam or URL bam */
UrlValidator urlValidator = new UrlValidator();
SamReaderFactory srf = SamReaderFactory.make();
srf.validationStringency(ValidationStringency.SILENT);
SamReader samReader;
if (urlValidator.isValid(bam)) {
samReader = SamReaderFactory.makeDefault().open(SamInputResource.of(new URL(bam)).index(new URL(bam + ".bai")));
} else {
samReader = srf.open(new File(bam));
}
/* ------------------------------------------------------ */
// SamReaderFactory srf=SamReaderFactory.make();
// srf.validationStringency(ValidationStringency.SILENT);
// SamReader samReader = srf.open(new File(bam));
boolean hasIndex = samReader.hasIndex();
samReader.close();
return hasIndex;
}
use of htsjdk.samtools.SamReaderFactory in project polyGembler by c-zhou.
the class SAMtools method run.
@Override
public void run() {
// TODO Auto-generated method stub
final SamReaderFactory factory = SamReaderFactory.makeDefault().enable(SamReaderFactory.Option.INCLUDE_SOURCE_IN_RECORDS, SamReaderFactory.Option.VALIDATE_CRC_CHECKSUMS).validationStringency(ValidationStringency.SILENT);
final SamReader inputSam = factory.open(new File(mySamFile));
samHeader = inputSam.getFileHeader();
samHeader.setSortOrder(SortOrder.unsorted);
SAMRecordIterator iter = inputSam.iterator();
Set<Entry<String, String>> attr = samHeader.getAttributes();
List<SAMReadGroupRecord> rgs = samHeader.getReadGroups();
SAMReadGroupRecord rg = new SAMReadGroupRecord("cz1");
rg.setSample("cz1");
samHeader.addReadGroup(rg);
// samHeader.setAttribute("RG", "cz1");
final SAMFileWriter outSam = new SAMFileWriterFactory().makeSAMOrBAMWriter(samHeader, true, new File(myOutput));
for (int i = 0; i < 100; i++) {
SAMRecord record = iter.next();
List<SAMTagAndValue> tags = record.getAttributes();
record.setAttribute("RG", "cz1");
List<SAMTagAndValue> tags2 = record.getAttributes();
outSam.addAlignment(record);
}
myLogger.info("exit...");
try {
inputSam.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
outSam.close();
}
use of htsjdk.samtools.SamReaderFactory in project polyGembler by c-zhou.
the class SamFileSplit method run.
@Override
public void run() {
// TODO Auto-generated method stub
Utils.makeOutputDir(bam_out);
final File[] beds = new File(bed_in).listFiles();
final String[] out_prefix = new String[beds.length];
for (int i = 0; i < beds.length; i++) {
out_prefix[i] = bam_out + "/" + beds[i].getName().replaceAll(".bed$", "");
Utils.makeOutputDir(out_prefix[i]);
}
final File[] bams = new File(bam_in).listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.endsWith(".bam");
}
});
this.initial_thread_pool();
for (File bam : bams) {
executor.submit(new Runnable() {
private File bam;
@Override
public void run() {
// TODO Auto-generated method stub
try {
final SamReaderFactory factory = SamReaderFactory.makeDefault().enable(SamReaderFactory.Option.INCLUDE_SOURCE_IN_RECORDS, SamReaderFactory.Option.VALIDATE_CRC_CHECKSUMS).validationStringency(ValidationStringency.SILENT);
final SamReader inputSam = factory.open(bam);
final SAMFileHeader header = inputSam.getFileHeader();
final SAMRecordIterator iter = inputSam.iterator();
final SAMSequenceDictionary seqdic = header.getSequenceDictionary();
final SAMFileWriter[] outputSam = new SAMFileWriter[beds.length];
final SAMSequenceDictionary[] seqdics = new SAMSequenceDictionary[beds.length];
final Map<String, Integer> outMap = new HashMap<String, Integer>();
final String out = bam.getName();
for (int i = 0; i < beds.length; i++) {
Set<String> bed_seq = new HashSet<String>();
String tmp;
BufferedReader br = new BufferedReader(new FileReader(beds[i]));
String line;
while ((line = br.readLine()) != null) {
tmp = line.split("\\s+")[0];
bed_seq.add(tmp);
outMap.put(tmp, i);
}
br.close();
final SAMFileHeader header_i = new SAMFileHeader();
final SAMSequenceDictionary seqdic_i = new SAMSequenceDictionary();
header_i.setAttribute("VN", header.getAttribute("VN"));
header_i.setAttribute("SO", header.getAttribute("SO"));
List<SAMSequenceRecord> seqs = seqdic.getSequences();
for (SAMSequenceRecord seq : seqs) if (bed_seq.contains(seq.getSequenceName()))
seqdic_i.addSequence(seq);
header_i.setSequenceDictionary(seqdic_i);
for (SAMReadGroupRecord rg : header.getReadGroups()) header_i.addReadGroup(rg);
for (SAMProgramRecord pg : header.getProgramRecords()) header_i.addProgramRecord(pg);
outputSam[i] = new SAMFileWriterFactory().makeSAMOrBAMWriter(header_i, true, new File(out_prefix[i] + "/" + out));
seqdics[i] = seqdic_i;
}
Set<String> refs = outMap.keySet();
String ref;
int f;
while (iter.hasNext()) {
SAMRecord rec = iter.next();
if (refs.contains(ref = rec.getReferenceName())) {
f = outMap.get(ref);
rec.setReferenceIndex(seqdics[f].getSequenceIndex(ref));
outputSam[f].addAlignment(rec);
}
}
iter.close();
inputSam.close();
for (int i = 0; i < outputSam.length; i++) outputSam[i].close();
myLogger.info(out + " return true");
} catch (Exception e) {
Thread t = Thread.currentThread();
t.getUncaughtExceptionHandler().uncaughtException(t, e);
e.printStackTrace();
executor.shutdown();
System.exit(1);
}
}
public Runnable init(File bam) {
this.bam = bam;
return (this);
}
}.init(bam));
}
this.waitFor();
}
use of htsjdk.samtools.SamReaderFactory in project polyGembler by c-zhou.
the class Anchor method run.
@Override
public void run() {
// TODO Auto-generated method stub
// read assembly graph file
final GFA gfa = new GFA(query_file, asm_graph);
qry_seqs = gfa.getSequenceMap();
sub_seqs = Sequence.parseFastaFileAsMap(subject_file);
myLogger.info(" GFA vertices: " + gfa.vertexSet().size());
myLogger.info(" GFA edges : " + gfa.edgeSet().size());
// myLogger.info(" GFA edges --- ");
// for(OverlapEdge olap : gfa.edgeSet())
// myLogger.info(olap.olapInfo().toString());
// find 'N/n's in subject/reference sequences
// which could have impact on parsing the blast records
sub_gaps = new HashMap<String, TreeRangeSet<Integer>>();
for (Map.Entry<String, Sequence> entry : sub_seqs.entrySet()) {
String seq_sn = entry.getKey();
String seq_str = entry.getValue().seq_str();
final TreeRangeSet<Integer> tmp_rangeSet = TreeRangeSet.create();
for (int j = 0; j < seq_str.length(); j++) {
if (seq_str.charAt(j) == 'N' || seq_str.charAt(j) == 'n')
// blast record is 1-based closed coordination
tmp_rangeSet.add(Range.closed(j + 1, j + 1).canonical(DiscreteDomain.integers()));
}
int seq_ln = seq_str.length();
final TreeRangeSet<Integer> range_set = TreeRangeSet.create();
for (Range<Integer> range : tmp_rangeSet.asRanges()) {
int lowerend = range.hasLowerBound() ? Math.max(0, range.lowerEndpoint() - gap_buff) : 0;
int upperend = range.hasUpperBound() ? Math.min(seq_ln, range.upperEndpoint() + gap_buff - 1) : seq_ln;
range_set.add(Range.closed(lowerend, upperend).canonical(DiscreteDomain.integers()));
}
sub_gaps.put(seq_sn, range_set);
}
// read alignment file and place the query sequences
final Map<String, Set<SAMSegment>> initPlace = new HashMap<String, Set<SAMSegment>>();
final Map<String, List<SAMSegment>> initPseudoAssembly = new HashMap<String, List<SAMSegment>>();
for (String sub_seq : sub_seqs.keySet()) initPseudoAssembly.put(sub_seq, new ArrayList<SAMSegment>());
try {
final SamReaderFactory factory = SamReaderFactory.makeDefault().enable(SamReaderFactory.Option.INCLUDE_SOURCE_IN_RECORDS, SamReaderFactory.Option.VALIDATE_CRC_CHECKSUMS).validationStringency(ValidationStringency.SILENT);
final SamReader in1 = factory.open(new File(align_file));
final SAMRecordIterator iter1 = in1.iterator();
String qry;
int qry_ln;
double min_aln;
final List<SAMSegment> buff = new ArrayList<SAMSegment>();
SAMRecord rc = iter1.next();
while (rc != null) {
qry = rc.getReadName();
qry_ln = qry_seqs.get(qry).seq_ln();
buff.clear();
if (!rc.getReadUnmappedFlag())
buff.add(SAMSegment.samRecord(rc, true, qry_ln));
while ((rc = iter1.next()) != null && rc.getReadName().equals(qry)) {
buff.add(SAMSegment.samRecord(rc, true, qry_ln));
}
if (buff.isEmpty())
continue;
min_aln = 0.9 * buff.get(0).qlength();
// keep alignment fragment that has qual>0
Set<SAMSegment> init_f = new HashSet<SAMSegment>();
Set<SAMSegment> init_r = new HashSet<SAMSegment>();
for (SAMSegment record : buff) {
if (record.qual() == 0 && record.qlength() < min_aln)
continue;
if (record.qseqid().equals(qry))
init_f.add(record);
else
init_r.add(record);
initPseudoAssembly.get(record.sseqid()).add(record);
}
if (!init_f.isEmpty())
initPlace.put(qry, init_f);
if (!init_r.isEmpty())
initPlace.put(qry + "'", init_r);
}
iter1.close();
in1.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// Collections.sort(initPseudoAssembly.get("1_pilon"), new AlignmentSegment.SubjectCoordinationComparator());
// if(debug) {
// for(SAMSegment record : initPseudoAssembly.get("1_pilon")) {
// System.out.println(record.qseqid()+":"+record.sstart()+"-"+record.send());
// }
// }
final Set<SAMSegment> contained = new HashSet<SAMSegment>();
final Set<SAMSegment> placed = new HashSet<SAMSegment>();
final int flank_size = 10000;
int distance;
for (String sub_seq : sub_seqs.keySet()) {
// sub_seq = "Chr10";
if (sub_seq.equals("Chr00"))
continue;
myLogger.info(">>>>>>>>>>>>>" + sub_seq + "<<<<<<<<<<<<<<<<");
final List<SAMSegment> seq_by_sub = initPseudoAssembly.get(sub_seq);
Collections.sort(seq_by_sub, new AlignmentSegment.SubjectCoordinationComparator());
placed.clear();
int nSeq = seq_by_sub.size();
double edge_penalty, edge_score;
SAMSegment root_seq, source_seq, target_seq;
Set<SAMSegment> target_seqs;
Set<OverlapEdge> outgoing;
TraceableEdge edge;
String root_seqid, source_seqid, target_seqid;
TraceableVertex<String> root_vertex, source_vertex, target_vertex;
Deque<SAMSegment> deque = new ArrayDeque<SAMSegment>();
final List<TraceableVertex<String>> traceable = new ArrayList<TraceableVertex<String>>();
for (int i = 0; i < nSeq; i++) {
root_seq = seq_by_sub.get(i);
root_seqid = root_seq.qseqid();
if (placed.contains(root_seq))
continue;
final TraceableDirectedWeightedPseudograph<String> razor = new TraceableDirectedWeightedPseudograph<String>(TraceableEdge.class);
// final ListenableDirectedWeightedGraph<TraceableVertex<String>, DefaultWeightedEdge> razor =
// new ListenableDirectedWeightedGraph<TraceableVertex<String>, DefaultWeightedEdge>(DefaultWeightedEdge.class);
// JGraphModelAdapter<TraceableVertex<String>, DefaultWeightedEdge> jgAdapter =
// new JGraphModelAdapter<TraceableVertex<String>, DefaultWeightedEdge>(razor);
// JGraph jgraph = new JGraph(jgAdapter);
deque.clear();
deque.push(root_seq);
contained.clear();
while (!deque.isEmpty()) {
source_seq = deque.pop();
source_seqid = source_seq.qseqid();
if (contained.contains(source_seq))
continue;
contained.add(source_seq);
source_vertex = new TraceableVertex<String>(source_seqid);
source_vertex.setSAMSegment(source_seq);
if (!razor.containsVertex(source_vertex))
razor.addVertex(source_vertex);
outgoing = gfa.outgoingEdgesOf(source_seqid);
for (OverlapEdge out : outgoing) {
target_seqid = gfa.getEdgeTarget(out);
if (!initPlace.containsKey(target_seqid))
continue;
target_seqs = initPlace.get(target_seqid);
distance = Integer.MAX_VALUE;
target_seq = null;
for (SAMSegment seq : target_seqs) {
int d = AlignmentSegment.sdistance(source_seq, seq);
if (d < distance) {
distance = d;
target_seq = seq;
}
}
if (distance <= flank_size) {
target_vertex = new TraceableVertex<String>(target_seqid);
target_vertex.setSAMSegment(target_seq);
if (!razor.containsVertex(target_vertex))
razor.addVertex(target_vertex);
if (razor.containsEdge(source_vertex, target_vertex))
continue;
edge = razor.addEdge(source_vertex, target_vertex);
// calculate edge weight
// higher weight edges are those,
/**
**
* // 1. large/long alignment segments vertices
* // TODO: 2*. small gaps on the reference
* edge_weight = qry_seqs.get(source_seqid).seq_ln()+
* qry_seqs.get(target_seqid).seq_ln()-
* gfa.getEdge(source_seqid, target_seqid).olap();
*/
// TODO: 1*. large/long alignment segments vertices
// 2. small gaps on the reference
edge_penalty = AlignmentSegment.sdistance(source_seq, target_seq);
edge.setPenalty(edge_penalty);
edge_score = qry_seqs.get(source_seqid).seq_ln() + qry_seqs.get(target_seqid).seq_ln() - gfa.getEdge(source_seqid, target_seqid).olap();
edge.setScore(edge_score);
deque.push(target_seq);
}
}
}
if (ddebug)
myLogger.info(root_seqid + " " + razor.vertexSet().size() + " " + razor.edgeSet().size() + " done");
// JFrame frame = new JFrame();
// frame.getContentPane().add(jgraph);
// frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
// frame.pack();
// frame.setVisible(true);
// "pseudo"-DFS to find the route with the highest score
final Map<String, TraceableVertex<String>> razv_map = new HashMap<String, TraceableVertex<String>>();
for (TraceableVertex<String> v : razor.vertexSet()) razv_map.put(v.getId(), v);
// we use a bidirectional hashmap to simulate the deque
// this is because we may need to do deletions
// Deque<TraceableVertex<String>> queue = new ArrayDeque<TraceableVertex<String>>();
final TreeBidiMap<Long, TraceableVertex<String>> bidiQ = new TreeBidiMap<Long, TraceableVertex<String>>();
root_vertex = razv_map.get(root_seqid);
root_vertex.setSAMSegment(root_seq);
root_vertex.setScore(qry_seqs.get(root_seqid).seq_ln());
root_vertex.setPenalty(0);
root_vertex.setStatus(true);
bidiQ.put(0L, root_vertex);
double max_ws = Double.NEGATIVE_INFINITY, source_penalty, target_penalty, source_score, target_score, penalty, score, target_ws, source_ws, ws;
int source_ln;
Set<TraceableEdge> out_edges;
TraceableVertex<String> opt_vertex = null;
long sizeQ;
boolean isLeaf;
if (ddebug)
for (TraceableEdge e : razor.edgeSet()) myLogger.info(e.toString() + "(" + razor.getEdgeSource(e).getSAMSegment().toString() + "|" + razor.getEdgeTarget(e).getSAMSegment().toString() + "|" + e.getScore() + "-" + e.getPenalty() + ")");
while (!bidiQ.isEmpty()) {
sizeQ = bidiQ.lastKey();
source_vertex = bidiQ.get(sizeQ);
bidiQ.remove(sizeQ);
source_ln = qry_seqs.get(source_vertex.getId()).seq_ln();
source_score = source_vertex.getScore() - source_ln;
source_penalty = source_vertex.getPenalty();
source_ws = source_score - source_penalty;
isLeaf = true;
out_edges = razor.outgoingEdgesOf(source_vertex);
for (TraceableEdge out : out_edges) {
// this is not right because graph edges are immutable?
// target_vertex = razor.getEdgeTarget(out);
target_vertex = razv_map.get(razor.getEdgeTarget(out).getId());
target_score = target_vertex.getScore();
target_penalty = target_vertex.getPenalty();
target_ws = target_score - target_penalty;
edge_penalty = out.getPenalty();
penalty = source_penalty + edge_penalty;
edge_score = out.getScore();
score = source_score + edge_score;
ws = score - penalty;
if (edge_penalty > flank_size || target_vertex.getStatus() && (ws <= target_ws || isLoopback(razor, source_vertex, target_vertex)))
continue;
isLeaf = false;
target_vertex.setBackTrace(source_vertex);
target_vertex.setScore(score);
target_vertex.setPenalty(penalty);
target_vertex.setStatus(true);
bidiQ.put(sizeQ++, target_vertex);
}
if (isLeaf && source_ws > max_ws) {
penalty = source_vertex.getPenalty();
score = source_vertex.getScore();
max_ws = source_ws;
opt_vertex = source_vertex;
if (ddebug) {
String trace = opt_vertex.toString() + ":" + opt_vertex.getSAMSegment().sstart() + "-" + opt_vertex.getSAMSegment().send() + "(" + opt_vertex.getScore() + "-" + opt_vertex.getPenalty() + ")";
TraceableVertex<String> optx = opt_vertex;
while ((optx = optx.getBackTrace()) != null) {
trace += "," + optx.toString() + ":" + optx.getSAMSegment().sstart() + "-" + optx.getSAMSegment().send() + "(" + optx.getScore() + "-" + optx.getPenalty() + ")";
}
myLogger.info("trace back [" + score + ", " + penalty + "]: " + trace);
}
}
}
traceable.add(opt_vertex);
Set<TraceableVertex<String>> optx = new HashSet<TraceableVertex<String>>();
optx.add(opt_vertex);
while ((opt_vertex = opt_vertex.getBackTrace()) != null) optx.add(opt_vertex);
for (TraceableVertex<String> v : optx) placed.add(v.getSAMSegment());
}
// sort traceable by size
Collections.sort(traceable, new Comparator<TraceableVertex<String>>() {
@Override
public int compare(TraceableVertex<String> t0, TraceableVertex<String> t1) {
// TODO Auto-generated method stub
return Double.compare(t1.getScore(), t0.getScore());
}
});
if (debug) {
for (TraceableVertex<String> opt_vertex : traceable) {
double score = opt_vertex.getScore();
double penalty = opt_vertex.getPenalty();
String trace = opt_vertex.toString() + ":" + opt_vertex.getSAMSegment().sstart() + "-" + opt_vertex.getSAMSegment().send() + "(" + opt_vertex.getScore() + "-" + opt_vertex.getPenalty() + ")";
while ((opt_vertex = opt_vertex.getBackTrace()) != null) {
trace += "," + opt_vertex.toString() + ":" + opt_vertex.getSAMSegment().sstart() + "-" + opt_vertex.getSAMSegment().send() + "(" + opt_vertex.getScore() + "-" + opt_vertex.getPenalty() + ")";
}
myLogger.info("trace back [" + score + ", " + penalty + "]: " + trace);
}
}
// we generate a compound alignment record for each traceable
for (TraceableVertex<String> opt_vertex : traceable) {
}
}
}
use of htsjdk.samtools.SamReaderFactory in project polyGembler by c-zhou.
the class BAMstats method run.
@Override
public void run() {
// TODO Auto-generated method stub
final SamReaderFactory factory = SamReaderFactory.makeDefault().enable(SamReaderFactory.Option.INCLUDE_SOURCE_IN_RECORDS, SamReaderFactory.Option.VALIDATE_CRC_CHECKSUMS).validationStringency(ValidationStringency.SILENT);
try {
final SamReader in1 = factory.open(new File(bam_in1));
final SamReader in2 = factory.open(new File(bam_in2));
SAMRecordIterator iter1 = in1.iterator();
SAMRecordIterator iter2 = in2.iterator();
SAMRecord tmp_record1 = iter1.next(), tmp_record2 = iter2.next();
final SAMRecord[] sam_record1 = new SAMRecord[2], sam_record2 = new SAMRecord[2];
final float[] sam_as1 = new float[2], sam_as2 = new float[2];
final boolean[] sam_mapped1 = new boolean[2], sam_mapped2 = new boolean[2];
String record_id1 = tmp_record1.getReadName(), record_id2 = tmp_record2.getReadName();
int i_sz;
float i_as;
boolean b1, b2;
while (true) {
if (record_counter % 4000000 == 0)
myLogger.info(record_counter + " processed.");
while (record_id1 != null && record_id2 != null && !record_id1.equals(record_id2)) {
while (record_id1 != null && record_id1.compareTo(record_id2) < 0) {
tmp_record1 = bufferRecord(iter1, tmp_record1, sam_record1, sam_as1, sam_mapped1, record_id1);
processBuffer(sam_record1, sam_as1, sam_mapped1, 0);
record_id1 = tmp_record1 == null ? null : tmp_record1.getReadName();
}
if (record_id1 == null)
break;
while (record_id2 != null && record_id2.compareTo(record_id1) < 0) {
tmp_record2 = bufferRecord(iter2, tmp_record2, sam_record2, sam_as2, sam_mapped2, record_id2);
processBuffer(sam_record2, sam_as2, sam_mapped2, 1);
record_id2 = tmp_record2 == null ? null : tmp_record2.getReadName();
}
}
if (record_id1 == null || record_id2 == null)
break;
tmp_record1 = bufferRecord(iter1, tmp_record1, sam_record1, sam_as1, sam_mapped1, record_id1);
tmp_record2 = bufferRecord(iter2, tmp_record2, sam_record2, sam_as2, sam_mapped2, record_id2);
record_id1 = tmp_record1 == null ? null : tmp_record1.getReadName();
record_id2 = tmp_record2 == null ? null : tmp_record2.getReadName();
if ((sam_record2[0] == null || sam_record2[1] == null) && (sam_record1[0] == null || sam_record1[1] == null))
continue;
if ((sam_record1[0] == null || sam_record1[1] == null) && sam_record2[0] != null && sam_record2[1] != null) {
processBuffer(sam_record2, sam_as2, sam_mapped2, 1);
continue;
}
if ((sam_record2[0] == null || sam_record2[1] == null) && sam_record1[0] != null && sam_record1[1] != null) {
processBuffer(sam_record1, sam_as1, sam_mapped1, 0);
continue;
}
if (sam_record1[0].getDuplicateReadFlag() || sam_record1[1].getDuplicateReadFlag() || sam_record2[0].getDuplicateReadFlag() || sam_record2[1].getDuplicateReadFlag() || sam_record1[0].getReadString().replaceAll("N+$", "").replaceAll("^N+", "").length() < min_seqLen || sam_record1[1].getReadString().replaceAll("N+$", "").replaceAll("^N+", "").length() < min_seqLen || sam_record2[0].getReadString().replaceAll("N+$", "").replaceAll("^N+", "").length() < min_seqLen || sam_record2[1].getReadString().replaceAll("N+$", "").replaceAll("^N+", "").length() < min_seqLen)
// we need both reads longer than 36
continue;
record_counter += 2;
if (sam_mapped1[0])
record_mapped_asInf[0]++;
if (sam_mapped1[1])
record_mapped_asInf[0]++;
if (sam_mapped2[0])
record_mapped_asInf[1]++;
if (sam_mapped2[1])
record_mapped_asInf[1]++;
if (sam_mapped1[0] && sam_mapped2[0])
record_mapped_asInf[2]++;
if (sam_mapped1[1] && sam_mapped2[1])
record_mapped_asInf[2]++;
if (b1 = (sam_mapped1[0] && sam_mapped1[1] && !sam_record1[0].getReferenceName().equals("Chr00") && !sam_record1[1].getReferenceName().equals("Chr00"))) {
i_sz = Math.abs(sam_record1[0].getInferredInsertSize());
if (!insert_size_c1.containsKey(i_sz))
insert_size_c1.put(i_sz, 1);
else
insert_size_c1.put(i_sz, insert_size_c1.get(i_sz) + 1);
}
if (b2 = (sam_mapped2[0] && sam_mapped2[1] && !sam_record2[0].getReferenceName().equals("Chr00") && !sam_record2[1].getReferenceName().equals("Chr00"))) {
i_sz = Math.abs(sam_record2[0].getInferredInsertSize());
if (!insert_size_c2.containsKey(i_sz))
insert_size_c2.put(i_sz, 1);
else
insert_size_c2.put(i_sz, insert_size_c2.get(i_sz) + 1);
}
if (!(b1 & b2))
continue;
i_sz = Math.abs(sam_record1[0].getInferredInsertSize()) - Math.abs(sam_record2[0].getInferredInsertSize());
if (!insert_size_cdiff.containsKey(i_sz))
insert_size_cdiff.put(i_sz, 1);
else
insert_size_cdiff.put(i_sz, insert_size_cdiff.get(i_sz) + 1);
// alignment score for pairs always the same
i_as = sam_as1[0] - sam_as2[0];
if (!alignment_score_cdiff.containsKey(i_as))
alignment_score_cdiff.put(i_as, 1);
else
alignment_score_cdiff.put(i_as, alignment_score_cdiff.get(i_as) + 1);
int i = i_as > 0 ? 0 : (i_as < 0 ? 1 : 2);
if (sam_mapped1[0] && !sam_mapped2[0])
record_mapped_as0[0]++;
if (!sam_mapped1[0] && sam_mapped2[0])
record_mapped_as0[1]++;
if (sam_mapped1[0] && sam_mapped2[0])
record_mapped_as0[i]++;
if (sam_mapped1[1] && !sam_mapped2[1])
record_mapped_as0[0]++;
if (!sam_mapped1[1] && sam_mapped2[1])
record_mapped_as0[1]++;
if (sam_mapped1[1] && sam_mapped2[1])
record_mapped_as0[i]++;
}
if (record_id1 == null) {
while (record_id2 != null) {
tmp_record2 = bufferRecord(iter2, tmp_record2, sam_record2, sam_as2, sam_mapped2, record_id2);
processBuffer(sam_record2, sam_as2, sam_mapped2, 1);
record_id2 = tmp_record2 == null ? null : tmp_record2.getReadName();
}
}
if (record_id2 == null) {
while (record_id1 != null) {
tmp_record1 = bufferRecord(iter1, tmp_record1, sam_record1, sam_as1, sam_mapped1, record_id1);
processBuffer(sam_record1, sam_as1, sam_mapped1, 0);
record_id1 = tmp_record1 == null ? null : tmp_record1.getReadName();
}
}
iter1.close();
iter2.close();
in1.close();
in2.close();
BufferedWriter bw = new BufferedWriter(new FileWriter(out));
bw.write("###record_total\n" + record_counter + "\n");
bw.write("###record_mapped ( 0 )\n" + record_mapped_as0[0] + ", " + record_mapped_as0[1] + ", " + record_mapped_as0[2] + "\n");
bw.write("###record_mapped (Inf)\n" + record_mapped_asInf[0] + ", " + record_mapped_asInf[1] + ", " + record_mapped_asInf[2] + "\n");
bw.write("###alignment_score_c1\n");
for (float i : alignment_score_c1.keySet()) bw.write(i + "\t" + alignment_score_c1.get(i) + "\n");
bw.write("###alignment_score_c2\n");
for (float i : alignment_score_c2.keySet()) bw.write(i + "\t" + alignment_score_c2.get(i) + "\n");
bw.write("###alignment_score_cdiff\n");
for (float i : alignment_score_cdiff.keySet()) bw.write(i + "\t" + alignment_score_cdiff.get(i) + "\n");
bw.write("###insert_size_c1\n");
for (int i : insert_size_c1.keySet()) bw.write(i + "\t" + insert_size_c1.get(i) + "\n");
bw.write("###insert_size_c2\n");
for (int i : insert_size_c2.keySet()) bw.write(i + "\t" + insert_size_c2.get(i) + "\n");
bw.write("###insert_size_cdiff\n");
for (int i : insert_size_cdiff.keySet()) bw.write(i + "\t" + insert_size_cdiff.get(i) + "\n");
bw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
Aggregations