use of org.apache.commons.collections4.BidiMap in project fmv by f-agu.
the class FontUtils method loadAllSystemFonts.
// ***************************************
/**
*/
private static void loadAllSystemFonts() {
if (fontMap != null) {
return;
}
BidiMap<String, Font> tmpFontBidiMap = new DualHashBidiMap<>();
Map<Font, File> fileMap = new HashMap<>();
File folder = new File(getSystemFontPath());
for (File file : folder.listFiles((FileFilter) new SuffixFileFilter(".ttf", IOCase.INSENSITIVE))) {
try {
Font font = Font.createFont(Font.TRUETYPE_FONT, file);
tmpFontBidiMap.put(font.getName().toLowerCase(), font);
fileMap.put(font, file);
} catch (FontFormatException | IOException e) {
// e.printStackTrace();
}
}
fontMap = tmpFontBidiMap;
fontFileMap = fileMap;
}
use of org.apache.commons.collections4.BidiMap in project polyGembler by c-zhou.
the class NNsuperscaffold method nj.
public void nj(double max_r) {
// TODO Auto-generated method stub
final BidiMap<String, Integer> scaffs = new DualHashBidiMap<>();
final BidiMap<Cluster, Integer> clusts = new DualHashBidiMap<>();
final TreeMap<Double, Set<ClustPair>> minRfs = new TreeMap<>();
try {
BufferedReader br1 = Utils.getBufferedReader(rf_file);
int c = 0;
String line, scaff;
String[] s;
double minf;
double[] allf;
Cluster c1, c2;
int i1, i2;
ClustPair pair;
long line_count = 0;
while ((line = br1.readLine()) != null) {
if (line.startsWith("#")) {
scaff = line.trim().replaceAll("^##", "");
scaffs.put(scaff, c);
c1 = new Cluster(c);
clusts.put(c1, c);
++c;
} else {
++line_count;
if (line_count % 1000000 == 0)
myLogger.info("#lines loaded: " + line_count);
s = line.trim().split("\\s+");
minf = Double.parseDouble(s[0]);
i1 = scaffs.get(s[5]);
i2 = scaffs.get(s[6]);
c1 = clusts.getKey(i1);
c2 = clusts.getKey(i2);
// recomb freqs arranged in i1<i2 order
allf = new double[4];
allf[0] = Double.parseDouble(s[1]);
allf[3] = Double.parseDouble(s[4]);
if (i1 < i2) {
allf[1] = Double.parseDouble(s[2]);
allf[2] = Double.parseDouble(s[3]);
} else {
allf[1] = Double.parseDouble(s[3]);
allf[2] = Double.parseDouble(s[2]);
}
c1.minf.put(c2, minf);
c1.allf.put(c2, allf);
c2.minf.put(c1, minf);
c2.allf.put(c1, allf);
// cluster pair arranged in i1<i2 order
pair = i1 < i2 ? new ClustPair(c1, c2) : new ClustPair(c2, c1);
minRfs.putIfAbsent(minf, new HashSet<>());
minRfs.get(minf).add(pair);
}
}
myLogger.info("####total lines loaded: " + line_count);
br1.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
int nclust = clusts.size();
Map.Entry<Double, Set<ClustPair>> entry;
double f, extf;
Set<ClustPair> pairs;
ClustPair pair;
Cluster c1, c2, c, cc;
boolean j1, j2;
Map<Cluster, Double> minf;
Map<Cluster, double[]> allf;
int i1, i2;
double[] fs, fs1, fs2;
Set<Double> fs2cl = new HashSet<>();
while (!minRfs.isEmpty()) {
entry = minRfs.firstEntry();
f = entry.getKey();
pairs = entry.getValue();
if (f > max_r)
break;
myLogger.info("####" + nclust + " " + f + " " + clusts.size());
// all pairs have same recombination frequencies
// need to decide which pair to join
// before that need to decide the orientation of join
pair = null;
extf = Double.MAX_VALUE;
for (ClustPair p : pairs) {
p.join();
if (p.extf < extf)
pair = p;
}
if (pair == null)
pair = pairs.iterator().next();
// now join the pair
c1 = pair.c1;
c2 = pair.c2;
j1 = pair.j1;
j2 = pair.j2;
i1 = c1.clust;
i2 = c2.clust;
c = new Cluster(nclust, c1, c2, j1, j2, f);
allf = c.allf;
minf = c.minf;
// now update clusts
clusts.remove(c1);
clusts.remove(c2);
// now update allf and minf
for (int i : clusts.values()) {
cc = clusts.getKey(i);
fs = new double[4];
// ** for c1
fs1 = c1.allf.get(cc);
if (i < i1) {
if (j1) {
fs[0] = fs1[1];
fs[2] = fs1[3];
} else {
fs[0] = fs1[0];
fs[2] = fs1[2];
}
} else {
if (j1) {
fs[0] = fs1[2];
fs[2] = fs1[3];
} else {
fs[0] = fs1[0];
fs[2] = fs1[1];
}
}
// ** for c2
fs2 = c2.allf.get(cc);
if (i < i2) {
if (j2) {
fs[1] = fs2[0];
fs[3] = fs2[2];
} else {
fs[1] = fs2[1];
fs[3] = fs2[3];
}
} else {
if (j2) {
fs[1] = fs2[0];
fs[3] = fs2[1];
} else {
fs[1] = fs2[2];
fs[3] = fs2[3];
}
}
f = StatUtils.min(fs);
allf.put(cc, fs);
minf.put(cc, f);
cc.allf.put(c, fs);
cc.minf.put(c, f);
}
// now update minf
for (Map.Entry<Cluster, double[]> ent : allf.entrySet()) minf.put(ent.getKey(), StatUtils.min(ent.getValue()));
// now update minRfs
pairs.remove(pair);
// remove pairs with c1 c2
for (Map.Entry<Cluster, Integer> ent : clusts.entrySet()) {
cc = ent.getKey();
int i = cc.clust;
if (i1 < i) {
minRfs.get(c1.minf.get(cc)).remove(new ClustPair(c1, cc));
} else {
minRfs.get(cc.minf.get(c1)).remove(new ClustPair(cc, c1));
}
if (i2 < i) {
minRfs.get(c2.minf.get(cc)).remove(new ClustPair(c2, cc));
} else {
minRfs.get(cc.minf.get(c2)).remove(new ClustPair(cc, c2));
}
}
// add pairs with c
for (Map.Entry<Cluster, Double> ent : minf.entrySet()) {
f = ent.getValue();
minRfs.putIfAbsent(f, new HashSet<>());
minRfs.get(f).add(new ClustPair(ent.getKey(), c));
}
// clear empty entries in minRfs
fs2cl.clear();
for (Map.Entry<Double, Set<ClustPair>> ent : minRfs.entrySet()) if (ent.getValue().isEmpty())
fs2cl.add(ent.getKey());
for (double f2cl : fs2cl) minRfs.remove(f2cl);
// clear c1 c2 from minf and allf
for (Cluster c0 : clusts.keySet()) {
c0.minf.remove(c1);
c0.minf.remove(c2);
c0.allf.remove(c1);
c0.allf.remove(c2);
}
// add new cluster c and update nclust
clusts.put(c, nclust);
++nclust;
}
myLogger.info("####clusters: " + clusts.size());
StringBuilder out = new StringBuilder();
List<Integer> ids;
List<Double> dists;
List<Boolean> joins;
try {
BufferedWriter bw = Utils.getBufferedWriter(this.out_prefix + ".nns");
for (Map.Entry<Cluster, Integer> ent : clusts.entrySet()) {
c = ent.getKey();
out.setLength(0);
ids = c.ids;
dists = c.dists;
joins = c.joins;
out.append("-c ");
out.append(scaffs.getKey(ids.get(0)));
for (int i = 1; i < ids.size(); i++) {
out.append(":");
out.append(scaffs.getKey(ids.get(i)));
}
if (dists.size() > 0) {
out.append(" -s ");
out.append(dists.get(0));
for (int i = 1; i < dists.size(); i++) {
out.append(":");
out.append(dists.get(i));
}
}
if (joins.size() > 1) {
out.append(" -r ");
out.append(joins.get(0));
for (int i = 1; i < joins.size(); i++) {
out.append(":");
out.append(joins.get(i));
}
}
myLogger.info("#" + ent.getValue() + "\t" + out.toString());
for (int i = 0; i < ids.size() - 1; i++) myLogger.info(scaffs.getKey(ids.get(i)) + "\t" + dists.get(i) + "\t" + joins.get(i));
myLogger.info(scaffs.getKey(ids.get(ids.size() - 1)) + "\t\t\t\t" + joins.get(joins.size() - 1));
out.append("\n");
bw.write(out.toString());
}
bw.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
use of org.apache.commons.collections4.BidiMap in project polyGembler by c-zhou.
the class RFUtils method makeRMatrix.
/**
* make RData object from Renjin
* renjin-script-engine-*-with dependencies.jar required
* https://nexus.bedatadriven.com/content/groups/public/org/renjin/renjin-script-engine/
*/
protected static void makeRMatrix(String in_rf, String out_Rmat) {
// TODO Auto-generated method stub
ScriptEngineManager manager = new ScriptEngineManager();
ScriptEngine engine = manager.getEngineByName("Renjin");
if (engine == null) {
throw new RuntimeException("Renjin not found!!!");
}
try {
BufferedReader br = Utils.getBufferedReader(in_rf);
final BidiMap<Integer, String> scaffs = new TreeBidiMap<Integer, String>();
String line;
String[] s;
int w = 0;
double d, l;
while ((line = br.readLine()) != null && line.startsWith("##")) {
scaffs.put(w++, line.replaceAll("^##", ""));
}
int n = scaffs.size();
int A = w * (w - 1) / 2;
DoubleMatrixBuilder dMat = new DoubleMatrixBuilder(n, n);
DoubleMatrixBuilder lMat = new DoubleMatrixBuilder(n, n);
DoubleMatrixBuilder iMat = new DoubleMatrixBuilder(n, n);
DoubleMatrixBuilder dAllMat = new DoubleMatrixBuilder(A * 2, 4);
DoubleMatrixBuilder lAllMat = new DoubleMatrixBuilder(A * 2, 4);
w = 0;
while (line != null) {
s = line.split("\\s+");
int i = scaffs.getKey(s[5]), j = scaffs.getKey(s[6]), hs = Integer.parseInt(s[7]);
d = Math.min(RF_MAX, Double.parseDouble(s[0]));
l = calcLODFromRf(d, hs);
dMat.set(i, j, d);
dMat.set(j, i, d);
lMat.set(i, j, l);
lMat.set(j, i, l);
iMat.set(i, j, w + 1);
iMat.set(j, i, w + 1 + A);
for (int k = 0; k < 4; k++) {
d = Math.min(RF_MAX, Double.parseDouble(s[k + 1]));
l = calcLODFromRf(d, hs);
dAllMat.set(w, k, d);
dAllMat.set(w + A, (k == 0 || k == 3) ? k : (3 - k), d);
lAllMat.set(w, k, l);
lAllMat.set(w + A, (k == 0 || k == 3) ? k : (3 - k), l);
}
w++;
line = br.readLine();
}
br.close();
StringVector scf = new StringArrayVector(scaffs.values());
dMat.setRowNames(scf);
dMat.setColNames(scf);
lMat.setRowNames(scf);
lMat.setColNames(scf);
iMat.setRowNames(scf);
iMat.setColNames(scf);
Context context = Context.newTopLevelContext();
FileOutputStream fos = new FileOutputStream(out_Rmat);
GZIPOutputStream zos = new GZIPOutputStream(fos);
RDataWriter writer = new RDataWriter(context, zos);
ListVector.NamedBuilder Rdat = new ListVector.NamedBuilder();
Rdat.add("scaffs", scf);
Rdat.add("n", n);
Rdat.add("A", A);
Rdat.add("distanceMat", dMat.build());
Rdat.add("distanceAll", dAllMat.build());
Rdat.add("lodMat", lMat.build());
Rdat.add("lodAll", lAllMat.build());
Rdat.add("indexMat", iMat.build());
writer.save(Rdat.build());
writer.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
Aggregations