use of ini.trakem2.analysis.Compare.CATAParameters in project TrakEM2 by trakem2.
the class Compare method variabilityAnalysis.
/**
* @param reference_project If null, then the first one found in the Project.getProjects() lists is used.
* @param regex A String (can be null) to filter objects by, to limit what gets processed.
* If regex is not null, then only ProjectThing nodes with the matching regex are analyzed (shallow: none of their children are questioned, but pipes will be built from them all).
* @param generate_plots Whether to generate the variability plots at all.
* @param show_plots If generate_plots, whether to show the plots in a stack image window or to save them.
* @param show_3D Whether to show any 3D data.
* @param show_condensed_3D If show_3D, whether to show the condensed vector strings, i.e. the "average" pipes.
* @param show_sources_3D If show_3D, whether to show the source pipes from which the condensed vector string was generated.
* @param sources_color_table Which colors to give to the pipes of which Project.
* @param show_envelope_3D If show_3D, whether to generate the variability envelope.
* @param envelope_alpha If show_envelope_3D, the envelope takes an alpha value between 0 (total transparency) and 1 (total opacity)
* @param delta_envelope The delta to resample the envelope to. When smaller than or equal to 1, no envelope resampling occurs.
* @param show_axes_3D If show_3D, whether to display the reference axes as well.
* @param heat_map If show_3D, whether to color the variability with a Fire LUT.
* If not show_condensed_3D, then the variability is shown in color-coded 3D spheres placed at the entry point to the neuropile.
* @param map_condensed If not null, all VectorString3D are put into this map.
* @param projects The projects to use.
*/
public static Bureaucrat variabilityAnalysis(final Project reference_project, final String regex, final String[] ignore, final boolean show_cata_dialog, final boolean generate_plots, final boolean show_plots, final String plot_dir_, final boolean show_3D, final boolean show_condensed_3D, final boolean show_sources_3D, final Map<Project, Color> sources_color_table, final boolean show_envelope_3D, final float envelope_alpha, final double delta_envelope, final int envelope_type, final boolean show_axes_3D, final boolean heat_map, final Map<String, VectorString3D> map_condensed, final Project[] projects) {
// gather all open projects
final Project[] p = null == projects ? Project.getProjects().toArray(new Project[0]) : projects;
// make the reference_project be the first in the array
if (null != reference_project && reference_project != p[0]) {
for (int i = 0; i < p.length; i++) {
if (reference_project == p[i]) {
p[i] = p[0];
p[0] = reference_project;
break;
}
}
}
final Worker worker = new Worker("Comparing all to all") {
@Override
public void run() {
startedWorking();
try {
Utils.log2("Asking for CATAParameters...");
final CATAParameters cp = new CATAParameters();
cp.regex = regex;
cp.delta_envelope = delta_envelope;
cp.envelope_type = envelope_type;
if (show_cata_dialog && !cp.setup(false, regex, true, true)) {
finishedWorking();
return;
}
// so source points are stored in VectorString3D for each resampled and interpolated point
cp.with_source = true;
// Store a series of results, depending on options
final HashMap<String, Display3D> results = new HashMap<String, Display3D>();
String plot_dir = plot_dir_;
if (generate_plots && !show_plots) {
// Save plots
if (null == plot_dir) {
final DirectoryChooser dc = new DirectoryChooser("Choose plots directory");
plot_dir = dc.getDirectory();
if (null == plot_dir) {
finishedWorking();
return;
}
}
if (IJ.isWindows())
plot_dir = plot_dir.replace('\\', '/');
if (!plot_dir.endsWith("/"))
plot_dir += "/";
}
Utils.log2("Gathering chains...");
// Gather chains that do not match the ignore regexes
// will transform them as well to the reference found in the first project in the p array
Object[] ob = gatherChains(p, cp, ignore);
ArrayList<Chain> chains = (ArrayList<Chain>) ob[0];
// to keep track of each project's chains
final ArrayList[] p_chains = (ArrayList[]) ob[1];
ob = null;
if (null == chains) {
finishedWorking();
return;
}
Utils.log2("Collecting bundles...");
final HashMap<Project, HashMap<String, VectorString3D>> axes = new HashMap<Project, HashMap<String, VectorString3D>>();
// Sort out into groups by unique names of lineage bundles
final HashMap<String, ArrayList<Chain>> bundles = new HashMap<String, ArrayList<Chain>>();
for (final Chain chain : chains) {
String title = chain.getCellTitle();
final String t = title.toLowerCase();
// unnamed
if (0 == t.indexOf('[') || 0 == t.indexOf('#'))
continue;
Utils.log("Accepting " + title);
title = title.substring(0, title.indexOf(' '));
// lineage bundle instance chains
ArrayList<Chain> bc = bundles.get(title);
if (null == bc) {
bc = new ArrayList<Chain>();
bundles.put(title, bc);
}
bc.add(chain);
}
Utils.log2("Found " + bundles.size() + " bundles.");
chains = null;
if (null != cp.regex && show_axes_3D && axes.size() < 3) {
// Must find the Mushroom Body lobes separately
final String cp_regex = cp.regex;
cp.regex = "mb";
final Object[] o = gatherChains(p, cp, ignore);
final ArrayList<Chain> lobes = (ArrayList<Chain>) o[0];
Utils.logAll("Found " + lobes.size() + " chains for lobes");
for (final Chain chain : lobes) {
final String t = chain.getCellTitle().toLowerCase();
if (-1 != t.indexOf("peduncle") || -1 != t.indexOf("medial lobe") || -1 != t.indexOf("dorsal lobe")) {
Utils.logAll("adding " + t);
final Project pr = chain.pipes.get(0).getProject();
HashMap<String, VectorString3D> m = axes.get(pr);
if (null == m) {
m = new HashMap<String, VectorString3D>();
axes.put(pr, m);
}
m.put(t, chain.vs);
continue;
}
}
cp.regex = cp_regex;
} else {
Utils.logAll("Not: cp.regex = " + cp.regex + " show_axes_3D = " + show_axes_3D + " axes.size() = " + axes.size());
}
final HashMap<String, VectorString3D> condensed = new HashMap<String, VectorString3D>();
Utils.log2("Condensing each bundle...");
// Condense each into a single VectorString3D
for (final Map.Entry<String, ArrayList<Chain>> entry : bundles.entrySet()) {
final ArrayList<Chain> bc = entry.getValue();
if (bc.size() < 2) {
Utils.log2("Skipping single: " + entry.getKey());
continue;
}
final VectorString3D[] vs = new VectorString3D[bc.size()];
for (int i = 0; i < vs.length; i++) vs[i] = bc.get(i).vs;
final VectorString3D c = condense(cp, vs, this);
c.setCalibration(p[0].getRootLayerSet().getCalibrationCopy());
condensed.put(entry.getKey(), c);
if (this.hasQuitted())
return;
}
// Store:
if (null != map_condensed) {
map_condensed.putAll(condensed);
}
if (generate_plots) {
Utils.log2("Plotting stdDev for each condensed bundle...");
// Y axis: the stdDev at each point, computed from the group of points that contribute to each
for (final Map.Entry<String, VectorString3D> e : condensed.entrySet()) {
final String name = e.getKey();
final VectorString3D c = e.getValue();
final Plot plot = makePlot(cp, name, c);
// FAILS//plot.addLabel(10, cp.plot_height-5, name); // must be added after setting size
if (show_plots)
plot.show();
else if (null != plot_dir)
new FileSaver(plot.getImagePlus()).saveAsPng(plot_dir + name.replace('/', '-') + ".png");
}
}
if (show_3D) {
final HashMap<String, Color> heat_table = new HashMap<String, Color>();
if (heat_map || show_envelope_3D) {
// Create a Fire LUT
final ImagePlus lutimp = new ImagePlus("lut", new ByteProcessor(4, 4));
IJ.run(lutimp, "Fire", "");
final IndexColorModel icm = (IndexColorModel) lutimp.getProcessor().getColorModel();
final byte[] reds = new byte[256];
final byte[] greens = new byte[256];
final byte[] blues = new byte[256];
icm.getReds(reds);
icm.getGreens(greens);
icm.getBlues(blues);
final List<String> names = new ArrayList<String>(bundles.keySet());
Collections.sort(names);
// find max stdDev
double max = 0;
final HashMap<String, Double> heats = new HashMap<String, Double>();
for (final String name : names) {
final VectorString3D vs_merged = condensed.get(name);
if (null == vs_merged) {
Utils.logAll("WARNING could not find a condensed pipe for " + name);
continue;
}
final double[] stdDev = vs_merged.getStdDevAtEachPoint();
// double avg = 0;
// for (int i=0; i<stdDev.length; i++) avg += stdDev[i];
// avg = avg/stdDev.length;
Arrays.sort(stdDev);
// median is more representative than average
final double median = stdDev[stdDev.length / 2];
if (max < median)
max = median;
heats.put(name, median);
}
for (final Map.Entry<String, Double> e : heats.entrySet()) {
final String name = e.getKey();
final double median = e.getValue();
// scale between 0 and max to get a Fire LUT color:
int index = (int) ((median / max) * 255);
if (index > 255)
index = 255;
final Color color = new Color(0xff & reds[index], 0xff & greens[index], 0xff & blues[index]);
Utils.log2(new StringBuilder(name).append('\t').append(median).append('\t').append(reds[index]).append('\t').append(greens[index]).append('\t').append(blues[index]).toString());
heat_table.put(name, color);
}
}
final LayerSet common_ls = new LayerSet(p[0], -1, "Common", 10, 10, 0, 0, 0, 512, 512, false, 2, new AffineTransform());
final Display3D d3d = Display3D.get(common_ls);
float env_alpha = envelope_alpha;
if (env_alpha < 0) {
Utils.log2("WARNING envelope_alpha is invalid: " + envelope_alpha + "\n Using 0.4f instead");
env_alpha = 0.4f;
} else if (env_alpha > 1)
env_alpha = 1.0f;
for (final String name : bundles.keySet()) {
final ArrayList<Chain> bc = bundles.get(name);
final VectorString3D vs_merged = condensed.get(name);
if (null == vs_merged) {
Utils.logAll("WARNING: could not find a condensed vs for " + name);
continue;
}
if (show_sources_3D) {
if (null != sources_color_table) {
final HashSet<String> titles = new HashSet<String>();
for (final Chain chain : bc) {
final Color c = sources_color_table.get(chain.getRoot().getProject());
final String title = chain.getCellTitle();
String t = title;
int i = 2;
while (titles.contains(t)) {
t = title + "-" + i;
i += 1;
}
titles.add(t);
Display3D.addMesh(common_ls, chain.vs, t, null != c ? c : Color.gray);
}
} else {
for (final Chain chain : bc) Display3D.addMesh(common_ls, chain.vs, chain.getCellTitle(), Color.gray);
}
}
if (show_condensed_3D) {
Display3D.addMesh(common_ls, vs_merged, name + "-condensed", heat_map ? heat_table.get(name) : Color.red);
}
if (show_envelope_3D) {
double[] widths = makeEnvelope(cp, vs_merged);
if (cp.delta_envelope > 1) {
vs_merged.addDependent(widths);
vs_merged.resample(cp.delta_envelope);
widths = vs_merged.getDependent(0);
}
Display3D.addMesh(common_ls, vs_merged, name + "-envelope", heat_map ? heat_table.get(name) : Color.red, widths, env_alpha);
} else if (heat_map) {
// Show spheres in place of envelopes, at the starting tip (neuropile entry point)
final double x = vs_merged.getPoints(0)[0];
final double y = vs_merged.getPoints(1)[0];
final double z = vs_merged.getPoints(2)[0];
final double r = 10;
final Color color = heat_table.get(name);
if (null == color) {
Utils.logAll("WARNING: heat table does not have a color for " + name);
continue;
}
final Content sphere = d3d.getUniverse().addMesh(ij3d.Mesh_Maker.createSphere(x, y, z, r), new Color3f(heat_table.get(name)), name + "-sphere", 1);
}
}
if (show_axes_3D) {
for (int i = 0; i < p.length; i++) {
final Map<String, VectorString3D> m = axes.get(p[i]);
if (null == m) {
Utils.log2("No axes found for project " + p[i]);
continue;
}
for (final Map.Entry<String, VectorString3D> e : m.entrySet()) {
Display3D.addMesh(common_ls, e.getValue(), e.getKey() + "-" + i, Color.gray);
}
}
}
results.put("d3d", Display3D.get(common_ls));
}
this.result = results;
Utils.log2("Done.");
} catch (final Exception e) {
IJError.print(e);
} finally {
finishedWorking();
}
}
};
return Bureaucrat.createAndStart(worker, p[0]);
}
use of ini.trakem2.analysis.Compare.CATAParameters in project TrakEM2 by trakem2.
the class Compare method gatherChains.
/**
* Gather chains for all projects considering the cp.regex, and transforms all relative to the reference Project p[0].
* Will ignore any for which a match exists in @param ignore.
*/
public static final Object[] gatherChains(final Project[] p, final CATAParameters cp, final String[] ignore) throws Exception {
String regex_exclude = null;
if (null != ignore) {
final StringBuilder sb = new StringBuilder();
for (final String ig : ignore) {
sb.append("(.*").append(ig).append(".*)|");
}
sb.setLength(sb.length() - 1);
regex_exclude = sb.toString();
}
Utils.logAll("Compare/gatherChains: using ignore string: " + regex_exclude);
Utils.logAll("Compare/gatherChains: using regex: " + cp.regex);
// gather all chains
// to keep track of each project's chains
final ArrayList[] p_chains = new ArrayList[p.length];
final ArrayList<Chain> chains = new ArrayList<Chain>();
for (int i = 0; i < p.length; i++) {
// for each project:
if (null == cp.regex) {
p_chains[i] = createPipeChains(p[i].getRootProjectThing(), p[i].getRootLayerSet(), regex_exclude);
} else {
// Search (shallow) for cp.regex matches
for (final ProjectThing pt : p[i].getRootProjectThing().findChildren(cp.regex, regex_exclude, true)) {
final ArrayList<Chain> ac = createPipeChains(pt, p[i].getRootLayerSet(), regex_exclude);
if (null == p_chains[i])
p_chains[i] = ac;
else
p_chains[i].addAll(ac);
}
// empty
if (null == p_chains[i])
p_chains[i] = new ArrayList<Chain>();
}
chains.addAll(p_chains[i]);
// calibrate
final Calibration cal = p[i].getRootLayerSet().getCalibrationCopy();
for (final Chain chain : (ArrayList<Chain>) p_chains[i]) chain.vs.calibrate(cal);
}
final int n_chains = chains.size();
// register all, or relative
if (4 == cp.transform_type) {
// compute global average delta
if (0 == cp.delta) {
for (final Chain chain : chains) {
cp.delta += (chain.vs.getAverageDelta() / n_chains);
}
}
Utils.log2("Using delta: " + cp.delta);
for (final Chain chain : chains) {
// BEFORE making it relative
chain.vs.resample(cp.delta, cp.with_source);
chain.vs.relative();
}
} else {
if (3 == cp.transform_type) {
// '3' means moving least squares computed from 3D landmarks
Utils.log2("Moving Least Squares Registration based on common fiducial points");
// Find fiducial points, if any
final HashMap<Project, Map<String, Tuple3d>> fiducials = new HashMap<Project, Map<String, Tuple3d>>();
for (final Project pr : p) {
final Set<ProjectThing> fids = pr.getRootProjectThing().findChildrenOfTypeR("fiducial_points");
if (null == fids || 0 == fids.size()) {
Utils.log("No fiducial points found in project: " + pr);
} else {
// the first fiducial group
fiducials.put(pr, Compare.extractPoints(fids.iterator().next()));
}
}
if (!fiducials.isEmpty()) {
// Register all VectorString3D relative to the first project:
final List<VectorString3D> lvs = new ArrayList<VectorString3D>();
final Calibration cal2 = p[0].getRootLayerSet().getCalibrationCopy();
for (final Chain chain : chains) {
final Project pr = chain.pipes.get(0).getProject();
// first project is reference, no need to transform.
if (pr == p[0])
continue;
lvs.clear();
lvs.add(chain.vs);
chain.vs = transferVectorStrings(lvs, fiducials.get(pr), fiducials.get(p[0])).get(0);
// Set (but do not apply!) the calibration of the reference project
chain.vs.setCalibration(cal2);
}
}
} else if (cp.transform_type < 3) {
// '0', '1' and '2' involve a 3D affine computed from the 3 axes
// no need //VectorString3D[][] vs_axes = new VectorString3D[p.length][];
Vector3d[][] o = new Vector3d[p.length][];
for (int i = 0; i < p.length; i++) {
// 1 - find pipes to work as axes for each project
final ArrayList<ZDisplayable> pipes = p[i].getRootLayerSet().getZDisplayables(Line3D.class, true);
final String[] pipe_names = new String[pipes.size()];
for (int k = 0; k < pipes.size(); k++) {
pipe_names[k] = p[i].getMeaningfulTitle(pipes.get(k));
}
final int[] s = findFirstXYZAxes(cp.preset, pipes, pipe_names);
// if axes are -1, forget it: not found
if (-1 == s[0] || -1 == s[1] || -1 == s[2]) {
Utils.log("Can't find axes for project " + p[i]);
o = null;
return null;
}
// obtain axes and origin
final Object[] pack = obtainOrigin(new Line3D[] { (Line3D) pipes.get(s[0]), (Line3D) pipes.get(s[1]), (Line3D) pipes.get(s[2]) }, cp.transform_type, // will be null for the first, which will then be non-null and act as the reference for the others.
o[0]);
// no need //vs_axes[i] = (VectorString3D[])pack[0];
o[i] = (Vector3d[]) pack[1];
}
/* // OLD WAY
// match the scales to make the largest be 1.0
final double scaling_factor = VectorString3D.matchOrigins(o, transform_type);
Utils.log2("matchOrigins scaling factor: " + scaling_factor + " for transform_type " + transform_type);
*/
// transform all except the first (which acts as reference)
final Transform3D M_ref = Compare.createTransform(o[0]);
for (int i = 1; i < p.length; i++) {
final Vector3d trans = new Vector3d(-o[i][3].x, -o[i][3].y, -o[i][3].z);
final Transform3D M_query = Compare.createTransform(o[i]);
// The transfer T transform: from query space to reference space.
final Transform3D T = new Transform3D(M_ref);
T.mulInverse(M_query);
for (final Chain chain : (ArrayList<Chain>) p_chains[i]) {
// in place
chain.vs.transform(T);
}
}
}
// compute global average delta, after correcting calibration and transformation
if (0 == cp.delta) {
for (final Chain chain : chains) {
cp.delta += (chain.vs.getAverageDelta() / n_chains);
}
}
Utils.log2("Using delta: " + cp.delta);
// After calibration and transformation, resample all to the same delta
for (final Chain chain : chains) chain.vs.resample(cp.delta, cp.with_source);
}
return new Object[] { chains, p_chains };
}
use of ini.trakem2.analysis.Compare.CATAParameters in project TrakEM2 by trakem2.
the class Compare method compareAllToAll.
/**
* Gets pipes for all open projects, and generates a matrix of dissimilarities, which gets passed on to the Worker thread and also to a file, if desired.
*
* @param to_file Whether to save the results to a file and popup a save dialog for it or not. In any case the results are stored in the worker's load, which you can retrieve like:
* <pre>
* Bureaucrat bu = Compare.compareAllToAll(true, null, null);
* Object result = bu.getWorker().getResult();
* float[][] scores = (float[][])result[0];
* ArrayList<Compare.Chain> chains = (ArrayList<Compare.Chain>)result[1];
* </pre>
*/
public static Bureaucrat compareAllToAll(final boolean to_file, final String regex, final String[] ignore, final Project[] projects, final boolean crop, final boolean from_end, final int max_n_elements, final String outgroup) {
// gather all open projects
final Project[] p = null == projects ? Project.getProjects().toArray(new Project[0]) : projects;
final Worker worker = new Worker("Comparing all to all") {
@Override
public void run() {
startedWorking();
try {
final CATAParameters cp = new CATAParameters();
if (!cp.setup(to_file, regex, false, false)) {
finishedWorking();
return;
}
String filename = null, dir = null;
if (to_file) {
final SaveDialog sd = new SaveDialog("Save matrix", OpenDialog.getDefaultDirectory(), null, ".csv");
filename = sd.getFileName();
if (null == filename) {
finishedWorking();
return;
}
dir = sd.getDirectory().replace('\\', '/');
if (!dir.endsWith("/"))
dir += "/";
}
Object[] ob = gatherChains(p, cp, ignore);
final ArrayList<Chain> chains = (ArrayList<Chain>) ob[0];
// to keep track of each project's chains
final ArrayList[] p_chains = (ArrayList[]) ob[1];
ob = null;
if (null == chains) {
finishedWorking();
return;
}
final int n_chains = chains.size();
// crop chains if desired
if (crop) {
for (final Chain chain : chains) {
if (from_end) {
final int start = chain.vs.length() - max_n_elements;
if (start > 0) {
chain.vs = chain.vs.substring(start, chain.vs.length());
// BEFORE making it relative
chain.vs.resample(cp.delta, cp.with_source);
}
} else {
if (max_n_elements < chain.vs.length()) {
chain.vs = chain.vs.substring(0, max_n_elements);
// BEFORE making it relative
chain.vs.resample(cp.delta, cp.with_source);
}
}
}
}
// compare all to all
final VectorString3D[] vs = new VectorString3D[n_chains];
for (int i = 0; i < n_chains; i++) vs[i] = chains.get(i).vs;
final float[][] scores = Compare.scoreAllToAll(vs, cp.distance_type, cp.delta, cp.skip_ends, cp.max_mut, cp.min_chunk, cp.direct, cp.substring_matching, this);
if (null == scores) {
finishedWorking();
return;
}
// store matrix and chains into the worker
this.result = new Object[] { scores, chains };
// write to file
if (!to_file) {
finishedWorking();
return;
}
final File f = new File(dir + filename);
// encoding in Latin 1 (for macosx not to mess around
final OutputStreamWriter dos = new OutputStreamWriter(new BufferedOutputStream(new FileOutputStream(f)), "8859_1");
// Normalize matrix to largest value of 1.0
if (cp.normalize) {
float max = 0;
for (int i = 0; i < scores.length; i++) {
// traverse half matrix ony: it's mirrored
for (int j = i; j < scores[0].length; j++) {
if (scores[i][j] > max)
max = scores[i][j];
}
}
for (int i = 0; i < scores.length; i++) {
for (int j = i; j < scores[0].length; j++) {
scores[i][j] = scores[j][i] /= max;
}
}
}
// write chain titles, with project prefix
if (cp.format.equals(cp.formats[0])) {
// as csv:
try {
final StringBuffer[] titles = new StringBuffer[n_chains];
int next = 0;
for (int i = 0; i < p.length; i++) {
final String prefix = Utils.getCharacter(i + 1);
// empty upper left corner
dos.write("\"\"");
for (final Chain chain : (ArrayList<Chain>) p_chains[i]) {
dos.write(",");
titles[next] = new StringBuffer().append('\"').append(prefix).append(' ').append(chain.getCellTitle()).append('\"');
dos.write(titles[next].toString());
next++;
}
}
dos.write("\n");
for (int i = 0; i < n_chains; i++) {
final StringBuffer line = new StringBuffer();
line.append(titles[i]);
for (int j = 0; j < n_chains; j++) line.append(',').append(scores[i][j]);
line.append('\n');
dos.write(line.toString());
}
dos.flush();
} catch (final Exception e) {
e.printStackTrace();
}
} else if (cp.format.equals(cp.formats[1])) {
// as XML:
try {
final StringBuffer sb = new StringBuffer("<?xml version=\"1.0\"?>\n<!DOCTYPE ggobidata SYSTEM \"ggobi.dtd\">\n");
sb.append("<ggobidata count=\"2\">\n");
sb.append("<data name=\"Pipe Chains\">\n");
sb.append("<description />\n");
// ggobi: what a crappy XML parser it has
sb.append("<variables count=\"0\">\n</variables>\n");
sb.append("<records count=\"").append(chains.size()).append("\" glyph=\"fr 1\" color=\"3\">\n");
int next = 0;
for (int i = 0; i < p.length; i++) {
final String prefix = Utils.getCharacter(i + 1);
final String color = new StringBuffer("color=\"").append(i + 1).append('\"').toString();
for (final Chain chain : (ArrayList<Chain>) p_chains[i]) {
sb.append("<record id=\"").append(next + 1).append("\" label=\"").append(prefix).append(' ').append(chain.getCellTitle()).append("\" ").append(color).append("></record>\n");
next++;
}
}
sb.append("</records>\n</data>\n");
sb.append("<data name=\"distances\">\n");
sb.append("<description />\n");
sb.append("<variables count=\"1\">\n<realvariable name=\"D\" />\n</variables>\n");
sb.append("<records count=\"").append(n_chains * (n_chains - 1)).append("\" glyph=\"fr 1\" color=\"0\">\n");
for (int i = 0; i < n_chains; i++) {
for (int j = 0; j < n_chains; j++) {
if (i == j)
continue;
sb.append("<record source=\"").append(i + 1).append("\" destination=\"").append(j + 1).append("\">").append(scores[i][j]).append("</record>\n");
}
}
sb.append("</records>\n</data>\n");
sb.append("</ggobidata>");
dos.write(sb.toString());
dos.flush();
} catch (final Exception e) {
e.printStackTrace();
}
} else if (cp.format.equals(cp.formats[2])) {
// as Phylip .dis
try {
// collect different projects
final ArrayList<Project> projects = new ArrayList<Project>();
for (final Chain chain : chains) {
final Project p = chain.getRoot().getProject();
if (!projects.contains(p))
projects.add(p);
}
final HashSet names = new HashSet();
final StringBuffer sb = new StringBuffer();
sb.append(scores.length).append('\n');
dos.write(sb.toString());
// unique ids, since phylip cannot handle long names
final AtomicInteger ids = new AtomicInteger(0);
final File ftags = new File(dir + filename + ".tags");
// encoding in Latin 1 (for macosx not to mess around
final OutputStreamWriter dostags = new OutputStreamWriter(new BufferedOutputStream(new FileOutputStream(ftags)), "8859_1");
for (int i = 0; i < scores.length; i++) {
sb.setLength(0);
// String title = chains.get(i).getShortCellTitle().replace(' ', '_').replace('\t', '_').replace('[', '-').replace(']', '-');
final int id = ids.incrementAndGet();
final String sid = Utils.getCharacter(id);
String name = chains.get(i).getShortCellTitle();
// If sid.length() > 10 chars, trouble!
if (sid.length() > 10) {
Utils.log2("Ignoring " + name + " : id longer than 10 chars: " + id);
continue;
}
final int k = 1;
// Prepend a project char identifier to the name
String project_name = "";
if (projects.size() > 1) {
project_name = Utils.getCharacter(projects.indexOf(chains.get(i).getRoot().getProject()) + 1).toLowerCase();
name = project_name + name;
}
dostags.write(new StringBuilder().append(sid).append('\t').append(name).append('\n').toString());
if (null != outgroup && -1 != name.indexOf(outgroup)) {
Utils.logAll("Outgroup 0-based index is " + id + ", with id " + sid + ", with name " + name);
}
//
final int len = 12;
sb.append(sid);
// pad with spaces up to len
for (int j = len - sid.length(); j > 0; j--) sb.append(' ');
int count = 0;
for (int j = 0; j < scores[0].length; j++) {
sb.append(' ').append(scores[i][j]);
count++;
if (7 == count && j < scores[0].length - 1) {
sb.append('\n');
count = 0;
while (++count < len) sb.append(' ');
sb.append(' ');
count = 0;
}
}
sb.append('\n');
dos.write(sb.toString());
}
dos.flush();
dostags.flush();
dostags.close();
} catch (final Exception e) {
e.printStackTrace();
}
}
dos.close();
} catch (final Exception e) {
e.printStackTrace();
} finally {
finishedWorking();
}
}
};
return Bureaucrat.createAndStart(worker, p);
}
use of ini.trakem2.analysis.Compare.CATAParameters in project TrakEM2 by trakem2.
the class Compare method reliabilityAnalysis.
public static final Bureaucrat reliabilityAnalysis(final String[] ignore, final boolean output_arff, final boolean weka_classify, final boolean show_dialog, final double delta, final double wi, final double wd, final double wm) {
// gather all open projects
final Project[] p = Project.getProjects().toArray(new Project[0]);
final Worker worker = new Worker("Reliability by name") {
@Override
public void run() {
startedWorking();
try {
final CATAParameters cp = new CATAParameters();
cp.delta = delta;
if (show_dialog && !cp.setup(false, null, false, false)) {
finishedWorking();
return;
}
Object[] ob = gatherChains(p, cp, ignore);
final ArrayList<Chain> chains = (ArrayList<Chain>) ob[0];
// to keep track of each project's chains
final ArrayList[] p_chains = (ArrayList[]) ob[1];
ob = null;
if (null == chains) {
finishedWorking();
return;
}
// For each pipe in a brain:
// - score against all other brains in which that pipe name exists,
// - record the score position within that brain.
//
final ExecutorService exec = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
// for each individual lineage:
final TreeMap<String, ArrayList<Integer>> indices = new TreeMap<String, ArrayList<Integer>>();
final ArrayList<CITuple> cin = new ArrayList<CITuple>();
// for each family:
final TreeMap<String, ArrayList<Integer>> indices_f = new TreeMap<String, ArrayList<Integer>>();
final ArrayList<CITuple> cin_f = new ArrayList<CITuple>();
final ArrayList<Future> fus = new ArrayList<Future>();
// For neural network analysis:
final StringBuilder arff = output_arff ? new StringBuilder("@RELATION Lineages\n\n") : null;
if (output_arff) {
arff.append("@ATTRIBUTE APD NUMERIC\n");
arff.append("@ATTRIBUTE CPD NUMERIC\n");
arff.append("@ATTRIBUTE STD NUMERIC\n");
arff.append("@ATTRIBUTE MPD NUMERIC\n");
arff.append("@ATTRIBUTE PM NUMERIC\n");
arff.append("@ATTRIBUTE LEV NUMERIC\n");
arff.append("@ATTRIBUTE SIM NUMERIC\n");
arff.append("@ATTRIBUTE PRX NUMERIC\n");
arff.append("@ATTRIBUTE PRM NUMERIC\n");
// length ratio: len(query) / len(ref)
arff.append("@ATTRIBUTE LR NUMERIC\n");
arff.append("@ATTRIBUTE TR NUMERIC\n");
arff.append("@ATTRIBUTE CLASS {false,true}\n");
arff.append("\n@DATA\n");
}
// Count number of times when decision tree says it's good, versus number of times when it should be good
// observed
final AtomicInteger obs_good = new AtomicInteger(0);
// observed wrong
final AtomicInteger obs_wrong = new AtomicInteger(0);
// expected
final AtomicInteger exp_good = new AtomicInteger(0);
final AtomicInteger exp_bad = new AtomicInteger(0);
final AtomicInteger obs_bad_classified_good_ones = new AtomicInteger(0);
final AtomicInteger obs_well_classified_bad_ones = new AtomicInteger(0);
// inc by one when a lineage to compare is not found at all in the brain that works as reference
final AtomicInteger not_found = new AtomicInteger(0);
final AtomicInteger already_classified = new AtomicInteger(0);
Method classify_ = null;
if (weka_classify) {
try {
classify_ = Class.forName("lineage.LineageClassifier").getDeclaredMethod("classify", new Class[] { double[].class });
} catch (final Exception e) {
IJError.print(e);
}
}
final Method classify = classify_;
// All possible pairs of projects, with repetition (it's not the same, although the pipe pairwise comparison itself will be.)
for (int _i = 0; _i < p_chains.length; _i++) {
final int i = _i;
Utils.log2("Project " + p[i] + " has " + p_chains[i].size() + " chains.");
for (int _j = 0; _j < p_chains.length; _j++) {
final int j = _j;
// skip same project (would have a score of zero, identical.)
if (i == j)
continue;
final String[] titles_j = new String[p_chains[j].size()];
int next = 0;
for (final Chain cj : (ArrayList<Chain>) p_chains[j]) {
final String t = cj.getCellTitle();
titles_j[next++] = t.substring(0, t.indexOf(' '));
}
// families:
final TreeSet<String> ts_families = new TreeSet<String>();
for (int f = 0; f < titles_j.length; f++) {
// extract family name from title: read the first continuous string of capital letters
final String title = titles_j[f];
int u = 0;
for (; u < title.length(); u++) {
if (!Character.isUpperCase(title.charAt(u)))
break;
}
ts_families.add(title.substring(0, u));
}
final ArrayList<String> families = new ArrayList<String>(ts_families);
fus.add(exec.submit(new Callable() {
@Override
public Object call() {
// All chains of one project to all chains of the other:
for (final Chain chain : (ArrayList<Chain>) p_chains[i]) {
final VectorString3D vs1 = chain.vs;
// Prepare title
String title = chain.getCellTitle();
title = title.substring(0, title.indexOf(' '));
// check if the other project j contains a chain of name chain.getCellTitle() up to the space.
int title_index = -1;
for (int k = 0; k < titles_j.length; k++) {
if (title.equals(titles_j[k])) {
title_index = k;
break;
}
}
if (-1 == title_index) {
Utils.log2(title + " not found in project " + p[j]);
if (weka_classify)
not_found.incrementAndGet();
continue;
}
// should be there:
if (weka_classify) {
exp_good.incrementAndGet();
exp_bad.addAndGet(titles_j.length - 1);
}
final ArrayList<ChainMatch> list = new ArrayList<ChainMatch>();
// extract family name from title: read the first continuous string of capital letters
int u = 0;
for (; u < title.length(); u++) {
if (!Character.isUpperCase(title.charAt(u)))
break;
}
final String family_name = title.substring(0, u);
String last_classify = null;
int g = 0;
for (final Chain cj : (ArrayList<Chain>) p_chains[j]) {
final VectorString3D vs2 = cj.vs;
final Object[] ob = findBestMatch(vs1, vs2, cp.delta, cp.skip_ends, cp.max_mut, cp.min_chunk, cp.distance_type, cp.direct, cp.substring_matching, wi, wd, wm);
final Editions ed = (Editions) ob[0];
final double[] stats = ed.getStatistics(cp.skip_ends, cp.max_mut, cp.min_chunk, cp.score_mut_only);
final ChainMatch cm = new ChainMatch(cj, null, ed, stats, score(ed.getSimilarity(), ed.getDistance(), stats[3], Compare.W));
cm.title = titles_j[g];
list.add(cm);
g++;
if (weka_classify) {
// from decision tree: is it good?
final double[] param = new double[11];
for (int p = 0; p < stats.length; p++) param[p] = stats[p];
try {
if (((Boolean) classify.invoke(null, param)).booleanValue()) {
if (null != last_classify) {
Utils.log2("ALREADY CLASSIFIED " + title + " as " + last_classify + " (now: " + cm.title + " )");
already_classified.incrementAndGet();
}
last_classify = cm.title;
if (title.equals(cm.title)) {
obs_good.incrementAndGet();
} else {
Utils.log2("WRONG CLASSIFICATION of " + title + " as " + cm.title);
obs_wrong.incrementAndGet();
}
} else {
if (title.equals(cm.title)) {
obs_bad_classified_good_ones.incrementAndGet();
} else {
obs_well_classified_bad_ones.incrementAndGet();
}
}
} catch (final Exception ee) {
IJError.print(ee);
}
}
}
// sort scores:
Compare.sortMatches(list, cp.distance_type, cp.distance_type_2, cp.min_matches);
if (output_arff) {
// Take top 8 and put them into training set for WEKA in arff format
for (int h = 0; h < 8; h++) {
final ChainMatch cm = list.get(h);
final StringBuilder sb = new StringBuilder();
sb.append(cm.phys_dist).append(',').append(cm.cum_phys_dist).append(',').append(cm.stdDev).append(',').append(cm.median).append(',').append(cm.prop_mut).append(',').append(cm.ed.getDistance()).append(',').append(cm.seq_sim).append(',').append(cm.proximity).append(',').append(cm.proximity_mut).append(',').append(cm.prop_len).append(',').append(cm.tortuosity_ratio).append(',').append(title.equals(cm.title)).append(// append('-').append(cm.title.startsWith(family_name)).append('\n');
'\n');
synchronized (arff) {
arff.append(sb);
}
}
}
// record scoring index
int f = 0;
boolean found_specific = false;
boolean found_family = false;
for (final ChainMatch cm : list) {
// Exact match: for each individual lineage
if (!found_specific && title.equals(cm.title)) {
synchronized (indices) {
ArrayList<Integer> al = indices.get(title);
if (null == al) {
al = new ArrayList<Integer>();
indices.put(title, al);
// so I can keep a list of chains sorted by name
cin.add(new CITuple(title, chain, al));
}
al.add(f);
}
found_specific = true;
}
if (!found_family && cm.title.startsWith(family_name)) {
synchronized (indices_f) {
ArrayList<Integer> al = indices_f.get(family_name);
if (null == al) {
al = new ArrayList<Integer>();
indices_f.put(family_name, al);
cin_f.add(new CITuple(family_name, chain, al));
}
al.add(f);
}
found_family = true;
}
if (found_specific && found_family) {
break;
}
//
f++;
}
if (!found_specific) {
Utils.log2("NOT FOUND any match for " + title + " within a list of size " + list.size() + ", in project " + chain.getRoot().getProject());
}
}
return null;
}
}));
}
}
for (final Future fu : fus) {
try {
fu.get();
} catch (final Exception e) {
IJError.print(e);
}
}
exec.shutdownNow();
if (weka_classify) {
// so stateful ... it's a sin.
try {
Class.forName("lineage.LineageClassifier").getDeclaredMethod("flush", new Class[] {}).invoke(null, new Object[] {});
} catch (final Exception e) {
IJError.print(e);
}
}
// export ARFF for neural network training
if (output_arff) {
Utils.saveToFile(new File(System.getProperty("user.dir") + "/lineages.arff"), arff.toString());
}
// Show the results from indices map
final StringBuilder sb = new StringBuilder();
// scoring index vs count of occurrences
final TreeMap<Integer, Integer> sum = new TreeMap<Integer, Integer>();
// best scoring index of best family member vs count of ocurrences
final TreeMap<Integer, Integer> sum_f = new TreeMap<Integer, Integer>();
// scoring index vs count of ocurrences, within each family
final TreeMap<String, TreeMap<Integer, Integer>> sum_fw = new TreeMap<String, TreeMap<Integer, Integer>>();
// From collected data, several kinds of results:
// - a list of how well each chain scores: its index position in the sorted list of scores of one to many.
// - a list of how well each chain scores relative to family: the lowest (best) index position of a lineage of the same family in the sorted list of scores.
sb.append("List of scoring indices for each (starting at index 1, aka best possible score):\n");
for (final CITuple ci : cin) {
// sort indices in place
Collections.sort(ci.list);
// count occurrences of each scoring index
// lowest possible index
int last = 0;
int count = 1;
for (final int i : ci.list) {
if (last == i)
count++;
else {
sb.append(ci.title).append(' ').append(last + 1).append(' ').append(count).append('\n');
// reset
last = i;
count = 1;
}
// global count of occurrences
final Integer oi = new Integer(i);
sum.put(oi, (sum.containsKey(oi) ? sum.get(oi) : 0) + 1);
// Same thing but not for all lineages, but only for lineages within a family:
// extract family name from title: read the first continuous string of capital letters
int u = 0;
for (; u < ci.title.length(); u++) {
if (!Character.isUpperCase(ci.title.charAt(u)))
break;
}
final String family_name = ci.title.substring(0, u);
TreeMap<Integer, Integer> sfw = sum_fw.get(family_name);
if (null == sfw) {
sfw = new TreeMap<Integer, Integer>();
sum_fw.put(family_name, sfw);
}
sfw.put(oi, (sfw.containsKey(oi) ? sfw.get(oi) : 0) + 1);
}
if (0 != count)
sb.append(ci.title).append(' ').append(last + 1).append(' ').append(count).append('\n');
// find the very-off ones:
if (last > 6) {
Utils.log2("BAD index " + last + " for chain " + ci.title + " " + ci.chain.getRoot() + " of project " + ci.chain.getRoot().getProject());
}
}
sb.append("===============================\n");
// / family score:
for (final CITuple ci : cin_f) {
// sort indices in place
Collections.sort(ci.list);
// count occurrences of each scoring index
// lowest possible index
int last = 0;
int count = 1;
for (final int i : ci.list) {
if (last == i)
count++;
else {
// reset
last = i;
count = 1;
}
// global count of occurrences
final Integer oi = new Integer(i);
sum_f.put(oi, (sum_f.containsKey(oi) ? sum_f.get(oi) : 0) + 1);
}
}
sb.append("===============================\n");
// - a summarizing histogram that collects how many 1st, how many 2nd, etc. in total, normalized to total number of one-to-many matches performed (i.e. the number of scoring indices recorded.)
//
{
sb.append("Global count of index ocurrences:\n");
int total = 0;
int top2 = 0;
int top5 = 0;
for (final Map.Entry<Integer, Integer> e : sum.entrySet()) {
sb.append(e.getKey()).append(' ').append(e.getValue()).append('\n');
total += e.getValue();
if (e.getKey() < 2)
top2 += e.getValue();
if (e.getKey() < 5)
top5 += e.getValue();
}
sb.append("total: ").append(total).append('\n');
sb.append("top1: ").append(sum.get(sum.firstKey()) / (float) total).append('\n');
sb.append("top2: ").append(top2 / (float) total).append('\n');
sb.append("top5: ").append(top5 / (float) total).append('\n');
sb.append("===============================\n");
}
sb.append("Family-wise count of index ocurrences:\n");
for (final Map.Entry<String, TreeMap<Integer, Integer>> fe : sum_fw.entrySet()) {
int total = 0;
int top5 = 0;
for (final Map.Entry<Integer, Integer> e : fe.getValue().entrySet()) {
sb.append(fe.getKey()).append(' ').append(e.getKey()).append(' ').append(e.getValue()).append('\n');
total += e.getValue();
if (e.getKey() < 5)
top5 += e.getValue();
}
sb.append("total: ").append(total).append('\n');
sb.append("top1: ").append(fe.getValue().get(fe.getValue().firstKey()) / (float) total).append('\n');
sb.append("top5: ").append(top5 / (float) total).append('\n');
}
sb.append("===============================\n");
// - the percent of first score being the correct one:
double first = 0;
double first_5 = 0;
double all = 0;
for (final Map.Entry<Integer, Integer> e : sum.entrySet()) {
final int k = e.getKey();
final int a = e.getValue();
all += a;
if (0 == k)
first = a;
if (k < 5)
first_5 += a;
}
// STORE
this.result = new double[] { // Top one ratio
first / all, // Top 5 ratio
first_5 / all };
sb.append("Global count of index occurrences family-wise:\n");
for (final Map.Entry<Integer, Integer> e : sum_f.entrySet()) {
sb.append(e.getKey()).append(' ').append(e.getValue()).append('\n');
}
sb.append("===============================\n");
// - a summarizing histogram of how well each chain scores (4/4, 3/4, 2/4, 1/4, 0/4 only for those that have 4 homologous members.)
// Must consider that there are 5 projects taken in pairs with repetition.
sb.append("A summarizing histogram of how well each chain scores, for those that have 4 homologous members. It's the number of 1st scores (zeroes) versus the total number of scores:\n");
// First, classify them in having 4, 3, 2, 1
// For 5 brains: 5! / (5-2)! = 5 * 4 = 20 --- 5 elements taken in groups of 2, where order matters
// For 4 brains: 4! / (4-2)! = 4 * 3 = 12
// For 3 brains: 3! / (3-2)! = 3 * 2 = 6;
final TreeMap<Integer, ArrayList<String>> hsc = new TreeMap<Integer, ArrayList<String>>();
for (final CITuple ci : cin) {
final int size = ci.list.size();
ArrayList<String> al = hsc.get(size);
if (null == al) {
al = new ArrayList<String>();
hsc.put(size, al);
}
// Count the number of 0s -- top scoring
int count = 0;
for (final Integer i : ci.list) {
if (0 == i)
count++;
else
break;
}
al.add(new StringBuffer(ci.title).append(" =").append(count).append('/').append(ci.list.size()).append('\n').toString());
}
// Then just print:
for (final Map.Entry<Integer, ArrayList<String>> e : hsc.entrySet()) {
sb.append("For ").append(e.getKey()).append(" matches:\n");
for (final String s : e.getValue()) sb.append(s);
}
sb.append("=========================\n");
// Family-wise, count the number of zeros per family:
sb.append("Number of top scoring per family:\n");
final TreeMap<String, String> family_scores = new TreeMap<String, String>();
for (final CITuple ci : cin_f) {
int count = 0;
for (final Integer i : ci.list) {
if (0 == i)
count++;
else
// ci.list is sorted
break;
}
family_scores.put(ci.title, new StringBuilder().append(ci.title).append(" =").append(count).append('/').append(ci.list.size()).append('\n').toString());
}
// Now print sorted by family name:
for (final String s : family_scores.values()) {
sb.append(s);
}
sb.append("=========================\n");
if (weka_classify) {
sb.append("Decision tree:\n");
sb.append("Expected good matches: " + exp_good.get() + "\n");
sb.append("Expected bad matches: " + exp_bad.get() + "\n");
sb.append("Observed good matches: " + obs_good.get() + "\n");
sb.append("Observed bad matches: " + obs_wrong.get() + "\n");
sb.append("Observed well classified bad ones: " + obs_well_classified_bad_ones.get() + "\n");
sb.append("Observed bad classified good ones: " + obs_bad_classified_good_ones.get() + "\n");
sb.append("Not found, so skipped: " + not_found.get() + "\n");
sb.append("Already classified: " + already_classified.get() + "\n");
sb.append("=========================\n");
}
if (output_arff) {
Utils.log(sb.toString());
} else {
Utils.log2(sb.toString());
}
} catch (final Exception e) {
e.printStackTrace();
} finally {
finishedWorking();
}
}
};
return Bureaucrat.createAndStart(worker, p);
}
use of ini.trakem2.analysis.Compare.CATAParameters in project TrakEM2 by trakem2.
the class Compare method condense.
/**
* Do an all-to-all distance matrix of the given vs, then do a neighbor joining, do a weighted merge of the two VectorString3D being merged, and then finally output the resulting condensed unique VectorString3D with its source array full with all points that make each point in it. Expects VectorString3D which are already calibrated and transformed.
*/
public static VectorString3D condense(final CATAParameters cp, final VectorString3D[] vs, final Worker worker) throws Exception {
// Trivial case 1:
if (1 == vs.length)
return vs[0];
// Estimate delta
if (0 == cp.delta) {
for (int i = 0; i < vs.length; i++) {
cp.delta += vs[i].getAverageDelta();
}
cp.delta /= vs.length;
}
// Resample all:
for (int i = 0; i < vs.length; i++) vs[i].resample(cp.delta, true);
// Trivial case 2:
try {
if (2 == vs.length)
VectorString3D.createInterpolatedPoints(new Editions(vs[0], vs[1], cp.delta, false), 0.5f);
} catch (final Exception e) {
IJError.print(e);
return null;
}
// Else, do neighbor joining
final float[][] scores = Compare.scoreAllToAll(vs, cp.distance_type, cp.delta, cp.skip_ends, cp.max_mut, cp.min_chunk, cp.direct, cp.substring_matching, worker);
final HashMap<Compare.Cell<VectorString3D>, Float> table = new HashMap<Compare.Cell<VectorString3D>, Float>();
// Input the half matrix only into the table, since it's mirrored. And without the diagonal of zeros:
for (int i = 1; i < scores.length; i++) {
for (int j = 0; j < i; j++) {
table.put(new Cell<VectorString3D>(vs[i], vs[j]), scores[i][j]);
}
}
final HashSet<VectorString3D> remaining = new HashSet<VectorString3D>();
for (final VectorString3D v : vs) remaining.add(v);
while (table.size() > 0) {
if (null != worker && worker.hasQuitted()) {
return null;
}
// find smallest value
float min = Float.MAX_VALUE;
Cell<VectorString3D> cell = null;
for (final Map.Entry<Cell<VectorString3D>, Float> e : table.entrySet()) {
final float f = e.getValue();
if (f < min) {
min = f;
cell = e.getKey();
}
}
// done below//table.remove(cell);
for (final Iterator<Cell<VectorString3D>> it = table.keySet().iterator(); it.hasNext(); ) {
final Cell<VectorString3D> c = it.next();
if (c.t1 == cell.t1 || c.t2 == cell.t2 || c.t2 == cell.t1 || c.t1 == cell.t2) {
it.remove();
}
}
// pop the two merged VectorString3D
remaining.remove(cell.t1);
remaining.remove(cell.t2);
// merge, weighted by number of sources of each
// in createInterpolated, the alpha is the opposite of what one would think: a 0.2 alpha means 0.8 for the first and 0.2 for the second. So alpha should be 1-alpha
final double alpha = (double) (cell.t1.getNSources()) / (double) (cell.t1.getNSources() + cell.t2.getNSources());
final Editions eds = new Editions(cell.t1, cell.t2, cp.delta, false);
VectorString3D vs_merged = null;
if (cp.cut_uneven_ends) {
// crop ends to eliminate strings of insertions or deletions sparsed by strings of max cp.max_mut mutations inside
// (This reduces or eliminates variability noise caused by unequal sequence length)
final int[][] editions = eds.getEditions();
int first = 0;
int last = editions.length - 1;
int n_mut = 0;
for (int i = 0; i < last; i++) {
if (Editions.MUTATION == editions[i][0]) {
n_mut++;
if (n_mut > cp.max_mut) {
first = i - n_mut + 1;
break;
}
}
}
// reset
n_mut = 0;
for (int i = last; i > first; i--) {
if (Editions.MUTATION == editions[i][0]) {
n_mut++;
if (n_mut > cp.max_mut) {
last = i + n_mut - 1;
break;
}
}
}
vs_merged = VectorString3D.createInterpolatedPoints(eds, alpha, first, last);
} else {
vs_merged = VectorString3D.createInterpolatedPoints(eds, alpha);
}
vs_merged.resample(cp.delta, true);
// add a new cell for each possible comparison with all other unique vs
for (final VectorString3D v : remaining) {
final Object[] ob = findBestMatch(vs_merged, v, cp.delta, cp.skip_ends, cp.max_mut, cp.min_chunk, cp.distance_type, cp.direct, cp.substring_matching);
final Editions ed = (Editions) ob[0];
final float score = (float) getScore(ed, cp.skip_ends, cp.max_mut, cp.min_chunk, cp.distance_type);
table.put(new Cell<VectorString3D>(vs_merged, v), score);
}
// add the new VectorString3D
remaining.add(vs_merged);
}
// test:
if (1 != remaining.size()) {
Utils.log2("WARNING: remaining.size() == " + remaining.size());
}
return remaining.iterator().next();
}
Aggregations