Search in sources :

Example 16 with Worker

use of ini.trakem2.utils.Worker in project TrakEM2 by trakem2.

the class Compare method scoreAllToAll.

/**
 * Returns the half matrix of scores, with values copied from one half matrix to the other, and a diagonal of zeros.
 * @param distance_type ranges from 0 to 5, and includes: 0=Levenshtein, 1=Dissimilarity, 2=Average physical distance, 3=Median physical distance, 4=Cummulative physical distance and 5=Standard deviation.
 */
public static float[][] scoreAllToAll(final VectorString3D[] vs, final int distance_type, final double delta, final boolean skip_ends, final int max_mut, final float min_chunk, final boolean direct, final boolean substring_matching, final Worker worker) {
    final float[][] scores = new float[vs.length][vs.length];
    final AtomicInteger ai = new AtomicInteger(0);
    final Thread[] threads = MultiThreading.newThreads();
    for (int ithread = 0; ithread < threads.length; ithread++) {
        threads[ithread] = new Thread() {

            @Override
            public void run() {
                for (int i = ai.getAndIncrement(); i < vs.length; i = ai.getAndIncrement()) {
                    final VectorString3D vs1 = vs[i];
                    for (int j = i + 1; j < vs.length; j++) {
                        if (null != worker && worker.hasQuitted())
                            return;
                        // TODO should add 'distance_type' as well for the selection of the best match when not direct.
                        final Object[] ob = findBestMatch(vs[i], vs[j], delta, skip_ends, max_mut, min_chunk, distance_type, direct, substring_matching);
                        /*
				switch (distance_type) {
					case 0: // Levenshtein
						scores[i][j] = (float)((Editions)ob[0]).getDistance();
						break;
					case 1: // dissimilarity
						scores[i][j] = (float)((Double)ob[1]).doubleValue();
						break;
					case 2: // average physical distance between mutation pairs
						scores[i][j] = (float)((Editions)ob[0]).getPhysicalDistance(skip_ends, max_mut, min_chunk, true);
						break;
					case 3: // median physical distance between mutation pairs
						scores[i][j] = (float)((Editions)ob[0]).getStatistics(skip_ends, max_mut, min_chunk, false)[3]; // 3 is median
						break;
					case 4: // cummulative physical distance between mutation pairs
						scores[i][j] = (float)((Editions)ob[0]).getPhysicalDistance(skip_ends, max_mut, min_chunk, false);
						break;
					case 5: // stdDev of distances between mutation pairs
						scores[i][j] = (float)((Editions)ob[0]).getStdDev(skip_ends, max_mut, min_chunk);
						break;
				}
				*/
                        final Editions ed = (Editions) ob[0];
                        scores[i][j] = (float) getScore(ed, skip_ends, max_mut, min_chunk, distance_type);
                        // mirror value
                        scores[j][i] = scores[i][j];
                    }
                }
            // //
            }
        };
    }
    MultiThreading.startAndJoin(threads);
    if (null != worker && worker.hasQuitted())
        return null;
    return scores;
}
Also used : Editions(ini.trakem2.vector.Editions) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) VectorString3D(ini.trakem2.vector.VectorString3D)

Example 17 with Worker

use of ini.trakem2.utils.Worker in project TrakEM2 by trakem2.

the class Project method adjustProperties.

public void adjustProperties() {
    // should be more generic, but for now it'll do
    GenericDialog gd = new GenericDialog("Properties");
    gd.addMessage("Ignore image linking for:");
    boolean link_labels = addBox(gd, DLabel.class);
    boolean nolink_segmentations = "true".equals(ht_props.get("segmentations_nolinks"));
    gd.addCheckbox("Segmentations", nolink_segmentations);
    gd.addMessage("Currently linked objects will remain so\nunless explicitly unlinked.");
    boolean dissector_zoom = "true".equals(ht_props.get("dissector_zoom"));
    gd.addCheckbox("Zoom-invariant markers for Dissector", dissector_zoom);
    gd.addChoice("Image_resizing_mode: ", Loader.MIPMAP_MODES.values().toArray(new String[Loader.MIPMAP_MODES.size()]), Loader.getMipMapModeName(mipmaps_mode));
    gd.addChoice("mipmaps format:", FSLoader.MIPMAP_FORMATS, FSLoader.MIPMAP_FORMATS[loader.getMipMapFormat()]);
    gd.addNumericField("Save mipmap images from level", this.first_mipmap_level_saved, 0);
    boolean layer_mipmaps = "true".equals(ht_props.get("layer_mipmaps"));
    gd.addCheckbox("Layer_mipmaps", layer_mipmaps);
    boolean keep_mipmaps = "true".equals(ht_props.get("keep_mipmaps"));
    // coping with the fact that thee is no Action context ... there should be one in the Worker thread.
    gd.addCheckbox("Keep_mipmaps_when_deleting_images", keep_mipmaps);
    int bucket_side = (int) getProperty("bucket_side", Bucket.MIN_BUCKET_SIZE);
    gd.addNumericField("Bucket side length: ", bucket_side, 0, 6, "pixels");
    boolean no_shutdown_hook = "true".equals(ht_props.get("no_shutdown_hook"));
    gd.addCheckbox("No_shutdown_hook to save the project", no_shutdown_hook);
    int n_undo_steps = getProperty("n_undo_steps", 32);
    gd.addSlider("Undo steps", 32, 200, n_undo_steps);
    boolean flood_fill_to_image_edge = "true".equals(ht_props.get("flood_fill_to_image_edge"));
    gd.addCheckbox("AreaList_flood_fill_to_image_edges", flood_fill_to_image_edge);
    int look_ahead_cache = (int) getProperty("look_ahead_cache", 0);
    gd.addNumericField("Look_ahead_cache:", look_ahead_cache, 0, 6, "layers");
    // default: every 10 minutes
    int autosaving_interval = getProperty("autosaving_interval", 10);
    gd.addNumericField("Autosave every:", autosaving_interval, 0, 6, "minutes");
    int n_mipmap_threads = getProperty("n_mipmap_threads", 1);
    gd.addSlider("Number of threads for mipmaps", 1, n_mipmap_threads, n_mipmap_threads);
    int meshResolution = getProperty("mesh_resolution", 32);
    gd.addSlider("Default mesh resolution for images", 1, 512, meshResolution);
    // 
    gd.showDialog();
    // 
    if (gd.wasCanceled())
        return;
    setLinkProp(link_labels, gd.getNextBoolean(), DLabel.class);
    boolean nolink_segmentations2 = gd.getNextBoolean();
    if (nolink_segmentations) {
        if (!nolink_segmentations2)
            ht_props.remove("segmentations_nolinks");
    } else if (nolink_segmentations2)
        ht_props.put("segmentations_nolinks", "true");
    if (adjustProp("dissector_zoom", dissector_zoom, gd.getNextBoolean())) {
        // TODO: should repaint nested LayerSets as well
        Display.repaint(layer_set);
    }
    this.mipmaps_mode = Loader.getMipMapModeIndex(gd.getNextChoice());
    final int new_mipmap_format = gd.getNextChoiceIndex();
    final int old_mipmap_format = loader.getMipMapFormat();
    if (new_mipmap_format != old_mipmap_format) {
        YesNoDialog yn = new YesNoDialog("MipMaps format", "Changing mipmaps format to '" + FSLoader.MIPMAP_FORMATS[new_mipmap_format] + "'requires regenerating all mipmaps. Proceed?");
        if (yn.yesPressed()) {
            if (loader.setMipMapFormat(new_mipmap_format)) {
                loader.updateMipMapsFormat(old_mipmap_format, new_mipmap_format);
            }
        }
    }
    setFirstMipMapLevelSaved(gd.getNextNumber());
    boolean layer_mipmaps2 = gd.getNextBoolean();
    if (adjustProp("layer_mipmaps", layer_mipmaps, layer_mipmaps2)) {
        if (layer_mipmaps && !layer_mipmaps2) {
        // TODO
        // 1 - ask first
        // 2 - remove all existing images from layer.mipmaps folder
        } else if (!layer_mipmaps && layer_mipmaps2) {
        // TODO
        // 1 - ask first
        // 2 - create de novo all layer mipmaps in a background task
        }
    }
    adjustProp("keep_mipmaps", keep_mipmaps, gd.getNextBoolean());
    Utils.log2("keep_mipmaps: " + getBooleanProperty("keep_mipmaps"));
    // 
    bucket_side = (int) gd.getNextNumber();
    if (bucket_side > Bucket.MIN_BUCKET_SIZE) {
        setProperty("bucket_side", Integer.toString(bucket_side));
        layer_set.recreateBuckets(true);
    }
    adjustProp("no_shutdown_hook", no_shutdown_hook, gd.getNextBoolean());
    n_undo_steps = (int) gd.getNextNumber();
    if (n_undo_steps < 0)
        n_undo_steps = 0;
    setProperty("n_undo_steps", Integer.toString(n_undo_steps));
    adjustProp("flood_fill_to_image_edge", flood_fill_to_image_edge, gd.getNextBoolean());
    double d_look_ahead_cache = gd.getNextNumber();
    if (!Double.isNaN(d_look_ahead_cache) && d_look_ahead_cache >= 0) {
        setProperty("look_ahead_cache", Integer.toString((int) d_look_ahead_cache));
        if (0 == d_look_ahead_cache) {
            Display.clearColumnScreenshots(this.layer_set);
        } else {
            Utils.logAll("WARNING: look-ahead cache is incomplete.\n  Expect issues when editing objects, adding new ones, and the like.\n  Use \"Project - Flush image cache\" to fix any lack of refreshing issues you encounter.");
        }
    } else {
        Utils.log2("Ignoring invalid 'look ahead cache' value " + d_look_ahead_cache);
    }
    double autosaving_interval2 = gd.getNextNumber();
    if (((int) (autosaving_interval2)) == autosaving_interval) {
    // do nothing
    } else if (autosaving_interval2 < 0 || Double.isNaN(autosaving_interval)) {
        Utils.log("IGNORING invalid autosaving interval: " + autosaving_interval2);
    } else {
        setProperty("autosaving_interval", Integer.toString((int) autosaving_interval2));
        restartAutosaving();
    }
    int n_mipmap_threads2 = (int) Math.max(1, gd.getNextNumber());
    if (n_mipmap_threads != n_mipmap_threads2) {
        setProperty("n_mipmap_threads", Integer.toString(n_mipmap_threads2));
        // WARNING: this does it for a static service, affecting all projects!
        FSLoader.restartMipMapThreads(n_mipmap_threads2);
    }
    int meshResolution2 = (int) gd.getNextNumber();
    if (meshResolution != meshResolution2) {
        if (meshResolution2 > 0) {
            setProperty("mesh_resolution", Integer.toString(meshResolution2));
        } else {
            Utils.log("WARNING: ignoring invalid mesh resolution value " + meshResolution2);
        }
    }
}
Also used : GenericDialog(ij.gui.GenericDialog) YesNoDialog(ini.trakem2.display.YesNoDialog)

Example 18 with Worker

use of ini.trakem2.utils.Worker in project TrakEM2 by trakem2.

the class Compare method compareAllToAll.

/**
 * Gets pipes for all open projects, and generates a matrix of dissimilarities, which gets passed on to the Worker thread and also to a file, if desired.
 *
 * @param to_file Whether to save the results to a file and popup a save dialog for it or not. In any case the results are stored in the worker's load, which you can retrieve like:
 * <pre>
 * Bureaucrat bu = Compare.compareAllToAll(true, null, null);
 * Object result = bu.getWorker().getResult();
 * float[][] scores = (float[][])result[0];
 * ArrayList&lt;Compare.Chain&gt; chains = (ArrayList&lt;Compare.Chain&gt;)result[1];
 * </pre>
 */
public static Bureaucrat compareAllToAll(final boolean to_file, final String regex, final String[] ignore, final Project[] projects, final boolean crop, final boolean from_end, final int max_n_elements, final String outgroup) {
    // gather all open projects
    final Project[] p = null == projects ? Project.getProjects().toArray(new Project[0]) : projects;
    final Worker worker = new Worker("Comparing all to all") {

        @Override
        public void run() {
            startedWorking();
            try {
                final CATAParameters cp = new CATAParameters();
                if (!cp.setup(to_file, regex, false, false)) {
                    finishedWorking();
                    return;
                }
                String filename = null, dir = null;
                if (to_file) {
                    final SaveDialog sd = new SaveDialog("Save matrix", OpenDialog.getDefaultDirectory(), null, ".csv");
                    filename = sd.getFileName();
                    if (null == filename) {
                        finishedWorking();
                        return;
                    }
                    dir = sd.getDirectory().replace('\\', '/');
                    if (!dir.endsWith("/"))
                        dir += "/";
                }
                Object[] ob = gatherChains(p, cp, ignore);
                final ArrayList<Chain> chains = (ArrayList<Chain>) ob[0];
                // to keep track of each project's chains
                final ArrayList[] p_chains = (ArrayList[]) ob[1];
                ob = null;
                if (null == chains) {
                    finishedWorking();
                    return;
                }
                final int n_chains = chains.size();
                // crop chains if desired
                if (crop) {
                    for (final Chain chain : chains) {
                        if (from_end) {
                            final int start = chain.vs.length() - max_n_elements;
                            if (start > 0) {
                                chain.vs = chain.vs.substring(start, chain.vs.length());
                                // BEFORE making it relative
                                chain.vs.resample(cp.delta, cp.with_source);
                            }
                        } else {
                            if (max_n_elements < chain.vs.length()) {
                                chain.vs = chain.vs.substring(0, max_n_elements);
                                // BEFORE making it relative
                                chain.vs.resample(cp.delta, cp.with_source);
                            }
                        }
                    }
                }
                // compare all to all
                final VectorString3D[] vs = new VectorString3D[n_chains];
                for (int i = 0; i < n_chains; i++) vs[i] = chains.get(i).vs;
                final float[][] scores = Compare.scoreAllToAll(vs, cp.distance_type, cp.delta, cp.skip_ends, cp.max_mut, cp.min_chunk, cp.direct, cp.substring_matching, this);
                if (null == scores) {
                    finishedWorking();
                    return;
                }
                // store matrix and chains into the worker
                this.result = new Object[] { scores, chains };
                // write to file
                if (!to_file) {
                    finishedWorking();
                    return;
                }
                final File f = new File(dir + filename);
                // encoding in Latin 1 (for macosx not to mess around
                final OutputStreamWriter dos = new OutputStreamWriter(new BufferedOutputStream(new FileOutputStream(f)), "8859_1");
                // Normalize matrix to largest value of 1.0
                if (cp.normalize) {
                    float max = 0;
                    for (int i = 0; i < scores.length; i++) {
                        // traverse half matrix ony: it's mirrored
                        for (int j = i; j < scores[0].length; j++) {
                            if (scores[i][j] > max)
                                max = scores[i][j];
                        }
                    }
                    for (int i = 0; i < scores.length; i++) {
                        for (int j = i; j < scores[0].length; j++) {
                            scores[i][j] = scores[j][i] /= max;
                        }
                    }
                }
                // write chain titles, with project prefix
                if (cp.format.equals(cp.formats[0])) {
                    // as csv:
                    try {
                        final StringBuffer[] titles = new StringBuffer[n_chains];
                        int next = 0;
                        for (int i = 0; i < p.length; i++) {
                            final String prefix = Utils.getCharacter(i + 1);
                            // empty upper left corner
                            dos.write("\"\"");
                            for (final Chain chain : (ArrayList<Chain>) p_chains[i]) {
                                dos.write(",");
                                titles[next] = new StringBuffer().append('\"').append(prefix).append(' ').append(chain.getCellTitle()).append('\"');
                                dos.write(titles[next].toString());
                                next++;
                            }
                        }
                        dos.write("\n");
                        for (int i = 0; i < n_chains; i++) {
                            final StringBuffer line = new StringBuffer();
                            line.append(titles[i]);
                            for (int j = 0; j < n_chains; j++) line.append(',').append(scores[i][j]);
                            line.append('\n');
                            dos.write(line.toString());
                        }
                        dos.flush();
                    } catch (final Exception e) {
                        e.printStackTrace();
                    }
                } else if (cp.format.equals(cp.formats[1])) {
                    // as XML:
                    try {
                        final StringBuffer sb = new StringBuffer("<?xml version=\"1.0\"?>\n<!DOCTYPE ggobidata SYSTEM \"ggobi.dtd\">\n");
                        sb.append("<ggobidata count=\"2\">\n");
                        sb.append("<data name=\"Pipe Chains\">\n");
                        sb.append("<description />\n");
                        // ggobi: what a crappy XML parser it has
                        sb.append("<variables count=\"0\">\n</variables>\n");
                        sb.append("<records count=\"").append(chains.size()).append("\" glyph=\"fr 1\" color=\"3\">\n");
                        int next = 0;
                        for (int i = 0; i < p.length; i++) {
                            final String prefix = Utils.getCharacter(i + 1);
                            final String color = new StringBuffer("color=\"").append(i + 1).append('\"').toString();
                            for (final Chain chain : (ArrayList<Chain>) p_chains[i]) {
                                sb.append("<record id=\"").append(next + 1).append("\" label=\"").append(prefix).append(' ').append(chain.getCellTitle()).append("\" ").append(color).append("></record>\n");
                                next++;
                            }
                        }
                        sb.append("</records>\n</data>\n");
                        sb.append("<data name=\"distances\">\n");
                        sb.append("<description />\n");
                        sb.append("<variables count=\"1\">\n<realvariable name=\"D\" />\n</variables>\n");
                        sb.append("<records count=\"").append(n_chains * (n_chains - 1)).append("\" glyph=\"fr 1\" color=\"0\">\n");
                        for (int i = 0; i < n_chains; i++) {
                            for (int j = 0; j < n_chains; j++) {
                                if (i == j)
                                    continue;
                                sb.append("<record source=\"").append(i + 1).append("\" destination=\"").append(j + 1).append("\">").append(scores[i][j]).append("</record>\n");
                            }
                        }
                        sb.append("</records>\n</data>\n");
                        sb.append("</ggobidata>");
                        dos.write(sb.toString());
                        dos.flush();
                    } catch (final Exception e) {
                        e.printStackTrace();
                    }
                } else if (cp.format.equals(cp.formats[2])) {
                    // as Phylip .dis
                    try {
                        // collect different projects
                        final ArrayList<Project> projects = new ArrayList<Project>();
                        for (final Chain chain : chains) {
                            final Project p = chain.getRoot().getProject();
                            if (!projects.contains(p))
                                projects.add(p);
                        }
                        final HashSet names = new HashSet();
                        final StringBuffer sb = new StringBuffer();
                        sb.append(scores.length).append('\n');
                        dos.write(sb.toString());
                        // unique ids, since phylip cannot handle long names
                        final AtomicInteger ids = new AtomicInteger(0);
                        final File ftags = new File(dir + filename + ".tags");
                        // encoding in Latin 1 (for macosx not to mess around
                        final OutputStreamWriter dostags = new OutputStreamWriter(new BufferedOutputStream(new FileOutputStream(ftags)), "8859_1");
                        for (int i = 0; i < scores.length; i++) {
                            sb.setLength(0);
                            // String title = chains.get(i).getShortCellTitle().replace(' ', '_').replace('\t', '_').replace('[', '-').replace(']', '-');
                            final int id = ids.incrementAndGet();
                            final String sid = Utils.getCharacter(id);
                            String name = chains.get(i).getShortCellTitle();
                            // If sid.length() > 10 chars, trouble!
                            if (sid.length() > 10) {
                                Utils.log2("Ignoring " + name + " : id longer than 10 chars: " + id);
                                continue;
                            }
                            final int k = 1;
                            // Prepend a project char identifier to the name
                            String project_name = "";
                            if (projects.size() > 1) {
                                project_name = Utils.getCharacter(projects.indexOf(chains.get(i).getRoot().getProject()) + 1).toLowerCase();
                                name = project_name + name;
                            }
                            dostags.write(new StringBuilder().append(sid).append('\t').append(name).append('\n').toString());
                            if (null != outgroup && -1 != name.indexOf(outgroup)) {
                                Utils.logAll("Outgroup 0-based index is " + id + ", with id " + sid + ", with name " + name);
                            }
                            // 
                            final int len = 12;
                            sb.append(sid);
                            // pad with spaces up to len
                            for (int j = len - sid.length(); j > 0; j--) sb.append(' ');
                            int count = 0;
                            for (int j = 0; j < scores[0].length; j++) {
                                sb.append(' ').append(scores[i][j]);
                                count++;
                                if (7 == count && j < scores[0].length - 1) {
                                    sb.append('\n');
                                    count = 0;
                                    while (++count < len) sb.append(' ');
                                    sb.append(' ');
                                    count = 0;
                                }
                            }
                            sb.append('\n');
                            dos.write(sb.toString());
                        }
                        dos.flush();
                        dostags.flush();
                        dostags.close();
                    } catch (final Exception e) {
                        e.printStackTrace();
                    }
                }
                dos.close();
            } catch (final Exception e) {
                e.printStackTrace();
            } finally {
                finishedWorking();
            }
        }
    };
    return Bureaucrat.createAndStart(worker, p);
}
Also used : CATAParameters(ini.trakem2.analysis.Compare.CATAParameters) ArrayList(java.util.ArrayList) Worker(ini.trakem2.utils.Worker) SaveDialog(ij.io.SaveDialog) BufferedOutputStream(java.io.BufferedOutputStream) HashSet(java.util.HashSet) Project(ini.trakem2.Project) VectorString3D(ini.trakem2.vector.VectorString3D) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) FileOutputStream(java.io.FileOutputStream) OutputStreamWriter(java.io.OutputStreamWriter) File(java.io.File)

Example 19 with Worker

use of ini.trakem2.utils.Worker in project TrakEM2 by trakem2.

the class Compare method reliabilityAnalysis.

public static final Bureaucrat reliabilityAnalysis(final String[] ignore, final boolean output_arff, final boolean weka_classify, final boolean show_dialog, final double delta, final double wi, final double wd, final double wm) {
    // gather all open projects
    final Project[] p = Project.getProjects().toArray(new Project[0]);
    final Worker worker = new Worker("Reliability by name") {

        @Override
        public void run() {
            startedWorking();
            try {
                final CATAParameters cp = new CATAParameters();
                cp.delta = delta;
                if (show_dialog && !cp.setup(false, null, false, false)) {
                    finishedWorking();
                    return;
                }
                Object[] ob = gatherChains(p, cp, ignore);
                final ArrayList<Chain> chains = (ArrayList<Chain>) ob[0];
                // to keep track of each project's chains
                final ArrayList[] p_chains = (ArrayList[]) ob[1];
                ob = null;
                if (null == chains) {
                    finishedWorking();
                    return;
                }
                // For each pipe in a brain:
                // - score against all other brains in which that pipe name exists,
                // - record the score position within that brain.
                // 
                final ExecutorService exec = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
                // for each individual lineage:
                final TreeMap<String, ArrayList<Integer>> indices = new TreeMap<String, ArrayList<Integer>>();
                final ArrayList<CITuple> cin = new ArrayList<CITuple>();
                // for each family:
                final TreeMap<String, ArrayList<Integer>> indices_f = new TreeMap<String, ArrayList<Integer>>();
                final ArrayList<CITuple> cin_f = new ArrayList<CITuple>();
                final ArrayList<Future> fus = new ArrayList<Future>();
                // For neural network analysis:
                final StringBuilder arff = output_arff ? new StringBuilder("@RELATION Lineages\n\n") : null;
                if (output_arff) {
                    arff.append("@ATTRIBUTE APD NUMERIC\n");
                    arff.append("@ATTRIBUTE CPD NUMERIC\n");
                    arff.append("@ATTRIBUTE STD NUMERIC\n");
                    arff.append("@ATTRIBUTE MPD NUMERIC\n");
                    arff.append("@ATTRIBUTE PM NUMERIC\n");
                    arff.append("@ATTRIBUTE LEV NUMERIC\n");
                    arff.append("@ATTRIBUTE SIM NUMERIC\n");
                    arff.append("@ATTRIBUTE PRX NUMERIC\n");
                    arff.append("@ATTRIBUTE PRM NUMERIC\n");
                    // length ratio: len(query) / len(ref)
                    arff.append("@ATTRIBUTE LR NUMERIC\n");
                    arff.append("@ATTRIBUTE TR NUMERIC\n");
                    arff.append("@ATTRIBUTE CLASS {false,true}\n");
                    arff.append("\n@DATA\n");
                }
                // Count number of times when decision tree says it's good, versus number of times when it should be good
                // observed
                final AtomicInteger obs_good = new AtomicInteger(0);
                // observed wrong
                final AtomicInteger obs_wrong = new AtomicInteger(0);
                // expected
                final AtomicInteger exp_good = new AtomicInteger(0);
                final AtomicInteger exp_bad = new AtomicInteger(0);
                final AtomicInteger obs_bad_classified_good_ones = new AtomicInteger(0);
                final AtomicInteger obs_well_classified_bad_ones = new AtomicInteger(0);
                // inc by one when a lineage to compare is not found at all in the brain that works as reference
                final AtomicInteger not_found = new AtomicInteger(0);
                final AtomicInteger already_classified = new AtomicInteger(0);
                Method classify_ = null;
                if (weka_classify) {
                    try {
                        classify_ = Class.forName("lineage.LineageClassifier").getDeclaredMethod("classify", new Class[] { double[].class });
                    } catch (final Exception e) {
                        IJError.print(e);
                    }
                }
                final Method classify = classify_;
                // All possible pairs of projects, with repetition (it's not the same, although the pipe pairwise comparison itself will be.)
                for (int _i = 0; _i < p_chains.length; _i++) {
                    final int i = _i;
                    Utils.log2("Project " + p[i] + " has " + p_chains[i].size() + " chains.");
                    for (int _j = 0; _j < p_chains.length; _j++) {
                        final int j = _j;
                        // skip same project (would have a score of zero, identical.)
                        if (i == j)
                            continue;
                        final String[] titles_j = new String[p_chains[j].size()];
                        int next = 0;
                        for (final Chain cj : (ArrayList<Chain>) p_chains[j]) {
                            final String t = cj.getCellTitle();
                            titles_j[next++] = t.substring(0, t.indexOf(' '));
                        }
                        // families:
                        final TreeSet<String> ts_families = new TreeSet<String>();
                        for (int f = 0; f < titles_j.length; f++) {
                            // extract family name from title: read the first continuous string of capital letters
                            final String title = titles_j[f];
                            int u = 0;
                            for (; u < title.length(); u++) {
                                if (!Character.isUpperCase(title.charAt(u)))
                                    break;
                            }
                            ts_families.add(title.substring(0, u));
                        }
                        final ArrayList<String> families = new ArrayList<String>(ts_families);
                        fus.add(exec.submit(new Callable() {

                            @Override
                            public Object call() {
                                // All chains of one project to all chains of the other:
                                for (final Chain chain : (ArrayList<Chain>) p_chains[i]) {
                                    final VectorString3D vs1 = chain.vs;
                                    // Prepare title
                                    String title = chain.getCellTitle();
                                    title = title.substring(0, title.indexOf(' '));
                                    // check if the other project j contains a chain of name chain.getCellTitle() up to the space.
                                    int title_index = -1;
                                    for (int k = 0; k < titles_j.length; k++) {
                                        if (title.equals(titles_j[k])) {
                                            title_index = k;
                                            break;
                                        }
                                    }
                                    if (-1 == title_index) {
                                        Utils.log2(title + " not found in project " + p[j]);
                                        if (weka_classify)
                                            not_found.incrementAndGet();
                                        continue;
                                    }
                                    // should be there:
                                    if (weka_classify) {
                                        exp_good.incrementAndGet();
                                        exp_bad.addAndGet(titles_j.length - 1);
                                    }
                                    final ArrayList<ChainMatch> list = new ArrayList<ChainMatch>();
                                    // extract family name from title: read the first continuous string of capital letters
                                    int u = 0;
                                    for (; u < title.length(); u++) {
                                        if (!Character.isUpperCase(title.charAt(u)))
                                            break;
                                    }
                                    final String family_name = title.substring(0, u);
                                    String last_classify = null;
                                    int g = 0;
                                    for (final Chain cj : (ArrayList<Chain>) p_chains[j]) {
                                        final VectorString3D vs2 = cj.vs;
                                        final Object[] ob = findBestMatch(vs1, vs2, cp.delta, cp.skip_ends, cp.max_mut, cp.min_chunk, cp.distance_type, cp.direct, cp.substring_matching, wi, wd, wm);
                                        final Editions ed = (Editions) ob[0];
                                        final double[] stats = ed.getStatistics(cp.skip_ends, cp.max_mut, cp.min_chunk, cp.score_mut_only);
                                        final ChainMatch cm = new ChainMatch(cj, null, ed, stats, score(ed.getSimilarity(), ed.getDistance(), stats[3], Compare.W));
                                        cm.title = titles_j[g];
                                        list.add(cm);
                                        g++;
                                        if (weka_classify) {
                                            // from decision tree: is it good?
                                            final double[] param = new double[11];
                                            for (int p = 0; p < stats.length; p++) param[p] = stats[p];
                                            try {
                                                if (((Boolean) classify.invoke(null, param)).booleanValue()) {
                                                    if (null != last_classify) {
                                                        Utils.log2("ALREADY CLASSIFIED " + title + " as " + last_classify + "  (now: " + cm.title + " )");
                                                        already_classified.incrementAndGet();
                                                    }
                                                    last_classify = cm.title;
                                                    if (title.equals(cm.title)) {
                                                        obs_good.incrementAndGet();
                                                    } else {
                                                        Utils.log2("WRONG CLASSIFICATION of " + title + " as " + cm.title);
                                                        obs_wrong.incrementAndGet();
                                                    }
                                                } else {
                                                    if (title.equals(cm.title)) {
                                                        obs_bad_classified_good_ones.incrementAndGet();
                                                    } else {
                                                        obs_well_classified_bad_ones.incrementAndGet();
                                                    }
                                                }
                                            } catch (final Exception ee) {
                                                IJError.print(ee);
                                            }
                                        }
                                    }
                                    // sort scores:
                                    Compare.sortMatches(list, cp.distance_type, cp.distance_type_2, cp.min_matches);
                                    if (output_arff) {
                                        // Take top 8 and put them into training set for WEKA in arff format
                                        for (int h = 0; h < 8; h++) {
                                            final ChainMatch cm = list.get(h);
                                            final StringBuilder sb = new StringBuilder();
                                            sb.append(cm.phys_dist).append(',').append(cm.cum_phys_dist).append(',').append(cm.stdDev).append(',').append(cm.median).append(',').append(cm.prop_mut).append(',').append(cm.ed.getDistance()).append(',').append(cm.seq_sim).append(',').append(cm.proximity).append(',').append(cm.proximity_mut).append(',').append(cm.prop_len).append(',').append(cm.tortuosity_ratio).append(',').append(title.equals(cm.title)).append(// append('-').append(cm.title.startsWith(family_name)).append('\n');
                                            '\n');
                                            synchronized (arff) {
                                                arff.append(sb);
                                            }
                                        }
                                    }
                                    // record scoring index
                                    int f = 0;
                                    boolean found_specific = false;
                                    boolean found_family = false;
                                    for (final ChainMatch cm : list) {
                                        // Exact match: for each individual lineage
                                        if (!found_specific && title.equals(cm.title)) {
                                            synchronized (indices) {
                                                ArrayList<Integer> al = indices.get(title);
                                                if (null == al) {
                                                    al = new ArrayList<Integer>();
                                                    indices.put(title, al);
                                                    // so I can keep a list of chains sorted by name
                                                    cin.add(new CITuple(title, chain, al));
                                                }
                                                al.add(f);
                                            }
                                            found_specific = true;
                                        }
                                        if (!found_family && cm.title.startsWith(family_name)) {
                                            synchronized (indices_f) {
                                                ArrayList<Integer> al = indices_f.get(family_name);
                                                if (null == al) {
                                                    al = new ArrayList<Integer>();
                                                    indices_f.put(family_name, al);
                                                    cin_f.add(new CITuple(family_name, chain, al));
                                                }
                                                al.add(f);
                                            }
                                            found_family = true;
                                        }
                                        if (found_specific && found_family) {
                                            break;
                                        }
                                        // 
                                        f++;
                                    }
                                    if (!found_specific) {
                                        Utils.log2("NOT FOUND any match for " + title + " within a list of size " + list.size() + ", in project " + chain.getRoot().getProject());
                                    }
                                }
                                return null;
                            }
                        }));
                    }
                }
                for (final Future fu : fus) {
                    try {
                        fu.get();
                    } catch (final Exception e) {
                        IJError.print(e);
                    }
                }
                exec.shutdownNow();
                if (weka_classify) {
                    // so stateful ... it's a sin.
                    try {
                        Class.forName("lineage.LineageClassifier").getDeclaredMethod("flush", new Class[] {}).invoke(null, new Object[] {});
                    } catch (final Exception e) {
                        IJError.print(e);
                    }
                }
                // export ARFF for neural network training
                if (output_arff) {
                    Utils.saveToFile(new File(System.getProperty("user.dir") + "/lineages.arff"), arff.toString());
                }
                // Show the results from indices map
                final StringBuilder sb = new StringBuilder();
                // scoring index vs count of occurrences
                final TreeMap<Integer, Integer> sum = new TreeMap<Integer, Integer>();
                // best scoring index of best family member vs count of ocurrences
                final TreeMap<Integer, Integer> sum_f = new TreeMap<Integer, Integer>();
                // scoring index vs count of ocurrences, within each family
                final TreeMap<String, TreeMap<Integer, Integer>> sum_fw = new TreeMap<String, TreeMap<Integer, Integer>>();
                // From collected data, several kinds of results:
                // - a list of how well each chain scores: its index position in the sorted list of scores of one to many.
                // - a list of how well each chain scores relative to family: the lowest (best) index position of a lineage of the same family in the sorted list of scores.
                sb.append("List of scoring indices for each (starting at index 1, aka best possible score):\n");
                for (final CITuple ci : cin) {
                    // sort indices in place
                    Collections.sort(ci.list);
                    // count occurrences of each scoring index
                    // lowest possible index
                    int last = 0;
                    int count = 1;
                    for (final int i : ci.list) {
                        if (last == i)
                            count++;
                        else {
                            sb.append(ci.title).append(' ').append(last + 1).append(' ').append(count).append('\n');
                            // reset
                            last = i;
                            count = 1;
                        }
                        // global count of occurrences
                        final Integer oi = new Integer(i);
                        sum.put(oi, (sum.containsKey(oi) ? sum.get(oi) : 0) + 1);
                        // Same thing but not for all lineages, but only for lineages within a family:
                        // extract family name from title: read the first continuous string of capital letters
                        int u = 0;
                        for (; u < ci.title.length(); u++) {
                            if (!Character.isUpperCase(ci.title.charAt(u)))
                                break;
                        }
                        final String family_name = ci.title.substring(0, u);
                        TreeMap<Integer, Integer> sfw = sum_fw.get(family_name);
                        if (null == sfw) {
                            sfw = new TreeMap<Integer, Integer>();
                            sum_fw.put(family_name, sfw);
                        }
                        sfw.put(oi, (sfw.containsKey(oi) ? sfw.get(oi) : 0) + 1);
                    }
                    if (0 != count)
                        sb.append(ci.title).append(' ').append(last + 1).append(' ').append(count).append('\n');
                    // find the very-off ones:
                    if (last > 6) {
                        Utils.log2("BAD index " + last + " for chain " + ci.title + " " + ci.chain.getRoot() + " of project " + ci.chain.getRoot().getProject());
                    }
                }
                sb.append("===============================\n");
                // / family score:
                for (final CITuple ci : cin_f) {
                    // sort indices in place
                    Collections.sort(ci.list);
                    // count occurrences of each scoring index
                    // lowest possible index
                    int last = 0;
                    int count = 1;
                    for (final int i : ci.list) {
                        if (last == i)
                            count++;
                        else {
                            // reset
                            last = i;
                            count = 1;
                        }
                        // global count of occurrences
                        final Integer oi = new Integer(i);
                        sum_f.put(oi, (sum_f.containsKey(oi) ? sum_f.get(oi) : 0) + 1);
                    }
                }
                sb.append("===============================\n");
                // - a summarizing histogram that collects how many 1st, how many 2nd, etc. in total, normalized to total number of one-to-many matches performed (i.e. the number of scoring indices recorded.)
                // 
                {
                    sb.append("Global count of index ocurrences:\n");
                    int total = 0;
                    int top2 = 0;
                    int top5 = 0;
                    for (final Map.Entry<Integer, Integer> e : sum.entrySet()) {
                        sb.append(e.getKey()).append(' ').append(e.getValue()).append('\n');
                        total += e.getValue();
                        if (e.getKey() < 2)
                            top2 += e.getValue();
                        if (e.getKey() < 5)
                            top5 += e.getValue();
                    }
                    sb.append("total: ").append(total).append('\n');
                    sb.append("top1: ").append(sum.get(sum.firstKey()) / (float) total).append('\n');
                    sb.append("top2: ").append(top2 / (float) total).append('\n');
                    sb.append("top5: ").append(top5 / (float) total).append('\n');
                    sb.append("===============================\n");
                }
                sb.append("Family-wise count of index ocurrences:\n");
                for (final Map.Entry<String, TreeMap<Integer, Integer>> fe : sum_fw.entrySet()) {
                    int total = 0;
                    int top5 = 0;
                    for (final Map.Entry<Integer, Integer> e : fe.getValue().entrySet()) {
                        sb.append(fe.getKey()).append(' ').append(e.getKey()).append(' ').append(e.getValue()).append('\n');
                        total += e.getValue();
                        if (e.getKey() < 5)
                            top5 += e.getValue();
                    }
                    sb.append("total: ").append(total).append('\n');
                    sb.append("top1: ").append(fe.getValue().get(fe.getValue().firstKey()) / (float) total).append('\n');
                    sb.append("top5: ").append(top5 / (float) total).append('\n');
                }
                sb.append("===============================\n");
                // - the percent of first score being the correct one:
                double first = 0;
                double first_5 = 0;
                double all = 0;
                for (final Map.Entry<Integer, Integer> e : sum.entrySet()) {
                    final int k = e.getKey();
                    final int a = e.getValue();
                    all += a;
                    if (0 == k)
                        first = a;
                    if (k < 5)
                        first_5 += a;
                }
                // STORE
                this.result = new double[] { // Top one ratio
                first / all, // Top 5 ratio
                first_5 / all };
                sb.append("Global count of index occurrences family-wise:\n");
                for (final Map.Entry<Integer, Integer> e : sum_f.entrySet()) {
                    sb.append(e.getKey()).append(' ').append(e.getValue()).append('\n');
                }
                sb.append("===============================\n");
                // - a summarizing histogram of how well each chain scores (4/4, 3/4, 2/4, 1/4, 0/4 only for those that have 4 homologous members.)
                // Must consider that there are 5 projects taken in pairs with repetition.
                sb.append("A summarizing histogram of how well each chain scores, for those that have 4 homologous members. It's the number of 1st scores (zeroes) versus the total number of scores:\n");
                // First, classify them in having 4, 3, 2, 1
                // For 5 brains:  5! / (5-2)! = 5 * 4 = 20   --- 5 elements taken in groups of 2, where order matters
                // For 4 brains:  4! / (4-2)! = 4 * 3 = 12
                // For 3 brains:  3! / (3-2)! = 3 * 2 = 6;
                final TreeMap<Integer, ArrayList<String>> hsc = new TreeMap<Integer, ArrayList<String>>();
                for (final CITuple ci : cin) {
                    final int size = ci.list.size();
                    ArrayList<String> al = hsc.get(size);
                    if (null == al) {
                        al = new ArrayList<String>();
                        hsc.put(size, al);
                    }
                    // Count the number of 0s -- top scoring
                    int count = 0;
                    for (final Integer i : ci.list) {
                        if (0 == i)
                            count++;
                        else
                            break;
                    }
                    al.add(new StringBuffer(ci.title).append(" =").append(count).append('/').append(ci.list.size()).append('\n').toString());
                }
                // Then just print:
                for (final Map.Entry<Integer, ArrayList<String>> e : hsc.entrySet()) {
                    sb.append("For ").append(e.getKey()).append(" matches:\n");
                    for (final String s : e.getValue()) sb.append(s);
                }
                sb.append("=========================\n");
                // Family-wise, count the number of zeros per family:
                sb.append("Number of top scoring per family:\n");
                final TreeMap<String, String> family_scores = new TreeMap<String, String>();
                for (final CITuple ci : cin_f) {
                    int count = 0;
                    for (final Integer i : ci.list) {
                        if (0 == i)
                            count++;
                        else
                            // ci.list is sorted
                            break;
                    }
                    family_scores.put(ci.title, new StringBuilder().append(ci.title).append(" =").append(count).append('/').append(ci.list.size()).append('\n').toString());
                }
                // Now print sorted by family name:
                for (final String s : family_scores.values()) {
                    sb.append(s);
                }
                sb.append("=========================\n");
                if (weka_classify) {
                    sb.append("Decision tree:\n");
                    sb.append("Expected good matches: " + exp_good.get() + "\n");
                    sb.append("Expected bad matches: " + exp_bad.get() + "\n");
                    sb.append("Observed good matches: " + obs_good.get() + "\n");
                    sb.append("Observed bad matches: " + obs_wrong.get() + "\n");
                    sb.append("Observed well classified bad ones: " + obs_well_classified_bad_ones.get() + "\n");
                    sb.append("Observed bad classified good ones: " + obs_bad_classified_good_ones.get() + "\n");
                    sb.append("Not found, so skipped: " + not_found.get() + "\n");
                    sb.append("Already classified: " + already_classified.get() + "\n");
                    sb.append("=========================\n");
                }
                if (output_arff) {
                    Utils.log(sb.toString());
                } else {
                    Utils.log2(sb.toString());
                }
            } catch (final Exception e) {
                e.printStackTrace();
            } finally {
                finishedWorking();
            }
        }
    };
    return Bureaucrat.createAndStart(worker, p);
}
Also used : ArrayList(java.util.ArrayList) TreeSet(java.util.TreeSet) Method(java.lang.reflect.Method) Project(ini.trakem2.Project) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) File(java.io.File) Map(java.util.Map) HashMap(java.util.HashMap) TreeMap(java.util.TreeMap) CATAParameters(ini.trakem2.analysis.Compare.CATAParameters) Callable(java.util.concurrent.Callable) Worker(ini.trakem2.utils.Worker) TreeMap(java.util.TreeMap) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Editions(ini.trakem2.vector.Editions) VectorString3D(ini.trakem2.vector.VectorString3D) ExecutorService(java.util.concurrent.ExecutorService) Future(java.util.concurrent.Future)

Example 20 with Worker

use of ini.trakem2.utils.Worker in project TrakEM2 by trakem2.

the class Compare method condense.

/**
 * Do an all-to-all distance matrix of the given vs, then do a neighbor joining, do a weighted merge of the two VectorString3D being merged, and then finally output the resulting condensed unique VectorString3D with its source array full with all points that make each point in it. Expects VectorString3D which are already calibrated and transformed.
 */
public static VectorString3D condense(final CATAParameters cp, final VectorString3D[] vs, final Worker worker) throws Exception {
    // Trivial case 1:
    if (1 == vs.length)
        return vs[0];
    // Estimate delta
    if (0 == cp.delta) {
        for (int i = 0; i < vs.length; i++) {
            cp.delta += vs[i].getAverageDelta();
        }
        cp.delta /= vs.length;
    }
    // Resample all:
    for (int i = 0; i < vs.length; i++) vs[i].resample(cp.delta, true);
    // Trivial case 2:
    try {
        if (2 == vs.length)
            VectorString3D.createInterpolatedPoints(new Editions(vs[0], vs[1], cp.delta, false), 0.5f);
    } catch (final Exception e) {
        IJError.print(e);
        return null;
    }
    // Else, do neighbor joining
    final float[][] scores = Compare.scoreAllToAll(vs, cp.distance_type, cp.delta, cp.skip_ends, cp.max_mut, cp.min_chunk, cp.direct, cp.substring_matching, worker);
    final HashMap<Compare.Cell<VectorString3D>, Float> table = new HashMap<Compare.Cell<VectorString3D>, Float>();
    // Input the half matrix only into the table, since it's mirrored. And without the diagonal of zeros:
    for (int i = 1; i < scores.length; i++) {
        for (int j = 0; j < i; j++) {
            table.put(new Cell<VectorString3D>(vs[i], vs[j]), scores[i][j]);
        }
    }
    final HashSet<VectorString3D> remaining = new HashSet<VectorString3D>();
    for (final VectorString3D v : vs) remaining.add(v);
    while (table.size() > 0) {
        if (null != worker && worker.hasQuitted()) {
            return null;
        }
        // find smallest value
        float min = Float.MAX_VALUE;
        Cell<VectorString3D> cell = null;
        for (final Map.Entry<Cell<VectorString3D>, Float> e : table.entrySet()) {
            final float f = e.getValue();
            if (f < min) {
                min = f;
                cell = e.getKey();
            }
        }
        // done below//table.remove(cell);
        for (final Iterator<Cell<VectorString3D>> it = table.keySet().iterator(); it.hasNext(); ) {
            final Cell<VectorString3D> c = it.next();
            if (c.t1 == cell.t1 || c.t2 == cell.t2 || c.t2 == cell.t1 || c.t1 == cell.t2) {
                it.remove();
            }
        }
        // pop the two merged VectorString3D
        remaining.remove(cell.t1);
        remaining.remove(cell.t2);
        // merge, weighted by number of sources of each
        // in createInterpolated, the alpha is the opposite of what one would think: a 0.2 alpha means 0.8 for the first and 0.2 for the second. So alpha should be 1-alpha
        final double alpha = (double) (cell.t1.getNSources()) / (double) (cell.t1.getNSources() + cell.t2.getNSources());
        final Editions eds = new Editions(cell.t1, cell.t2, cp.delta, false);
        VectorString3D vs_merged = null;
        if (cp.cut_uneven_ends) {
            // crop ends to eliminate strings of insertions or deletions sparsed by strings of max cp.max_mut mutations inside
            // (This reduces or eliminates variability noise caused by unequal sequence length)
            final int[][] editions = eds.getEditions();
            int first = 0;
            int last = editions.length - 1;
            int n_mut = 0;
            for (int i = 0; i < last; i++) {
                if (Editions.MUTATION == editions[i][0]) {
                    n_mut++;
                    if (n_mut > cp.max_mut) {
                        first = i - n_mut + 1;
                        break;
                    }
                }
            }
            // reset
            n_mut = 0;
            for (int i = last; i > first; i--) {
                if (Editions.MUTATION == editions[i][0]) {
                    n_mut++;
                    if (n_mut > cp.max_mut) {
                        last = i + n_mut - 1;
                        break;
                    }
                }
            }
            vs_merged = VectorString3D.createInterpolatedPoints(eds, alpha, first, last);
        } else {
            vs_merged = VectorString3D.createInterpolatedPoints(eds, alpha);
        }
        vs_merged.resample(cp.delta, true);
        // add a new cell for each possible comparison with all other unique vs
        for (final VectorString3D v : remaining) {
            final Object[] ob = findBestMatch(vs_merged, v, cp.delta, cp.skip_ends, cp.max_mut, cp.min_chunk, cp.distance_type, cp.direct, cp.substring_matching);
            final Editions ed = (Editions) ob[0];
            final float score = (float) getScore(ed, cp.skip_ends, cp.max_mut, cp.min_chunk, cp.distance_type);
            table.put(new Cell<VectorString3D>(vs_merged, v), score);
        }
        // add the new VectorString3D
        remaining.add(vs_merged);
    }
    // test:
    if (1 != remaining.size()) {
        Utils.log2("WARNING: remaining.size() == " + remaining.size());
    }
    return remaining.iterator().next();
}
Also used : HashMap(java.util.HashMap) Editions(ini.trakem2.vector.Editions) VectorString3D(ini.trakem2.vector.VectorString3D) Cell(mpicbg.imglib.container.cell.Cell) Map(java.util.Map) HashMap(java.util.HashMap) TreeMap(java.util.TreeMap) HashSet(java.util.HashSet)

Aggregations

Worker (ini.trakem2.utils.Worker)20 ArrayList (java.util.ArrayList)10 ImagePlus (ij.ImagePlus)9 Patch (ini.trakem2.display.Patch)8 File (java.io.File)8 GenericDialog (ij.gui.GenericDialog)7 Displayable (ini.trakem2.display.Displayable)7 Rectangle (java.awt.Rectangle)7 HashSet (java.util.HashSet)7 IOException (java.io.IOException)6 Future (java.util.concurrent.Future)6 FormatException (loci.formats.FormatException)6 DirectoryChooser (ij.io.DirectoryChooser)5 Project (ini.trakem2.Project)5 Layer (ini.trakem2.display.Layer)5 VectorString3D (ini.trakem2.vector.VectorString3D)5 HashMap (java.util.HashMap)5 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)5 Calibration (ij.measure.Calibration)4 ZDisplayable (ini.trakem2.display.ZDisplayable)4