Search in sources :

Example 1 with Min

use of org.apache.commons.math3.stat.descriptive.rank.Min in project GDSC-SMLM by aherbert.

the class SearchSpace method sampleWithoutRounding.

private static double[][] sampleWithoutRounding(int samples, RandomVectorGenerator[] generator, final int[] indices, int size, final double[] centre, final double[] lower, final double[] range) {
    RandomVectorGenerator g = createGenerator(generator, size);
    // Generate random points
    final double[][] searchSpace = new double[samples][];
    for (int i = 0; i < samples; i++) {
        final double[] r = g.nextVector();
        final double[] p = centre.clone();
        for (int j = 0; j < size; j++) {
            // Ensure the min interval is respected
            p[indices[j]] = lower[j] + r[j] * range[j];
        }
        searchSpace[i] = p;
    }
    return searchSpace;
}
Also used : RandomVectorGenerator(org.apache.commons.math3.random.RandomVectorGenerator)

Example 2 with Min

use of org.apache.commons.math3.stat.descriptive.rank.Min in project GDSC-SMLM by aherbert.

the class DensityImage method computeRipleysPlot.

/**
	 * Compute the Ripley's L-function for user selected radii and show it on a plot.
	 * 
	 * @param results
	 */
private void computeRipleysPlot(MemoryPeakResults results) {
    ExtendedGenericDialog gd = new ExtendedGenericDialog(TITLE);
    gd.addMessage("Compute Ripley's L(r) - r plot");
    gd.addNumericField("Min_radius", minR, 2);
    gd.addNumericField("Max_radius", maxR, 2);
    gd.addNumericField("Increment", incrementR, 2);
    gd.addCheckbox("Confidence_intervals", confidenceIntervals);
    gd.showDialog();
    if (gd.wasCanceled())
        return;
    minR = gd.getNextNumber();
    maxR = gd.getNextNumber();
    incrementR = gd.getNextNumber();
    confidenceIntervals = gd.getNextBoolean();
    if (minR > maxR || incrementR < 0 || gd.invalidNumber()) {
        IJ.error(TITLE, "Invalid radius parameters");
        return;
    }
    DensityManager dm = createDensityManager(results);
    double[][] values = calculateLScores(dm);
    // 99% confidence intervals
    final int iterations = (confidenceIntervals) ? 99 : 0;
    double[] upper = null;
    double[] lower = null;
    Rectangle bounds = results.getBounds();
    // Use a uniform distribution for the coordinates
    HaltonSequenceGenerator dist = new HaltonSequenceGenerator(2);
    dist.skipTo(new Well19937c(System.currentTimeMillis() + System.identityHashCode(this)).nextInt());
    for (int i = 0; i < iterations; i++) {
        IJ.showProgress(i, iterations);
        IJ.showStatus(String.format("L-score confidence interval %d / %d", i + 1, iterations));
        // Randomise coordinates
        float[] x = new float[results.size()];
        float[] y = new float[x.length];
        for (int j = x.length; j-- > 0; ) {
            final double[] d = dist.nextVector();
            x[j] = (float) (d[0] * bounds.width);
            y[j] = (float) (d[1] * bounds.height);
        }
        double[][] values2 = calculateLScores(new DensityManager(x, y, bounds));
        if (upper == null) {
            upper = values2[1];
            lower = new double[upper.length];
            System.arraycopy(upper, 0, lower, 0, upper.length);
        } else {
            for (int m = upper.length; m-- > 0; ) {
                if (upper[m] < values2[1][m])
                    upper[m] = values2[1][m];
                if (lower[m] > values2[1][m])
                    lower[m] = values2[1][m];
            }
        }
    }
    String title = results.getName() + " Ripley's (L(r) - r) / r";
    Plot2 plot = new Plot2(title, "Radius", "(L(r) - r) / r", values[0], values[1]);
    // Get the limits
    double yMin = min(0, values[1]);
    double yMax = max(0, values[1]);
    if (iterations > 0) {
        yMin = min(yMin, lower);
        yMax = max(yMax, upper);
    }
    plot.setLimits(0, values[0][values[0].length - 1], yMin, yMax);
    if (iterations > 0) {
        plot.setColor(Color.BLUE);
        plot.addPoints(values[0], upper, 1);
        plot.setColor(Color.RED);
        plot.addPoints(values[0], lower, 1);
        plot.setColor(Color.BLACK);
    }
    Utils.display(title, plot);
}
Also used : Rectangle(java.awt.Rectangle) ExtendedGenericDialog(ij.gui.ExtendedGenericDialog) DensityManager(gdsc.core.clustering.DensityManager) Plot2(ij.gui.Plot2) Well19937c(org.apache.commons.math3.random.Well19937c) HaltonSequenceGenerator(org.apache.commons.math3.random.HaltonSequenceGenerator)

Example 3 with Min

use of org.apache.commons.math3.stat.descriptive.rank.Min in project GDSC-SMLM by aherbert.

the class CreateData method drawImage.

//StoredDataStatistics rawPhotons = new StoredDataStatistics();
//StoredDataStatistics drawPhotons = new StoredDataStatistics();
//	private synchronized void addRaw(double d)
//	{
//		//rawPhotons.add(d);
//	}
//
//	private synchronized void addDraw(double d)
//	{
//		//drawPhotons.add(d);
//	}
/**
	 * Create an image from the localisations using the configured PSF width. Draws a new stack
	 * image.
	 * <p>
	 * Note that the localisations are filtered using the signal. The input list of localisations will be updated.
	 * 
	 * @param localisationSets
	 * @return The localisations
	 */
private List<LocalisationModel> drawImage(final List<LocalisationModelSet> localisationSets) {
    if (localisationSets.isEmpty())
        return null;
    // Create a new list for all localisation that are drawn (i.e. pass the signal filters)
    List<LocalisationModelSet> newLocalisations = Collections.synchronizedList(new ArrayList<LocalisationModelSet>(localisationSets.size()));
    photonsRemoved = new AtomicInteger();
    t1Removed = new AtomicInteger();
    tNRemoved = new AtomicInteger();
    photonStats = new SummaryStatistics();
    // Add drawn spots to memory
    results = new MemoryPeakResults();
    Calibration c = new Calibration(settings.pixelPitch, settings.getTotalGain(), settings.exposureTime);
    c.setEmCCD((settings.getEmGain() > 1));
    c.setBias(settings.bias);
    c.setReadNoise(settings.readNoise * ((settings.getCameraGain() > 0) ? settings.getCameraGain() : 1));
    c.setAmplification(settings.getAmplification());
    results.setCalibration(c);
    results.setSortAfterEnd(true);
    results.begin();
    maxT = localisationSets.get(localisationSets.size() - 1).getTime();
    // Display image
    ImageStack stack = new ImageStack(settings.size, settings.size, maxT);
    final double psfSD = getPsfSD();
    if (psfSD <= 0)
        return null;
    ImagePSFModel imagePSFModel = null;
    if (imagePSF) {
        // Create one Image PSF model that can be copied
        imagePSFModel = createImagePSF(localisationSets);
        if (imagePSFModel == null)
            return null;
    }
    IJ.showStatus("Drawing image ...");
    // Multi-thread for speed
    // Note that the default Executors.newCachedThreadPool() will continue to make threads if
    // new tasks are added. We need to limit the tasks that can be added using a fixed size
    // blocking queue.
    // http://stackoverflow.com/questions/1800317/impossible-to-make-a-cached-thread-pool-with-a-size-limit
    // ExecutorService threadPool = Executors.newCachedThreadPool();
    ExecutorService threadPool = Executors.newFixedThreadPool(Prefs.getThreads());
    List<Future<?>> futures = new LinkedList<Future<?>>();
    // Count all the frames to process
    frame = 0;
    totalFrames = maxT;
    // Collect statistics on the number of photons actually simulated
    // Process all frames
    int i = 0;
    int lastT = -1;
    for (LocalisationModelSet l : localisationSets) {
        if (Utils.isInterrupted())
            break;
        if (l.getTime() != lastT) {
            lastT = l.getTime();
            futures.add(threadPool.submit(new ImageGenerator(localisationSets, newLocalisations, i, lastT, createPSFModel(imagePSFModel), results, stack, poissonNoise, new RandomDataGenerator(createRandomGenerator()))));
        }
        i++;
    }
    // Finish processing data
    Utils.waitForCompletion(futures);
    futures.clear();
    if (Utils.isInterrupted()) {
        IJ.showProgress(1);
        return null;
    }
    // Do all the frames that had no localisations
    for (int t = 1; t <= maxT; t++) {
        if (Utils.isInterrupted())
            break;
        if (stack.getPixels(t) == null) {
            futures.add(threadPool.submit(new ImageGenerator(localisationSets, newLocalisations, maxT, t, null, results, stack, poissonNoise, new RandomDataGenerator(createRandomGenerator()))));
        }
    }
    // Finish
    Utils.waitForCompletion(futures);
    threadPool.shutdown();
    IJ.showProgress(1);
    if (Utils.isInterrupted()) {
        return null;
    }
    results.end();
    // Clear memory
    imagePSFModel = null;
    threadPool = null;
    futures.clear();
    futures = null;
    if (photonsRemoved.get() > 0)
        Utils.log("Removed %d localisations with less than %.1f rendered photons", photonsRemoved.get(), settings.minPhotons);
    if (t1Removed.get() > 0)
        Utils.log("Removed %d localisations with no neighbours @ SNR %.2f", t1Removed.get(), settings.minSNRt1);
    if (tNRemoved.get() > 0)
        Utils.log("Removed %d localisations with valid neighbours @ SNR %.2f", tNRemoved.get(), settings.minSNRtN);
    if (photonStats.getN() > 0)
        Utils.log("Average photons rendered = %s +/- %s", Utils.rounded(photonStats.getMean()), Utils.rounded(photonStats.getStandardDeviation()));
    //System.out.printf("rawPhotons = %f\n", rawPhotons.getMean());
    //System.out.printf("drawPhotons = %f\n", drawPhotons.getMean());
    //Utils.showHistogram("draw photons", drawPhotons, "photons", true, 0, 1000);
    // Update with all those localisation that have been drawn
    localisationSets.clear();
    localisationSets.addAll(newLocalisations);
    newLocalisations = null;
    IJ.showStatus("Displaying image ...");
    ImageStack newStack = stack;
    if (!settings.rawImage) {
        // Get the global limits and ensure all values can be represented
        Object[] imageArray = stack.getImageArray();
        float[] limits = Maths.limits((float[]) imageArray[0]);
        for (int j = 1; j < imageArray.length; j++) limits = Maths.limits(limits, (float[]) imageArray[j]);
        // Leave bias in place
        limits[0] = 0;
        // Check if the image will fit in a 16-bit range
        if ((limits[1] - limits[0]) < 65535) {
            // Convert to 16-bit
            newStack = new ImageStack(stack.getWidth(), stack.getHeight(), stack.getSize());
            // Account for rounding
            final float min = (float) (limits[0] - 0.5);
            for (int j = 0; j < imageArray.length; j++) {
                float[] image = (float[]) imageArray[j];
                short[] pixels = new short[image.length];
                for (int k = 0; k < pixels.length; k++) {
                    pixels[k] = (short) (image[k] - min);
                }
                newStack.setPixels(pixels, j + 1);
                // Free memory
                imageArray[j] = null;
                // Attempt to stay within memory (check vs 32MB)
                if (MemoryPeakResults.freeMemory() < 33554432L)
                    MemoryPeakResults.runGCOnce();
            }
        } else {
            // Keep as 32-bit but round to whole numbers
            for (int j = 0; j < imageArray.length; j++) {
                float[] pixels = (float[]) imageArray[j];
                for (int k = 0; k < pixels.length; k++) {
                    pixels[k] = Math.round(pixels[k]);
                }
            }
        }
    }
    // Show image
    ImagePlus imp = Utils.display(CREATE_DATA_IMAGE_TITLE, newStack);
    ij.measure.Calibration cal = new ij.measure.Calibration();
    String unit = "nm";
    double unitPerPixel = settings.pixelPitch;
    if (unitPerPixel > 100) {
        unit = "um";
        unitPerPixel /= 1000.0;
    }
    cal.setUnit(unit);
    cal.pixelHeight = cal.pixelWidth = unitPerPixel;
    imp.setCalibration(cal);
    imp.setDimensions(1, 1, newStack.getSize());
    imp.resetDisplayRange();
    imp.updateAndDraw();
    saveImage(imp);
    results.setSource(new IJImageSource(imp));
    results.setName(CREATE_DATA_IMAGE_TITLE + " (" + TITLE + ")");
    results.setConfiguration(createConfiguration((float) psfSD));
    results.setBounds(new Rectangle(0, 0, settings.size, settings.size));
    MemoryPeakResults.addResults(results);
    setBenchmarkResults(imp, results);
    if (benchmarkMode && benchmarkParameters != null)
        benchmarkParameters.setPhotons(results);
    List<LocalisationModel> localisations = toLocalisations(localisationSets);
    savePulses(localisations, results, CREATE_DATA_IMAGE_TITLE);
    // Saved the fixed and moving localisations into different datasets
    saveFixedAndMoving(results, CREATE_DATA_IMAGE_TITLE);
    return localisations;
}
Also used : Rectangle(java.awt.Rectangle) MemoryPeakResults(gdsc.smlm.results.MemoryPeakResults) ImagePSFModel(gdsc.smlm.model.ImagePSFModel) ImageStack(ij.ImageStack) RandomDataGenerator(org.apache.commons.math3.random.RandomDataGenerator) SummaryStatistics(org.apache.commons.math3.stat.descriptive.SummaryStatistics) Calibration(gdsc.smlm.results.Calibration) ImagePlus(ij.ImagePlus) LinkedList(java.util.LinkedList) IJImageSource(gdsc.smlm.ij.IJImageSource) LocalisationModel(gdsc.smlm.model.LocalisationModel) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ExecutorService(java.util.concurrent.ExecutorService) Future(java.util.concurrent.Future) LocalisationModelSet(gdsc.smlm.model.LocalisationModelSet)

Example 4 with Min

use of org.apache.commons.math3.stat.descriptive.rank.Min in project GDSC-SMLM by aherbert.

the class BenchmarkFilterAnalysis method filterAnalysis.

private int filterAnalysis(FilterSet filterSet, int setNumber, DirectFilter currentOptimum, double rangeReduction) {
    // Check if the filters are the same so allowing optimisation
    final boolean allSameType = filterSet.allSameType();
    this.ga_resultsList = resultsList;
    Chromosome<FilterScore> best = null;
    String algorithm = "";
    // All the search algorithms use search dimensions.
    // Create search dimensions if needed (these are used for testing if the optimum is at the limit).
    ss_filter = null;
    ss_lower = null;
    ss_upper = null;
    FixedDimension[] originalDimensions = null;
    boolean rangeInput = false;
    boolean[] disabled = null;
    double[][] seed = null;
    boolean nonInteractive = false;
    if (allSameType) {
        // There should always be 1 filter
        ss_filter = (DirectFilter) filterSet.getFilters().get(0);
        int n = ss_filter.getNumberOfParameters();
        // Option to configure a range
        rangeInput = filterSet.getName().contains("Range");
        double[] range = new double[n];
        if (rangeInput && filterSet.size() == 4) {
            originalDimensions = new FixedDimension[n];
            // This is used as min/lower/upper/max
            final Filter minF = ss_filter;
            final Filter lowerF = filterSet.getFilters().get(1);
            final Filter upperF = filterSet.getFilters().get(2);
            final Filter maxF = filterSet.getFilters().get(3);
            for (int i = 0; i < n; i++) {
                double min = minF.getParameterValue(i);
                double lower = lowerF.getParameterValue(i);
                double upper = upperF.getParameterValue(i);
                range[i] = upper - lower;
                double max = maxF.getParameterValue(i);
                double minIncrement = ss_filter.getParameterIncrement(i);
                try {
                    originalDimensions[i] = new FixedDimension(min, max, minIncrement, lower, upper);
                } catch (IllegalArgumentException e) {
                    Utils.log(TITLE + " : Unable to configure dimension [%d] %s: " + e.getMessage(), i, ss_filter.getParameterName(i));
                    originalDimensions = null;
                    rangeInput = false;
                    break;
                }
            }
        }
        if (rangeInput && (filterSet.size() == 3 || filterSet.size() == 2)) {
            originalDimensions = new FixedDimension[n];
            // This is used as lower/upper[/increment]
            final Filter lowerF = ss_filter;
            final Filter upperF = filterSet.getFilters().get(1);
            for (int i = 0; i < n; i++) {
                // Do not disable if the increment is not set. This is left to the user to decide.
                //					if (incF.getParameterValue(i) == incF.getDisabledParameterValue(i) ||
                //							Double.isInfinite(incF.getParameterValue(i)))
                //					{
                //						// Not enabled
                //						dimensions[i] = new SearchDimension(incF.getDisabledParameterValue(i));
                //						continue;
                //					}
                double lower = lowerF.getParameterValue(i);
                double upper = upperF.getParameterValue(i);
                range[i] = upper - lower;
                ParameterType type = ss_filter.getParameterType(i);
                double min = BenchmarkSpotFit.getMin(type);
                double max = BenchmarkSpotFit.getMax(type);
                double minIncrement = ss_filter.getParameterIncrement(i);
                try {
                    originalDimensions[i] = new FixedDimension(min, max, minIncrement, lower, upper);
                } catch (IllegalArgumentException e) {
                    Utils.log(TITLE + " : Unable to configure dimension [%d] %s: " + e.getMessage(), i, ss_filter.getParameterName(i));
                    originalDimensions = null;
                    rangeInput = false;
                    break;
                }
            }
        }
        // Get the dimensions from the filters
        if (originalDimensions == null) {
            originalDimensions = new FixedDimension[n];
            // Allow inputing a filter set (e.g. saved from previous optimisation)
            // Find the limits in the current scores
            final double[] lower = ss_filter.getParameters().clone();
            final double[] upper = lower.clone();
            // Allow the SearchSpace algorithms to be seeded with an initial population 
            // for the first evaluation of the optimum. This is done when the input filter 
            // set is not a range.
            seed = new double[filterSet.size()][];
            int c = 0;
            for (Filter f : filterSet.getFilters()) {
                final double[] point = f.getParameters();
                seed[c++] = point;
                for (int j = 0; j < lower.length; j++) {
                    if (lower[j] > point[j])
                        lower[j] = point[j];
                    if (upper[j] < point[j])
                        upper[j] = point[j];
                }
            }
            // Min/max must be set using values from BenchmarkSpotFit.
            for (int i = 0; i < n; i++) {
                if (lower[i] == upper[i]) {
                    // Not enabled
                    originalDimensions[i] = new FixedDimension(lower[i]);
                    continue;
                }
                ParameterType type = ss_filter.getParameterType(i);
                double min = BenchmarkSpotFit.getMin(type);
                double max = BenchmarkSpotFit.getMax(type);
                double minIncrement = ss_filter.getParameterIncrement(i);
                if (min > lower[i])
                    min = lower[i];
                if (max < upper[i])
                    max = upper[i];
                try {
                    originalDimensions[i] = new FixedDimension(min, max, minIncrement, lower[i], upper[i]);
                } catch (IllegalArgumentException e) {
                    Utils.log(TITLE + " : Unable to configure dimension [%d] %s: " + e.getMessage(), i, ss_filter.getParameterName(i));
                    originalDimensions = null;
                    break;
                }
            }
            if (originalDimensions == null) {
                // Failed to work out the dimensions. No optimisation will be possible.
                // Sort so that the filters are in a nice order for reporting
                filterSet.sort();
                // This will not be used when the dimensions are null
                seed = null;
            }
        }
        if (originalDimensions != null) {
            // Use the current optimum if we are doing a range optimisation
            if (currentOptimum != null && rangeInput && currentOptimum.getType().equals(ss_filter.getType()) && evolve != 0) {
                // Suppress dialogs and use the current settings
                nonInteractive = true;
                double[] p = currentOptimum.getParameters();
                // Range search uses SearchDimension and we must centre on the optimum after creation.							
                for (int i = 0; i < originalDimensions.length; i++) {
                    double centre = p[i];
                    double r = 0;
                    if (originalDimensions[i].isActive()) {
                        // Set the range around the centre.
                        // This uses the range for each param when we read the filters.
                        r = range[i];
                        // Optionally reduce the width of the dimensions. 
                        if (rangeReduction > 0 && rangeReduction < 1)
                            r *= rangeReduction;
                    }
                    double lower = centre - r * 0.5;
                    double upper = centre + r * 0.5;
                    originalDimensions[i] = originalDimensions[i].create(lower, upper);
                }
            }
            // Store the dimensions so we can do an 'at limit' check
            disabled = new boolean[originalDimensions.length];
            ss_lower = new double[originalDimensions.length];
            ss_upper = new double[originalDimensions.length];
            for (int i = 0; i < disabled.length; i++) {
                disabled[i] = !originalDimensions[i].isActive();
                ss_lower[i] = originalDimensions[i].lower;
                ss_upper[i] = originalDimensions[i].upper;
            }
        }
    } else {
        // Sort so that the filters are in a nice order for reporting
        filterSet.sort();
    }
    analysisStopWatch = StopWatch.createStarted();
    if (evolve == 1 && originalDimensions != null) {
        // Collect parameters for the genetic algorithm
        pauseFilterTimer();
        // Remember the step size settings
        double[] stepSize = stepSizeMap.get(setNumber);
        if (stepSize == null || stepSize.length != ss_filter.length()) {
            stepSize = ss_filter.mutationStepRange().clone();
            for (int j = 0; j < stepSize.length; j++) stepSize[j] *= delta;
            // See if the same number of parameters have been optimised in other algorithms
            boolean[] enabled = searchRangeMap.get(setNumber);
            if (enabled != null && enabled.length == stepSize.length) {
                for (int j = 0; j < stepSize.length; j++) if (!enabled[j])
                    stepSize[j] *= -1;
            }
        }
        GenericDialog gd = null;
        int[] indices = ss_filter.getChromosomeParameters();
        boolean runAlgorithm = nonInteractive;
        if (!nonInteractive) {
            // Ask the user for the mutation step parameters.
            gd = new GenericDialog(TITLE);
            String prefix = setNumber + "_";
            gd.addMessage("Configure the genetic algorithm for [" + setNumber + "] " + filterSet.getName());
            gd.addNumericField(prefix + "Population_size", populationSize, 0);
            gd.addNumericField(prefix + "Failure_limit", failureLimit, 0);
            gd.addNumericField(prefix + "Tolerance", tolerance, -1);
            gd.addNumericField(prefix + "Converged_count", convergedCount, 0);
            gd.addSlider(prefix + "Mutation_rate", 0.05, 1, mutationRate);
            gd.addSlider(prefix + "Crossover_rate", 0.05, 1, crossoverRate);
            gd.addSlider(prefix + "Mean_children", 0.05, 3, meanChildren);
            gd.addSlider(prefix + "Selection_fraction", 0.05, 0.5, selectionFraction);
            gd.addCheckbox(prefix + "Ramped_selection", rampedSelection);
            gd.addCheckbox(prefix + "Save_option", saveOption);
            gd.addMessage("Configure the step size for each parameter");
            for (int j = 0; j < indices.length; j++) {
                // Do not mutate parameters that were not expanded, i.e. the input did not vary them.
                final double step = (originalDimensions[indices[j]].isActive()) ? stepSize[j] : 0;
                gd.addNumericField(getDialogName(prefix, ss_filter, indices[j]), step, 2);
            }
            gd.showDialog();
            runAlgorithm = !gd.wasCanceled();
        }
        if (runAlgorithm) {
            // Used to create random sample
            FixedDimension[] dimensions = Arrays.copyOf(originalDimensions, originalDimensions.length);
            if (!nonInteractive) {
                populationSize = (int) Math.abs(gd.getNextNumber());
                if (populationSize < 10)
                    populationSize = 10;
                failureLimit = (int) Math.abs(gd.getNextNumber());
                tolerance = gd.getNextNumber();
                // Allow negatives
                convergedCount = (int) gd.getNextNumber();
                mutationRate = Math.abs(gd.getNextNumber());
                crossoverRate = Math.abs(gd.getNextNumber());
                meanChildren = Math.abs(gd.getNextNumber());
                selectionFraction = Math.abs(gd.getNextNumber());
                rampedSelection = gd.getNextBoolean();
                saveOption = gd.getNextBoolean();
                for (int j = 0; j < indices.length; j++) {
                    stepSize[j] = gd.getNextNumber();
                }
                // Store for repeat analysis
                stepSizeMap.put(setNumber, stepSize);
            }
            for (int j = 0; j < indices.length; j++) {
                // A zero step size will keep the parameter but prevent range mutation.
                if (stepSize[j] < 0) {
                    dimensions[indices[j]] = new FixedDimension(ss_filter.getDisabledParameterValue(indices[j]));
                    disabled[indices[j]] = true;
                }
            }
            //				// Reset negatives to zero
            //				stepSize = stepSize.clone();
            //				for (int j = 0; j < stepSize.length; j++)
            //					if (stepSize[j] < 0)
            //						stepSize[j] = 0;
            // Create the genetic algorithm
            RandomDataGenerator random = new RandomDataGenerator(new Well44497b());
            SimpleMutator<FilterScore> mutator = new SimpleMutator<FilterScore>(random, mutationRate);
            // Override the settings with the step length, a min of zero and the configured upper
            double[] upper = ss_filter.upperLimit();
            mutator.overrideChromosomeSettings(stepSize, new double[stepSize.length], upper);
            Recombiner<FilterScore> recombiner = new SimpleRecombiner<FilterScore>(random, crossoverRate, meanChildren);
            SelectionStrategy<FilterScore> selectionStrategy;
            // If the initial population is huge ensure that the first selection culls to the correct size
            final int selectionMax = (int) (selectionFraction * populationSize);
            if (rampedSelection)
                selectionStrategy = new RampedSelectionStrategy<FilterScore>(random, selectionFraction, selectionMax);
            else
                selectionStrategy = new SimpleSelectionStrategy<FilterScore>(random, selectionFraction, selectionMax);
            ToleranceChecker<FilterScore> ga_checker = new InterruptChecker(tolerance, tolerance * 1e-3, convergedCount);
            // Create new random filters if the population is initially below the population size
            List<Filter> filters = filterSet.getFilters();
            if (filterSet.size() < populationSize) {
                filters = new ArrayList<Filter>(populationSize);
                // Add the existing filters if they are not a range input file
                if (!rangeInput)
                    filters.addAll(filterSet.getFilters());
                // Add current optimum to seed
                if (nonInteractive)
                    filters.add(currentOptimum);
                // The GA does not use the min interval grid so sample without rounding
                double[][] sample = SearchSpace.sampleWithoutRounding(dimensions, populationSize - filters.size(), null);
                filters.addAll(searchSpaceToFilters(sample));
            }
            ga_population = new Population<FilterScore>(filters);
            ga_population.setPopulationSize(populationSize);
            ga_population.setFailureLimit(failureLimit);
            selectionStrategy.setTracker(this);
            // Evolve
            algorithm = EVOLVE[evolve];
            ga_statusPrefix = algorithm + " [" + setNumber + "] " + filterSet.getName() + " ... ";
            ga_iteration = 0;
            ga_population.setTracker(this);
            createGAWindow();
            resumeFilterTimer();
            best = ga_population.evolve(mutator, recombiner, this, selectionStrategy, ga_checker);
            if (best != null) {
                // In case optimisation was stopped
                IJ.resetEscape();
                // The GA may produce coordinates off the min interval grid
                best = enumerateMinInterval(best, stepSize, indices);
                // Now update the filter set for final assessment
                filterSet = new FilterSet(filterSet.getName(), populationToFilters(ga_population.getIndividuals()));
                // Option to save the filters
                if (saveOption)
                    saveFilterSet(filterSet, setNumber, !nonInteractive);
            }
        } else
            resumeFilterTimer();
    }
    if ((evolve == 2 || evolve == 4) && originalDimensions != null) {
        // Collect parameters for the range search algorithm
        pauseFilterTimer();
        boolean isStepSearch = evolve == 4;
        // The step search should use a multi-dimension refinement and no range reduction
        SearchSpace.RefinementMode myRefinementMode = SearchSpace.RefinementMode.MULTI_DIMENSION;
        // Remember the enabled settings
        boolean[] enabled = searchRangeMap.get(setNumber);
        int n = ss_filter.getNumberOfParameters();
        if (enabled == null || enabled.length != n) {
            enabled = new boolean[n];
            Arrays.fill(enabled, true);
            // See if the same number of parameters have been optimised in other algorithms
            double[] stepSize = stepSizeMap.get(setNumber);
            if (stepSize != null && enabled.length == stepSize.length) {
                for (int j = 0; j < stepSize.length; j++) if (stepSize[j] < 0)
                    enabled[j] = false;
            }
        }
        GenericDialog gd = null;
        boolean runAlgorithm = nonInteractive;
        if (!nonInteractive) {
            // Ask the user for the search parameters.
            gd = new GenericDialog(TITLE);
            String prefix = setNumber + "_";
            gd.addMessage("Configure the " + EVOLVE[evolve] + " algorithm for [" + setNumber + "] " + filterSet.getName());
            gd.addSlider(prefix + "Width", 1, 5, rangeSearchWidth);
            if (!isStepSearch) {
                gd.addCheckbox(prefix + "Save_option", saveOption);
                gd.addNumericField(prefix + "Max_iterations", maxIterations, 0);
                String[] modes = SettingsManager.getNames((Object[]) SearchSpace.RefinementMode.values());
                gd.addSlider(prefix + "Reduce", 0.01, 0.99, rangeSearchReduce);
                gd.addChoice("Refinement", modes, modes[refinementMode]);
            }
            gd.addNumericField(prefix + "Seed_size", seedSize, 0);
            // Add choice of fields to optimise
            for (int i = 0; i < n; i++) gd.addCheckbox(getDialogName(prefix, ss_filter, i), enabled[i]);
            gd.showDialog();
            runAlgorithm = !gd.wasCanceled();
        }
        if (runAlgorithm) {
            SearchDimension[] dimensions = new SearchDimension[n];
            if (!nonInteractive) {
                rangeSearchWidth = (int) gd.getNextNumber();
                if (!isStepSearch) {
                    saveOption = gd.getNextBoolean();
                    maxIterations = (int) gd.getNextNumber();
                    refinementMode = gd.getNextChoiceIndex();
                    rangeSearchReduce = gd.getNextNumber();
                }
                seedSize = (int) gd.getNextNumber();
                for (int i = 0; i < n; i++) enabled[i] = gd.getNextBoolean();
                searchRangeMap.put(setNumber, enabled);
            }
            if (!isStepSearch)
                myRefinementMode = SearchSpace.RefinementMode.values()[refinementMode];
            for (int i = 0; i < n; i++) {
                if (enabled[i]) {
                    try {
                        dimensions[i] = originalDimensions[i].create(rangeSearchWidth);
                        dimensions[i].setPad(true);
                        // Prevent range reduction so that the step search just does a single refinement step
                        dimensions[i].setReduceFactor((isStepSearch) ? 1 : rangeSearchReduce);
                        // Centre on current optimum
                        if (nonInteractive)
                            dimensions[i].setCentre(currentOptimum.getParameterValue(i));
                    } catch (IllegalArgumentException e) {
                        IJ.error(TITLE, String.format("Unable to configure dimension [%d] %s: " + e.getMessage(), i, ss_filter.getParameterName(i)));
                        return -1;
                    }
                } else {
                    dimensions[i] = new SearchDimension(ss_filter.getDisabledParameterValue(i));
                }
            }
            for (int i = 0; i < disabled.length; i++) disabled[i] = !dimensions[i].active;
            // Check the number of combinations is OK
            long combinations = SearchSpace.countCombinations(dimensions);
            if (!nonInteractive && combinations > 10000) {
                gd = new GenericDialog(TITLE);
                gd.addMessage(String.format("%d combinations for the configured dimensions.\n \nClick 'Yes' to optimise.", combinations));
                gd.enableYesNoCancel();
                gd.hideCancelButton();
                gd.showDialog();
                if (!gd.wasOKed()) {
                    combinations = 0;
                }
            }
            if (combinations == 0) {
                resumeFilterTimer();
            } else {
                algorithm = EVOLVE[evolve] + " " + rangeSearchWidth;
                ga_statusPrefix = algorithm + " [" + setNumber + "] " + filterSet.getName() + " ... ";
                ga_iteration = 0;
                es_optimum = null;
                SearchSpace ss = new SearchSpace();
                ss.setTracker(this);
                if (seedSize > 0) {
                    double[][] sample;
                    // Add current optimum to seed
                    if (nonInteractive) {
                        sample = new double[1][];
                        sample[0] = currentOptimum.getParameters();
                        seed = merge(seed, sample);
                    }
                    int size = (seed == null) ? 0 : seed.length;
                    // Sample without rounding as the seed will be rounded
                    sample = SearchSpace.sampleWithoutRounding(dimensions, seedSize - size, null);
                    seed = merge(seed, sample);
                }
                // Note: If we have an optimum and we are not seeding this should not matter as the dimensions 
                // have been centred on the current optimum					
                ss.seed(seed);
                ConvergenceChecker<FilterScore> checker = new InterruptConvergenceChecker(0, 0, maxIterations);
                createGAWindow();
                resumeFilterTimer();
                SearchResult<FilterScore> optimum = ss.search(dimensions, this, checker, myRefinementMode);
                if (optimum != null) {
                    // In case optimisation was stopped
                    IJ.resetEscape();
                    best = ((SimpleFilterScore) optimum.score).r.filter;
                    if (seedSize > 0) {
                    // Not required as the search now respects the min interval
                    // The optimum may be off grid if it was from the seed
                    //best = enumerateMinInterval(best, enabled);
                    }
                    // Now update the filter set for final assessment
                    filterSet = new FilterSet(filterSet.getName(), searchSpaceToFilters((DirectFilter) best, ss.getSearchSpace()));
                    // Option to save the filters
                    if (saveOption)
                        saveFilterSet(filterSet, setNumber, !nonInteractive);
                }
            }
        } else
            resumeFilterTimer();
    }
    if (evolve == 3 && originalDimensions != null) {
        // Collect parameters for the enrichment search algorithm
        pauseFilterTimer();
        boolean[] enabled = searchRangeMap.get(setNumber);
        int n = ss_filter.getNumberOfParameters();
        if (enabled == null || enabled.length != n) {
            enabled = new boolean[n];
            Arrays.fill(enabled, true);
            // See if the same number of parameters have been optimised in other algorithms
            double[] stepSize = stepSizeMap.get(setNumber);
            if (stepSize != null && enabled.length == stepSize.length) {
                for (int j = 0; j < stepSize.length; j++) if (stepSize[j] < 0)
                    enabled[j] = false;
            }
        }
        GenericDialog gd = null;
        boolean runAlgorithm = nonInteractive;
        if (!nonInteractive) {
            // Ask the user for the search parameters.
            gd = new GenericDialog(TITLE);
            String prefix = setNumber + "_";
            gd.addMessage("Configure the enrichment search algorithm for [" + setNumber + "] " + filterSet.getName());
            gd.addCheckbox(prefix + "Save_option", saveOption);
            gd.addNumericField(prefix + "Max_iterations", maxIterations, 0);
            gd.addNumericField(prefix + "Converged_count", convergedCount, 0);
            gd.addNumericField(prefix + "Samples", enrichmentSamples, 0);
            gd.addSlider(prefix + "Fraction", 0.01, 0.99, enrichmentFraction);
            gd.addSlider(prefix + "Padding", 0, 0.99, enrichmentPadding);
            // Add choice of fields to optimise
            for (int i = 0; i < n; i++) gd.addCheckbox(getDialogName(prefix, ss_filter, i), enabled[i]);
            gd.showDialog();
            runAlgorithm = !gd.wasCanceled();
        }
        if (runAlgorithm) {
            FixedDimension[] dimensions = Arrays.copyOf(originalDimensions, originalDimensions.length);
            if (!nonInteractive) {
                saveOption = gd.getNextBoolean();
                maxIterations = (int) gd.getNextNumber();
                convergedCount = (int) gd.getNextNumber();
                enrichmentSamples = (int) gd.getNextNumber();
                enrichmentFraction = gd.getNextNumber();
                enrichmentPadding = gd.getNextNumber();
                for (int i = 0; i < n; i++) enabled[i] = gd.getNextBoolean();
                searchRangeMap.put(setNumber, enabled);
            }
            for (int i = 0; i < n; i++) {
                if (!enabled[i])
                    dimensions[i] = new FixedDimension(ss_filter.getDisabledParameterValue(i));
            }
            for (int i = 0; i < disabled.length; i++) disabled[i] = !dimensions[i].active;
            algorithm = EVOLVE[evolve];
            ga_statusPrefix = algorithm + " [" + setNumber + "] " + filterSet.getName() + " ... ";
            ga_iteration = 0;
            es_optimum = null;
            SearchSpace ss = new SearchSpace();
            ss.setTracker(this);
            // Add current optimum to seed
            if (nonInteractive) {
                double[][] sample = new double[1][];
                sample[0] = currentOptimum.getParameters();
                seed = merge(seed, sample);
            }
            ss.seed(seed);
            ConvergenceChecker<FilterScore> checker = new InterruptConvergenceChecker(0, 0, maxIterations, convergedCount);
            createGAWindow();
            resumeFilterTimer();
            SearchResult<FilterScore> optimum = ss.enrichmentSearch(dimensions, this, checker, enrichmentSamples, enrichmentFraction, enrichmentPadding);
            if (optimum != null) {
                // In case optimisation was stopped
                IJ.resetEscape();
                best = ((SimpleFilterScore) optimum.score).r.filter;
                // Not required as the search now respects the min interval
                // Enumerate on the min interval to produce the final filter
                //best = enumerateMinInterval(best, enabled);
                // Now update the filter set for final assessment
                filterSet = new FilterSet(filterSet.getName(), searchSpaceToFilters((DirectFilter) best, ss.getSearchSpace()));
                // Option to save the filters
                if (saveOption)
                    saveFilterSet(filterSet, setNumber, !nonInteractive);
            }
        } else
            resumeFilterTimer();
    }
    IJ.showStatus("Analysing [" + setNumber + "] " + filterSet.getName() + " ...");
    // Do not support plotting if we used optimisation
    double[] xValues = (best != null || isHeadless || (plotTopN == 0)) ? null : new double[filterSet.size()];
    double[] yValues = (xValues == null) ? null : new double[xValues.length];
    SimpleFilterScore max = null;
    // It can just assess the top 1 required for the summary.
    if (best != null) {
        // Only assess the top 1 filter for the summary
        List<Filter> list = new ArrayList<Filter>();
        list.add((DirectFilter) best);
        filterSet = new FilterSet(filterSet.getName(), list);
    }
    // Score the filters and report the results if configured.
    FilterScoreResult[] scoreResults = scoreFilters(setUncomputedStrength(filterSet), showResultsTable);
    if (scoreResults == null)
        return -1;
    analysisStopWatch.stop();
    for (int index = 0; index < scoreResults.length; index++) {
        final FilterScoreResult scoreResult = scoreResults[index];
        if (xValues != null) {
            xValues[index] = scoreResult.filter.getNumericalValue();
            yValues[index] = scoreResult.score;
        }
        final SimpleFilterScore result = new SimpleFilterScore(scoreResult, allSameType, scoreResult.criteria >= minCriteria);
        if (result.compareTo(max) < 0) {
            max = result;
        }
    }
    if (showResultsTable) {
        BufferedTextWindow tw = null;
        if (resultsWindow != null) {
            tw = new BufferedTextWindow(resultsWindow);
            tw.setIncrement(Integer.MAX_VALUE);
        }
        for (int index = 0; index < scoreResults.length; index++) addToResultsWindow(tw, scoreResults[index].text);
        if (resultsWindow != null)
            resultsWindow.getTextPanel().updateDisplay();
    }
    // Check the top filter against the limits of the original dimensions
    char[] atLimit = null;
    if (allSameType && originalDimensions != null) {
        DirectFilter filter = max.r.filter;
        int[] indices = filter.getChromosomeParameters();
        atLimit = new char[indices.length];
        StringBuilder sb = new StringBuilder(200);
        for (int j = 0; j < indices.length; j++) {
            atLimit[j] = ComplexFilterScore.WITHIN;
            final int p = indices[j];
            if (disabled[p])
                continue;
            final double value = filter.getParameterValue(p);
            double lowerLimit = originalDimensions[p].getLower();
            double upperLimit = originalDimensions[p].getUpper();
            int c1 = Double.compare(value, lowerLimit);
            if (c1 <= 0) {
                atLimit[j] = ComplexFilterScore.FLOOR;
                sb.append(" : ").append(filter.getParameterName(p)).append(' ').append(atLimit[j]).append('[').append(Utils.rounded(value));
                if (c1 == -1) {
                    atLimit[j] = ComplexFilterScore.BELOW;
                    sb.append("<").append(Utils.rounded(lowerLimit));
                }
                sb.append("]");
            } else {
                int c2 = Double.compare(value, upperLimit);
                if (c2 >= 0) {
                    atLimit[j] = ComplexFilterScore.CEIL;
                    sb.append(" : ").append(filter.getParameterName(p)).append(' ').append(atLimit[j]).append('[').append(Utils.rounded(value));
                    if (c2 == 1) {
                        atLimit[j] = ComplexFilterScore.ABOVE;
                        sb.append(">").append(Utils.rounded(upperLimit));
                    }
                    sb.append("]");
                }
            }
        }
        if (sb.length() > 0) {
            if (max.criteriaPassed) {
                Utils.log("Warning: Top filter (%s @ %s|%s) [%s] at the limit of the expanded range%s", filter.getName(), Utils.rounded((invertScore) ? -max.score : max.score), Utils.rounded((invertCriteria) ? -minCriteria : minCriteria), limitFailCount + limitRange, sb.toString());
            } else {
                Utils.log("Warning: Top filter (%s @ -|%s) [%s] at the limit of the expanded range%s", filter.getName(), Utils.rounded((invertCriteria) ? -max.criteria : max.criteria), limitFailCount + limitRange, sb.toString());
            }
        }
    }
    // Note that max should never be null since this method is not run with an empty filter set
    // We may have no filters that pass the criteria
    String type = max.r.filter.getType();
    if (!max.criteriaPassed) {
        Utils.log("Warning: Filter does not pass the criteria: %s : Best = %s using %s", type, Utils.rounded((invertCriteria) ? -max.criteria : max.criteria), max.r.filter.getName());
        return 0;
    }
    // This could be an option?
    boolean allowDuplicates = true;
    // XXX - Commented out the requirement to be the same type to store for later analysis. 
    // This may break the code, however I think that all filter sets should be able to have a best filter
    // irrespective of whether they were the same type or not.
    //if (allSameType)
    //{
    ComplexFilterScore newFilterScore = new ComplexFilterScore(max.r, atLimit, algorithm, analysisStopWatch.getTime(), "", 0);
    addBestFilter(type, allowDuplicates, newFilterScore);
    // Add spacer at end of each result set
    if (isHeadless) {
        if (showResultsTable && filterSet.size() > 1)
            IJ.log("");
    } else {
        if (showResultsTable && filterSet.size() > 1)
            resultsWindow.append("");
        if (plotTopN > 0 && xValues != null) {
            // Check the xValues are unique. Since the filters have been sorted by their
            // numeric value we only need to compare adjacent entries.
            boolean unique = true;
            for (int ii = 0; ii < xValues.length - 1; ii++) {
                if (xValues[ii] == xValues[ii + 1]) {
                    unique = false;
                    break;
                }
            }
            String xAxisName = filterSet.getValueName();
            if (unique) {
                // Check the values all refer to the same property
                for (Filter filter : filterSet.getFilters()) {
                    if (!xAxisName.equals(filter.getNumericalValueName())) {
                        unique = false;
                        break;
                    }
                }
            }
            if (!unique) {
                // If not unique then renumber them and use an arbitrary label
                xAxisName = "Filter";
                for (int ii = 0; ii < xValues.length; ii++) xValues[ii] = ii + 1;
            }
            String title = filterSet.getName();
            // Check if a previous filter set had the same name, update if necessary
            NamedPlot p = getNamedPlot(title);
            if (p == null)
                plots.add(new NamedPlot(title, xAxisName, xValues, yValues));
            else
                p.updateValues(xAxisName, xValues, yValues);
            if (plots.size() > plotTopN) {
                Collections.sort(plots);
                p = plots.remove(plots.size() - 1);
            }
        }
    }
    return 0;
}
Also used : SimpleRecombiner(gdsc.smlm.ga.SimpleRecombiner) SearchSpace(gdsc.smlm.search.SearchSpace) ArrayList(java.util.ArrayList) RandomDataGenerator(org.apache.commons.math3.random.RandomDataGenerator) BufferedTextWindow(gdsc.core.ij.BufferedTextWindow) SearchDimension(gdsc.smlm.search.SearchDimension) FixedDimension(gdsc.smlm.search.FixedDimension) FilterSet(gdsc.smlm.results.filter.FilterSet) RampedSelectionStrategy(gdsc.smlm.ga.RampedSelectionStrategy) GenericDialog(ij.gui.GenericDialog) NonBlockingGenericDialog(ij.gui.NonBlockingGenericDialog) ParameterType(gdsc.smlm.results.filter.ParameterType) IDirectFilter(gdsc.smlm.results.filter.IDirectFilter) DirectFilter(gdsc.smlm.results.filter.DirectFilter) SimpleSelectionStrategy(gdsc.smlm.ga.SimpleSelectionStrategy) IDirectFilter(gdsc.smlm.results.filter.IDirectFilter) DirectFilter(gdsc.smlm.results.filter.DirectFilter) Filter(gdsc.smlm.results.filter.Filter) MultiPathFilter(gdsc.smlm.results.filter.MultiPathFilter) MaximaSpotFilter(gdsc.smlm.filters.MaximaSpotFilter) SimpleMutator(gdsc.smlm.ga.SimpleMutator) FilterScore(gdsc.smlm.results.filter.FilterScore) Well44497b(org.apache.commons.math3.random.Well44497b)

Example 5 with Min

use of org.apache.commons.math3.stat.descriptive.rank.Min in project GDSC-SMLM by aherbert.

the class LVMGradientProcedureTest method gradientProcedureSupportsPrecomputed.

private void gradientProcedureSupportsPrecomputed(final Type type) {
    int iter = 10;
    rdg = new RandomDataGenerator(new Well19937c(30051977));
    ArrayList<double[]> paramsList = new ArrayList<double[]>(iter);
    ArrayList<double[]> yList = new ArrayList<double[]>(iter);
    // 3 peaks
    createData(3, iter, paramsList, yList, true);
    for (int i = 0; i < paramsList.size(); i++) {
        final double[] y = yList.get(i);
        // Add Gaussian read noise so we have negatives
        double min = Maths.min(y);
        for (int j = 0; j < y.length; j++) y[j] = y[i] - min + rdg.nextGaussian(0, Noise);
    }
    // We want to know that:
    // y|peak1+peak2+peak3 == y|peak1+peak2+peak3(precomputed)
    // We want to know when:
    // y|peak1+peak2+peak3 != y-peak3|peak1+peak2
    // i.e. we cannot subtract a precomputed peak from the data, it must be included in the fit
    // E.G. LSQ - subtraction is OK, MLE/WLSQ - subtraction is not allowed
    Gaussian2DFunction f123 = GaussianFunctionFactory.create2D(3, blockWidth, blockWidth, GaussianFunctionFactory.FIT_ERF_FREE_CIRCLE, null);
    Gaussian2DFunction f12 = GaussianFunctionFactory.create2D(2, blockWidth, blockWidth, GaussianFunctionFactory.FIT_ERF_FREE_CIRCLE, null);
    Gaussian2DFunction f3 = GaussianFunctionFactory.create2D(1, blockWidth, blockWidth, GaussianFunctionFactory.FIT_ERF_FREE_CIRCLE, null);
    int nparams = f12.getNumberOfGradients();
    int[] indices = f12.gradientIndices();
    final double[] b = new double[f12.size()];
    double delta = 1e-3;
    DoubleEquality eq = new DoubleEquality(5e-3, 1e-6);
    double[] a1peaks = new double[7];
    final double[] y_b = new double[b.length];
    for (int i = 0; i < paramsList.size(); i++) {
        final double[] y = yList.get(i);
        double[] a3peaks = paramsList.get(i);
        double[] a2peaks = Arrays.copyOf(a3peaks, 1 + 2 * 6);
        double[] a2peaks2 = a2peaks.clone();
        for (int j = 1; j < 7; j++) a1peaks[j] = a3peaks[j + 2 * 6];
        // Evaluate peak 3 to get the background and subtract it from the data to get the new data
        f3.initialise0(a1peaks);
        f3.forEach(new ValueProcedure() {

            int k = 0;

            public void execute(double value) {
                b[k] = value;
                // Remove negatives for MLE
                if (type == Type.MLE) {
                    y[k] = Math.max(0, y[k]);
                    y_b[k] = Math.max(0, y[k] - value);
                } else {
                    y_b[k] = y[k] - value;
                }
                k++;
            }
        });
        // These should be the same
        LVMGradientProcedure p123 = LVMGradientProcedureFactory.create(y, f123, type);
        LVMGradientProcedure p12b3 = LVMGradientProcedureFactory.create(y, PrecomputedGradient1Function.wrapGradient1Function(f12, b), type);
        // This may be different
        LVMGradientProcedure p12m3 = LVMGradientProcedureFactory.create(y_b, f12, type);
        // Check they are the same
        p123.gradient(a3peaks);
        double[][] m123 = p123.getAlphaMatrix();
        p12b3.gradient(a2peaks);
        double s = p12b3.value;
        double[] beta = p12b3.beta.clone();
        double[][] alpha = p12b3.getAlphaMatrix();
        System.out.printf("MLE=%b [%d] p12b3  %f  %f\n", type, i, p123.value, s);
        Assert.assertTrue("p12b3 Not same value @ " + i, eq.almostEqualRelativeOrAbsolute(p123.value, s));
        Assert.assertTrue("p12b3 Not same gradient @ " + i, eq.almostEqualRelativeOrAbsolute(beta, p123.beta));
        for (int j = 0; j < alpha.length; j++) Assert.assertTrue("p12b3 Not same alpha @ " + j, eq.almostEqualRelativeOrAbsolute(alpha[j], m123[j]));
        // Check actual gradients are correct
        for (int j = 0; j < nparams; j++) {
            int k = indices[j];
            double d = Precision.representableDelta(a2peaks[k], (a2peaks[k] == 0) ? 1e-3 : a2peaks[k] * delta);
            a2peaks2[k] = a2peaks[k] + d;
            p12b3.value(a2peaks2);
            double s1 = p12b3.value;
            a2peaks2[k] = a2peaks[k] - d;
            p12b3.value(a2peaks2);
            double s2 = p12b3.value;
            a2peaks2[k] = a2peaks[k];
            // Apply a factor of -2 to compute the actual gradients:
            // See Numerical Recipes in C++, 2nd Ed. Equation 15.5.6 for Nonlinear Models
            beta[j] *= -2;
            double gradient = (s1 - s2) / (2 * d);
            System.out.printf("[%d,%d] %f  (%s %f+/-%f)  %f  ?=  %f  (%f)\n", i, k, s, f12.getName(k), a2peaks[k], d, beta[j], gradient, DoubleEquality.relativeError(gradient, beta[j]));
            Assert.assertTrue("Not same gradient @ " + j, eq.almostEqualRelativeOrAbsolute(beta[j], gradient));
        }
        // Check these may be different
        p12m3.gradient(a2peaks);
        s = p12m3.value;
        beta = p12m3.beta.clone();
        alpha = p12m3.getAlphaMatrix();
        System.out.printf("%s [%d] p12m3  %f  %f\n", type, i, p123.value, s);
        if (type != Type.LSQ) {
            Assert.assertFalse("p12b3 Same value @ " + i, eq.almostEqualRelativeOrAbsolute(p123.value, s));
            Assert.assertFalse("p12b3 Same gradient @ " + i, eq.almostEqualRelativeOrAbsolute(beta, p123.beta));
            for (int j = 0; j < alpha.length; j++) {
                //System.out.printf("%s != %s\n", Arrays.toString(alpha[j]), Arrays.toString(m123[j]));
                Assert.assertFalse("p12b3 Same alpha @ " + j, eq.almostEqualRelativeOrAbsolute(alpha[j], m123[j]));
            }
        } else {
            Assert.assertTrue("p12b3 Not same value @ " + i, eq.almostEqualRelativeOrAbsolute(p123.value, s));
            Assert.assertTrue("p12b3 Not same gradient @ " + i, eq.almostEqualRelativeOrAbsolute(beta, p123.beta));
            for (int j = 0; j < alpha.length; j++) Assert.assertTrue("p12b3 Not same alpha @ " + j, eq.almostEqualRelativeOrAbsolute(alpha[j], m123[j]));
        }
        // Check actual gradients are correct
        for (int j = 0; j < nparams; j++) {
            int k = indices[j];
            double d = Precision.representableDelta(a2peaks[k], (a2peaks[k] == 0) ? 1e-3 : a2peaks[k] * delta);
            a2peaks2[k] = a2peaks[k] + d;
            p12m3.value(a2peaks2);
            double s1 = p12m3.value;
            a2peaks2[k] = a2peaks[k] - d;
            p12m3.value(a2peaks2);
            double s2 = p12m3.value;
            a2peaks2[k] = a2peaks[k];
            // Apply a factor of -2 to compute the actual gradients:
            // See Numerical Recipes in C++, 2nd Ed. Equation 15.5.6 for Nonlinear Models
            beta[j] *= -2;
            double gradient = (s1 - s2) / (2 * d);
            System.out.printf("[%d,%d] %f  (%s %f+/-%f)  %f  ?=  %f  (%f)\n", i, k, s, f12.getName(k), a2peaks[k], d, beta[j], gradient, DoubleEquality.relativeError(gradient, beta[j]));
            Assert.assertTrue("Not same gradient @ " + j, eq.almostEqualRelativeOrAbsolute(beta[j], gradient));
        }
    }
}
Also used : ValueProcedure(gdsc.smlm.function.ValueProcedure) RandomDataGenerator(org.apache.commons.math3.random.RandomDataGenerator) ArrayList(java.util.ArrayList) Well19937c(org.apache.commons.math3.random.Well19937c) ErfGaussian2DFunction(gdsc.smlm.function.gaussian.erf.ErfGaussian2DFunction) Gaussian2DFunction(gdsc.smlm.function.gaussian.Gaussian2DFunction) SingleFreeCircularErfGaussian2DFunction(gdsc.smlm.function.gaussian.erf.SingleFreeCircularErfGaussian2DFunction) DoubleEquality(gdsc.core.utils.DoubleEquality)

Aggregations

ArrayList (java.util.ArrayList)16 List (java.util.List)10 DescriptiveStatistics (org.apache.commons.math3.stat.descriptive.DescriptiveStatistics)8 SummaryStatistics (org.apache.commons.math3.stat.descriptive.SummaryStatistics)7 Map (java.util.Map)6 UnivariateFunction (org.apache.commons.math3.analysis.UnivariateFunction)6 MaxEval (org.apache.commons.math3.optim.MaxEval)6 Collectors (java.util.stream.Collectors)5 ExpressionException (cbit.vcell.parser.ExpressionException)4 Plot2 (ij.gui.Plot2)4 TooManyEvaluationsException (org.apache.commons.math3.exception.TooManyEvaluationsException)4 InitialGuess (org.apache.commons.math3.optim.InitialGuess)4 PointValuePair (org.apache.commons.math3.optim.PointValuePair)4 RandomDataGenerator (org.apache.commons.math3.random.RandomDataGenerator)4 VisibleForTesting (com.google.common.annotations.VisibleForTesting)3 StoredDataStatistics (gdsc.core.utils.StoredDataStatistics)3 HashMap (java.util.HashMap)3 SimpsonIntegrator (org.apache.commons.math3.analysis.integration.SimpsonIntegrator)3 BrentOptimizer (org.apache.commons.math3.optim.univariate.BrentOptimizer)3 SearchInterval (org.apache.commons.math3.optim.univariate.SearchInterval)3