Search in sources :

Example 16 with Precision

use of org.apache.commons.math3.util.Precision in project metron by apache.

the class HLLPMeasurement method main.

public static void main(String[] args) {
    Options options = new Options();
    try {
        CommandLineParser parser = new PosixParser();
        CommandLine cmd = null;
        try {
            cmd = ParserOptions.parse(parser, args);
        } catch (ParseException pe) {
            pe.printStackTrace();
            final HelpFormatter usageFormatter = new HelpFormatter();
            usageFormatter.printHelp("HLLPMeasurement", null, options, null, true);
            System.exit(-1);
        }
        if (cmd.hasOption("h")) {
            final HelpFormatter usageFormatter = new HelpFormatter();
            usageFormatter.printHelp("HLLPMeasurement", null, options, null, true);
            System.exit(0);
        }
        final String chartDelim = ParserOptions.CHART_DELIM.get(cmd, "|");
        final int numTrials = Integer.parseInt(ParserOptions.NUM_TRIALS.get(cmd, "5000"));
        final int cardMin = Integer.parseInt(ParserOptions.CARD_MIN.get(cmd, "200"));
        final int cardMax = Integer.parseInt(ParserOptions.CARD_MAX.get(cmd, "1000"));
        final int cardStep = Integer.parseInt(ParserOptions.CARD_STEP.get(cmd, "200"));
        final int cardStart = (((cardMin - 1) / cardStep) * cardStep) + cardStep;
        final int spMin = Integer.parseInt(ParserOptions.SP_MIN.get(cmd, "4"));
        final int spMax = Integer.parseInt(ParserOptions.SP_MAX.get(cmd, "32"));
        final int spStep = Integer.parseInt(ParserOptions.SP_STEP.get(cmd, "4"));
        final int pMin = Integer.parseInt(ParserOptions.P_MIN.get(cmd, "4"));
        final int pMax = Integer.parseInt(ParserOptions.P_MAX.get(cmd, "32"));
        final int pStep = Integer.parseInt(ParserOptions.P_STEP.get(cmd, "4"));
        final double errorPercentile = Double.parseDouble(ParserOptions.ERR_PERCENTILE.get(cmd, "50"));
        final double timePercentile = Double.parseDouble(ParserOptions.TIME_PERCENTILE.get(cmd, "50"));
        final double sizePercentile = Double.parseDouble(ParserOptions.SIZE_PERCENTILE.get(cmd, "50"));
        final boolean formatErrPercent = Boolean.parseBoolean(ParserOptions.ERR_FORMAT_PERCENT.get(cmd, "true"));
        final int errMultiplier = formatErrPercent ? 100 : 1;
        final Function<Double, String> errorFormatter = (v -> ERR_FORMAT.format(v * errMultiplier));
        final Function<Double, String> timeFormatter = (v -> TIME_FORMAT.format(v / NANO_TO_MILLIS));
        final Function<Double, String> sizeFormatter = (v -> SIZE_FORMAT.format(v));
        final String[] chartKey = new String[] { "card: cardinality", "sp: sparse precision value", "p: normal precision value", "err: error as a percent of the expected cardinality; ", "time: total time to add all values to the hllp estimator and calculate a cardinality estimate", "size: size of the hllp set in bytes once all values have been added for the specified cardinality", "l=low, m=mid(based on percentile chosen), h=high, std=standard deviation" };
        final String[] chartHeader = new String[] { "card", "sp", "p", "err l/m/h/std (% of actual)", "time l/m/h/std (ms)", "size l/m/h/std (b)" };
        final int[] chartPadding = new int[] { 10, 10, 10, 40, 40, 30 };
        if (spMin < pMin) {
            throw new IllegalArgumentException("p must be <= sp");
        }
        if (spMax < pMax) {
            throw new IllegalArgumentException("p must be <= sp");
        }
        println("Options Used");
        println("------------");
        println("num trials: " + numTrials);
        println("card min: " + cardMin);
        println("card max: " + cardMax);
        println("card step: " + cardStep);
        println("card start: " + cardStart);
        println("sp min: " + spMin);
        println("sp max: " + spMax);
        println("sp step: " + spStep);
        println("p min: " + pMin);
        println("p max: " + pMax);
        println("p step: " + pStep);
        println("error percentile: " + errorPercentile);
        println("time percentile: " + timePercentile);
        println("size percentile: " + sizePercentile);
        println("format err as %: " + formatErrPercent);
        println("");
        printHeading(chartKey, chartHeader, chartPadding, chartDelim);
        for (int c = cardStart; c <= cardMax; c += cardStep) {
            for (int sp = spMin; sp <= spMax; sp += spStep) {
                for (int p = pMin; p <= pMax; p += pStep) {
                    DescriptiveStatistics errorStats = new DescriptiveStatistics();
                    DescriptiveStatistics timeStats = new DescriptiveStatistics();
                    DescriptiveStatistics sizeStats = new DescriptiveStatistics();
                    for (int i = 0; i < numTrials; i++) {
                        List<Object> trialSet = buildTrialSet(c);
                        Set unique = new HashSet();
                        unique.addAll(trialSet);
                        long distinctVals = unique.size();
                        HyperLogLogPlus hllp = new HyperLogLogPlus(p, sp);
                        long timeStart = System.nanoTime();
                        hllp.addAll(trialSet);
                        long dvEstimate = hllp.cardinality();
                        long timeEnd = System.nanoTime();
                        long timeElapsed = timeEnd - timeStart;
                        double rawError = Math.abs(dvEstimate - distinctVals) / (double) distinctVals;
                        errorStats.addValue(rawError);
                        timeStats.addValue(timeElapsed);
                        sizeStats.addValue(SerDeUtils.toBytes(hllp).length);
                    }
                    MeasureResultFormatter errorRF = new MeasureResultFormatter(errorStats, errorFormatter, errorPercentile);
                    MeasureResultFormatter timeRF = new MeasureResultFormatter(timeStats, timeFormatter, timePercentile);
                    MeasureResultFormatter sizeRF = new MeasureResultFormatter(sizeStats, sizeFormatter, sizePercentile);
                    println(formatWithPadding(new String[] { "" + c, "" + sp, "" + p, errorRF.getFormattedResults(), timeRF.getFormattedResults(), sizeRF.getFormattedResults() }, chartPadding, chartDelim));
                }
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        System.exit(-1);
    }
}
Also used : DescriptiveStatistics(org.apache.commons.math3.stat.descriptive.DescriptiveStatistics)

Example 17 with Precision

use of org.apache.commons.math3.util.Precision in project GDSC-SMLM by aherbert.

the class CreateData method showSummary.

private double showSummary(List<? extends FluorophoreSequenceModel> fluorophores, List<LocalisationModel> localisations) {
    IJ.showStatus("Calculating statistics ...");
    createSummaryTable();
    Statistics[] stats = new Statistics[NAMES.length];
    for (int i = 0; i < stats.length; i++) {
        stats[i] = (settings.showHistograms || alwaysRemoveOutliers[i]) ? new StoredDataStatistics() : new Statistics();
    }
    // Find the largest timepoint
    ImagePlus outputImp = WindowManager.getImage(benchmarkImageId);
    int nFrames;
    if (outputImp == null) {
        sortLocalisationsByTime(localisations);
        nFrames = localisations.get(localisations.size() - 1).getTime();
    } else {
        nFrames = outputImp.getStackSize();
    }
    int[] countHistogram = new int[nFrames + 1];
    // Use the localisations that were drawn to create the sampled on/off times
    rebuildNeighbours(localisations);
    // Assume that there is at least one localisation
    LocalisationModel first = localisations.get(0);
    // The current localisation
    int currentId = first.getId();
    // The last time this localisation was on
    int lastT = first.getTime();
    // Number of blinks
    int blinks = 0;
    // On-time of current pulse
    int currentT = 0;
    double signal = 0;
    final double centreOffset = settings.size * 0.5;
    // Used to convert the sampled times in frames into seconds
    final double framesPerSecond = 1000.0 / settings.exposureTime;
    final double gain = (settings.getTotalGain() > 0) ? settings.getTotalGain() : 1;
    for (LocalisationModel l : localisations) {
        if (l.getData() == null)
            System.out.println("No localisation data. This should not happen!");
        final double noise = (l.getData() != null) ? l.getData()[1] : 1;
        final double intensity = (l.getData() != null) ? l.getData()[4] : l.getIntensity();
        final double intensityInPhotons = intensity / gain;
        // Q. What if the noise is zero, i.e. no background photon / read noise?
        // Just ignore it at current.
        final double snr = intensity / noise;
        stats[SIGNAL].add(intensityInPhotons);
        stats[NOISE].add(noise / gain);
        if (noise != 0)
            stats[SNR].add(snr);
        //if (l.isContinuous())
        if (l.getNext() != null && l.getPrevious() != null) {
            stats[SIGNAL_CONTINUOUS].add(intensityInPhotons);
            if (noise != 0)
                stats[SNR_CONTINUOUS].add(snr);
        }
        int id = l.getId();
        // Check if this a new fluorophore
        if (currentId != id) {
            // Add previous fluorophore
            stats[SAMPLED_BLINKS].add(blinks);
            stats[SAMPLED_T_ON].add(currentT / framesPerSecond);
            stats[TOTAL_SIGNAL].add(signal);
            // Reset
            blinks = 0;
            currentT = 1;
            currentId = id;
            signal = intensityInPhotons;
        } else {
            signal += intensityInPhotons;
            // Check if the current fluorophore pulse is broken (i.e. a blink)
            if (l.getTime() - 1 > lastT) {
                blinks++;
                stats[SAMPLED_T_ON].add(currentT / framesPerSecond);
                currentT = 1;
                stats[SAMPLED_T_OFF].add(((l.getTime() - 1) - lastT) / framesPerSecond);
            } else {
                // Continuous on-time
                currentT++;
            }
        }
        lastT = l.getTime();
        countHistogram[lastT]++;
        stats[X].add((l.getX() - centreOffset) * settings.pixelPitch);
        stats[Y].add((l.getY() - centreOffset) * settings.pixelPitch);
        stats[Z].add(l.getZ() * settings.pixelPitch);
    }
    // Final fluorophore
    stats[SAMPLED_BLINKS].add(blinks);
    stats[SAMPLED_T_ON].add(currentT / framesPerSecond);
    stats[TOTAL_SIGNAL].add(signal);
    // Samples per frame
    for (int t = 1; t < countHistogram.length; t++) stats[SAMPLES].add(countHistogram[t]);
    if (fluorophores != null) {
        for (FluorophoreSequenceModel f : fluorophores) {
            stats[BLINKS].add(f.getNumberOfBlinks());
            // On-time
            for (double t : f.getOnTimes()) stats[T_ON].add(t);
            // Off-time
            for (double t : f.getOffTimes()) stats[T_OFF].add(t);
        }
    } else {
        // show no blinks
        stats[BLINKS].add(0);
        stats[T_ON].add(1);
    //stats[T_OFF].add(0);
    }
    if (results != null) {
        final boolean emCCD = (settings.getEmGain() > 1);
        // Convert depth-of-field to pixels
        final double depth = settings.depthOfField / settings.pixelPitch;
        for (PeakResult r : results.getResults()) {
            final double precision = r.getPrecision(settings.pixelPitch, gain, emCCD);
            stats[PRECISION].add(precision);
            // The error stores the z-depth in pixels
            if (Math.abs(r.error) < depth)
                stats[PRECISION_IN_FOCUS].add(precision);
            stats[WIDTH].add(r.getSD());
        }
        // Compute density per frame. Multithread for speed
        if (settings.densityRadius > 0) {
            IJ.showStatus("Calculating density ...");
            ExecutorService threadPool = Executors.newFixedThreadPool(Prefs.getThreads());
            List<Future<?>> futures = new LinkedList<Future<?>>();
            final ArrayList<float[]> coords = new ArrayList<float[]>();
            int t = results.getHead().getFrame();
            final Statistics densityStats = stats[DENSITY];
            final float radius = (float) (settings.densityRadius * getHWHM());
            final Rectangle bounds = results.getBounds();
            currentIndex = 0;
            finalIndex = results.getTail().getFrame();
            // Store the density for each result.
            int[] allDensity = new int[results.size()];
            int allIndex = 0;
            for (PeakResult r : results.getResults()) {
                if (t != r.getFrame()) {
                    allIndex += runDensityCalculation(threadPool, futures, coords, densityStats, radius, bounds, allDensity, allIndex);
                }
                coords.add(new float[] { r.getXPosition(), r.getYPosition() });
                t = r.getFrame();
            }
            runDensityCalculation(threadPool, futures, coords, densityStats, radius, bounds, allDensity, allIndex);
            Utils.waitForCompletion(futures);
            threadPool.shutdownNow();
            threadPool = null;
            IJ.showProgress(1);
            // Split results into singles (density = 0) and clustered (density > 0)
            MemoryPeakResults singles = copyMemoryPeakResults("No Density");
            MemoryPeakResults clustered = copyMemoryPeakResults("Density");
            int i = 0;
            for (PeakResult r : results.getResults()) {
                // Store density in the original value field
                r.origValue = allDensity[i];
                if (allDensity[i++] == 0)
                    singles.add(r);
                else
                    clustered.add(r);
            }
        }
    }
    StringBuilder sb = new StringBuilder();
    sb.append(datasetNumber).append("\t");
    sb.append((fluorophores == null) ? localisations.size() : fluorophores.size()).append("\t");
    sb.append(stats[SAMPLED_BLINKS].getN() + (int) stats[SAMPLED_BLINKS].getSum()).append("\t");
    sb.append(localisations.size()).append("\t");
    sb.append(nFrames).append("\t");
    sb.append(Utils.rounded(areaInUm)).append("\t");
    sb.append(Utils.rounded(localisations.size() / (areaInUm * nFrames), 4)).append("\t");
    sb.append(Utils.rounded(getHWHM(), 4)).append("\t");
    double s = getPsfSD();
    sb.append(Utils.rounded(s, 4)).append("\t");
    s *= settings.pixelPitch;
    final double sa = PSFCalculator.squarePixelAdjustment(s, settings.pixelPitch) / settings.pixelPitch;
    sb.append(Utils.rounded(sa, 4)).append("\t");
    // Width not valid for the Image PSF
    int nStats = (imagePSF) ? stats.length - 1 : stats.length;
    for (int i = 0; i < nStats; i++) {
        double centre = (alwaysRemoveOutliers[i]) ? ((StoredDataStatistics) stats[i]).getStatistics().getPercentile(50) : stats[i].getMean();
        sb.append(Utils.rounded(centre, 4)).append("\t");
    }
    if (java.awt.GraphicsEnvironment.isHeadless()) {
        IJ.log(sb.toString());
        return stats[SIGNAL].getMean();
    } else {
        summaryTable.append(sb.toString());
    }
    // Show histograms
    if (settings.showHistograms) {
        IJ.showStatus("Calculating histograms ...");
        boolean[] chosenHistograms = getChoosenHistograms();
        WindowOrganiser wo = new WindowOrganiser();
        boolean requireRetile = false;
        for (int i = 0; i < NAMES.length; i++) {
            if (chosenHistograms[i]) {
                wo.add(Utils.showHistogram(TITLE, (StoredDataStatistics) stats[i], NAMES[i], (integerDisplay[i]) ? 1 : 0, (settings.removeOutliers || alwaysRemoveOutliers[i]) ? 2 : 0, settings.histogramBins * ((integerDisplay[i]) ? 100 : 1)));
                requireRetile = requireRetile || Utils.isNewWindow();
            }
        }
        wo.tile();
    }
    IJ.showStatus("");
    return stats[SIGNAL].getMean();
}
Also used : StoredDataStatistics(gdsc.core.utils.StoredDataStatistics) ArrayList(java.util.ArrayList) Rectangle(java.awt.Rectangle) WindowOrganiser(ij.plugin.WindowOrganiser) Statistics(gdsc.core.utils.Statistics) SummaryStatistics(org.apache.commons.math3.stat.descriptive.SummaryStatistics) StoredDataStatistics(gdsc.core.utils.StoredDataStatistics) ImagePlus(ij.ImagePlus) PeakResult(gdsc.smlm.results.PeakResult) IdPeakResult(gdsc.smlm.results.IdPeakResult) ExtendedPeakResult(gdsc.smlm.results.ExtendedPeakResult) LinkedList(java.util.LinkedList) LocalisationModel(gdsc.smlm.model.LocalisationModel) FluorophoreSequenceModel(gdsc.smlm.model.FluorophoreSequenceModel) ExecutorService(java.util.concurrent.ExecutorService) Future(java.util.concurrent.Future) MemoryPeakResults(gdsc.smlm.results.MemoryPeakResults)

Example 18 with Precision

use of org.apache.commons.math3.util.Precision in project GDSC-SMLM by aherbert.

the class PulseActivationAnalysis method simulateActivations.

private int simulateActivations(RandomDataGenerator rdg, BinomialDistribution bd, float[][] molecules, MemoryPeakResults results, int t, double precision, int id) {
    if (bd == null)
        return 0;
    int n = molecules.length;
    int k = bd.sample();
    // Sample
    RandomGenerator rand = rdg.getRandomGenerator();
    int[] sample = Random.sample(k, n, rand);
    while (k-- > 0) {
        float[] xy = molecules[sample[k]];
        float x, y;
        do {
            x = (float) (xy[0] + rand.nextGaussian() * precision);
        } while (outOfBounds(x));
        do {
            y = (float) (xy[1] + rand.nextGaussian() * precision);
        } while (outOfBounds(y));
        results.add(createResult(t, x, y));
    }
    return sample.length;
}
Also used : RandomGenerator(org.apache.commons.math3.random.RandomGenerator)

Example 19 with Precision

use of org.apache.commons.math3.util.Precision in project GDSC-SMLM by aherbert.

the class PCPALMFitting method fitRandomModel.

/**
	 * Fits the correlation curve with r>0 to the random model using the estimated density and precision. Parameters
	 * must be fit within a tolerance of the starting values.
	 * 
	 * @param gr
	 * @param sigmaS
	 *            The estimated precision
	 * @param proteinDensity
	 *            The estimate protein density
	 * @return The fitted parameters [precision, density]
	 */
private double[] fitRandomModel(double[][] gr, double sigmaS, double proteinDensity, String resultColour) {
    final RandomModelFunction function = new RandomModelFunction();
    randomModel = function;
    log("Fitting %s: Estimated precision = %f nm, estimated protein density = %g um^-2", randomModel.getName(), sigmaS, proteinDensity * 1e6);
    randomModel.setLogging(true);
    for (int i = offset; i < gr[0].length; i++) {
        // Only fit the curve above the estimated resolution (points below it will be subject to error)
        if (gr[0][i] > sigmaS * fitAboveEstimatedPrecision)
            randomModel.addPoint(gr[0][i], gr[1][i]);
    }
    LevenbergMarquardtOptimizer optimizer = new LevenbergMarquardtOptimizer();
    Optimum optimum;
    try {
        //@formatter:off
        LeastSquaresProblem problem = new LeastSquaresBuilder().maxEvaluations(Integer.MAX_VALUE).maxIterations(3000).start(new double[] { sigmaS, proteinDensity }).target(function.getY()).weight(new DiagonalMatrix(function.getWeights())).model(function, new MultivariateMatrixFunction() {

            public double[][] value(double[] point) throws IllegalArgumentException {
                return function.jacobian(point);
            }
        }).build();
        //@formatter:on
        optimum = optimizer.optimize(problem);
    } catch (TooManyIterationsException e) {
        log("Failed to fit %s: Too many iterations (%s)", randomModel.getName(), e.getMessage());
        return null;
    } catch (ConvergenceException e) {
        log("Failed to fit %s: %s", randomModel.getName(), e.getMessage());
        return null;
    }
    randomModel.setLogging(false);
    double[] parameters = optimum.getPoint().toArray();
    // Ensure the width is positive
    parameters[0] = Math.abs(parameters[0]);
    double ss = optimum.getResiduals().dotProduct(optimum.getResiduals());
    ic1 = Maths.getAkaikeInformationCriterionFromResiduals(ss, randomModel.size(), parameters.length);
    final double fitSigmaS = parameters[0];
    final double fitProteinDensity = parameters[1];
    // Check the fitted parameters are within tolerance of the initial estimates
    double e1 = parameterDrift(sigmaS, fitSigmaS);
    double e2 = parameterDrift(proteinDensity, fitProteinDensity);
    log("  %s fit: SS = %f. cAIC = %f. %d evaluations", randomModel.getName(), ss, ic1, optimum.getEvaluations());
    log("  %s parameters:", randomModel.getName());
    log("    Average precision = %s nm (%s%%)", Utils.rounded(fitSigmaS, 4), Utils.rounded(e1, 4));
    log("    Average protein density = %s um^-2 (%s%%)", Utils.rounded(fitProteinDensity * 1e6, 4), Utils.rounded(e2, 4));
    valid1 = true;
    if (fittingTolerance > 0 && (Math.abs(e1) > fittingTolerance || Math.abs(e2) > fittingTolerance)) {
        log("  Failed to fit %s within tolerance (%s%%): Average precision = %f nm (%s%%), average protein density = %g um^-2 (%s%%)", randomModel.getName(), Utils.rounded(fittingTolerance, 4), fitSigmaS, Utils.rounded(e1, 4), fitProteinDensity * 1e6, Utils.rounded(e2, 4));
        valid1 = false;
    }
    if (valid1) {
        // ---------
        // TODO - My data does not comply with this criteria. 
        // This could be due to the PC-PALM Molecule code limiting the nmPerPixel to fit the images in memory 
        // thus removing correlations at small r.
        // It could also be due to the nature of the random simulations being 3D not 2D membranes 
        // as per the PC-PALM paper. 
        // ---------
        // Evaluate g(r)protein where:
        // g(r)peaks = g(r)protein + g(r)stoch
        // g(r)peaks ~ 1           + g(r)stoch
        // Verify g(r)protein should be <1.5 for all r>0
        double[] gr_stoch = randomModel.value(parameters);
        double[] gr_peaks = randomModel.getY();
        double[] gr_ = randomModel.getX();
        //SummaryStatistics stats = new SummaryStatistics();
        for (int i = 0; i < gr_peaks.length; i++) {
            // Only evaluate above the fitted average precision 
            if (gr_[i] < fitSigmaS)
                continue;
            // Note the RandomModelFunction evaluates g(r)stoch + 1;
            double gr_protein_i = gr_peaks[i] - (gr_stoch[i] - 1);
            if (gr_protein_i > gr_protein_threshold) {
                // Failed fit
                log("  Failed to fit %s: g(r)protein %s > %s @ r=%s", randomModel.getName(), Utils.rounded(gr_protein_i, 4), Utils.rounded(gr_protein_threshold, 4), Utils.rounded(gr_[i], 4));
                valid1 = false;
            }
        //stats.addValue(gr_i);
        //System.out.printf("g(r)protein @ %f = %f\n", gr[0][i], gr_protein_i);
        }
    }
    addResult(randomModel.getName(), resultColour, valid1, fitSigmaS, fitProteinDensity, 0, 0, 0, 0, ic1);
    return parameters;
}
Also used : Optimum(org.apache.commons.math3.fitting.leastsquares.LeastSquaresOptimizer.Optimum) LevenbergMarquardtOptimizer(org.apache.commons.math3.fitting.leastsquares.LevenbergMarquardtOptimizer) DiagonalMatrix(org.apache.commons.math3.linear.DiagonalMatrix) ConvergenceException(org.apache.commons.math3.exception.ConvergenceException) TooManyIterationsException(org.apache.commons.math3.exception.TooManyIterationsException) LeastSquaresProblem(org.apache.commons.math3.fitting.leastsquares.LeastSquaresProblem) MultivariateMatrixFunction(org.apache.commons.math3.analysis.MultivariateMatrixFunction) LeastSquaresBuilder(org.apache.commons.math3.fitting.leastsquares.LeastSquaresBuilder)

Example 20 with Precision

use of org.apache.commons.math3.util.Precision in project GDSC-SMLM by aherbert.

the class PCPALMFitting method fitClusteredModel.

/**
	 * Fits the correlation curve with r>0 to the clustered model using the estimated density and precision. Parameters
	 * must be fit within a tolerance of the starting values.
	 * 
	 * @param gr
	 * @param sigmaS
	 *            The estimated precision
	 * @param proteinDensity
	 *            The estimated protein density
	 * @return The fitted parameters [precision, density, clusterRadius, clusterDensity]
	 */
private double[] fitClusteredModel(double[][] gr, double sigmaS, double proteinDensity, String resultColour) {
    final ClusteredModelFunctionGradient function = new ClusteredModelFunctionGradient();
    clusteredModel = function;
    log("Fitting %s: Estimated precision = %f nm, estimated protein density = %g um^-2", clusteredModel.getName(), sigmaS, proteinDensity * 1e6);
    clusteredModel.setLogging(true);
    for (int i = offset; i < gr[0].length; i++) {
        // Only fit the curve above the estimated resolution (points below it will be subject to error)
        if (gr[0][i] > sigmaS * fitAboveEstimatedPrecision)
            clusteredModel.addPoint(gr[0][i], gr[1][i]);
    }
    double[] parameters;
    // The model is: sigma, density, range, amplitude
    double[] initialSolution = new double[] { sigmaS, proteinDensity, sigmaS * 5, 1 };
    int evaluations = 0;
    // Constrain the fitting to be close to the estimated precision (sigmaS) and protein density.
    // LVM fitting does not support constrained fitting so use a bounded optimiser.
    SumOfSquaresModelFunction clusteredModelMulti = new SumOfSquaresModelFunction(clusteredModel);
    double[] x = clusteredModelMulti.x;
    // Put some bounds around the initial guess. Use the fitting tolerance (in %) if provided.
    double limit = (fittingTolerance > 0) ? 1 + fittingTolerance / 100 : 2;
    double[] lB = new double[] { initialSolution[0] / limit, initialSolution[1] / limit, 0, 0 };
    // The amplitude and range should not extend beyond the limits of the g(r) curve.
    double[] uB = new double[] { initialSolution[0] * limit, initialSolution[1] * limit, Maths.max(x), Maths.max(gr[1]) };
    log("Fitting %s using a bounded search: %s < precision < %s & %s < density < %s", clusteredModel.getName(), Utils.rounded(lB[0], 4), Utils.rounded(uB[0], 4), Utils.rounded(lB[1] * 1e6, 4), Utils.rounded(uB[1] * 1e6, 4));
    PointValuePair constrainedSolution = runBoundedOptimiser(gr, initialSolution, lB, uB, clusteredModelMulti);
    if (constrainedSolution == null)
        return null;
    parameters = constrainedSolution.getPointRef();
    evaluations = boundedEvaluations;
    // Refit using a LVM  
    if (useLSE) {
        log("Re-fitting %s using a gradient optimisation", clusteredModel.getName());
        LevenbergMarquardtOptimizer optimizer = new LevenbergMarquardtOptimizer();
        Optimum lvmSolution;
        try {
            //@formatter:off
            LeastSquaresProblem problem = new LeastSquaresBuilder().maxEvaluations(Integer.MAX_VALUE).maxIterations(3000).start(parameters).target(function.getY()).weight(new DiagonalMatrix(function.getWeights())).model(function, new MultivariateMatrixFunction() {

                public double[][] value(double[] point) throws IllegalArgumentException {
                    return function.jacobian(point);
                }
            }).build();
            //@formatter:on
            lvmSolution = optimizer.optimize(problem);
            evaluations += lvmSolution.getEvaluations();
            double ss = lvmSolution.getResiduals().dotProduct(lvmSolution.getResiduals());
            if (ss < constrainedSolution.getValue()) {
                log("Re-fitting %s improved the SS from %s to %s (-%s%%)", clusteredModel.getName(), Utils.rounded(constrainedSolution.getValue(), 4), Utils.rounded(ss, 4), Utils.rounded(100 * (constrainedSolution.getValue() - ss) / constrainedSolution.getValue(), 4));
                parameters = lvmSolution.getPoint().toArray();
            }
        } catch (TooManyIterationsException e) {
            log("Failed to re-fit %s: Too many iterations (%s)", clusteredModel.getName(), e.getMessage());
        } catch (ConvergenceException e) {
            log("Failed to re-fit %s: %s", clusteredModel.getName(), e.getMessage());
        }
    }
    clusteredModel.setLogging(false);
    // Ensure the width is positive
    parameters[0] = Math.abs(parameters[0]);
    //parameters[2] = Math.abs(parameters[2]);
    double ss = 0;
    double[] obs = clusteredModel.getY();
    double[] exp = clusteredModel.value(parameters);
    for (int i = 0; i < obs.length; i++) ss += (obs[i] - exp[i]) * (obs[i] - exp[i]);
    ic2 = Maths.getAkaikeInformationCriterionFromResiduals(ss, clusteredModel.size(), parameters.length);
    final double fitSigmaS = parameters[0];
    final double fitProteinDensity = parameters[1];
    //The radius of the cluster domain
    final double domainRadius = parameters[2];
    //The density of the cluster domain
    final double domainDensity = parameters[3];
    // This is from the PC-PALM paper. However that paper fits the g(r)protein exponential convolved in 2D
    // with the g(r)PSF. In this method we have just fit the exponential
    final double nCluster = 2 * domainDensity * Math.PI * domainRadius * domainRadius * fitProteinDensity;
    double e1 = parameterDrift(sigmaS, fitSigmaS);
    double e2 = parameterDrift(proteinDensity, fitProteinDensity);
    log("  %s fit: SS = %f. cAIC = %f. %d evaluations", clusteredModel.getName(), ss, ic2, evaluations);
    log("  %s parameters:", clusteredModel.getName());
    log("    Average precision = %s nm (%s%%)", Utils.rounded(fitSigmaS, 4), Utils.rounded(e1, 4));
    log("    Average protein density = %s um^-2 (%s%%)", Utils.rounded(fitProteinDensity * 1e6, 4), Utils.rounded(e2, 4));
    log("    Domain radius = %s nm", Utils.rounded(domainRadius, 4));
    log("    Domain density = %s", Utils.rounded(domainDensity, 4));
    log("    nCluster = %s", Utils.rounded(nCluster, 4));
    // Check the fitted parameters are within tolerance of the initial estimates
    valid2 = true;
    if (fittingTolerance > 0 && (Math.abs(e1) > fittingTolerance || Math.abs(e2) > fittingTolerance)) {
        log("  Failed to fit %s within tolerance (%s%%): Average precision = %f nm (%s%%), average protein density = %g um^-2 (%s%%)", clusteredModel.getName(), Utils.rounded(fittingTolerance, 4), fitSigmaS, Utils.rounded(e1, 4), fitProteinDensity * 1e6, Utils.rounded(e2, 4));
        valid2 = false;
    }
    // Check extra parameters. Domain radius should be higher than the precision. Density should be positive
    if (domainRadius < fitSigmaS) {
        log("  Failed to fit %s: Domain radius is smaller than the average precision (%s < %s)", clusteredModel.getName(), Utils.rounded(domainRadius, 4), Utils.rounded(fitSigmaS, 4));
        valid2 = false;
    }
    if (domainDensity < 0) {
        log("  Failed to fit %s: Domain density is negative (%s)", clusteredModel.getName(), Utils.rounded(domainDensity, 4));
        valid2 = false;
    }
    if (ic2 > ic1) {
        log("  Failed to fit %s - Information Criterion has increased %s%%", clusteredModel.getName(), Utils.rounded((100 * (ic2 - ic1) / ic1), 4));
        valid2 = false;
    }
    addResult(clusteredModel.getName(), resultColour, valid2, fitSigmaS, fitProteinDensity, domainRadius, domainDensity, nCluster, 0, ic2);
    return parameters;
}
Also used : PointValuePair(org.apache.commons.math3.optim.PointValuePair) LeastSquaresBuilder(org.apache.commons.math3.fitting.leastsquares.LeastSquaresBuilder) Optimum(org.apache.commons.math3.fitting.leastsquares.LeastSquaresOptimizer.Optimum) LevenbergMarquardtOptimizer(org.apache.commons.math3.fitting.leastsquares.LevenbergMarquardtOptimizer) DiagonalMatrix(org.apache.commons.math3.linear.DiagonalMatrix) ConvergenceException(org.apache.commons.math3.exception.ConvergenceException) TooManyIterationsException(org.apache.commons.math3.exception.TooManyIterationsException) LeastSquaresProblem(org.apache.commons.math3.fitting.leastsquares.LeastSquaresProblem) MultivariateMatrixFunction(org.apache.commons.math3.analysis.MultivariateMatrixFunction)

Aggregations

WeightedObservedPoint (org.apache.commons.math3.fitting.WeightedObservedPoint)8 StoredDataStatistics (gdsc.core.utils.StoredDataStatistics)7 PeakResult (gdsc.smlm.results.PeakResult)6 ArrayList (java.util.ArrayList)6 RandomGenerator (org.apache.commons.math3.random.RandomGenerator)6 ConvergenceException (org.apache.commons.math3.exception.ConvergenceException)5 TooManyIterationsException (org.apache.commons.math3.exception.TooManyIterationsException)5 DescriptiveStatistics (org.apache.commons.math3.stat.descriptive.DescriptiveStatistics)5 ClusterPoint (gdsc.core.clustering.ClusterPoint)4 MemoryPeakResults (gdsc.smlm.results.MemoryPeakResults)4 MultivariateMatrixFunction (org.apache.commons.math3.analysis.MultivariateMatrixFunction)4 LeastSquaresBuilder (org.apache.commons.math3.fitting.leastsquares.LeastSquaresBuilder)4 Optimum (org.apache.commons.math3.fitting.leastsquares.LeastSquaresOptimizer.Optimum)4 LeastSquaresProblem (org.apache.commons.math3.fitting.leastsquares.LeastSquaresProblem)4 LevenbergMarquardtOptimizer (org.apache.commons.math3.fitting.leastsquares.LevenbergMarquardtOptimizer)4 DiagonalMatrix (org.apache.commons.math3.linear.DiagonalMatrix)4 Statistics (gdsc.core.utils.Statistics)3 Plot2 (ij.gui.Plot2)3 WindowOrganiser (ij.plugin.WindowOrganiser)3 BasePoint (gdsc.core.match.BasePoint)2