Search in sources :

Example 21 with StoredDataStatistics

use of gdsc.core.utils.StoredDataStatistics in project GDSC-SMLM by aherbert.

the class TraceDiffusion method summarise.

private void summarise(Trace[] traces, double[] fitMSDResult, int n, double[][] jdParams) {
    IJ.showStatus("Calculating summary ...");
    // Create summary table
    createSummaryTable();
    Statistics[] stats = new Statistics[NAMES.length];
    for (int i = 0; i < stats.length; i++) {
        stats[i] = (settings.showHistograms) ? new StoredDataStatistics() : new Statistics();
    }
    for (Trace trace : traces) {
        stats[T_ON].add(trace.getOnTime() * exposureTime);
        final double signal = trace.getSignal() / results.getGain();
        stats[TOTAL_SIGNAL].add(signal);
        stats[SIGNAL_PER_FRAME].add(signal / trace.size());
    }
    // Add to the summary table
    StringBuilder sb = new StringBuilder(title);
    sb.append('\t').append(createCombinedName());
    sb.append("\t");
    sb.append(Utils.rounded(exposureTime * 1000, 3)).append("\t");
    sb.append(Utils.rounded(settings.distanceThreshold, 3)).append("\t");
    sb.append(Utils.rounded(settings.distanceExclusion, 3)).append("\t");
    sb.append(settings.minimumTraceLength).append("\t");
    sb.append(settings.ignoreEnds).append("\t");
    sb.append(settings.truncate).append("\t");
    sb.append(settings.internalDistances).append("\t");
    sb.append(settings.fitLength).append("\t");
    sb.append(settings.msdCorrection).append("\t");
    sb.append(settings.precisionCorrection).append("\t");
    sb.append(settings.mle).append("\t");
    sb.append(traces.length).append("\t");
    sb.append(Utils.rounded(precision, 4)).append("\t");
    double D = 0, s = 0;
    if (fitMSDResult != null) {
        D = fitMSDResult[0];
        s = fitMSDResult[1];
    }
    sb.append(Utils.rounded(D, 4)).append("\t");
    sb.append(Utils.rounded(s * 1000, 4)).append("\t");
    sb.append(Utils.rounded(settings.jumpDistance * exposureTime)).append("\t");
    sb.append(n).append("\t");
    sb.append(Utils.rounded(beta, 4)).append("\t");
    if (jdParams == null) {
        sb.append("\t\t\t");
    } else {
        sb.append(format(jdParams[0])).append("\t");
        sb.append(format(jdParams[1])).append("\t");
        sb.append(Utils.rounded(ic)).append("\t");
    }
    for (int i = 0; i < stats.length; i++) {
        sb.append(Utils.rounded(stats[i].getMean(), 3)).append("\t");
    }
    if (java.awt.GraphicsEnvironment.isHeadless()) {
        IJ.log(sb.toString());
        return;
    } else {
        summaryTable.append(sb.toString());
    }
    if (settings.showHistograms) {
        IJ.showStatus("Calculating histograms ...");
        for (int i = 0; i < NAMES.length; i++) {
            if (displayHistograms[i]) {
                showHistogram((StoredDataStatistics) stats[i], NAMES[i], alwaysRemoveOutliers[i], ROUNDED[i], false);
            }
        }
    }
    tileNewWindows();
    IJ.showStatus("Finished " + TITLE);
}
Also used : Trace(gdsc.smlm.results.Trace) StoredDataStatistics(gdsc.core.utils.StoredDataStatistics) StoredDataStatistics(gdsc.core.utils.StoredDataStatistics) Statistics(gdsc.core.utils.Statistics)

Example 22 with StoredDataStatistics

use of gdsc.core.utils.StoredDataStatistics in project GDSC-SMLM by aherbert.

the class BlinkEstimatorTest method estimateBlinking.

private TIntHashSet estimateBlinking(double nBlinks, double tOn, double tOff, int particles, double fixedFraction, boolean timeAtLowerBound, boolean doAssert) {
    SpatialIllumination activationIllumination = new UniformIllumination(100);
    int totalSteps = 100;
    double eAct = totalSteps * 0.3 * activationIllumination.getAveragePhotons();
    ImageModel imageModel = new ActivationEnergyImageModel(eAct, activationIllumination, tOn, 0, tOff, 0, nBlinks);
    imageModel.setRandomGenerator(rand);
    double[] max = new double[] { 256, 256, 32 };
    double[] min = new double[3];
    SpatialDistribution distribution = new UniformDistribution(min, max, rand.nextInt());
    List<CompoundMoleculeModel> compounds = new ArrayList<CompoundMoleculeModel>(1);
    CompoundMoleculeModel c = new CompoundMoleculeModel(1, 0, 0, 0, Arrays.asList(new MoleculeModel(0, 0, 0, 0)));
    c.setDiffusionRate(diffusionRate);
    c.setDiffusionType(DiffusionType.RANDOM_WALK);
    compounds.add(c);
    List<CompoundMoleculeModel> molecules = imageModel.createMolecules(compounds, particles, distribution, false);
    // Activate fluorophores
    List<? extends FluorophoreSequenceModel> fluorophores = imageModel.createFluorophores(molecules, totalSteps);
    totalSteps = checkTotalSteps(totalSteps, fluorophores);
    List<LocalisationModel> localisations = imageModel.createImage(molecules, fixedFraction, totalSteps, photons, 0.5, false);
    //		// Remove localisations to simulate missed counts. 
    //		List<LocalisationModel> newLocalisations = new ArrayList<LocalisationModel>(localisations.size());
    //		boolean[] id = new boolean[fluorophores.size() + 1];
    //		Statistics photonStats = new Statistics();
    //		for (LocalisationModel l : localisations)
    //		{
    //			photonStats.add(l.getIntensity());
    //			// Remove by intensity threshold and optionally at random.
    //			if (l.getIntensity() < minPhotons || rand.nextDouble() < pDelete)
    //				continue;
    //			newLocalisations.add(l);
    //			id[l.getId()] = true;
    //		}
    //		localisations = newLocalisations;
    //		System.out.printf("Photons = %f\n", photonStats.getMean());
    //
    //		List<FluorophoreSequenceModel> newFluorophores = new ArrayList<FluorophoreSequenceModel>(fluorophores.size());
    //		for (FluorophoreSequenceModel f : fluorophores)
    //		{
    //			if (id[f.getId()])
    //				newFluorophores.add(f);
    //		}
    //		fluorophores = newFluorophores;
    MemoryPeakResults results = new MemoryPeakResults();
    results.setCalibration(new Calibration(pixelPitch, 1, msPerFrame));
    for (LocalisationModel l : localisations) {
        // Remove by intensity threshold and optionally at random.
        if (l.getIntensity() < minPhotons || rand.nextDouble() < pDelete)
            continue;
        float[] params = new float[7];
        params[Gaussian2DFunction.X_POSITION] = (float) l.getX();
        params[Gaussian2DFunction.Y_POSITION] = (float) l.getY();
        params[Gaussian2DFunction.X_SD] = params[Gaussian2DFunction.Y_SD] = psfWidth;
        params[Gaussian2DFunction.SIGNAL] = (float) (l.getIntensity());
        results.addf(l.getTime(), 0, 0, 0, 0, 0, params, null);
    }
    // Add random localisations
    for (int i = (int) (localisations.size() * pAdd); i-- > 0; ) {
        float[] params = new float[7];
        params[Gaussian2DFunction.X_POSITION] = (float) (rand.nextDouble() * max[0]);
        params[Gaussian2DFunction.Y_POSITION] = (float) (rand.nextDouble() * max[1]);
        params[Gaussian2DFunction.X_SD] = params[Gaussian2DFunction.Y_SD] = psfWidth;
        // Intensity doesn't matter at the moment for tracing
        params[Gaussian2DFunction.SIGNAL] = (float) (photons);
        results.addf(1 + rand.nextInt(totalSteps), 0, 0, 0, 0, 0, params, null);
    }
    // Get actual simulated stats ...
    Statistics statsNBlinks = new Statistics();
    Statistics statsTOn = new Statistics();
    Statistics statsTOff = new Statistics();
    Statistics statsSampledNBlinks = new Statistics();
    Statistics statsSampledTOn = new Statistics();
    StoredDataStatistics statsSampledTOff = new StoredDataStatistics();
    for (FluorophoreSequenceModel f : fluorophores) {
        statsNBlinks.add(f.getNumberOfBlinks());
        statsTOn.add(f.getOnTimes());
        statsTOff.add(f.getOffTimes());
        int[] on = f.getSampledOnTimes();
        statsSampledNBlinks.add(on.length);
        statsSampledTOn.add(on);
        statsSampledTOff.add(f.getSampledOffTimes());
    }
    System.out.printf("N = %d (%d), N-blinks = %f, tOn = %f, tOff = %f, Fixed = %f\n", fluorophores.size(), localisations.size(), nBlinks, tOn, tOff, fixedFraction);
    System.out.printf("Actual N-blinks = %f (%f), tOn = %f (%f), tOff = %f (%f), 95%% = %f, max = %f\n", statsNBlinks.getMean(), statsSampledNBlinks.getMean(), statsTOn.getMean(), statsSampledTOn.getMean(), statsTOff.getMean(), statsSampledTOff.getMean(), statsSampledTOff.getStatistics().getPercentile(95), statsSampledTOff.getStatistics().getMax());
    System.out.printf("-=-=--=-\n");
    BlinkEstimator be = new BlinkEstimator();
    be.maxDarkTime = (int) (tOff * 10);
    be.msPerFrame = msPerFrame;
    be.relativeDistance = false;
    double d = ImageModel.getRandomMoveDistance(diffusionRate);
    be.searchDistance = (fixedFraction < 1) ? Math.sqrt(2 * d * d) * 3 : 0;
    be.timeAtLowerBound = timeAtLowerBound;
    be.showPlots = false;
    //Assert.assertTrue("Max dark time must exceed the dark time of the data (otherwise no plateau)",
    //		be.maxDarkTime > statsSampledTOff.getStatistics().getMax());
    int nMolecules = fluorophores.size();
    if (usePopulationStatistics) {
        nBlinks = statsNBlinks.getMean();
        tOff = statsTOff.getMean();
    } else {
        nBlinks = statsSampledNBlinks.getMean();
        tOff = statsSampledTOff.getMean();
    }
    // See if any fitting regime gets a correct answer
    TIntHashSet ok = new TIntHashSet();
    for (int nFittedPoints = MIN_FITTED_POINTS; nFittedPoints <= MAX_FITTED_POINTS; nFittedPoints++) {
        be.nFittedPoints = nFittedPoints;
        be.computeBlinkingRate(results, true);
        double moleculesError = DoubleEquality.relativeError(nMolecules, be.getNMolecules());
        double blinksError = DoubleEquality.relativeError(nBlinks, be.getNBlinks());
        double offError = DoubleEquality.relativeError(tOff * msPerFrame, be.getTOff());
        System.out.printf("Error %d: N = %f, blinks = %f, tOff = %f : %f\n", nFittedPoints, moleculesError, blinksError, offError, (moleculesError + blinksError + offError) / 3);
        if (moleculesError < relativeError && blinksError < relativeError && offError < relativeError) {
            ok.add(nFittedPoints);
            System.out.printf("-=-=--=-\n");
            System.out.printf("*** Correct at %d fitted points ***\n", nFittedPoints);
            if (doAssert)
                break;
        }
    //if (!be.isIncreaseNFittedPoints())
    //	break;
    }
    System.out.printf("-=-=--=-\n");
    if (doAssert)
        Assert.assertFalse(ok.isEmpty());
    //Assert.assertEquals("Invalid t-off", tOff * msPerFrame, be.getTOff(), tOff * msPerFrame * relativeError);
    return ok;
}
Also used : ActivationEnergyImageModel(gdsc.smlm.model.ActivationEnergyImageModel) CompoundMoleculeModel(gdsc.smlm.model.CompoundMoleculeModel) ArrayList(java.util.ArrayList) TIntHashSet(gnu.trove.set.hash.TIntHashSet) CompoundMoleculeModel(gdsc.smlm.model.CompoundMoleculeModel) MoleculeModel(gdsc.smlm.model.MoleculeModel) SpatialIllumination(gdsc.smlm.model.SpatialIllumination) MemoryPeakResults(gdsc.smlm.results.MemoryPeakResults) SpatialDistribution(gdsc.smlm.model.SpatialDistribution) UniformDistribution(gdsc.smlm.model.UniformDistribution) StoredDataStatistics(gdsc.core.utils.StoredDataStatistics) Calibration(gdsc.smlm.results.Calibration) StoredDataStatistics(gdsc.core.utils.StoredDataStatistics) Statistics(gdsc.core.utils.Statistics) UniformIllumination(gdsc.smlm.model.UniformIllumination) LocalisationModel(gdsc.smlm.model.LocalisationModel) FluorophoreSequenceModel(gdsc.smlm.model.FluorophoreSequenceModel) ImageModel(gdsc.smlm.model.ImageModel) ActivationEnergyImageModel(gdsc.smlm.model.ActivationEnergyImageModel)

Example 23 with StoredDataStatistics

use of gdsc.core.utils.StoredDataStatistics in project GDSC-SMLM by aherbert.

the class EMGainAnalysis method pdfEMGain.

/**
	 * Calculate the probability density function for EM-gain. The maximum count to evaluate is calculated dynamically
	 * so that the cumulative probability does not change.
	 * <p>
	 * See Ulbrich & Isacoff (2007). Nature Methods 4, 319-321, SI equation 3.
	 * 
	 * @param p
	 *            The average number of photons per pixel input to the EM-camera
	 * @param m
	 *            The multiplication factor (gain)
	 * @return The PDF
	 */
private double[] pdfEMGain(final double p, final double m) {
    StoredDataStatistics stats = new StoredDataStatistics(100);
    stats.add(FastMath.exp(-p));
    for (int c = 1; ; c++) {
        final double g = pEMGain(c, p, m);
        stats.add(g);
        final double delta = g / stats.getSum();
        if (delta < 1e-5)
            break;
    }
    return stats.getValues();
}
Also used : StoredDataStatistics(gdsc.core.utils.StoredDataStatistics) Point(java.awt.Point)

Example 24 with StoredDataStatistics

use of gdsc.core.utils.StoredDataStatistics in project GDSC-SMLM by aherbert.

the class FIRE method runQEstimation.

private void runQEstimation() {
    IJ.showStatus(TITLE + " ...");
    if (!showQEstimationInputDialog())
        return;
    MemoryPeakResults results = ResultsManager.loadInputResults(inputOption, false);
    if (results == null || results.size() == 0) {
        IJ.error(TITLE, "No results could be loaded");
        return;
    }
    if (results.getCalibration() == null) {
        IJ.error(TITLE, "The results are not calibrated");
        return;
    }
    results = cropToRoi(results);
    if (results.size() < 2) {
        IJ.error(TITLE, "No results within the crop region");
        return;
    }
    initialise(results, null);
    // We need localisation precision.
    // Build a histogram of the localisation precision.
    // Get the initial mean and SD and plot as a Gaussian.
    PrecisionHistogram histogram = calculatePrecisionHistogram();
    if (histogram == null) {
        IJ.error(TITLE, "No localisation precision available.\n \nPlease choose " + PrecisionMethod.FIXED + " and enter a precision mean and SD.");
        return;
    }
    StoredDataStatistics precision = histogram.precision;
    //String name = results.getName();
    double fourierImageScale = SCALE_VALUES[imageScaleIndex];
    int imageSize = IMAGE_SIZE_VALUES[imageSizeIndex];
    // Create the image and compute the numerator of FRC. 
    // Do not use the signal so results.size() is the number of localisations.
    IJ.showStatus("Computing FRC curve ...");
    FireImages images = createImages(fourierImageScale, imageSize, false);
    // DEBUGGING - Save the two images to disk. Load the images into the Matlab 
    // code that calculates the Q-estimation and make this plugin match the functionality.
    //IJ.save(new ImagePlus("i1", images.ip1), "/scratch/i1.tif");
    //IJ.save(new ImagePlus("i2", images.ip2), "/scratch/i2.tif");
    FRC frc = new FRC();
    frc.progress = progress;
    frc.setFourierMethod(fourierMethod);
    frc.setSamplingMethod(samplingMethod);
    frc.setPerimeterSamplingFactor(perimeterSamplingFactor);
    FRCCurve frcCurve = frc.calculateFrcCurve(images.ip1, images.ip2, images.nmPerPixel);
    if (frcCurve == null) {
        IJ.error(TITLE, "Failed to compute FRC curve");
        return;
    }
    IJ.showStatus("Running Q-estimation ...");
    // Note:
    // The method implemented here is based on Matlab code provided by Bernd Rieger.
    // The idea is to compute the spurious correlation component of the FRC Numerator
    // using an initial estimate of distribution of the localisation precision (assumed 
    // to be Gaussian). This component is the contribution of repeat localisations of 
    // the same molecule to the numerator and is modelled as an exponential decay
    // (exp_decay). The component is scaled by the Q-value which
    // is the average number of times a molecule is seen in addition to the first time.
    // At large spatial frequencies the scaled component should match the numerator,
    // i.e. at high resolution (low FIRE number) the numerator is made up of repeat 
    // localisations of the same molecule and not actual structure in the image.
    // The best fit is where the numerator equals the scaled component, i.e. num / (q*exp_decay) == 1.
    // The FRC Numerator is plotted and Q can be determined by
    // adjusting Q and the precision mean and SD to maximise the cost function.
    // This can be done interactively by the user with the effect on the FRC curve
    // dynamically updated and displayed.
    // Compute the scaled FRC numerator
    double qNorm = (1 / frcCurve.mean1 + 1 / frcCurve.mean2);
    double[] frcnum = new double[frcCurve.getSize()];
    for (int i = 0; i < frcnum.length; i++) {
        FRCCurveResult r = frcCurve.get(i);
        frcnum[i] = qNorm * r.getNumerator() / r.getNumberOfSamples();
    }
    // Compute the spatial frequency and the region for curve fitting
    double[] q = FRC.computeQ(frcCurve, false);
    int low = 0, high = q.length;
    while (high > 0 && q[high - 1] > maxQ) high--;
    while (low < q.length && q[low] < minQ) low++;
    // Require we fit at least 10% of the curve
    if (high - low < q.length * 0.1) {
        IJ.error(TITLE, "Not enough points for Q estimation");
        return;
    }
    // Obtain initial estimate of Q plateau height and decay.
    // This can be done by fitting the precision histogram and then fixing the mean and sigma.
    // Or it can be done by allowing the precision to be sampled and the mean and sigma
    // become parameters for fitting.
    // Check if we can sample precision values
    boolean sampleDecay = precision != null && FIRE.sampleDecay;
    double[] exp_decay;
    if (sampleDecay) {
        // Random sample of precision values from the distribution is used to 
        // construct the decay curve
        int[] sample = Random.sample(10000, precision.getN(), new Well19937c());
        final double four_pi2 = 4 * Math.PI * Math.PI;
        double[] pre = new double[q.length];
        for (int i = 1; i < q.length; i++) pre[i] = -four_pi2 * q[i] * q[i];
        // Sample
        final int n = sample.length;
        double[] hq = new double[n];
        for (int j = 0; j < n; j++) {
            // Scale to SR pixels
            double s2 = precision.getValue(sample[j]) / images.nmPerPixel;
            s2 *= s2;
            for (int i = 1; i < q.length; i++) hq[i] += FastMath.exp(pre[i] * s2);
        }
        for (int i = 1; i < q.length; i++) hq[i] /= n;
        exp_decay = new double[q.length];
        exp_decay[0] = 1;
        for (int i = 1; i < q.length; i++) {
            double sinc_q = sinc(Math.PI * q[i]);
            exp_decay[i] = sinc_q * sinc_q * hq[i];
        }
    } else {
        // Note: The sigma mean and std should be in the units of super-resolution 
        // pixels so scale to SR pixels
        exp_decay = computeExpDecay(histogram.mean / images.nmPerPixel, histogram.sigma / images.nmPerPixel, q);
    }
    // Smoothing
    double[] smooth;
    if (loessSmoothing) {
        // Note: This computes the log then smooths it 
        double bandwidth = 0.1;
        int robustness = 0;
        double[] l = new double[exp_decay.length];
        for (int i = 0; i < l.length; i++) {
            // Original Matlab code computes the log for each array.
            // This is equivalent to a single log on the fraction of the two.
            // Perhaps the two log method is more numerically stable.
            //l[i] = Math.log(Math.abs(frcnum[i])) - Math.log(exp_decay[i]);
            l[i] = Math.log(Math.abs(frcnum[i] / exp_decay[i]));
        }
        try {
            LoessInterpolator loess = new LoessInterpolator(bandwidth, robustness);
            smooth = loess.smooth(q, l);
        } catch (Exception e) {
            IJ.error(TITLE, "LOESS smoothing failed");
            return;
        }
    } else {
        // Note: This smooths the curve before computing the log 
        double[] norm = new double[exp_decay.length];
        for (int i = 0; i < norm.length; i++) {
            norm[i] = frcnum[i] / exp_decay[i];
        }
        // Median window of 5 == radius of 2
        MedianWindow mw = new MedianWindow(norm, 2);
        smooth = new double[exp_decay.length];
        for (int i = 0; i < norm.length; i++) {
            smooth[i] = Math.log(Math.abs(mw.getMedian()));
            mw.increment();
        }
    }
    // Fit with quadratic to find the initial guess.
    // Note: example Matlab code frc_Qcorrection7.m identifies regions of the 
    // smoothed log curve with low derivative and only fits those. The fit is 
    // used for the final estimate. Fitting a subset with low derivative is not 
    // implemented here since the initial estimate is subsequently optimised 
    // to maximise a cost function. 
    Quadratic curve = new Quadratic();
    SimpleCurveFitter fit = SimpleCurveFitter.create(curve, new double[2]);
    WeightedObservedPoints points = new WeightedObservedPoints();
    for (int i = low; i < high; i++) points.add(q[i], smooth[i]);
    double[] estimate = fit.fit(points.toList());
    double qValue = FastMath.exp(estimate[0]);
    //System.out.printf("Initial q-estimate = %s => %.3f\n", Arrays.toString(estimate), qValue);
    // This could be made an option. Just use for debugging
    boolean debug = false;
    if (debug) {
        // Plot the initial fit and the fit curve
        double[] qScaled = FRC.computeQ(frcCurve, true);
        double[] line = new double[q.length];
        for (int i = 0; i < q.length; i++) line[i] = curve.value(q[i], estimate);
        String title = TITLE + " Initial fit";
        Plot2 plot = new Plot2(title, "Spatial Frequency (nm^-1)", "FRC Numerator");
        String label = String.format("Q = %.3f", qValue);
        plot.addPoints(qScaled, smooth, Plot.LINE);
        plot.setColor(Color.red);
        plot.addPoints(qScaled, line, Plot.LINE);
        plot.setColor(Color.black);
        plot.addLabel(0, 0, label);
        Utils.display(title, plot, Utils.NO_TO_FRONT);
    }
    if (fitPrecision) {
        // Q - Should this be optional?
        if (sampleDecay) {
            // If a sample of the precision was used to construct the data for the initial fit 
            // then update the estimate using the fit result since it will be a better start point. 
            histogram.sigma = precision.getStandardDeviation();
            // Normalise sum-of-squares to the SR pixel size
            double meanSumOfSquares = (precision.getSumOfSquares() / (images.nmPerPixel * images.nmPerPixel)) / precision.getN();
            histogram.mean = images.nmPerPixel * Math.sqrt(meanSumOfSquares - estimate[1] / (4 * Math.PI * Math.PI));
        }
        // Do a multivariate fit ...
        SimplexOptimizer opt = new SimplexOptimizer(1e-6, 1e-10);
        PointValuePair p = null;
        MultiPlateauness f = new MultiPlateauness(frcnum, q, low, high);
        double[] initial = new double[] { histogram.mean / images.nmPerPixel, histogram.sigma / images.nmPerPixel, qValue };
        p = findMin(p, opt, f, scale(initial, 0.1));
        p = findMin(p, opt, f, scale(initial, 0.5));
        p = findMin(p, opt, f, initial);
        p = findMin(p, opt, f, scale(initial, 2));
        p = findMin(p, opt, f, scale(initial, 10));
        if (p != null) {
            double[] point = p.getPointRef();
            histogram.mean = point[0] * images.nmPerPixel;
            histogram.sigma = point[1] * images.nmPerPixel;
            qValue = point[2];
        }
    } else {
        // If so then this should be optional.
        if (sampleDecay) {
            if (precisionMethod != PrecisionMethod.FIXED) {
                histogram.sigma = precision.getStandardDeviation();
                // Normalise sum-of-squares to the SR pixel size
                double meanSumOfSquares = (precision.getSumOfSquares() / (images.nmPerPixel * images.nmPerPixel)) / precision.getN();
                histogram.mean = images.nmPerPixel * Math.sqrt(meanSumOfSquares - estimate[1] / (4 * Math.PI * Math.PI));
            }
            exp_decay = computeExpDecay(histogram.mean / images.nmPerPixel, histogram.sigma / images.nmPerPixel, q);
        }
        // Estimate spurious component by promoting plateauness.
        // The Matlab code used random initial points for a Simplex optimiser.
        // A Brent line search should be pretty deterministic so do simple repeats.
        // However it will proceed downhill so if the initial point is wrong then 
        // it will find a sub-optimal result.
        UnivariateOptimizer o = new BrentOptimizer(1e-3, 1e-6);
        Plateauness f = new Plateauness(frcnum, exp_decay, low, high);
        UnivariatePointValuePair p = null;
        p = findMin(p, o, f, qValue, 0.1);
        p = findMin(p, o, f, qValue, 0.2);
        p = findMin(p, o, f, qValue, 0.333);
        p = findMin(p, o, f, qValue, 0.5);
        // Do some Simplex repeats as well
        SimplexOptimizer opt = new SimplexOptimizer(1e-6, 1e-10);
        p = findMin(p, opt, f, qValue * 0.1);
        p = findMin(p, opt, f, qValue * 0.5);
        p = findMin(p, opt, f, qValue);
        p = findMin(p, opt, f, qValue * 2);
        p = findMin(p, opt, f, qValue * 10);
        if (p != null)
            qValue = p.getPoint();
    }
    QPlot qplot = new QPlot(frcCurve, qValue, low, high);
    // Interactive dialog to estimate Q (blinking events per flourophore) using 
    // sliders for the mean and standard deviation of the localisation precision.
    showQEstimationDialog(histogram, qplot, frcCurve, images.nmPerPixel);
    IJ.showStatus(TITLE + " complete");
}
Also used : BrentOptimizer(org.apache.commons.math3.optim.univariate.BrentOptimizer) Plot2(ij.gui.Plot2) Well19937c(org.apache.commons.math3.random.Well19937c) PointValuePair(org.apache.commons.math3.optim.PointValuePair) UnivariatePointValuePair(org.apache.commons.math3.optim.univariate.UnivariatePointValuePair) LoessInterpolator(org.apache.commons.math3.analysis.interpolation.LoessInterpolator) WeightedObservedPoints(org.apache.commons.math3.fitting.WeightedObservedPoints) SimplexOptimizer(org.apache.commons.math3.optim.nonlinear.scalar.noderiv.SimplexOptimizer) MemoryPeakResults(gdsc.smlm.results.MemoryPeakResults) MedianWindow(gdsc.core.utils.MedianWindow) SimpleCurveFitter(org.apache.commons.math3.fitting.SimpleCurveFitter) FRCCurveResult(gdsc.smlm.ij.frc.FRC.FRCCurveResult) StoredDataStatistics(gdsc.core.utils.StoredDataStatistics) UnivariatePointValuePair(org.apache.commons.math3.optim.univariate.UnivariatePointValuePair) WeightedObservedPoint(org.apache.commons.math3.fitting.WeightedObservedPoint) TooManyEvaluationsException(org.apache.commons.math3.exception.TooManyEvaluationsException) FRCCurve(gdsc.smlm.ij.frc.FRC.FRCCurve) FRC(gdsc.smlm.ij.frc.FRC) UnivariateOptimizer(org.apache.commons.math3.optim.univariate.UnivariateOptimizer)

Example 25 with StoredDataStatistics

use of gdsc.core.utils.StoredDataStatistics in project GDSC-SMLM by aherbert.

the class FIRE method calculatePrecisionHistogram.

/**
	 * Calculate a histogram of the precision. The precision can be either stored in the results or calculated using the
	 * Mortensen formula. If the precision method for Q estimation is not fixed then the histogram is fitted with a
	 * Gaussian to create an initial estimate.
	 *
	 * @param precisionMethod
	 *            the precision method
	 * @return The precision histogram
	 */
private PrecisionHistogram calculatePrecisionHistogram() {
    boolean logFitParameters = false;
    String title = results.getName() + " Precision Histogram";
    // Check if the results has the precision already or if it can be computed.
    boolean canUseStored = canUseStoredPrecision(results);
    boolean canCalculatePrecision = canCalculatePrecision(results);
    // Set the method to compute a histogram. Default to the user selected option.
    PrecisionMethod m = null;
    if (canUseStored && precisionMethod == PrecisionMethod.STORED)
        m = precisionMethod;
    else if (canCalculatePrecision && precisionMethod == PrecisionMethod.CALCULATE)
        m = precisionMethod;
    if (m == null) {
        // We only have two choices so if one is available then select it.
        if (canUseStored)
            m = PrecisionMethod.STORED;
        else if (canCalculatePrecision)
            m = PrecisionMethod.CALCULATE;
        // If the user selected a method not available then log a warning
        if (m != null && precisionMethod != PrecisionMethod.FIXED) {
            IJ.log(String.format("%s : Selected precision method '%s' not available, switching to '%s'", TITLE, precisionMethod, m.getName()));
        }
        if (m == null) {
            // This does not matter if the user has provide a fixed input.
            if (precisionMethod == PrecisionMethod.FIXED) {
                PrecisionHistogram histogram = new PrecisionHistogram(title);
                histogram.mean = mean;
                histogram.sigma = sigma;
                return histogram;
            }
            // No precision
            return null;
        }
    }
    // We get here if we can compute precision.
    // Build the histogram 
    StoredDataStatistics precision = new StoredDataStatistics(results.size());
    if (m == PrecisionMethod.STORED) {
        for (PeakResult r : results.getResults()) {
            precision.add(r.getPrecision());
        }
    } else {
        final double nmPerPixel = results.getNmPerPixel();
        final double gain = results.getGain();
        final boolean emCCD = results.isEMCCD();
        for (PeakResult r : results.getResults()) {
            precision.add(r.getPrecision(nmPerPixel, gain, emCCD));
        }
    }
    //System.out.printf("Raw p = %f\n", precision.getMean());
    double yMin = Double.NEGATIVE_INFINITY, yMax = 0;
    // Set the min and max y-values using 1.5 x IQR 
    DescriptiveStatistics stats = precision.getStatistics();
    double lower = stats.getPercentile(25);
    double upper = stats.getPercentile(75);
    if (Double.isNaN(lower) || Double.isNaN(upper)) {
        if (logFitParameters)
            Utils.log("Error computing IQR: %f - %f", lower, upper);
    } else {
        double iqr = upper - lower;
        yMin = FastMath.max(lower - iqr, stats.getMin());
        yMax = FastMath.min(upper + iqr, stats.getMax());
        if (logFitParameters)
            Utils.log("  Data range: %f - %f. Plotting 1.5x IQR: %f - %f", stats.getMin(), stats.getMax(), yMin, yMax);
    }
    if (yMin == Double.NEGATIVE_INFINITY) {
        int n = 5;
        yMin = Math.max(stats.getMin(), stats.getMean() - n * stats.getStandardDeviation());
        yMax = Math.min(stats.getMax(), stats.getMean() + n * stats.getStandardDeviation());
        if (logFitParameters)
            Utils.log("  Data range: %f - %f. Plotting mean +/- %dxSD: %f - %f", stats.getMin(), stats.getMax(), n, yMin, yMax);
    }
    // Get the data within the range
    double[] data = precision.getValues();
    precision = new StoredDataStatistics(data.length);
    for (double d : data) {
        if (d < yMin || d > yMax)
            continue;
        precision.add(d);
    }
    int histogramBins = Utils.getBins(precision, Utils.BinMethod.SCOTT);
    float[][] hist = Utils.calcHistogram(precision.getFloatValues(), yMin, yMax, histogramBins);
    PrecisionHistogram histogram = new PrecisionHistogram(hist, precision, title);
    if (precisionMethod == PrecisionMethod.FIXED) {
        histogram.mean = mean;
        histogram.sigma = sigma;
        return histogram;
    }
    // Fitting of the histogram to produce the initial estimate
    // Extract non-zero data
    float[] x = Arrays.copyOf(hist[0], hist[0].length);
    float[] y = hist[1];
    int count = 0;
    float dx = (x[1] - x[0]) * 0.5f;
    for (int i = 0; i < y.length; i++) if (y[i] > 0) {
        x[count] = x[i] + dx;
        y[count] = y[i];
        count++;
    }
    x = Arrays.copyOf(x, count);
    y = Arrays.copyOf(y, count);
    // Sense check to fitted data. Get mean and SD of histogram
    double[] stats2 = Utils.getHistogramStatistics(x, y);
    if (logFitParameters)
        Utils.log("  Initial Statistics: %f +/- %f", stats2[0], stats2[1]);
    histogram.mean = stats2[0];
    histogram.sigma = stats2[1];
    // Standard Gaussian fit
    double[] parameters = fitGaussian(x, y);
    if (parameters == null) {
        Utils.log("  Failed to fit initial Gaussian");
        return histogram;
    }
    double newMean = parameters[1];
    double error = Math.abs(stats2[0] - newMean) / stats2[1];
    if (error > 3) {
        Utils.log("  Failed to fit Gaussian: %f standard deviations from histogram mean", error);
        return histogram;
    }
    if (newMean < yMin || newMean > yMax) {
        Utils.log("  Failed to fit Gaussian: %f outside data range %f - %f", newMean, yMin, yMax);
        return histogram;
    }
    if (logFitParameters)
        Utils.log("  Initial Gaussian: %f @ %f +/- %f", parameters[0], parameters[1], parameters[2]);
    histogram.mean = parameters[1];
    histogram.sigma = parameters[2];
    return histogram;
}
Also used : DescriptiveStatistics(org.apache.commons.math3.stat.descriptive.DescriptiveStatistics) StoredDataStatistics(gdsc.core.utils.StoredDataStatistics) PeakResult(gdsc.smlm.results.PeakResult) WeightedObservedPoint(org.apache.commons.math3.fitting.WeightedObservedPoint)

Aggregations

StoredDataStatistics (gdsc.core.utils.StoredDataStatistics)30 Statistics (gdsc.core.utils.Statistics)10 ArrayList (java.util.ArrayList)10 MemoryPeakResults (gdsc.smlm.results.MemoryPeakResults)7 WindowOrganiser (ij.plugin.WindowOrganiser)7 Plot2 (ij.gui.Plot2)5 BasePoint (gdsc.core.match.BasePoint)4 PeakResult (gdsc.smlm.results.PeakResult)4 RandomGenerator (org.apache.commons.math3.random.RandomGenerator)4 Well19937c (org.apache.commons.math3.random.Well19937c)4 DescriptiveStatistics (org.apache.commons.math3.stat.descriptive.DescriptiveStatistics)4 ClusterPoint (gdsc.core.clustering.ClusterPoint)3 PeakResultPoint (gdsc.smlm.ij.plugins.ResultsMatchCalculator.PeakResultPoint)3 FluorophoreSequenceModel (gdsc.smlm.model.FluorophoreSequenceModel)3 LocalisationModel (gdsc.smlm.model.LocalisationModel)3 Trace (gdsc.smlm.results.Trace)3 ImageStack (ij.ImageStack)3 PlotWindow (ij.gui.PlotWindow)3 Point (java.awt.Point)3 Rectangle (java.awt.Rectangle)3