use of uk.ac.sussex.gdsc.core.utils.StoredDataStatistics in project GDSC-SMLM by aherbert.
the class PsfCreator method runUsingFitting.
private void runUsingFitting() {
if (!showFittingDialog()) {
return;
}
if (!loadConfiguration()) {
return;
}
final BasePoint[] spots = getSpots(0, true);
if (spots.length == 0) {
IJ.error(TITLE, "No spots without neighbours within " + (boxRadius * 2) + "px");
return;
}
final ImageStack stack = getImageStack();
final int width = imp.getWidth();
final int height = imp.getHeight();
final int currentSlice = imp.getSlice();
// Adjust settings for a single maxima
config.setIncludeNeighbours(false);
final ArrayList<double[]> centres = new ArrayList<>(spots.length);
final int iterations = 1;
final LoessInterpolator loess = new LoessInterpolator(settings.getSmoothing(), iterations);
// TODO - The fitting routine may not produce many points. In this instance the LOESS
// interpolator
// fails to smooth the data very well. A higher bandwidth helps this but perhaps
// try a different smoothing method.
// For each spot
ImageJUtils.log(TITLE + ": " + imp.getTitle());
ImageJUtils.log("Finding spot locations...");
ImageJUtils.log(" %d spot%s without neighbours within %dpx", spots.length, ((spots.length == 1) ? "" : "s"), (boxRadius * 2));
final StoredDataStatistics averageSd = new StoredDataStatistics();
final StoredDataStatistics averageA = new StoredDataStatistics();
final Statistics averageRange = new Statistics();
final MemoryPeakResults allResults = new MemoryPeakResults();
allResults.setCalibration(fitConfig.getCalibration());
allResults.setPsf(fitConfig.getPsf());
allResults.setName(TITLE);
allResults.setBounds(new Rectangle(0, 0, width, height));
MemoryPeakResults.addResults(allResults);
for (int n = 1; n <= spots.length; n++) {
final BasePoint spot = spots[n - 1];
final int x = (int) spot.getX();
final int y = (int) spot.getY();
final MemoryPeakResults results = fitSpot(stack, width, height, x, y);
allResults.add(results);
if (results.size() < 5) {
ImageJUtils.log(" Spot %d: Not enough fit results %d", n, results.size());
continue;
}
// Get the results for the spot centre and width
final double[] z = new double[results.size()];
final double[] xCoord = new double[z.length];
final double[] yCoord = new double[z.length];
final double[] sd;
final double[] a;
final Counter counter = new Counter();
// We have fit the results so they will be in the preferred units
results.forEach(new PeakResultProcedure() {
@Override
public void execute(PeakResult peak) {
final int i = counter.getAndIncrement();
z[i] = peak.getFrame();
xCoord[i] = peak.getXPosition() - x;
yCoord[i] = peak.getYPosition() - y;
}
});
final WidthResultProcedure wp = new WidthResultProcedure(results, DistanceUnit.PIXEL);
wp.getW();
sd = SimpleArrayUtils.toDouble(wp.wx);
final HeightResultProcedure hp = new HeightResultProcedure(results, IntensityUnit.COUNT);
hp.getH();
a = SimpleArrayUtils.toDouble(hp.heights);
// Smooth the amplitude plot
final double[] smoothA = loess.smooth(z, a);
// Find the maximum amplitude
int maximumIndex = findMaximumIndex(smoothA);
// Find the range at a fraction of the max. This is smoothed to find the X/Y centre
int start = 0;
int stop = smoothA.length - 1;
final double limit = smoothA[maximumIndex] * settings.getAmplitudeFraction();
for (int j = 0; j < smoothA.length; j++) {
if (smoothA[j] > limit) {
start = j;
break;
}
}
for (int j = smoothA.length; j-- > 0; ) {
if (smoothA[j] > limit) {
stop = j;
break;
}
}
averageRange.add(stop - start + 1);
// Extract xy centre coords and smooth
double[] smoothX = new double[stop - start + 1];
double[] smoothY = new double[smoothX.length];
double[] smoothSd = new double[smoothX.length];
final double[] newZ = new double[smoothX.length];
for (int j = start, k = 0; j <= stop; j++, k++) {
smoothX[k] = xCoord[j];
smoothY[k] = yCoord[j];
smoothSd[k] = sd[j];
newZ[k] = z[j];
}
smoothX = loess.smooth(newZ, smoothX);
smoothY = loess.smooth(newZ, smoothY);
smoothSd = loess.smooth(newZ, smoothSd);
// Since the amplitude is not very consistent move from this peak to the
// lowest width which is the in-focus spot.
maximumIndex = findMinimumIndex(smoothSd, maximumIndex - start);
// Find the centre at the amplitude peak
final double cx = smoothX[maximumIndex] + x;
final double cy = smoothY[maximumIndex] + y;
int cz = (int) newZ[maximumIndex];
double csd = smoothSd[maximumIndex];
double ca = smoothA[maximumIndex + start];
// The average should weight the SD using the signal for each spot
averageSd.add(smoothSd[maximumIndex]);
averageA.add(ca);
if (ignoreSpot(n, z, a, smoothA, xCoord, yCoord, sd, newZ, smoothX, smoothY, smoothSd, cx, cy, cz, csd)) {
ImageJUtils.log(" Spot %d was ignored", n);
continue;
}
// Store result - it may have been moved interactively
maximumIndex += this.slice - cz;
cz = (int) newZ[maximumIndex];
csd = smoothSd[maximumIndex];
ca = smoothA[maximumIndex + start];
ImageJUtils.log(" Spot %d => x=%.2f, y=%.2f, z=%d, sd=%.2f, A=%.2f", n, cx, cy, cz, csd, ca);
centres.add(new double[] { cx, cy, cz, csd, n });
}
if (settings.getInteractiveMode()) {
imp.setSlice(currentSlice);
imp.setOverlay(null);
// Hide the amplitude and spot plots
ImageJUtils.hide(TITLE_AMPLITUDE);
ImageJUtils.hide(TITLE_PSF_PARAMETERS);
}
if (centres.isEmpty()) {
final String msg = "No suitable spots could be identified";
ImageJUtils.log(msg);
IJ.error(TITLE, msg);
return;
}
// Find the limits of the z-centre
int minz = (int) centres.get(0)[2];
int maxz = minz;
for (final double[] centre : centres) {
if (minz > centre[2]) {
minz = (int) centre[2];
} else if (maxz < centre[2]) {
maxz = (int) centre[2];
}
}
IJ.showStatus("Creating PSF image");
// Create a stack that can hold all the data.
final ImageStack psf = createStack(stack, minz, maxz, settings.getMagnification());
// For each spot
final Statistics stats = new Statistics();
boolean ok = true;
for (int i = 0; ok && i < centres.size(); i++) {
final double increment = 1.0 / (stack.getSize() * centres.size());
setProgress((double) i / centres.size());
final double[] centre = centres.get(i);
// Extract the spot
final float[][] spot = new float[stack.getSize()][];
Rectangle regionBounds = null;
for (int slice = 1; slice <= stack.getSize(); slice++) {
final ImageExtractor ie = ImageExtractor.wrap((float[]) stack.getPixels(slice), width, height);
if (regionBounds == null) {
regionBounds = ie.getBoxRegionBounds((int) centre[0], (int) centre[1], boxRadius);
}
spot[slice - 1] = ie.crop(regionBounds);
}
if (regionBounds == null) {
// Empty stack
continue;
}
final int n = (int) centre[4];
final float b = getBackground(n, spot);
if (!subtractBackgroundAndWindow(spot, b, regionBounds.width, regionBounds.height, centre, loess)) {
ImageJUtils.log(" Spot %d was ignored", n);
continue;
}
stats.add(b);
// Adjust the centre using the crop
centre[0] -= regionBounds.x;
centre[1] -= regionBounds.y;
// This takes a long time so this should track progress
ok = addToPsf(maxz, settings.getMagnification(), psf, centre, spot, regionBounds, increment, settings.getCentreEachSlice());
}
if (settings.getInteractiveMode()) {
ImageJUtils.hide(TITLE_INTENSITY);
}
IJ.showProgress(1);
if (!ok || stats.getN() == 0) {
return;
}
final double avSd = getAverage(averageSd, averageA, 2);
ImageJUtils.log(" Average background = %.2f, Av. SD = %s px", stats.getMean(), MathUtils.rounded(avSd, 4));
normalise(psf, maxz, avSd * settings.getMagnification(), false);
IJ.showProgress(1);
psfImp = ImageJUtils.display(TITLE_PSF, psf);
psfImp.setSlice(maxz);
psfImp.resetDisplayRange();
psfImp.updateAndDraw();
final double[][] fitCom = new double[2][psf.getSize()];
Arrays.fill(fitCom[0], Double.NaN);
Arrays.fill(fitCom[1], Double.NaN);
final double fittedSd = fitPsf(psf, loess, maxz, averageRange.getMean(), fitCom);
// Compute the drift in the PSF:
// - Use fitted centre if available; otherwise find CoM for each frame
// - express relative to the average centre
final double[][] com = calculateCentreOfMass(psf, fitCom, nmPerPixel / settings.getMagnification());
final double[] slice = SimpleArrayUtils.newArray(psf.getSize(), 1, 1.0);
final String title = TITLE + " CoM Drift";
final Plot plot = new Plot(title, "Slice", "Drift (nm)");
plot.addLabel(0, 0, "Red = X; Blue = Y");
// double[] limitsX = Maths.limits(com[0]);
// double[] limitsY = Maths.limits(com[1]);
final double[] limitsX = getLimits(com[0]);
final double[] limitsY = getLimits(com[1]);
plot.setLimits(1, psf.getSize(), Math.min(limitsX[0], limitsY[0]), Math.max(limitsX[1], limitsY[1]));
plot.setColor(Color.red);
plot.addPoints(slice, com[0], Plot.DOT);
plot.addPoints(slice, loess.smooth(slice, com[0]), Plot.LINE);
plot.setColor(Color.blue);
plot.addPoints(slice, com[1], Plot.DOT);
plot.addPoints(slice, loess.smooth(slice, com[1]), Plot.LINE);
ImageJUtils.display(title, plot);
// TODO - Redraw the PSF with drift correction applied.
// This means that the final image should have no drift.
// This is relevant when combining PSF images. It doesn't matter too much for simulations
// unless the drift is large.
// Add Image properties containing the PSF details
final double fwhm = getFwhm(psf, maxz);
psfImp.setProperty("Info", ImagePsfHelper.toString(ImagePsfHelper.create(maxz, nmPerPixel / settings.getMagnification(), settings.getNmPerSlice(), stats.getN(), fwhm, createNote())));
ImageJUtils.log("%s : z-centre = %d, nm/Pixel = %s, nm/Slice = %s, %d images, " + "PSF SD = %s nm, FWHM = %s px\n", psfImp.getTitle(), maxz, MathUtils.rounded(nmPerPixel / settings.getMagnification(), 3), MathUtils.rounded(settings.getNmPerSlice(), 3), stats.getN(), MathUtils.rounded(fittedSd * nmPerPixel, 4), MathUtils.rounded(fwhm));
createInteractivePlots(psf, maxz, nmPerPixel / settings.getMagnification(), fittedSd * nmPerPixel);
IJ.showStatus("");
}
use of uk.ac.sussex.gdsc.core.utils.StoredDataStatistics in project GDSC-SMLM by aherbert.
the class Fire method calculatePrecisionHistogram.
/**
* Calculate a histogram of the precision. The precision can be either stored in the results or
* calculated using the Mortensen formula. If the precision method for Q estimation is not fixed
* then the histogram is fitted with a Gaussian to create an initial estimate.
*
* @return The precision histogram
*/
private PrecisionHistogram calculatePrecisionHistogram() {
final boolean logFitParameters = false;
final String title = results.getName() + " Precision Histogram";
// Check if the results has the precision already or if it can be computed.
final boolean canUseStored = canUseStoredPrecision(results);
final boolean canCalculatePrecision = canCalculatePrecision(results);
// Set the method to compute a histogram. Default to the user selected option.
PrecisionMethod method = null;
if ((canUseStored && precisionMethod == PrecisionMethod.STORED) || (canCalculatePrecision && precisionMethod == PrecisionMethod.CALCULATE)) {
method = precisionMethod;
}
if (method == null) {
// We only have two choices so if one is available then select it.
if (canUseStored) {
method = PrecisionMethod.STORED;
} else if (canCalculatePrecision) {
method = PrecisionMethod.CALCULATE;
}
// If the user selected a method not available then log a warning
if (method != null && precisionMethod != PrecisionMethod.FIXED) {
IJ.log(String.format("%s : Selected precision method '%s' not available, switching to '%s'", pluginTitle, precisionMethod, method.getName()));
}
if (method == null) {
// This does not matter if the user has provide a fixed input.
if (precisionMethod == PrecisionMethod.FIXED) {
final PrecisionHistogram histogram = new PrecisionHistogram(title);
histogram.mean = settings.mean;
histogram.sigma = settings.sigma;
return histogram;
}
// No precision
return null;
}
}
// We get here if we can compute precision.
// Build the histogram
StoredDataStatistics precision = new StoredDataStatistics(results.size());
if (method == PrecisionMethod.STORED) {
final StoredDataStatistics p = precision;
results.forEach((PeakResultProcedure) result -> p.add(result.getPrecision()));
} else {
precision.add(pp.precisions);
}
double yMin = Double.NEGATIVE_INFINITY;
double yMax = 0;
// Set the min and max y-values using 1.5 x IQR
final DescriptiveStatistics stats = precision.getStatistics();
final double lower = stats.getPercentile(25);
final double upper = stats.getPercentile(75);
if (Double.isNaN(lower) || Double.isNaN(upper)) {
if (logFitParameters) {
ImageJUtils.log("Error computing IQR: %f - %f", lower, upper);
}
} else {
final double iqr = upper - lower;
yMin = Math.max(lower - iqr, stats.getMin());
yMax = Math.min(upper + iqr, stats.getMax());
if (logFitParameters) {
ImageJUtils.log(" Data range: %f - %f. Plotting 1.5x IQR: %f - %f", stats.getMin(), stats.getMax(), yMin, yMax);
}
}
if (yMin == Double.NEGATIVE_INFINITY) {
final int n = 5;
yMin = Math.max(stats.getMin(), stats.getMean() - n * stats.getStandardDeviation());
yMax = Math.min(stats.getMax(), stats.getMean() + n * stats.getStandardDeviation());
if (logFitParameters) {
ImageJUtils.log(" Data range: %f - %f. Plotting mean +/- %dxSD: %f - %f", stats.getMin(), stats.getMax(), n, yMin, yMax);
}
}
// Get the data within the range
final double[] data = precision.getValues();
precision = new StoredDataStatistics(data.length);
for (final double d : data) {
if (d < yMin || d > yMax) {
continue;
}
precision.add(d);
}
final int histogramBins = HistogramPlot.getBins(precision, HistogramPlot.BinMethod.SCOTT);
final float[][] hist = HistogramPlot.calcHistogram(precision.getFloatValues(), yMin, yMax, histogramBins);
final PrecisionHistogram histogram = new PrecisionHistogram(hist, precision, title);
if (precisionMethod == PrecisionMethod.FIXED) {
histogram.mean = settings.mean;
histogram.sigma = settings.sigma;
return histogram;
}
// Fitting of the histogram to produce the initial estimate
// Extract non-zero data
float[] x = Arrays.copyOf(hist[0], hist[0].length);
float[] y = Arrays.copyOf(hist[1], hist[1].length);
int count = 0;
for (int i = 0; i < y.length; i++) {
if (y[i] > 0) {
x[count] = x[i];
y[count] = y[i];
count++;
}
}
x = Arrays.copyOf(x, count);
y = Arrays.copyOf(y, count);
// Sense check to fitted data. Get mean and SD of histogram
final double[] stats2 = HistogramPlot.getHistogramStatistics(x, y);
if (logFitParameters) {
ImageJUtils.log(" Initial Statistics: %f +/- %f", stats2[0], stats2[1]);
}
histogram.mean = stats2[0];
histogram.sigma = stats2[1];
// Standard Gaussian fit
final double[] parameters = fitGaussian(x, y);
if (parameters == null) {
ImageJUtils.log(" Failed to fit initial Gaussian");
return histogram;
}
final double newMean = parameters[1];
final double error = Math.abs(stats2[0] - newMean) / stats2[1];
if (error > 3) {
ImageJUtils.log(" Failed to fit Gaussian: %f standard deviations from histogram mean", error);
return histogram;
}
if (newMean < yMin || newMean > yMax) {
ImageJUtils.log(" Failed to fit Gaussian: %f outside data range %f - %f", newMean, yMin, yMax);
return histogram;
}
if (logFitParameters) {
ImageJUtils.log(" Initial Gaussian: %f @ %f +/- %f", parameters[0], parameters[1], parameters[2]);
}
histogram.mean = parameters[1];
histogram.sigma = parameters[2];
return histogram;
}
use of uk.ac.sussex.gdsc.core.utils.StoredDataStatistics in project GDSC-SMLM by aherbert.
the class BenchmarkFilterAnalysis method expandFilters.
/**
* Expand filters. Set the increment for any parameter not expanded to zero. Set the input
* parameters array to the lower bounds. Set the input parameters2 array to the upper bounds.
*
* <p>If a parameter is not expanded since the increment is Infinity the parameter is disabled. If
* it was not expanded for any other reason (increment is zero, NaN values, etc) the weakest
* parameter of the two input array is used and set as the lower and upper bounds.
*
* @param baseFilter the base filter
* @param parameters the parameters
* @param parameters2 the parameters 2
* @param increment the increment
* @return the list
*/
private static List<Filter> expandFilters(Filter baseFilter, double[] parameters, double[] parameters2, double[] increment) {
final int n = baseFilter.getNumberOfParameters();
if (parameters.length < n || parameters2.length < n || increment.length < n) {
throw new IllegalArgumentException("Input arrays must be at least the length of the number of parameters");
}
final double[] increment2 = increment.clone();
final int capacity = (int) countCombinations(parameters, parameters2, increment);
final ArrayList<Filter> list = new ArrayList<>(capacity);
// Initialise with a filter set at the minimum for each parameter.
// Get the weakest parameters for those not expanded.
Filter f1 = baseFilter.create(parameters);
final double[] p = parameters2.clone();
f1.weakestParameters(p);
for (int i = 0; i < n; i++) {
if (increment[i] == 0) {
// Disable if Infinite increment, otherwise use the weakest parameter
parameters[i] = parameters2[i] = (Double.isInfinite(increment2[i])) ? baseFilter.getDisabledParameterValue(i) : p[i];
}
}
f1 = baseFilter.create(parameters);
list.add(f1);
for (int i = 0; i < n; i++) {
if (increment[i] == 0) {
continue;
}
final double min = parameters[i];
final double max = parameters2[i];
final double inc = increment[i];
final double max2 = max + inc;
// Set the upper bounds for the output and store the expansion params
final StoredDataStatistics stats = new StoredDataStatistics(10);
for (double value = min + inc; value < max || value - max < max2 - value; value += inc) {
parameters2[i] = value;
stats.add(value);
}
final double[] values = stats.getValues();
final ArrayList<Filter> list2 = new ArrayList<>((values.length + 1) * list.size());
for (int k = 0; k < list.size(); k++) {
final Filter f = list.get(k);
// Copy params of the filter
for (int j = 0; j < n; j++) {
p[j] = f.getParameterValue(j);
}
for (int l = 0; l < values.length; l++) {
p[i] = values[l];
list2.add(f.create(p));
}
}
list.addAll(list2);
}
// Sort the filters
Collections.sort(list);
return list;
}
use of uk.ac.sussex.gdsc.core.utils.StoredDataStatistics in project GDSC-SMLM by aherbert.
the class TraceDiffusion method calculateDiffusionCoefficient.
/**
* Calculate the diffusion coefficient (D) of the molecule. This is done by using the mean-squared
* deviation between frames divided by the time interval (delta) between frames. This is divided
* by 4 to produce the diffusion coefficient from two-dimensional distance analysis.
*
* <p>See Uphoff, et al, 2013. Single-molecule DNA repair in live bacteria, PNAS 110, 8063-8068
*
* @param msdPerMoleculeAdjacent the MSD per molecule adjacent
* @return The D per molecule
*/
private static StoredDataStatistics calculateDiffusionCoefficient(StoredDataStatistics msdPerMoleculeAdjacent) {
final StoredDataStatistics dPerMolecule = new StoredDataStatistics();
final double diffusionCoefficientConversion = 1.0 / 4.0;
for (final double msd : msdPerMoleculeAdjacent.getValues()) {
dPerMolecule.add(msd * diffusionCoefficientConversion);
}
return dPerMolecule;
}
use of uk.ac.sussex.gdsc.core.utils.StoredDataStatistics in project GDSC-SMLM by aherbert.
the class TraceDiffusion method run.
@Override
public void run(String arg) {
SmlmUsageTracker.recordPlugin(this.getClass(), arg);
jumpDistanceParametersRef.set(null);
extraOptions = ImageJUtils.isExtraOptions();
if (MemoryPeakResults.isMemoryEmpty()) {
IJ.error(TITLE, "No localisations in memory");
return;
}
settings = Settings.load();
// Saved by reference so just save now
settings.save();
final ArrayList<MemoryPeakResults> allResults = new ArrayList<>();
// Option to pick multiple input datasets together using a list box.
if ("multi".equals(arg) && !showMultiDialog(allResults)) {
return;
}
// This shows the dialog for selecting trace options
if (!showTraceDialog(allResults)) {
return;
}
if (allResults.isEmpty()) {
return;
}
ImageJUtils.log(TITLE + "...");
// - Trace each single dataset (and store in memory)
// - Combine trace results held in memory
final Trace[] traces = getTraces(allResults);
// This still allows a zero entry in the results table.
if (traces.length > 0 && !showDialog()) {
return;
}
final int count = traces.length;
double[] fitMsdResult = null;
int numberOfDataPoints = 0;
double[][] jdParams = null;
if (count > 0) {
calculatePrecision(traces, allResults.size() > 1);
// --- MSD Analysis ---
// Conversion constants
final double px2ToUm2 = MathUtils.pow2(results.getCalibrationReader().getNmPerPixel()) / 1e6;
final double px2ToUm2PerSecond = px2ToUm2 / exposureTime;
// Get the maximum trace length
int length = clusteringSettings.getMinimumTraceLength();
if (!clusteringSettings.getTruncate()) {
for (final Trace trace : traces) {
if (length < trace.size()) {
length = trace.size();
}
}
}
// Get the localisation error (4s^2) in um^2
final double error = (clusteringSettings.getPrecisionCorrection()) ? 4 * precision * precision / 1e6 : 0;
// Pre-calculate MSD correction factors. This accounts for the fact that the distance moved
// in the start/end frames is reduced due to the averaging of the particle location over the
// entire frame into a single point. The true MSD may be restored by applying a factor.
// Note: These are used for the calculation of the diffusion coefficients per molecule and
// the MSD passed to the Jump Distance analysis. However the error is not included in the
// jump distance analysis so will be subtracted from the fitted D coefficients later.
final double[] factors;
if (clusteringSettings.getMsdCorrection()) {
factors = new double[length];
for (int t = 1; t < length; t++) {
factors[t] = JumpDistanceAnalysis.getConversionfactor(t);
}
} else {
factors = SimpleArrayUtils.newArray(length, 0.0, 1.0);
}
// Extract the mean-squared distance statistics
final Statistics[] stats = new Statistics[length];
for (int i = 0; i < stats.length; i++) {
stats[i] = new Statistics();
}
final ArrayList<double[]> distances = (settings.saveTraceDistances || settings.displayTraceLength) ? new ArrayList<>(traces.length) : null;
// Store all the jump distances at the specified interval
final StoredDataStatistics jumpDistances = new StoredDataStatistics();
final int jumpDistanceInterval = clusteringSettings.getJumpDistance();
// Compute squared distances
final StoredDataStatistics msdPerMoleculeAllVsAll = new StoredDataStatistics();
final StoredDataStatistics msdPerMoleculeAdjacent = new StoredDataStatistics();
for (final Trace trace : traces) {
final PeakResultStoreList results = trace.getPoints();
// Sum the MSD and the time
final int traceLength = (clusteringSettings.getTruncate()) ? clusteringSettings.getMinimumTraceLength() : trace.size();
// Get the mean for each time separation
final double[] sumDistance = new double[traceLength + 1];
final double[] sumTime = new double[sumDistance.length];
// Do the distances to the origin (saving if necessary)
final float x0 = results.get(0).getXPosition();
final float y0 = results.get(0).getYPosition();
if (distances != null) {
final double[] msd = new double[traceLength - 1];
for (int j = 1; j < traceLength; j++) {
final int t = j;
final double d = distance2(x0, y0, results.get(j));
msd[j - 1] = px2ToUm2 * d;
if (t == jumpDistanceInterval) {
jumpDistances.add(msd[j - 1]);
}
sumDistance[t] += d;
sumTime[t] += t;
}
distances.add(msd);
} else {
for (int j = 1; j < traceLength; j++) {
final int t = j;
final double d = distance2(x0, y0, results.get(j));
if (t == jumpDistanceInterval) {
jumpDistances.add(px2ToUm2 * d);
}
sumDistance[t] += d;
sumTime[t] += t;
}
}
if (clusteringSettings.getInternalDistances()) {
// Do the internal distances
for (int i = 1; i < traceLength; i++) {
final float x = results.get(i).getXPosition();
final float y = results.get(i).getYPosition();
for (int j = i + 1; j < traceLength; j++) {
final int t = j - i;
final double d = distance2(x, y, results.get(j));
if (t == jumpDistanceInterval) {
jumpDistances.add(px2ToUm2 * d);
}
sumDistance[t] += d;
sumTime[t] += t;
}
}
// Add the average distance per time separation to the population
for (int t = 1; t < traceLength; t++) {
// Note: (traceLength - t) == count
stats[t].add(sumDistance[t] / (traceLength - t));
}
} else {
// Add the distance per time separation to the population
for (int t = 1; t < traceLength; t++) {
stats[t].add(sumDistance[t]);
}
}
// Fix this for the precision and MSD adjustment.
// It may be necessary to:
// - sum the raw distances for each time interval (this is sumDistance[t])
// - subtract the precision error
// - apply correction factor for the n-frames to get actual MSD
// - sum the actual MSD
double sumD = 0;
final double sumD_adjacent = Math.max(0, sumDistance[1] - error) * factors[1];
double sumT = 0;
final double sumT_adjacent = sumTime[1];
for (int t = 1; t < traceLength; t++) {
sumD += Math.max(0, sumDistance[t] - error) * factors[t];
sumT += sumTime[t];
}
// Calculate the average displacement for the trace (do not simply use the largest
// time separation since this will miss moving molecules that end up at the origin)
msdPerMoleculeAllVsAll.add(px2ToUm2PerSecond * sumD / sumT);
msdPerMoleculeAdjacent.add(px2ToUm2PerSecond * sumD_adjacent / sumT_adjacent);
}
StoredDataStatistics dperMoleculeAllVsAll = null;
StoredDataStatistics dperMoleculeAdjacent = null;
if (settings.saveTraceDistances || (clusteringSettings.getShowHistograms() && settings.displayDHistogram)) {
dperMoleculeAllVsAll = calculateDiffusionCoefficient(msdPerMoleculeAllVsAll);
dperMoleculeAdjacent = calculateDiffusionCoefficient(msdPerMoleculeAdjacent);
}
if (settings.saveTraceDistances) {
saveTraceDistances(traces.length, distances, msdPerMoleculeAllVsAll, msdPerMoleculeAdjacent, dperMoleculeAllVsAll, dperMoleculeAdjacent);
}
if (settings.displayTraceLength) {
final StoredDataStatistics lengths = calculateTraceLengths(distances);
showHistogram(lengths, "Trace length (um)");
}
if (settings.displayTraceSize) {
final StoredDataStatistics sizes = calculateTraceSizes(traces);
showHistogram(sizes, "Trace size", true);
}
// Plot the per-trace histogram of MSD and D
if (clusteringSettings.getShowHistograms()) {
if (settings.displayMsdHistogram) {
showHistogram(msdPerMoleculeAllVsAll, "MSD/Molecule (all-vs-all)");
showHistogram(msdPerMoleculeAdjacent, "MSD/Molecule (adjacent)");
}
if (settings.displayDHistogram) {
showHistogram(dperMoleculeAllVsAll, "D/Molecule (all-vs-all)");
showHistogram(dperMoleculeAdjacent, "D/Molecule (adjacent)");
}
}
// Calculate the mean squared distance (MSD)
final double[] x = new double[stats.length];
final double[] y = new double[x.length];
final double[] sd = new double[x.length];
// Intercept is the 4s^2 (in um^2)
y[0] = 4 * precision * precision / 1e6;
for (int i = 1; i < stats.length; i++) {
x[i] = i * exposureTime;
y[i] = stats[i].getMean() * px2ToUm2;
// sd[i] = stats[i].getStandardDeviation() * px2ToUm2;
sd[i] = stats[i].getStandardError() * px2ToUm2;
}
final String title = TITLE + " MSD";
final Plot plot = plotMsd(x, y, sd, title);
// Fit the MSD using a linear fit
fitMsdResult = fitMsd(x, y, title, plot);
// Jump Distance analysis
if (settings.saveRawData) {
saveStatistics(jumpDistances, "Jump Distance", "Distance (um^2)", false);
}
// Calculate the cumulative jump-distance histogram
final double[][] jdHistogram = JumpDistanceAnalysis.cumulativeHistogram(jumpDistances.getValues());
// Always show the jump distance histogram
jdTitle = TITLE + " Jump Distance";
jdPlot = new Plot(jdTitle, "Distance (um^2)", "Cumulative Probability");
jdPlot.addPoints(jdHistogram[0], jdHistogram[1], Plot.LINE);
display(jdTitle, jdPlot);
// Fit Jump Distance cumulative probability
numberOfDataPoints = jumpDistances.getN();
jdParams = fitJumpDistance(jumpDistances, jdHistogram);
jumpDistanceParametersRef.set(jdParams);
}
summarise(traces, fitMsdResult, numberOfDataPoints, jdParams);
}
Aggregations