use of uk.ac.sussex.gdsc.smlm.results.PeakResult in project GDSC-SMLM by aherbert.
the class BenchmarkFilterAnalysis method getCoordinates.
private static TIntObjectHashMap<UniqueIdPeakResult[]> getCoordinates(MemoryPeakResults results) {
final TIntObjectHashMap<UniqueIdPeakResult[]> coords = new TIntObjectHashMap<>();
if (results.size() > 0) {
// Do not use HashMap directly to build the coords object since there
// will be many calls to getEntry(). Instead sort the results and use
// a new list for each time point
results.sort();
final Counter uniqueId = new Counter();
final FrameCounter counter = new FrameCounter();
final LocalList<PeakResult> tmp = new LocalList<>();
// Add the results to the lists
results.forEach((PeakResultProcedure) result -> {
if (counter.advanceAndReset(result.getFrame()) && !tmp.isEmpty()) {
coords.put(counter.previousFrame(), tmp.toArray(new UniqueIdPeakResult[0]));
tmp.clear();
}
tmp.add(new UniqueIdPeakResult(tmp.size(), uniqueId.getAndIncrement(), result));
});
if (!tmp.isEmpty()) {
coords.put(counter.currentFrame(), tmp.toArray(new UniqueIdPeakResult[0]));
}
}
return coords;
}
use of uk.ac.sussex.gdsc.smlm.results.PeakResult in project GDSC-SMLM by aherbert.
the class TraceDiffusion method calculatePrecision.
/**
* Calculate the average precision of localisation in the traces.
*
* @param traces the traces
* @param multi true if the traces were from multiple input results
*/
private void calculatePrecision(Trace[] traces, boolean multi) {
// Check the diffusion simulation for a precision
if (DiffusionRateTest.isSimulated(results.getName()) && !multi) {
precision = DiffusionRateTest.getLastSimulationPrecision();
} else {
precision = 999;
try {
final Gaussian2DPeakResultCalculator calculator = Gaussian2DPeakResultHelper.create(results.getPsf(), results.getCalibration(), Gaussian2DPeakResultHelper.LSE_PRECISION);
// Get the average precision of the localisations
precision = 0;
int count = 0;
for (final Trace trace : traces) {
for (int k = 0; k < trace.size(); k++) {
final PeakResult r = trace.get(k);
precision += calculator.getLsePrecision(r.getParameters(), r.getNoise());
}
count += trace.size();
}
precision /= count;
} catch (final ConfigurationException ex) {
// Ignore this and we will ask the user for the precision
}
}
if (precision > 100) {
final ExtendedGenericDialog gd = new ExtendedGenericDialog(TITLE);
gd.addMessage("The average precision of the traced results is " + MathUtils.rounded(precision, 4) + " nm.\nPlease verify the precision.");
gd.addSlider("Precision (nm)", 5, 100, precision);
gd.showDialog();
if (!(gd.wasCanceled() || gd.invalidNumber())) {
precision = Math.abs(gd.getNextNumber());
}
}
}
use of uk.ac.sussex.gdsc.smlm.results.PeakResult in project GDSC-SMLM by aherbert.
the class TranslateResults method run.
@Override
public void run(String arg) {
SmlmUsageTracker.recordPlugin(this.getClass(), arg);
if (MemoryPeakResults.isMemoryEmpty()) {
IJ.error(TITLE, "There are no fitting results in memory");
return;
}
final TranslateResultsSettings.Builder settings = SettingsManager.readTranslateResultsSettings(0).toBuilder();
// Show a dialog allowing the results set to be filtered
final ExtendedGenericDialog gd = new ExtendedGenericDialog(TITLE);
gd.addMessage("Select a dataset to translate");
ResultsManager.addInput(gd, settings.getInputOption(), InputSource.MEMORY);
gd.addNumericField("x", settings.getDx(), 3);
gd.addNumericField("y", settings.getDy(), 3);
gd.addNumericField("z", settings.getDz(), 3);
gd.addChoice("Distance_unit", SettingsManager.getDistanceUnitNames(), settings.getDistanceUnitValue());
gd.addHelp(HelpUrls.getUrl("translate-results"));
gd.showDialog();
if (gd.wasCanceled()) {
return;
}
settings.setInputOption(ResultsManager.getInputSource(gd));
settings.setDx(gd.getNextNumber());
settings.setDy(gd.getNextNumber());
settings.setDz(gd.getNextNumber());
settings.setDistanceUnitValue(gd.getNextChoiceIndex());
SettingsManager.writeSettings(settings);
final MemoryPeakResults results = ResultsManager.loadInputResults(settings.getInputOption(), false, null, null);
if (MemoryPeakResults.isEmpty(results)) {
IJ.error(TITLE, "No results could be loaded");
return;
}
TypeConverter<DistanceUnit> converter;
try {
converter = results.getDistanceConverter(settings.getDistanceUnit());
} catch (final DataException ex) {
IJ.error(TITLE, "Unit conversion error: " + ex.getMessage());
return;
}
final float x = (float) converter.convertBack(settings.getDx());
final float y = (float) converter.convertBack(settings.getDy());
final float z = (float) converter.convertBack(settings.getDz());
// Reset the 2D bounds
if (x != 0 || y != 0) {
results.setBounds(null);
}
results.forEach((PeakResultProcedure) peakResult -> {
final float[] params = peakResult.getParameters();
params[PeakResult.X] += x;
params[PeakResult.Y] += y;
params[PeakResult.Z] += z;
});
}
use of uk.ac.sussex.gdsc.smlm.results.PeakResult in project GDSC-SMLM by aherbert.
the class TrackPopulationAnalysis method extractTrackData.
/**
* Extract the track data. This extracts different descriptors of the track using a rolling local
* window.
*
* <p>Distances are converted to {@code unit} using the provided converter and time units are
* converted from frame to seconds (s). The diffusion coefficients is in unit^2/s.
*
* <p>If categories are added they are remapped to be a natural sequence starting from 0.
*
* @param tracks the tracks
* @param distanceConverter the distance converter
* @param deltaT the time step of each frame in seconds (delta T)
* @param hasCategory if true add the category from the result to the end of the data
* @return the track data (track lengths and descriptors)
*/
private Pair<int[], double[][]> extractTrackData(List<Trace> tracks, TypeConverter<DistanceUnit> distanceConverter, double deltaT, boolean hasCategory) {
final List<double[]> data = new LocalList<>(tracks.size());
double[] x = new double[0];
double[] y = new double[0];
final int wm1 = settings.window - 1;
final int valuesLength = hasCategory ? 5 : 4;
final int mid = settings.window / 2;
final MsdFitter msdFitter = new MsdFitter(settings, deltaT);
final double significance = settings.significance;
final int[] fitResult = new int[4];
// Factor for the diffusion coefficient: 1/N * 1/(2*dimensions*deltaT) = 1 / 4Nt
// with N the number of points to average.
final double diffusionCoefficientFactor = 1.0 / (4 * wm1 * deltaT);
// Used for the standard deviations
final Statistics statsX = new Statistics();
final Statistics statsY = new Statistics();
final Ticker ticker = ImageJUtils.createTicker(tracks.size(), 1, "Computing track features...");
// Collect the track categories. Later these are renumbered to a natural sequence from 0.
final TIntHashSet catSet = new TIntHashSet();
// final StoredDataStatistics statsAlpha = new StoredDataStatistics(tracks.size());
// Process each track
final TIntArrayList lengths = new TIntArrayList(tracks.size());
for (final Trace track : tracks) {
// Get xy coordinates
final int size = track.size();
if (x.length < size) {
x = new double[size];
y = new double[size];
}
for (int i = 0; i < size; i++) {
final PeakResult peak = track.get(i);
x[i] = distanceConverter.convert(peak.getXPosition());
y[i] = distanceConverter.convert(peak.getYPosition());
}
final int smwm1 = size - wm1;
final int previousSize = data.size();
for (int k = 0; k < smwm1; k++) {
final double[] values = new double[valuesLength];
data.add(values);
// middle of even sized windows is between two localisations.
if (hasCategory) {
final int cat = track.get(k + mid).getCategory();
values[4] = cat;
catSet.add(cat);
}
// First point in window = k
// Last point in window = k + w - 1 = k + wm1
final int end = k + wm1;
// 1. Anomalous exponent.
msdFitter.setData(x, y);
try {
msdFitter.fit(k, null);
// statsAlpha.add(msdFitter.alpha);
int fitIndex = msdFitter.positiveSlope ? 0 : 2;
// If better then this is anomalous diffusion
final double alpha = msdFitter.lvmSolution2.getPoint().getEntry(2);
values[0] = alpha;
if (msdFitter.pValue > significance) {
fitIndex++;
}
fitResult[fitIndex]++;
// values[0] = msdFitter.alpha;
// // Debug
// if (
// // msdFitter.pValue < 0.2
// msdFitter.pValue < 0.2 && values[0] < 0
// // !msdFitter.positiveSlope
// ) {
// final RealVector p = msdFitter.lvmSolution2.getPoint();
// final String title = "anomalous exponent";
// final Plot plot = new Plot(title, "time (s)", "MSD (um^2)");
// final double[] t = SimpleArrayUtils.newArray(msdFitter.s.length, deltaT, deltaT);
// plot.addLabel(0, 0, msdFitter.lvmSolution2.getPoint().toString() + " p="
// + msdFitter.pValue + ". " + msdFitter.lvmSolution1.getPoint().toString());
// plot.addPoints(t, msdFitter.s, Plot.CROSS);
// plot.addPoints(t, msdFitter.model2.value(p).getFirst().toArray(), Plot.LINE);
// plot.setColor(Color.BLUE);
// plot.addPoints(t,
// msdFitter.model1.value(msdFitter.lvmSolution1.getPoint()).getFirst().toArray(),
// Plot.LINE);
// plot.setColor(Color.RED);
// final double[] yy = Arrays.stream(t).map(msdFitter.reg::predict).toArray();
// plot.addPoints(t, yy, Plot.CIRCLE);
// System.out.printf("%s : %s", msdFitter.lvmSolution2.getPoint(), values[0]);
// ImageJUtils.display(title, plot, ImageJUtils.NO_TO_FRONT);
// System.out.println();
// }
} catch (TooManyIterationsException | ConvergenceException ex) {
if (settings.debug) {
ImageJUtils.log("Failed to fit anomalous exponent: " + ex.getMessage());
}
// Ignore this and leave as Brownian motion
values[0] = 1.0;
}
// Referenced papers:
// Hozé, N. H., D. (2017) Statistical methods for large ensembles of super-resolution
// stochastic single particle trajectories in cell biology.
// Annual Review of Statistics and Its Application 4, 189-223
//
// Amitai, A., Seeber, A., Gasser, S. M. & Holcman, D. (2017) Visualization of Chromatin
// Decompaction and Break Site Extrusion as Predicted by Statistical Polymer
// Modeling of Single-Locus Trajectories. Cell reports 18, 1200-1214
// 2. Effective diffusion coefficient (Hozé, eq 10).
// This is the average squared jump distance between successive points
// divided by 1 / (2 * dimensions * deltaT), i.e. 1 / 4t.
double sum = 0;
for (int i = k; i < end; i++) {
sum += MathUtils.distance2(x[i], y[i], x[i + 1], y[i + 1]);
}
values[1] = sum * diffusionCoefficientFactor;
// 3. Length of confinement (Amitai et al, eq 1).
// Compute the average of the standard deviation of the position in each dimension.
statsX.reset();
statsY.reset();
for (int i = k; i <= end; i++) {
statsX.add(x[i]);
statsY.add(y[i]);
}
values[2] = (statsX.getStandardDeviation() + statsY.getStandardDeviation()) / 2;
// 4. Magnitude of drift vector (Hozé, eq 9).
// Note: The drift field is given as the expected distance between successive points, i.e.
// the average step. Since all track windows are the same length this is the same
// as the distance between the first and last point divided by the number of points.
// The drift field in each dimension is combined to create a drift norm, i.e. Euclidean
// distance.
values[3] = MathUtils.distance(x[k], y[k], x[end], y[end]) / wm1;
}
lengths.add(data.size() - previousSize);
ticker.tick();
}
ImageJUtils.finished();
if (settings.debug) {
ImageJUtils.log(" +Slope, significant: %d", fitResult[0]);
ImageJUtils.log(" +Slope, insignificant: %d", fitResult[1]);
ImageJUtils.log(" -Slope, significant: %d", fitResult[2]);
ImageJUtils.log(" -Slope, insignificant: %d", fitResult[3]);
}
ImageJUtils.log("Insignificant anomalous exponents: %d / %d", fitResult[1] + fitResult[3], data.size());
// System.out.println(statsAlpha.getStatistics().toString());
final double[][] trackData = data.toArray(new double[0][0]);
if (hasCategory) {
final int[] categories = catSet.toArray();
Arrays.sort(categories);
// Only remap if non-compact (i.e. not 0 to n)
final int max = categories[categories.length - 1];
if (categories[0] != 0 || max + 1 != categories.length) {
final int[] remap = new int[max + 1];
for (int i = 0; i < categories.length; i++) {
remap[categories[i]] = i;
}
final int end = valuesLength - 1;
for (final double[] values : trackData) {
values[end] = remap[(int) values[end]];
}
}
}
return Pair.create(lengths.toArray(), trackData);
}
use of uk.ac.sussex.gdsc.smlm.results.PeakResult in project GDSC-SMLM by aherbert.
the class TrackPopulationAnalysis method getTracks.
/**
* Gets the tracks. Each track has contiguous frames and the length is enough to fit
* {@code minTrackLength} overlapping windows of the specified size:
*
* <pre>
* length >= window + minTrackLength - 1
* </pre>
*
* @param combinedResults the combined results
* @param window the window size
* @param minTrackLength the minimum track length (assumed to be {@code >= 1})
* @return the tracks
*/
private static List<Trace> getTracks(List<MemoryPeakResults> combinedResults, int window, int minTrackLength) {
final LocalList<Trace> tracks = new LocalList<>();
final Statistics stats = new Statistics();
final int minSize = window + Math.max(minTrackLength, 1) - 1;
combinedResults.forEach(results -> {
final int start = tracks.size();
// Sort by id then frame
results = results.copy();
results.sort(IdFramePeakResultComparator.INSTANCE);
final int size = results.size();
// Skip IDs not associated with clustering
int index = 0;
while (index < size && results.get(index).getId() < 1) {
index++;
}
// Initialise current id and frame
int id = results.get(index).getId() - 1;
int frame = results.get(index).getFrame();
Trace track = new Trace();
for (; index < size; index++) {
final PeakResult result = results.get(index);
// Same ID and contiguous frames
if (result.getId() != id || result.getFrame() != frame + 1) {
addTrack(minSize, tracks, track);
track = new Trace();
}
id = result.getId();
frame = result.getFrame();
track.add(result);
}
addTrack(minSize, tracks, track);
stats.reset();
for (int i = start; i < tracks.size(); i++) {
stats.add(tracks.unsafeGet(i).size());
}
final StringBuilder sb = new StringBuilder(256);
TextUtils.formatTo(sb, "%s tracks=%d, length=%s +/- %s", results.getName(), stats.getN(), MathUtils.rounded(stats.getMean(), 3), MathUtils.rounded(stats.getStandardDeviation(), 3));
// Limit of diffusion coefficient from the localisation precision.
// Just use the entire dataset for simplicity (i.e. not the tracks of min length).
final PrecisionResultProcedure pp = new PrecisionResultProcedure(results);
try {
pp.getPrecision();
final Mean mean = new Mean();
for (final double p : pp.precisions) {
mean.add(p);
}
// 2nDt = MSD (n = number of dimensions)
// D = MSD / 2nt
final CalibrationReader reader = results.getCalibrationReader();
final double t = reader.getExposureTime() / 1000.0;
// Assume computed in nm. Convert to um.
final double x = mean.getMean() / 1000;
final double d = x * x / (2 * t);
TextUtils.formatTo(sb, ", precision=%s nm, D limit=%s um^2/s", MathUtils.rounded(x * 1000, 4), MathUtils.rounded(d, 4));
} catch (final DataException ex) {
// No precision
}
IJ.log(sb.toString());
});
return tracks;
}
Aggregations