use of uk.ac.sussex.gdsc.smlm.results.MemoryPeakResults in project GDSC-SMLM by aherbert.
the class ResultsMatchCalculator method getCoordinates.
/**
* Build a map between the peak id (time point) and a list of coordinates.
*
* @param results the results
* @param coordinateMethod the coordinate method
* @param integerCoordinates True if the values should be rounded down to integers
* @return the coordinates
*/
public static TIntObjectHashMap<List<Coordinate>> getCoordinates(MemoryPeakResults results, CoordinateMethod coordinateMethod, final boolean integerCoordinates) {
final TIntObjectHashMap<List<Coordinate>> coords = new TIntObjectHashMap<>();
if (results.size() > 0) {
// Do not use HashMap directly to build the coords object since there
// will be many calls to getEntry(). Instead sort the results and use
// a new list for each time point
results.sort();
final int minT = results.getFirstFrame();
final int maxT = results.getLastFrame();
// Create lists
final ArrayList<ArrayList<Coordinate>> tmpCoords = new ArrayList<>(maxT - minT + 1);
for (int t = minT; t <= maxT; t++) {
tmpCoords.add(new ArrayList<Coordinate>());
}
// Add the results to the lists
results.forEach((PeakResultProcedure) result -> {
final float x;
final float y;
final float z;
if (integerCoordinates) {
x = (int) result.getXPosition();
y = (int) result.getYPosition();
z = (int) result.getZPosition();
} else {
x = result.getXPosition();
y = result.getYPosition();
z = result.getZPosition();
}
final int startFrame = getStartFrame(result, coordinateMethod);
final int endFrame = getEndFrame(result, coordinateMethod);
for (int t = startFrame - minT, i = endFrame - startFrame + 1; i-- > 0; t++) {
tmpCoords.get(t).add(new PeakResultPoint(t + minT, x, y, z, result));
}
});
// Put in the map
for (int t = minT, i = 0; t <= maxT; t++, i++) {
coords.put(t, tmpCoords.get(i));
}
}
return coords;
}
use of uk.ac.sussex.gdsc.smlm.results.MemoryPeakResults in project GDSC-SMLM by aherbert.
the class ResultsMatchCalculator method getIds.
private static TIntHashSet getIds(MemoryPeakResults results) {
final TIntHashSet ids = new TIntHashSet(results.size());
results.forEach((PeakResultProcedure) result -> ids.add(result.getId()));
return ids;
}
use of uk.ac.sussex.gdsc.smlm.results.MemoryPeakResults in project GDSC-SMLM by aherbert.
the class TrackPopulationAnalysis method run.
@Override
public void run(String arg) {
SmlmUsageTracker.recordPlugin(this.getClass(), arg);
if (MemoryPeakResults.isMemoryEmpty()) {
IJ.error(TITLE, "No localisations in memory");
return;
}
settings = Settings.load();
// Saved by reference so just save now
settings.save();
// Read in multiple traced datasets
// All datasets must have the same pixel pitch and exposure time
// Get parameters
// Convert datasets to tracks
// For each track compute the 4 local track features using the configured window
//
// Optional:
// Fit a multi-variate Gaussian mixture model to the data
// (using the configured number of components/populations)
// Assign each point in the track using the model.
// Smooth the assignments.
//
// The alternative is to use the localisation category to assign populations.
//
// Plot histograms of each track parameter, coloured by component
final List<MemoryPeakResults> combinedResults = new LocalList<>();
if (!showInputDialog(combinedResults)) {
return;
}
final boolean hasCategory = showHasCategoryDialog(combinedResults);
if (!showDialog(hasCategory)) {
return;
}
ImageJUtils.log(TITLE + "...");
final List<Trace> tracks = getTracks(combinedResults, settings.window, settings.minTrackLength);
if (tracks.isEmpty()) {
IJ.error(TITLE, "No tracks. Please check the input data and min track length setting.");
return;
}
final Calibration cal = combinedResults.get(0).getCalibration();
final CalibrationReader cr = new CalibrationReader(cal);
// Use micrometer / second
final TypeConverter<DistanceUnit> distanceConverter = cr.getDistanceConverter(DistanceUnit.UM);
final double exposureTime = cr.getExposureTime() / 1000.0;
final Pair<int[], double[][]> trackData = extractTrackData(tracks, distanceConverter, exposureTime, hasCategory);
final double[][] data = trackData.getValue();
// Histogram the raw data.
final Array2DRowRealMatrix raw = new Array2DRowRealMatrix(data, false);
final WindowOrganiser wo = new WindowOrganiser();
// Store the histogram data for plotting the components
final double[][] columns = new double[FEATURE_NAMES.length][];
final double[][] limits = new double[FEATURE_NAMES.length][];
// Get column data
for (int i = 0; i < FEATURE_NAMES.length; i++) {
columns[i] = raw.getColumn(i);
if (i == FEATURE_D) {
// Plot using a logarithmic scale
SimpleArrayUtils.apply(columns[i], Math::log10);
}
limits[i] = MathUtils.limits(columns[i]);
}
// Compute histogram bins
final int[] bins = new int[FEATURE_NAMES.length];
if (settings.histogramBins > 0) {
Arrays.fill(bins, settings.histogramBins);
} else {
for (int i = 0; i < FEATURE_NAMES.length; i++) {
bins[i] = HistogramPlot.getBins(StoredData.create(columns[i]), BinMethod.FD);
}
// Use the maximum so all histograms look the same
Arrays.fill(bins, MathUtils.max(bins));
}
// Compute plots
final Plot[] plots = new Plot[FEATURE_NAMES.length];
for (int i = 0; i < FEATURE_NAMES.length; i++) {
final double[][] hist = HistogramPlot.calcHistogram(columns[i], limits[i][0], limits[i][1], bins[i]);
plots[i] = new Plot(TITLE + " " + FEATURE_NAMES[i], getFeatureLabel(i, i == FEATURE_D), "Frequency");
plots[i].addPoints(hist[0], hist[1], Plot.BAR);
ImageJUtils.display(plots[i].getTitle(), plots[i], ImageJUtils.NO_TO_FRONT, wo);
}
wo.tile();
// The component for each data point
int[] component;
// The number of components
int numComponents;
// Data used to fit the Gaussian mixture model
double[][] fitData;
// The fitted model
MixtureMultivariateGaussianDistribution model;
if (hasCategory) {
// Use the category as the component.
// No fit data and no output model
fitData = null;
model = null;
// The component is stored at the end of the raw track data.
final int end = data[0].length - 1;
component = Arrays.stream(data).mapToInt(d -> (int) d[end]).toArray();
numComponents = MathUtils.max(component) + 1;
// In the EM algorithm the probability of each data point is computed and normalised to
// sum to 1. The normalised probabilities are averaged to create the weights.
// Note the probability of each data point uses the previous weight and the algorithm
// iterates.
// This is not a fitted model but the input model so use
// zero weights to indicate no fitting was performed.
final double[] weights = new double[numComponents];
// Remove the trailing component to show the 'model' in a table.
createModelTable(Arrays.stream(data).map(d -> Arrays.copyOf(d, end)).toArray(double[][]::new), weights, component);
} else {
// Multivariate Gaussian mixture EM
// Provide option to not use the anomalous exponent in the population mix.
int sortDimension = SORT_DIMENSION;
if (settings.ignoreAlpha) {
// Remove index 0. This shifts the sort dimension.
sortDimension--;
fitData = Arrays.stream(data).map(d -> Arrays.copyOfRange(d, 1, d.length)).toArray(double[][]::new);
} else {
fitData = SimpleArrayUtils.deepCopy(data);
}
final MultivariateGaussianMixtureExpectationMaximization mixed = fitGaussianMixture(fitData, sortDimension);
if (mixed == null) {
IJ.error(TITLE, "Failed to fit a mixture model");
return;
}
model = sortComponents(mixed.getFittedModel(), sortDimension);
// For the best model, assign to the most likely population.
component = assignData(fitData, model);
// Table of the final model using the original data (i.e. not normalised)
final double[] weights = model.getWeights();
numComponents = weights.length;
createModelTable(data, weights, component);
}
// Output coloured histograms of the populations.
final LUT lut = LutHelper.createLut(settings.lutIndex);
IntFunction<Color> colourMap;
if (LutHelper.getColour(lut, 0).equals(Color.BLACK)) {
colourMap = i -> LutHelper.getNonZeroColour(lut, i, 0, numComponents - 1);
} else {
colourMap = i -> LutHelper.getColour(lut, i, 0, numComponents - 1);
}
for (int i = 0; i < FEATURE_NAMES.length; i++) {
// Extract the data for each component
final double[] col = columns[i];
final Plot plot = plots[i];
for (int n = 0; n < numComponents; n++) {
final StoredData feature = new StoredData();
for (int j = 0; j < component.length; j++) {
if (component[j] == n) {
feature.add(col[j]);
}
}
if (feature.size() == 0) {
continue;
}
final double[][] hist = HistogramPlot.calcHistogram(feature.values(), limits[i][0], limits[i][1], bins[i]);
// Colour the points
plot.setColor(colourMap.apply(n));
plot.addPoints(hist[0], hist[1], Plot.BAR);
}
plot.updateImage();
}
createTrackDataTable(tracks, trackData, fitData, model, component, cal, colourMap);
// Analysis.
// Assign the original localisations to their track component.
// Q. What about the start/end not covered by the window?
// Save tracks as a dataset labelled with the sub-track ID?
// Output for the bound component and free components track parameters.
// Compute dwell times.
// Other ...
// Track analysis plugin:
// Extract all continuous segments of the same component.
// Produce MSD plot with error bars.
// Fit using FBM model.
}
use of uk.ac.sussex.gdsc.smlm.results.MemoryPeakResults in project GDSC-SMLM by aherbert.
the class UpdateResultsBounds method run.
@Override
public void run(String arg) {
SmlmUsageTracker.recordPlugin(this.getClass(), arg);
if (!showInputDialog()) {
return;
}
final MemoryPeakResults results = ResultsManager.loadInputResults(settings.inputOption, true, null, null);
if (MemoryPeakResults.isEmpty(results)) {
IJ.error(TITLE, "No results could be loaded");
return;
}
if (!showDialog(results)) {
return;
}
IJ.showStatus("Updated " + results.getName());
}
use of uk.ac.sussex.gdsc.smlm.results.MemoryPeakResults in project GDSC-SMLM by aherbert.
the class ClassificationMatchCalculator method runCompareClassifications.
private void runCompareClassifications(MemoryPeakResults results1, MemoryPeakResults results2) {
final List<PointPair> allMatches = new LinkedList<>();
// Optionally exclude results which do not have an id and/or category
Predicate<PeakResult> test = settings.useId == ClassAnalysis.IGNORE_ZERO ? r -> r.getId() != 0 : null;
if (settings.useCategory == ClassAnalysis.IGNORE_ZERO) {
final Predicate<PeakResult> test2 = r -> r.getCategory() != 0;
test = test == null ? test2 : test.and(test2);
} else if (test == null) {
test = r -> true;
}
// Divide the results into time points
final TIntObjectHashMap<List<PeakResultPoint>> coordinates1 = getCoordinates(results1, test);
final TIntObjectHashMap<List<PeakResultPoint>> coordinates2 = getCoordinates(results2, test);
// Process each time point
int n1 = 0;
int n2 = 0;
for (final int t : getTimepoints(coordinates1, coordinates2)) {
final Coordinate[] c1 = getCoordinates(coordinates1, t);
final Coordinate[] c2 = getCoordinates(coordinates2, t);
n1 += c1.length;
n2 += c2.length;
final List<PointPair> matches = new LinkedList<>();
MatchCalculator.analyseResults3D(c1, c2, settings.matchDistance, null, null, null, matches);
allMatches.addAll(matches);
}
if (allMatches.isEmpty()) {
IJ.error(TITLE, "No localisation matches between the two results sets");
return;
}
// Get the unique Ids and Categories in the matches.
final Mapper ids = getMapper(allMatches, PeakResult::getId, settings.useId);
final Mapper cats = getMapper(allMatches, PeakResult::getCategory, settings.useCategory);
// Map id/category to an index = stride * cat + id
final int stride = ids.size();
// Any integer is allowed as an index
if ((long) stride * cats.size() > 1L << 32) {
IJ.error(TITLE, "Too many combinations of id and category to assigne unique labels");
return;
}
// Extract indices
final int[] set1 = new int[allMatches.size()];
final int[] set2 = new int[allMatches.size()];
int i = 0;
for (final PointPair r : allMatches) {
set1[i] = toIndex(stride, ids, cats, ((PeakResultPoint) r.getPoint1()).getPeakResult());
set2[i] = toIndex(stride, ids, cats, ((PeakResultPoint) r.getPoint2()).getPeakResult());
i++;
}
final Resequencer re = new Resequencer();
re.setCacheMap(true);
re.renumber(set1);
re.renumber(set2);
// Compare
final RandIndex r = new RandIndex().compute(set1, set2);
final TextWindow resultsWindow = ImageJUtils.refresh(resultsWindowRef, () -> new TextWindow(TITLE + " Results", "Results1\tResults2\tID\tCategory\tn1\tc1\tn2\tc2\tMatched\tRand Index\tAdjusted RI", "", 900, 300));
try (BufferedTextWindow bw = new BufferedTextWindow(resultsWindow)) {
final StringBuilder sb = new StringBuilder(2048);
sb.append(results1.getName()).append('\t');
sb.append(results2.getName()).append('\t');
sb.append(ANALYSIS_OPTION[settings.useId.ordinal()]).append('\t');
sb.append(ANALYSIS_OPTION[settings.useCategory.ordinal()]).append('\t');
sb.append(n1).append('\t');
sb.append(MathUtils.max(set1) + 1).append('\t');
sb.append(n2).append('\t');
sb.append(MathUtils.max(set2) + 1).append('\t');
sb.append(set1.length).append('\t');
sb.append(MathUtils.rounded(r.getRandIndex())).append('\t');
sb.append(MathUtils.rounded(r.getAdjustedRandIndex())).append('\t');
bw.append(sb.toString());
}
}
Aggregations