use of gdsc.smlm.results.PeakResult in project GDSC-SMLM by aherbert.
the class CreateData method saveFixedAndMoving.
private void saveFixedAndMoving(MemoryPeakResults results, String title) {
if (simpleMode || benchmarkMode || spotMode)
return;
if (settings.diffusionRate <= 0 || settings.fixedFraction >= 1)
return;
MemoryPeakResults fixedResults = copyMemoryPeakResults("Fixed");
MemoryPeakResults movingResults = copyMemoryPeakResults("Moving");
List<PeakResult> peakResults = results.getResults();
// Sort using the ID
Collections.sort(peakResults, new Comparator<PeakResult>() {
public int compare(PeakResult o1, PeakResult o2) {
return o1.getId() - o2.getId();
}
});
int currentId = -1;
MemoryPeakResults currentResults = movingResults;
for (PeakResult p : peakResults) {
// The ID was stored in the result's parameter standard deviation array
if (currentId != p.getId()) {
currentId = p.getId();
currentResults = (movingMolecules.contains(currentId)) ? movingResults : fixedResults;
}
currentResults.add(p);
}
movingResults.end();
fixedResults.end();
// Reset the input results
results.sort();
}
use of gdsc.smlm.results.PeakResult in project GDSC-SMLM by aherbert.
the class CreateData method showSummary.
private double showSummary(List<? extends FluorophoreSequenceModel> fluorophores, List<LocalisationModel> localisations) {
IJ.showStatus("Calculating statistics ...");
createSummaryTable();
Statistics[] stats = new Statistics[NAMES.length];
for (int i = 0; i < stats.length; i++) {
stats[i] = (settings.showHistograms || alwaysRemoveOutliers[i]) ? new StoredDataStatistics() : new Statistics();
}
// Find the largest timepoint
ImagePlus outputImp = WindowManager.getImage(benchmarkImageId);
int nFrames;
if (outputImp == null) {
sortLocalisationsByTime(localisations);
nFrames = localisations.get(localisations.size() - 1).getTime();
} else {
nFrames = outputImp.getStackSize();
}
int[] countHistogram = new int[nFrames + 1];
// Use the localisations that were drawn to create the sampled on/off times
rebuildNeighbours(localisations);
// Assume that there is at least one localisation
LocalisationModel first = localisations.get(0);
// The current localisation
int currentId = first.getId();
// The last time this localisation was on
int lastT = first.getTime();
// Number of blinks
int blinks = 0;
// On-time of current pulse
int currentT = 0;
double signal = 0;
final double centreOffset = settings.size * 0.5;
// Used to convert the sampled times in frames into seconds
final double framesPerSecond = 1000.0 / settings.exposureTime;
final double gain = (settings.getTotalGain() > 0) ? settings.getTotalGain() : 1;
for (LocalisationModel l : localisations) {
if (l.getData() == null)
System.out.println("No localisation data. This should not happen!");
final double noise = (l.getData() != null) ? l.getData()[1] : 1;
final double intensity = (l.getData() != null) ? l.getData()[4] : l.getIntensity();
final double intensityInPhotons = intensity / gain;
// Q. What if the noise is zero, i.e. no background photon / read noise?
// Just ignore it at current.
final double snr = intensity / noise;
stats[SIGNAL].add(intensityInPhotons);
stats[NOISE].add(noise / gain);
if (noise != 0)
stats[SNR].add(snr);
//if (l.isContinuous())
if (l.getNext() != null && l.getPrevious() != null) {
stats[SIGNAL_CONTINUOUS].add(intensityInPhotons);
if (noise != 0)
stats[SNR_CONTINUOUS].add(snr);
}
int id = l.getId();
// Check if this a new fluorophore
if (currentId != id) {
// Add previous fluorophore
stats[SAMPLED_BLINKS].add(blinks);
stats[SAMPLED_T_ON].add(currentT / framesPerSecond);
stats[TOTAL_SIGNAL].add(signal);
// Reset
blinks = 0;
currentT = 1;
currentId = id;
signal = intensityInPhotons;
} else {
signal += intensityInPhotons;
// Check if the current fluorophore pulse is broken (i.e. a blink)
if (l.getTime() - 1 > lastT) {
blinks++;
stats[SAMPLED_T_ON].add(currentT / framesPerSecond);
currentT = 1;
stats[SAMPLED_T_OFF].add(((l.getTime() - 1) - lastT) / framesPerSecond);
} else {
// Continuous on-time
currentT++;
}
}
lastT = l.getTime();
countHistogram[lastT]++;
stats[X].add((l.getX() - centreOffset) * settings.pixelPitch);
stats[Y].add((l.getY() - centreOffset) * settings.pixelPitch);
stats[Z].add(l.getZ() * settings.pixelPitch);
}
// Final fluorophore
stats[SAMPLED_BLINKS].add(blinks);
stats[SAMPLED_T_ON].add(currentT / framesPerSecond);
stats[TOTAL_SIGNAL].add(signal);
// Samples per frame
for (int t = 1; t < countHistogram.length; t++) stats[SAMPLES].add(countHistogram[t]);
if (fluorophores != null) {
for (FluorophoreSequenceModel f : fluorophores) {
stats[BLINKS].add(f.getNumberOfBlinks());
// On-time
for (double t : f.getOnTimes()) stats[T_ON].add(t);
// Off-time
for (double t : f.getOffTimes()) stats[T_OFF].add(t);
}
} else {
// show no blinks
stats[BLINKS].add(0);
stats[T_ON].add(1);
//stats[T_OFF].add(0);
}
if (results != null) {
final boolean emCCD = (settings.getEmGain() > 1);
// Convert depth-of-field to pixels
final double depth = settings.depthOfField / settings.pixelPitch;
for (PeakResult r : results.getResults()) {
final double precision = r.getPrecision(settings.pixelPitch, gain, emCCD);
stats[PRECISION].add(precision);
// The error stores the z-depth in pixels
if (Math.abs(r.error) < depth)
stats[PRECISION_IN_FOCUS].add(precision);
stats[WIDTH].add(r.getSD());
}
// Compute density per frame. Multithread for speed
if (settings.densityRadius > 0) {
IJ.showStatus("Calculating density ...");
ExecutorService threadPool = Executors.newFixedThreadPool(Prefs.getThreads());
List<Future<?>> futures = new LinkedList<Future<?>>();
final ArrayList<float[]> coords = new ArrayList<float[]>();
int t = results.getHead().getFrame();
final Statistics densityStats = stats[DENSITY];
final float radius = (float) (settings.densityRadius * getHWHM());
final Rectangle bounds = results.getBounds();
currentIndex = 0;
finalIndex = results.getTail().getFrame();
// Store the density for each result.
int[] allDensity = new int[results.size()];
int allIndex = 0;
for (PeakResult r : results.getResults()) {
if (t != r.getFrame()) {
allIndex += runDensityCalculation(threadPool, futures, coords, densityStats, radius, bounds, allDensity, allIndex);
}
coords.add(new float[] { r.getXPosition(), r.getYPosition() });
t = r.getFrame();
}
runDensityCalculation(threadPool, futures, coords, densityStats, radius, bounds, allDensity, allIndex);
Utils.waitForCompletion(futures);
threadPool.shutdownNow();
threadPool = null;
IJ.showProgress(1);
// Split results into singles (density = 0) and clustered (density > 0)
MemoryPeakResults singles = copyMemoryPeakResults("No Density");
MemoryPeakResults clustered = copyMemoryPeakResults("Density");
int i = 0;
for (PeakResult r : results.getResults()) {
// Store density in the original value field
r.origValue = allDensity[i];
if (allDensity[i++] == 0)
singles.add(r);
else
clustered.add(r);
}
}
}
StringBuilder sb = new StringBuilder();
sb.append(datasetNumber).append("\t");
sb.append((fluorophores == null) ? localisations.size() : fluorophores.size()).append("\t");
sb.append(stats[SAMPLED_BLINKS].getN() + (int) stats[SAMPLED_BLINKS].getSum()).append("\t");
sb.append(localisations.size()).append("\t");
sb.append(nFrames).append("\t");
sb.append(Utils.rounded(areaInUm)).append("\t");
sb.append(Utils.rounded(localisations.size() / (areaInUm * nFrames), 4)).append("\t");
sb.append(Utils.rounded(getHWHM(), 4)).append("\t");
double s = getPsfSD();
sb.append(Utils.rounded(s, 4)).append("\t");
s *= settings.pixelPitch;
final double sa = PSFCalculator.squarePixelAdjustment(s, settings.pixelPitch) / settings.pixelPitch;
sb.append(Utils.rounded(sa, 4)).append("\t");
// Width not valid for the Image PSF
int nStats = (imagePSF) ? stats.length - 1 : stats.length;
for (int i = 0; i < nStats; i++) {
double centre = (alwaysRemoveOutliers[i]) ? ((StoredDataStatistics) stats[i]).getStatistics().getPercentile(50) : stats[i].getMean();
sb.append(Utils.rounded(centre, 4)).append("\t");
}
if (java.awt.GraphicsEnvironment.isHeadless()) {
IJ.log(sb.toString());
return stats[SIGNAL].getMean();
} else {
summaryTable.append(sb.toString());
}
// Show histograms
if (settings.showHistograms) {
IJ.showStatus("Calculating histograms ...");
boolean[] chosenHistograms = getChoosenHistograms();
WindowOrganiser wo = new WindowOrganiser();
boolean requireRetile = false;
for (int i = 0; i < NAMES.length; i++) {
if (chosenHistograms[i]) {
wo.add(Utils.showHistogram(TITLE, (StoredDataStatistics) stats[i], NAMES[i], (integerDisplay[i]) ? 1 : 0, (settings.removeOutliers || alwaysRemoveOutliers[i]) ? 2 : 0, settings.histogramBins * ((integerDisplay[i]) ? 100 : 1)));
requireRetile = requireRetile || Utils.isNewWindow();
}
}
wo.tile();
}
IJ.showStatus("");
return stats[SIGNAL].getMean();
}
use of gdsc.smlm.results.PeakResult in project GDSC-SMLM by aherbert.
the class CreateData method setNoise.
/**
* Sets the noise in the results if missing.
*
* @param results
* the results
*/
private void setNoise(MemoryPeakResults results, ImagePlus imp) {
// Loaded results do not have noise
for (PeakResult r : results.getResults()) if (r.noise != 0)
return;
// Compute noise per frame
ImageStack stack = imp.getImageStack();
final int width = stack.getWidth();
final int height = stack.getHeight();
final IJImageSource source = new IJImageSource(imp);
final float[] noise = new float[source.getFrames() + 1];
for (int slice = 1; slice < noise.length; slice++) {
stack.getPixels(slice);
float[] data = source.next();
// Use the trimmed method as there may be a lot of spots in the frame
noise[slice] = (float) FitWorker.estimateNoise(data, width, height, NoiseEstimator.Method.QUICK_RESIDUALS_LEAST_TRIMMED_OF_SQUARES);
}
Statistics stats = new Statistics(Arrays.copyOfRange(noise, 1, noise.length));
System.out.printf("Noise = %.3f +/- %.3f (%d)\n", stats.getMean(), stats.getStandardDeviation(), stats.getN());
for (PeakResult p : results.getResults()) {
if (p.getFrame() < noise.length)
p.noise = noise[p.getFrame()];
}
}
use of gdsc.smlm.results.PeakResult in project GDSC-SMLM by aherbert.
the class ResultsManager method run.
/*
* (non-Javadoc)
*
* @see ij.plugin.PlugIn#run(java.lang.String)
*/
public void run(String arg) {
SMLMUsageTracker.recordPlugin(this.getClass(), arg);
if (arg != null && arg.startsWith("clear")) {
Collection<MemoryPeakResults> allResults;
boolean removeAll = false;
if (arg.contains("multi")) {
MultiDialog md = new MultiDialog(TITLE, new MultiDialog.MemoryResultsItems());
md.addSelected(selected);
md.showDialog();
if (md.wasCanceled())
return;
selected = md.getSelectedResults();
if (selected.isEmpty())
return;
allResults = new ArrayList<MemoryPeakResults>(selected.size());
for (String name : selected) {
MemoryPeakResults r = MemoryPeakResults.getResults(name);
if (r != null)
allResults.add(r);
}
} else {
removeAll = true;
allResults = MemoryPeakResults.getAllResults();
}
if (allResults.isEmpty())
return;
long memorySize = 0;
int size = 0;
for (MemoryPeakResults results : allResults) {
memorySize += MemoryPeakResults.estimateMemorySize(results.getResults());
size += results.size();
}
String memory = MemoryPeakResults.memorySizeString(memorySize);
String count = Utils.pleural(size, "result");
String sets = Utils.pleural(allResults.size(), "set");
GenericDialog gd = new GenericDialog(TITLE);
gd.addMessage(String.format("Do you want to remove %s from memory (%s, %s)?", count, sets, memory));
gd.enableYesNoCancel();
gd.showDialog();
if (!gd.wasOKed())
return;
if (removeAll)
MemoryPeakResults.clearMemory();
else {
for (MemoryPeakResults results : allResults) MemoryPeakResults.removeResults(results.getName());
}
SummariseResults.clearSummaryTable();
IJ.log(String.format("Cleared %s (%s, %s)", count, sets, memory));
return;
}
if (!showDialog())
return;
MemoryPeakResults results = loadResults(inputOption);
if (results == null || results.size() == 0) {
IJ.error(TITLE, "No results could be loaded");
IJ.showStatus("");
return;
}
results = cropToRoi(results);
if (results.size() == 0) {
IJ.error(TITLE, "No results within the crop region");
return;
}
if (resultsSettings.resultsInMemory && fileInput)
MemoryPeakResults.addResults(results);
IJ.showStatus("Processing outputs ...");
Rectangle bounds = results.getBounds(true);
boolean showDeviations = resultsSettings.showDeviations && canShowDeviations(results);
boolean showEndFrame = canShowEndFrame(results);
boolean showId = canShowId(results);
// Display the configured output
PeakResultsList outputList = new PeakResultsList();
outputList.copySettings(results);
//String title = results.getSource();
//if (title == null || title.length() == 0)
// output.setSource(TITLE);
addTableResults(results, outputList, showDeviations, showEndFrame);
addImageResults(outputList, results.getName(), bounds, results.getNmPerPixel(), results.getGain());
addFileResults(outputList, showDeviations, showEndFrame, showId);
// Reduce to single object for speed
PeakResults output = (outputList.numberOfOutputs() == 1) ? outputList.toArray()[0] : outputList;
output.begin();
// Process in batches to provide progress
List<PeakResult> list = results.getResults();
int progress = 0;
int totalProgress = list.size();
int stepProgress = Utils.getProgressInterval(totalProgress);
TurboList<PeakResult> batch = new TurboList<PeakResult>(stepProgress);
for (PeakResult result : list) {
if (progress % stepProgress == 0) {
IJ.showProgress(progress, totalProgress);
}
progress++;
batch.addf(result);
if (batch.size() == stepProgress) {
output.addAll(batch);
batch.clearf();
if (isInterrupted())
break;
}
}
IJ.showProgress(1);
output.end();
IJ.showStatus(String.format("Processed %d result%s", results.size(), (results.size() > 1) ? "s" : ""));
}
use of gdsc.smlm.results.PeakResult in project GDSC-SMLM by aherbert.
the class ResultsMatchCalculator method compareCoordinates.
private void compareCoordinates(MemoryPeakResults results1, MemoryPeakResults results2, double dThreshold, int increments, double delta) {
boolean requirePairs = showPairs || saveClassifications;
FilePeakResults fileResults = createFilePeakResults(results2);
List<PointPair> allMatches = new LinkedList<PointPair>();
List<PointPair> pairs = (requirePairs) ? new LinkedList<PointPair>() : null;
List<PeakResult> actualPoints = results1.getResults();
List<PeakResult> predictedPoints = results2.getResults();
double maxDistance = dThreshold + increments * delta;
// Old implementation
//// Process each time point
//for (Integer t : getTimepoints(actualPoints, predictedPoints))
//{
// Coordinate[] actual = getCoordinates(actualPoints, t);
// Coordinate[] predicted = getCoordinates(predictedPoints, t);
// Divide the results into time points
TIntObjectHashMap<ArrayList<Coordinate>> actualCoordinates = getCoordinates(actualPoints);
TIntObjectHashMap<ArrayList<Coordinate>> predictedCoordinates = getCoordinates(predictedPoints);
int n1 = 0;
int n2 = 0;
// Process each time point
for (Integer t : getTimepoints(actualCoordinates, predictedCoordinates)) {
Coordinate[] actual = getCoordinates(actualCoordinates, t);
Coordinate[] predicted = getCoordinates(predictedCoordinates, t);
List<Coordinate> TP = null;
List<Coordinate> FP = null;
List<Coordinate> FN = null;
List<PointPair> matches = new LinkedList<PointPair>();
if (requirePairs) {
FP = new LinkedList<Coordinate>();
FN = new LinkedList<Coordinate>();
}
MatchCalculator.analyseResults2D(actual, predicted, maxDistance, TP, FP, FN, matches);
// Aggregate
n1 += actual.length;
n2 += predicted.length;
allMatches.addAll(matches);
if (showPairs) {
pairs.addAll(matches);
for (Coordinate c : FN) pairs.add(new PointPair(c, null));
for (Coordinate c : FP) pairs.add(new PointPair(null, c));
}
if (fileResults != null) {
// Matches are marked in the original value with 1 for true, 0 for false
for (PointPair pair : matches) {
PeakResult p = ((PeakResultPoint) pair.getPoint2()).peakResult;
fileResults.add(p.getFrame(), p.origX, p.origY, 1, p.error, p.noise, p.params, null);
}
for (Coordinate c : FP) {
PeakResult p = ((PeakResultPoint) c).peakResult;
fileResults.add(p.getFrame(), p.origX, p.origY, 0, p.error, p.noise, p.params, null);
}
}
}
if (fileResults != null)
fileResults.end();
// XXX : DEBUGGING : Output for signal correlation and fitting analysis
/*
* try
* {
* OutputStreamWriter o = new OutputStreamWriter(new FileOutputStream("/tmp/ResultsMatchCalculator.txt"));
* FilePeakResults r1 = new FilePeakResults("/tmp/" + results1.getName() + ".txt", false);
* FilePeakResults r2 = new FilePeakResults("/tmp/" + results2.getName() + ".txt", false);
* r1.begin();
* r2.begin();
* //OutputStreamWriter o2 = new OutputStreamWriter(new FileOutputStream("/tmp/"+results1.getName()+".txt"));
* //OutputStreamWriter o3 = new OutputStreamWriter(new FileOutputStream("/tmp/"+results2.getName()+".txt"));
* for (PointPair pair : allMatches)
* {
* PeakResult p1 = ((PeakResultPoint) pair.getPoint1()).peakResult;
* PeakResult p2 = ((PeakResultPoint) pair.getPoint2()).peakResult;
* r1.add(p1);
* r2.add(p2);
* o.write(Float.toString(p1.getSignal()));
* o.write('\t');
* o.write(Float.toString(p2.getSignal()));
* o.write('\n');
* }
* o.close();
* r1.end();
* r2.end();
* }
* catch (Exception e)
* {
* e.printStackTrace();
* }
*/
boolean doIdAnalysis1 = (idAnalysis) ? haveIds(results1) : false;
boolean doIdAnalysis2 = (idAnalysis) ? haveIds(results2) : false;
boolean doIdAnalysis = doIdAnalysis1 || doIdAnalysis2;
// Create output
if (!java.awt.GraphicsEnvironment.isHeadless()) {
String header = createResultsHeader(doIdAnalysis);
Utils.refreshHeadings(resultsWindow, header, true);
if (showTable && (resultsWindow == null || !resultsWindow.isShowing())) {
resultsWindow = new TextWindow(TITLE + " Results", header, "", 900, 300);
}
if (showPairs) {
if (pairsWindow == null || !pairsWindow.isShowing()) {
pairsWindow = new TextWindow(TITLE + " Pairs", createPairsHeader(pairs), "", 900, 300);
if (resultsWindow != null) {
Point p = resultsWindow.getLocation();
p.y += resultsWindow.getHeight();
pairsWindow.setLocation(p);
}
pairPainter = new ImageROIPainter(pairsWindow.getTextPanel(), "", this);
}
pairsWindow.getTextPanel().clear();
String title = "Results 1";
if (results1.getSource() != null && results1.getSource().getOriginal().getName().length() > 0)
title = results1.getSource().getOriginal().getName();
pairPainter.setTitle(title);
IJ.showStatus("Writing pairs table");
IJ.showProgress(0);
int c = 0;
final int total = pairs.size();
final int step = Utils.getProgressInterval(total);
final ArrayList<String> list = new ArrayList<String>(total);
boolean flush = true;
for (PointPair pair : pairs) {
if (++c % step == 0)
IJ.showProgress(c, total);
list.add(addPairResult(pair));
if (flush && c == 9) {
pairsWindow.getTextPanel().append(list);
list.clear();
flush = false;
}
}
pairsWindow.getTextPanel().append(list);
IJ.showProgress(1);
}
} else {
if (writeHeader && showTable) {
writeHeader = false;
IJ.log(createResultsHeader(idAnalysis));
}
}
if (!showTable)
return;
// We have the results for the largest distance.
// Now reduce the distance threshold and recalculate the results
double[] distanceThresholds = getDistances(dThreshold, increments, delta);
double[] pairDistances = getPairDistances(allMatches);
// Re-use storage for the ID analysis
TIntHashSet id1 = null, id2 = null, matchId1 = null, matchId2 = null;
if (doIdAnalysis) {
if (doIdAnalysis1) {
id1 = getIds(results1);
matchId1 = new TIntHashSet(id1.size());
}
if (doIdAnalysis2) {
id2 = getIds(results2);
matchId2 = new TIntHashSet(id2.size());
}
}
for (double distanceThreshold : distanceThresholds) {
double rms = 0;
int tp2 = 0;
final double d2 = distanceThreshold * distanceThreshold;
for (double d : pairDistances) {
if (d <= d2) {
rms += d;
tp2++;
}
}
// All non-true positives must be added to the false totals.
int fp2 = n2 - tp2;
int fn2 = n1 - tp2;
MatchResult result = new MatchResult(tp2, fp2, fn2, (tp2 > 0) ? Math.sqrt(rms / tp2) : 0);
MatchResult idResult1 = null, idResult2 = null;
if (doIdAnalysis) {
if (doIdAnalysis1)
matchId1.clear();
if (doIdAnalysis2)
matchId2.clear();
int i = 0;
for (PointPair pair : allMatches) {
if (pairDistances[i++] <= d2) {
if (doIdAnalysis1)
matchId1.add(((PeakResultPoint) pair.getPoint1()).peakResult.getId());
if (doIdAnalysis2)
matchId2.add(((PeakResultPoint) pair.getPoint2()).peakResult.getId());
}
}
// => Only the recall will be valid: tp / (tp + fn)
if (doIdAnalysis1)
idResult1 = new MatchResult(matchId1.size(), 0, id1.size() - matchId1.size(), 0);
if (doIdAnalysis2)
idResult2 = new MatchResult(matchId2.size(), 0, id2.size() - matchId2.size(), 0);
}
addResult(inputOption1, inputOption2, distanceThreshold, result, idResult1, idResult2);
}
}
Aggregations