use of gdsc.smlm.results.MemoryPeakResults in project GDSC-SMLM by aherbert.
the class SummariseResults method run.
/*
* (non-Javadoc)
*
* @see ij.plugin.PlugIn#run(java.lang.String)
*/
public void run(String arg) {
SMLMUsageTracker.recordPlugin(this.getClass(), arg);
if (MemoryPeakResults.isMemoryEmpty()) {
IJ.error(TITLE, "There are no fitting results in memory");
clearSummaryTable();
return;
}
createSummaryTable();
StringBuilder sb = new StringBuilder();
int i = 0;
int nextFlush = 9;
for (MemoryPeakResults result : MemoryPeakResults.getAllResults()) {
addSummary(sb, result);
if (++i == nextFlush) {
summary.append(sb.toString());
sb.setLength(0);
}
}
summary.append(sb.toString());
summary.append("");
summary.toFront();
}
use of gdsc.smlm.results.MemoryPeakResults in project GDSC-SMLM by aherbert.
the class TraceDiffusion method getTraces.
private Trace[] getTraces(ArrayList<MemoryPeakResults> allResults) {
this.results = allResults.get(0);
// Results should be checked for calibration by this point
exposureTime = results.getCalibration().getExposureTime() / 1000;
final double nmPerPixel = results.getCalibration().getNmPerPixel();
ArrayList<Trace> allTraces = new ArrayList<Trace>();
additionalDatasets = -1;
for (MemoryPeakResults r : allResults) {
additionalDatasets++;
TraceManager manager = new TraceManager(r);
// Run the tracing
manager.setTracker(new IJTrackProgress());
manager.setDistanceExclusion(settings.distanceExclusion / nmPerPixel);
manager.traceMolecules(settings.distanceThreshold / nmPerPixel, 1);
Trace[] traces = manager.getTraces();
traces = filterTraces(r.getName(), traces, settings.minimumTraceLength, settings.ignoreEnds);
allTraces.addAll(Arrays.asList(traces));
//--- Save results ---
if (traces.length > 0) {
// Save the traces to memory
TraceMolecules.saveResults(r, traces, "Tracks");
if (settings.saveTraces) {
// Sort traces by time to assist the results source in extracting frames sequentially.
// Do this before saving to assist in debugging using the saved traces file.
TraceMolecules.sortByTime(traces);
String newFilename = TraceMolecules.saveTraces(r, traces, createSettingsComment(), tracesFilename, additionalDatasets);
// Only keep the main filename in memory
if (additionalDatasets == 0)
tracesFilename = newFilename;
}
}
}
Trace[] all = allTraces.toArray(new Trace[allTraces.size()]);
if (additionalDatasets > 0) {
Utils.log("Multiple inputs provide %d traces", allTraces.size());
MemoryPeakResults tracedResults = TraceManager.toPeakResults(all, results.getCalibration(), true);
tracedResults.copySettings(results);
tracedResults.setName(createCombinedName() + " Tracks");
MemoryPeakResults.addResults(tracedResults);
}
return all;
}
use of gdsc.smlm.results.MemoryPeakResults in project GDSC-SMLM by aherbert.
the class CreateData method drawImage.
//StoredDataStatistics rawPhotons = new StoredDataStatistics();
//StoredDataStatistics drawPhotons = new StoredDataStatistics();
// private synchronized void addRaw(double d)
// {
// //rawPhotons.add(d);
// }
//
// private synchronized void addDraw(double d)
// {
// //drawPhotons.add(d);
// }
/**
* Create an image from the localisations using the configured PSF width. Draws a new stack
* image.
* <p>
* Note that the localisations are filtered using the signal. The input list of localisations will be updated.
*
* @param localisationSets
* @return The localisations
*/
private List<LocalisationModel> drawImage(final List<LocalisationModelSet> localisationSets) {
if (localisationSets.isEmpty())
return null;
// Create a new list for all localisation that are drawn (i.e. pass the signal filters)
List<LocalisationModelSet> newLocalisations = Collections.synchronizedList(new ArrayList<LocalisationModelSet>(localisationSets.size()));
photonsRemoved = new AtomicInteger();
t1Removed = new AtomicInteger();
tNRemoved = new AtomicInteger();
photonStats = new SummaryStatistics();
// Add drawn spots to memory
results = new MemoryPeakResults();
Calibration c = new Calibration(settings.pixelPitch, settings.getTotalGain(), settings.exposureTime);
c.setEmCCD((settings.getEmGain() > 1));
c.setBias(settings.bias);
c.setReadNoise(settings.readNoise * ((settings.getCameraGain() > 0) ? settings.getCameraGain() : 1));
c.setAmplification(settings.getAmplification());
results.setCalibration(c);
results.setSortAfterEnd(true);
results.begin();
maxT = localisationSets.get(localisationSets.size() - 1).getTime();
// Display image
ImageStack stack = new ImageStack(settings.size, settings.size, maxT);
final double psfSD = getPsfSD();
if (psfSD <= 0)
return null;
ImagePSFModel imagePSFModel = null;
if (imagePSF) {
// Create one Image PSF model that can be copied
imagePSFModel = createImagePSF(localisationSets);
if (imagePSFModel == null)
return null;
}
IJ.showStatus("Drawing image ...");
// Multi-thread for speed
// Note that the default Executors.newCachedThreadPool() will continue to make threads if
// new tasks are added. We need to limit the tasks that can be added using a fixed size
// blocking queue.
// http://stackoverflow.com/questions/1800317/impossible-to-make-a-cached-thread-pool-with-a-size-limit
// ExecutorService threadPool = Executors.newCachedThreadPool();
ExecutorService threadPool = Executors.newFixedThreadPool(Prefs.getThreads());
List<Future<?>> futures = new LinkedList<Future<?>>();
// Count all the frames to process
frame = 0;
totalFrames = maxT;
// Collect statistics on the number of photons actually simulated
// Process all frames
int i = 0;
int lastT = -1;
for (LocalisationModelSet l : localisationSets) {
if (Utils.isInterrupted())
break;
if (l.getTime() != lastT) {
lastT = l.getTime();
futures.add(threadPool.submit(new ImageGenerator(localisationSets, newLocalisations, i, lastT, createPSFModel(imagePSFModel), results, stack, poissonNoise, new RandomDataGenerator(createRandomGenerator()))));
}
i++;
}
// Finish processing data
Utils.waitForCompletion(futures);
futures.clear();
if (Utils.isInterrupted()) {
IJ.showProgress(1);
return null;
}
// Do all the frames that had no localisations
for (int t = 1; t <= maxT; t++) {
if (Utils.isInterrupted())
break;
if (stack.getPixels(t) == null) {
futures.add(threadPool.submit(new ImageGenerator(localisationSets, newLocalisations, maxT, t, null, results, stack, poissonNoise, new RandomDataGenerator(createRandomGenerator()))));
}
}
// Finish
Utils.waitForCompletion(futures);
threadPool.shutdown();
IJ.showProgress(1);
if (Utils.isInterrupted()) {
return null;
}
results.end();
// Clear memory
imagePSFModel = null;
threadPool = null;
futures.clear();
futures = null;
if (photonsRemoved.get() > 0)
Utils.log("Removed %d localisations with less than %.1f rendered photons", photonsRemoved.get(), settings.minPhotons);
if (t1Removed.get() > 0)
Utils.log("Removed %d localisations with no neighbours @ SNR %.2f", t1Removed.get(), settings.minSNRt1);
if (tNRemoved.get() > 0)
Utils.log("Removed %d localisations with valid neighbours @ SNR %.2f", tNRemoved.get(), settings.minSNRtN);
if (photonStats.getN() > 0)
Utils.log("Average photons rendered = %s +/- %s", Utils.rounded(photonStats.getMean()), Utils.rounded(photonStats.getStandardDeviation()));
//System.out.printf("rawPhotons = %f\n", rawPhotons.getMean());
//System.out.printf("drawPhotons = %f\n", drawPhotons.getMean());
//Utils.showHistogram("draw photons", drawPhotons, "photons", true, 0, 1000);
// Update with all those localisation that have been drawn
localisationSets.clear();
localisationSets.addAll(newLocalisations);
newLocalisations = null;
IJ.showStatus("Displaying image ...");
ImageStack newStack = stack;
if (!settings.rawImage) {
// Get the global limits and ensure all values can be represented
Object[] imageArray = stack.getImageArray();
float[] limits = Maths.limits((float[]) imageArray[0]);
for (int j = 1; j < imageArray.length; j++) limits = Maths.limits(limits, (float[]) imageArray[j]);
// Leave bias in place
limits[0] = 0;
// Check if the image will fit in a 16-bit range
if ((limits[1] - limits[0]) < 65535) {
// Convert to 16-bit
newStack = new ImageStack(stack.getWidth(), stack.getHeight(), stack.getSize());
// Account for rounding
final float min = (float) (limits[0] - 0.5);
for (int j = 0; j < imageArray.length; j++) {
float[] image = (float[]) imageArray[j];
short[] pixels = new short[image.length];
for (int k = 0; k < pixels.length; k++) {
pixels[k] = (short) (image[k] - min);
}
newStack.setPixels(pixels, j + 1);
// Free memory
imageArray[j] = null;
// Attempt to stay within memory (check vs 32MB)
if (MemoryPeakResults.freeMemory() < 33554432L)
MemoryPeakResults.runGCOnce();
}
} else {
// Keep as 32-bit but round to whole numbers
for (int j = 0; j < imageArray.length; j++) {
float[] pixels = (float[]) imageArray[j];
for (int k = 0; k < pixels.length; k++) {
pixels[k] = Math.round(pixels[k]);
}
}
}
}
// Show image
ImagePlus imp = Utils.display(CREATE_DATA_IMAGE_TITLE, newStack);
ij.measure.Calibration cal = new ij.measure.Calibration();
String unit = "nm";
double unitPerPixel = settings.pixelPitch;
if (unitPerPixel > 100) {
unit = "um";
unitPerPixel /= 1000.0;
}
cal.setUnit(unit);
cal.pixelHeight = cal.pixelWidth = unitPerPixel;
imp.setCalibration(cal);
imp.setDimensions(1, 1, newStack.getSize());
imp.resetDisplayRange();
imp.updateAndDraw();
saveImage(imp);
results.setSource(new IJImageSource(imp));
results.setName(CREATE_DATA_IMAGE_TITLE + " (" + TITLE + ")");
results.setConfiguration(createConfiguration((float) psfSD));
results.setBounds(new Rectangle(0, 0, settings.size, settings.size));
MemoryPeakResults.addResults(results);
setBenchmarkResults(imp, results);
if (benchmarkMode && benchmarkParameters != null)
benchmarkParameters.setPhotons(results);
List<LocalisationModel> localisations = toLocalisations(localisationSets);
savePulses(localisations, results, CREATE_DATA_IMAGE_TITLE);
// Saved the fixed and moving localisations into different datasets
saveFixedAndMoving(results, CREATE_DATA_IMAGE_TITLE);
return localisations;
}
use of gdsc.smlm.results.MemoryPeakResults in project GDSC-SMLM by aherbert.
the class CreateData method loadBenchmarkData.
/**
* Load benchmark data using an open image and a XYZ text file.
*/
private void loadBenchmarkData() {
if (!showLoadDialog()) {
//resetMemory();
return;
}
// Load the image
ImagePlus imp = WindowManager.getImage(benchmarkImage);
if (imp == null) {
IJ.error(TITLE, "No benchmark image: " + benchmarkImage);
//resetMemory();
return;
}
// Load the results
MemoryPeakResults results = getSimulationResults();
if (results == null) {
IJ.error(TITLE, "No benchmark results: " + benchmarkResultsName);
//resetMemory();
return;
}
results.setName(imp.getTitle() + " (Results)");
results.setBounds(new Rectangle(0, 0, imp.getWidth(), imp.getHeight()));
results.setSource(new IJImageSource(imp));
// Get the calibration
simulationParameters = showSimulationParametersDialog(imp, results);
if (simulationParameters != null) {
setBackground(results);
setNoise(results, imp);
setBenchmarkResults(imp, results);
IJ.showStatus("Loaded " + Utils.pleural(results.size(), "result"));
} else {
resetMemory();
}
}
use of gdsc.smlm.results.MemoryPeakResults in project GDSC-SMLM by aherbert.
the class CreateData method getSimulationResults.
private MemoryPeakResults getSimulationResults() {
if (benchmarkAuto) {
// Load directly from a results file. This is mainly to be used to load simulations
// saved to memory then saved to file. This is because the z-depth must be in the
// error field of the results.
PeakResultsReader r = new PeakResultsReader(benchmarkFile);
MemoryPeakResults results = r.getResults();
if (results != null) {
ResultsManager.checkCalibration(results);
return results;
}
}
// Load using a universal text file
LocalisationList localisations = LoadLocalisations.loadLocalisations(benchmarkFile);
if (localisations.isEmpty())
return null;
return localisations.toPeakResults();
}
Aggregations