use of uk.ac.sussex.gdsc.smlm.results.PeakResult in project GDSC-SMLM by aherbert.
the class PeakFit method getSingleFrame.
/**
* Gets the single frame containing all the results (if they are all in a single frame), else 0.
*
* @param results the results (must not be empty)
* @return the single frame (or zero)
*/
private static int getSingleFrame(MemoryPeakResults results) {
final FrameCounter counter = new FrameCounter(results.getFirstFrame());
// The counter will return true (stop execution) if a new frame
results.forEach((PeakResultProcedureX) peakResult -> counter.advance(peakResult.getFrame()));
if (counter.currentFrame() != counter.previousFrame()) {
return 0;
}
return counter.currentFrame();
}
use of uk.ac.sussex.gdsc.smlm.results.PeakResult in project GDSC-SMLM by aherbert.
the class PeakFit method runMaximaFitting.
/**
* Load the selected results from memory. All multiple frame results are added directly to the
* results. All single frame results are added to a list of candidate maxima per frame and fitted
* using the configured parameters.
*/
private void runMaximaFitting() {
final MemoryPeakResults memoryResults = ResultsManager.loadInputResults(settings.inputOption, false, DistanceUnit.PIXEL);
if (memoryResults == null || memoryResults.size() == 0) {
log("No results for maxima fitting");
return;
}
// The total frames (for progress reporting)
int totalFrames;
// A function that can convert a frame into a set of candidate indices
final IntFunction<int[]> frameToMaxIndices;
// The frames to process (should be sorted ascending)
Supplier<IntStream> frames;
// Support fitting all time frames with the same results.
if (settings.fitAcrossAllFrames) {
// Check if the input spans multiple frames
if (getSingleFrame(memoryResults) == 0) {
final int min = memoryResults.getMinFrame();
final int max = memoryResults.getMaxFrame();
final GenericDialog gd = new GenericDialog(TITLE);
gd.enableYesNoCancel();
gd.hideCancelButton();
ImageJUtils.addMessage(gd, "Candidate maxima for fitting span multiple frames (%d-%d).\n \n" + "Please confirm the %s are correct.", min, max, TextUtils.pleural(memoryResults.size(), "candidate"));
gd.showDialog();
if (!gd.wasOKed()) {
return;
}
}
final int[] maxIndices = getMaxIndices(Arrays.asList(memoryResults.toArray()));
// This may not work correctly if using for example a series image source that
// incorrectly estimates the number of frames
totalFrames = source.getFrames();
frameToMaxIndices = frame -> maxIndices;
frames = () -> IntStream.rangeClosed(1, totalFrames);
} else {
// Build a map between the time-frame and the results in that frame.
final Map<Integer, List<PeakResult>> map = Arrays.stream(memoryResults.toArray()).parallel().filter(peakResult -> peakResult.getFrame() == peakResult.getEndFrame()).collect(Collectors.groupingBy(PeakResult::getFrame));
totalFrames = map.size();
// Build a function that can convert a frame into a set of candidate indices
frameToMaxIndices = frame -> getMaxIndices(map.get(frame));
frames = () -> map.keySet().stream().mapToInt(Integer::intValue).sorted();
}
final ImageStack stack = (extraSettings.showProcessedFrames) ? new ImageStack(bounds.width, bounds.height) : null;
// Use the FitEngine to allow multi-threading.
final FitEngine engine = createFitEngine(getNumberOfThreads(totalFrames));
if (engine == null) {
return;
}
final int step = ImageJUtils.getProgressInterval(totalFrames);
// No crop bounds are supported.
// To pre-process data for noise estimation
final boolean isFitCameraCounts = fitConfig.isFitCameraCounts();
final CameraModel cameraModel = fitConfig.getCameraModel();
runTime = System.nanoTime();
final AtomicBoolean shutdown = new AtomicBoolean();
final String format = String.format("Slice: %%d / %d (Results=%%d)", totalFrames);
frames.get().forEachOrdered(slice -> {
if (shutdown.get() || escapePressed()) {
shutdown.set(true);
return;
}
final float[] data = source.get(slice);
if (data == null) {
shutdown.set(true);
return;
}
if (slice % step == 0) {
if (ImageJUtils.showStatus(() -> String.format(format, slice, results.size()))) {
IJ.showProgress(slice, totalFrames);
}
}
// We must pre-process the data before noise estimation
final float[] data2 = data.clone();
if (isFitCameraCounts) {
cameraModel.removeBias(data2);
} else {
cameraModel.removeBiasAndGain(data2);
}
final float noise = FitWorker.estimateNoise(data2, source.getWidth(), source.getHeight(), config.getNoiseMethod());
if (stack != null) {
stack.addSlice(String.format("Frame %d - %d", source.getStartFrameNumber(), source.getEndFrameNumber()), data);
}
// Get the frame number from the source to allow for interlaced and aggregated data
engine.run(createMaximaFitJob(frameToMaxIndices.apply(slice), source.getStartFrameNumber(), source.getEndFrameNumber(), data, bounds, noise));
});
engine.end(shutdown.get());
time = engine.getTime();
runTime = System.nanoTime() - runTime;
if (stack != null) {
ImageJUtils.display("Processed frames", stack);
}
showResults();
source.close();
}
use of uk.ac.sussex.gdsc.smlm.results.PeakResult in project GDSC-SMLM by aherbert.
the class PeakFit method getMaxIndices.
private int[] getMaxIndices(List<PeakResult> sliceCandidates) {
final int[] maxIndices = new int[sliceCandidates.size()];
int count = 0;
for (final PeakResult result : sliceCandidates) {
maxIndices[count++] = result.getOrigX() + bounds.width * result.getOrigY();
}
return maxIndices;
}
use of uk.ac.sussex.gdsc.smlm.results.PeakResult in project GDSC-SMLM by aherbert.
the class Fire method createImages.
/**
* Creates the images to use for the FIRE calculation. This must be called after
* {@link #initialise(MemoryPeakResults, MemoryPeakResults)}.
*
* @param fourierImageScale the fourier image scale (set to zero to auto compute)
* @param imageSize the image size
* @param useSignal Use the localisation signal to weight the intensity. The default uses a value
* of 1 per localisation.
* @return the fire images
*/
public FireImages createImages(double fourierImageScale, int imageSize, boolean useSignal) {
if (results == null) {
return null;
}
final SignalProvider signalProvider = (useSignal && (results.hasIntensity())) ? new PeakSignalProvider() : new FixedSignalProvider();
// Draw images using the existing IJ routines.
final Rectangle bounds = new Rectangle((int) Math.ceil(dataBounds.getWidth()), (int) Math.ceil(dataBounds.getHeight()));
final ResultsImageSettings.Builder builder = ResultsImageSettings.newBuilder().setImageType(ResultsImageType.DRAW_NONE).setWeighted(true).setEqualised(false).setImageMode(ResultsImageMode.IMAGE_ADD);
if (fourierImageScale > 0) {
builder.setImageSizeMode(ResultsImageSizeMode.SCALED);
builder.setScale(fourierImageScale);
} else {
builder.setImageSizeMode(ResultsImageSizeMode.IMAGE_SIZE);
builder.setImageSize(imageSize);
}
ImageJImagePeakResults image1 = createPeakResultsImage(bounds, builder, "IP1");
ImageJImagePeakResults image2 = createPeakResultsImage(bounds, builder, "IP2");
final float minx = (float) dataBounds.getX();
final float miny = (float) dataBounds.getY();
if (this.results2 != null) {
// Two image comparison
final ImageJImagePeakResults i1 = image1;
results.forEach((PeakResultProcedure) result -> {
final float x = result.getXPosition() - minx;
final float y = result.getYPosition() - miny;
i1.add(x, y, signalProvider.getSignal(result));
});
final ImageJImagePeakResults i2 = image2;
results2.forEach((PeakResultProcedure) result -> {
final float x = result.getXPosition() - minx;
final float y = result.getYPosition() - miny;
i2.add(x, y, signalProvider.getSignal(result));
});
} else {
// Block sampling.
// Ensure we have at least 2 even sized blocks.
int blockSize = Math.min(results.size() / 2, Math.max(1, settings.blockSize));
int nblocks = (int) Math.ceil((double) results.size() / blockSize);
while (nblocks <= 1 && blockSize > 1) {
blockSize /= 2;
nblocks = (int) Math.ceil((double) results.size() / blockSize);
}
if (nblocks <= 1) {
// This should not happen since the results should contain at least 2 localisations
return null;
}
if (blockSize != settings.blockSize) {
IJ.log(pluginTitle + " Warning: Changed block size to " + blockSize);
}
final Counter i = new Counter();
final Counter block = new Counter();
final int finalBlockSize = blockSize;
final PeakResult[][] blocks = new PeakResult[nblocks][blockSize];
results.forEach((PeakResultProcedure) result -> {
if (i.getCount() == finalBlockSize) {
block.increment();
i.reset();
}
blocks[block.getCount()][i.getAndIncrement()] = result;
});
// Truncate last block
blocks[block.getCount()] = Arrays.copyOf(blocks[block.getCount()], i.getCount());
final int[] indices = SimpleArrayUtils.natural(block.getCount() + 1);
if (settings.randomSplit) {
MathArrays.shuffle(indices);
}
for (final int index : indices) {
// Split alternating so just rotate
final ImageJImagePeakResults image = image1;
image1 = image2;
image2 = image;
for (final PeakResult p : blocks[index]) {
final float x = p.getXPosition() - minx;
final float y = p.getYPosition() - miny;
image.add(x, y, signalProvider.getSignal(p));
}
}
}
image1.end();
final ImageProcessor ip1 = image1.getImagePlus().getProcessor();
image2.end();
final ImageProcessor ip2 = image2.getImagePlus().getProcessor();
if (settings.maxPerBin > 0 && signalProvider instanceof FixedSignalProvider) {
// We can eliminate over-sampled pixels
for (int i = ip1.getPixelCount(); i-- > 0; ) {
if (ip1.getf(i) > settings.maxPerBin) {
ip1.setf(i, settings.maxPerBin);
}
if (ip2.getf(i) > settings.maxPerBin) {
ip2.setf(i, settings.maxPerBin);
}
}
}
return new FireImages(ip1, ip2, nmPerUnit / image1.getScale());
}
use of uk.ac.sussex.gdsc.smlm.results.PeakResult in project GDSC-SMLM by aherbert.
the class Fire method showFrcTimeEvolution.
private void showFrcTimeEvolution(String name, double fireNumber, ThresholdMethod thresholdMethod, double fourierImageScale, int imageSize) {
IJ.showStatus("Calculating FRC time evolution curve...");
// Sort by time
results.sort();
final int nSteps = 10;
int maxT = results.getLastFrame();
if (maxT == 0) {
maxT = results.size();
}
final int step = maxT / nSteps;
final TDoubleArrayList x = new TDoubleArrayList();
final TDoubleArrayList y = new TDoubleArrayList();
double yMin = fireNumber;
double yMax = fireNumber;
final MemoryPeakResults newResults = new MemoryPeakResults();
newResults.copySettings(results);
int index = 0;
final PeakResult[] list = results.toArray();
for (int t = step; t <= maxT - step; t += step) {
while (index < list.length) {
final PeakResult r = list[index];
if (r.getFrame() <= t) {
newResults.add(r);
index++;
} else {
break;
}
}
x.add(t);
final Fire f = this.copy();
final FireResult result = f.calculateFireNumber(fourierMethod, samplingMethod, thresholdMethod, fourierImageScale, imageSize);
final double fire = (result == null) ? 0 : result.fireNumber;
y.add(fire);
yMin = Math.min(yMin, fire);
yMax = Math.max(yMax, fire);
}
// Add the final fire number
x.add(maxT);
y.add(fireNumber);
final double[] xValues = x.toArray();
final double[] yValues = y.toArray();
String units = "px";
if (results.getCalibration() != null) {
nmPerUnit = results.getNmPerPixel();
units = "nm";
}
final String title = name + " FRC Time Evolution";
final Plot plot = new Plot(title, "Frames", "Resolution (" + units + ")");
final double range = Math.max(1, yMax - yMin) * 0.05;
plot.setLimits(xValues[0], xValues[xValues.length - 1], yMin - range, yMax + range);
plot.setColor(Color.red);
plot.addPoints(xValues, yValues, Plot.CONNECTED_CIRCLES);
ImageJUtils.display(title, plot);
}
Aggregations