use of uk.ac.sussex.gdsc.smlm.model.camera.CameraModel in project GDSC-SMLM by aherbert.
the class FitEngineConfiguration method createSpotFilter.
/**
* Create the spot filter for identifying candidate maxima. The actual border, search width and
* smoothing parameters can be configured relative to the configured standard deviations or left
* absolute. The standard deviation is used to determine the Half-Width at Half-Maximum (HWHM) for
* each dimension and the parameters set as follows.
*
* <pre>
* int search = (int) Math.ceil(getSearch() * hwhmMax);
* int border = (int) Math.floor(getBorder() * hwhmMax);
* // For each filter
* double smooth = getSmooth(i) * hwhmMin;
* </pre>
*
* @return the maxima spot filter
*/
public MaximaSpotFilter createSpotFilter() {
// Respect the absolute parameter absolute flag for all the distances.
// Get the half-width at half maximum
final double hwhmMin = getHwhmMin();
final double hwhmMax = getHwhmMax();
// Note: rounding to 2 decimal places is a simple method for removing small errors
// in floating point precision from creating an incorrect integer
// Region for maxima finding
int search = (int) Math.ceil(convert(getSearchParameter(), hwhmMax, 2));
if (search < 1) {
search = 1;
}
// Border where peaks are ignored
int border = (int) Math.floor(convert(getBorderParameter(), hwhmMax, 2));
if (border < 0) {
border = 0;
}
final DataProcessor processor0 = createDataProcessor(border, 0, hwhmMin);
final DataFilterSettings f = fitEngineSettings.getDataFilterSettings();
final int filterCount = f.getDataFiltersCount();
final MaximaSpotFilter spotFilter;
if (f.getDataFilterType() == DataFilterType.JURY && filterCount > 1) {
final DataProcessor[] processors = new DataProcessor[filterCount];
processors[0] = processor0;
for (int i = 1; i < filterCount; i++) {
processors[i] = createDataProcessor(border, i, hwhmMin);
}
spotFilter = new JurySpotFilter(search, border, processors);
} else if (f.getDataFilterType() == DataFilterType.DIFFERENCE && filterCount > 1) {
final DataProcessor processor1 = createDataProcessor(border, 1, hwhmMin);
spotFilter = new DifferenceSpotFilter(search, border, processor0, processor1);
} else {
spotFilter = new SingleSpotFilter(search, border, processor0);
}
if (getFitConfiguration().isPerPixelCameraType()) {
if (!spotFilter.isWeighted()) {
throw new IllegalStateException("Camera type requires a weighted spot filter: " + fitConfiguration.getCameraType());
}
final CameraModel model = fitConfiguration.getCameraModel();
if (model == null || !model.isPerPixelModel()) {
throw new IllegalStateException("Weighted spot filter requires a per-pixel camera model");
}
}
return spotFilter;
}
use of uk.ac.sussex.gdsc.smlm.model.camera.CameraModel in project GDSC-SMLM by aherbert.
the class SpotFinderPreview method run.
private void run(ImageProcessor ip, MaximaSpotFilter filter) {
if (refreshing) {
return;
}
currentSlice = imp.getCurrentSlice();
final Rectangle bounds = ip.getRoi();
// Crop to the ROI
FloatProcessor fp = ip.crop().toFloat(0, null);
float[] data = (float[]) fp.getPixels();
final int width = fp.getWidth();
final int height = fp.getHeight();
// Store the mean bias and gain of the region data.
// This is used to correctly overlay the filtered data on the original image.
double bias = 0;
double gain = 1;
boolean adjust = false;
// Set weights
final CameraModel cameraModel = fitConfig.getCameraModel();
if (!(cameraModel instanceof FakePerPixelCameraModel)) {
// This should be done on the normalised data
final float[] w = cameraModel.getNormalisedWeights(bounds);
filter.setWeights(w, width, height);
data = data.clone();
if (data.length < ip.getPixelCount()) {
adjust = true;
bias = MathUtils.sum(cameraModel.getBias(bounds)) / data.length;
gain = MathUtils.sum(cameraModel.getGain(bounds)) / data.length;
}
cameraModel.removeBiasAndGain(bounds, data);
}
final Spot[] spots = filter.rank(data, width, height);
data = filter.getPreprocessedData();
final int size = spots.length;
if (topNScrollBar != null) {
topNScrollBar.setMaximum(size);
selectScrollBar.setMaximum(size);
}
fp = new FloatProcessor(width, height, data);
final FloatProcessor out = new FloatProcessor(ip.getWidth(), ip.getHeight());
out.copyBits(ip, 0, 0, Blitter.COPY);
if (adjust) {
fp.multiply(gain);
fp.add(bias);
}
out.insert(fp, bounds.x, bounds.y);
final double min = fp.getMin();
final double max = fp.getMax();
out.setMinAndMax(min, max);
final Overlay o = new Overlay();
o.add(new ImageRoi(0, 0, out));
if (label != null) {
// Get results for frame
final Coordinate[] actual = ResultsMatchCalculator.getCoordinates(actualCoordinates, imp.getCurrentSlice());
final Coordinate[] predicted = new Coordinate[size];
for (int i = 0; i < size; i++) {
predicted[i] = new BasePoint(spots[i].x + bounds.x, spots[i].y + bounds.y);
}
// Compute assignments
final LocalList<FractionalAssignment> fractionalAssignments = new LocalList<>(3 * predicted.length);
final double matchDistance = settings.distance * fitConfig.getInitialPeakStdDev();
final RampedScore score = RampedScore.of(matchDistance, matchDistance * settings.lowerDistance / 100, false);
final double dmin = matchDistance * matchDistance;
final int nActual = actual.length;
final int nPredicted = predicted.length;
for (int j = 0; j < nPredicted; j++) {
// Centre in the middle of the pixel
final float x = predicted[j].getX() + 0.5f;
final float y = predicted[j].getY() + 0.5f;
// Any spots that match
for (int i = 0; i < nActual; i++) {
final double dx = (x - actual[i].getX());
final double dy = (y - actual[i].getY());
final double d2 = dx * dx + dy * dy;
if (d2 <= dmin) {
final double d = Math.sqrt(d2);
final double s = score.score(d);
if (s == 0) {
continue;
}
double distance = 1 - s;
if (distance == 0) {
// In the case of a match below the distance thresholds
// the distance will be 0. To distinguish between candidates all below
// the thresholds just take the closest.
// We know d2 is below dmin so we subtract the delta.
distance -= (dmin - d2);
}
// Store the match
fractionalAssignments.add(new ImmutableFractionalAssignment(i, j, distance, s));
}
}
}
final FractionalAssignment[] assignments = fractionalAssignments.toArray(new FractionalAssignment[0]);
// Compute matches
final RankedScoreCalculator calc = RankedScoreCalculator.create(assignments, nActual - 1, nPredicted - 1);
final boolean save = settings.showTP || settings.showFP;
final double[] calcScore = calc.score(nPredicted, settings.multipleMatches, save);
final ClassificationResult result = RankedScoreCalculator.toClassificationResult(calcScore, nActual);
// Compute AUC and max jaccard (and plot)
final double[][] curve = RankedScoreCalculator.getPrecisionRecallCurve(assignments, nActual, nPredicted);
final double[] precision = curve[0];
final double[] recall = curve[1];
final double[] jaccard = curve[2];
final double auc = AucCalculator.auc(precision, recall);
// Show scores
final String scoreLabel = String.format("Slice=%d, AUC=%s, R=%s, Max J=%s", imp.getCurrentSlice(), MathUtils.rounded(auc), MathUtils.rounded(result.getRecall()), MathUtils.rounded(MathUtils.maxDefault(0, jaccard)));
setLabel(scoreLabel);
// Plot
String title = TITLE + " Performance";
Plot plot = new Plot(title, "Spot Rank", "");
final double[] rank = SimpleArrayUtils.newArray(precision.length, 0, 1.0);
plot.setLimits(0, nPredicted, 0, 1.05);
plot.setColor(Color.blue);
plot.addPoints(rank, precision, Plot.LINE);
plot.setColor(Color.red);
plot.addPoints(rank, recall, Plot.LINE);
plot.setColor(Color.black);
plot.addPoints(rank, jaccard, Plot.LINE);
plot.setColor(Color.black);
plot.addLabel(0, 0, scoreLabel);
final WindowOrganiser windowOrganiser = new WindowOrganiser();
ImageJUtils.display(title, plot, 0, windowOrganiser);
title = TITLE + " Precision-Recall";
plot = new Plot(title, "Recall", "Precision");
plot.setLimits(0, 1, 0, 1.05);
plot.setColor(Color.red);
plot.addPoints(recall, precision, Plot.LINE);
plot.drawLine(recall[recall.length - 1], precision[recall.length - 1], recall[recall.length - 1], 0);
plot.setColor(Color.black);
plot.addLabel(0, 0, scoreLabel);
ImageJUtils.display(title, plot, 0, windowOrganiser);
windowOrganiser.tile();
// Create Rois for TP and FP
if (save) {
final double[] matchScore = RankedScoreCalculator.getMatchScore(calc.getScoredAssignments(), nPredicted);
int matches = 0;
for (int i = 0; i < matchScore.length; i++) {
if (matchScore[i] != 0) {
matches++;
}
}
if (settings.showTP) {
final float[] x = new float[matches];
final float[] y = new float[x.length];
int count = 0;
for (int i = 0; i < matchScore.length; i++) {
if (matchScore[i] != 0) {
final BasePoint p = (BasePoint) predicted[i];
x[count] = p.getX() + 0.5f;
y[count] = p.getY() + 0.5f;
count++;
}
}
addRoi(0, o, x, y, count, Color.green);
}
if (settings.showFP) {
final float[] x = new float[nPredicted - matches];
final float[] y = new float[x.length];
int count = 0;
for (int i = 0; i < matchScore.length; i++) {
if (matchScore[i] == 0) {
final BasePoint p = (BasePoint) predicted[i];
x[count] = p.getX() + 0.5f;
y[count] = p.getY() + 0.5f;
count++;
}
}
addRoi(0, o, x, y, count, Color.red);
}
}
} else {
final WindowOrganiser wo = new WindowOrganiser();
// Option to show the number of neighbours within a set pixel box radius
final int[] count = spotFilterHelper.countNeighbours(spots, width, height, settings.neighbourRadius);
// Show as histogram the totals...
new HistogramPlotBuilder(TITLE, StoredData.create(count), "Neighbours").setIntegerBins(true).setPlotLabel("Radius = " + settings.neighbourRadius).show(wo);
// TODO - Draw n=0, n=1 on the image overlay
final LUT lut = LutHelper.createLut(LutColour.FIRE_LIGHT);
// These are copied by the ROI
final float[] x = new float[1];
final float[] y = new float[1];
// Plot the intensity
final double[] intensity = new double[size];
final double[] rank = SimpleArrayUtils.newArray(size, 1, 1.0);
final int top = (settings.topN > 0) ? settings.topN : size;
final int size_1 = size - 1;
for (int i = 0; i < size; i++) {
intensity[i] = spots[i].intensity;
if (i < top) {
x[0] = spots[i].x + bounds.x + 0.5f;
y[0] = spots[i].y + bounds.y + 0.5f;
final Color c = LutHelper.getColour(lut, size_1 - i, size);
addRoi(0, o, x, y, 1, c, 2, 1);
}
}
final String title = TITLE + " Intensity";
final Plot plot = new Plot(title, "Rank", "Intensity");
plot.setColor(Color.blue);
plot.addPoints(rank, intensity, Plot.LINE);
if (settings.topN > 0 && settings.topN < size) {
plot.setColor(Color.magenta);
plot.drawLine(settings.topN, 0, settings.topN, intensity[settings.topN - 1]);
}
if (settings.select > 0 && settings.select < size) {
plot.setColor(Color.yellow);
final int index = settings.select - 1;
final double in = intensity[index];
plot.drawLine(settings.select, 0, settings.select, in);
x[0] = spots[index].x + bounds.x + 0.5f;
y[0] = spots[index].y + bounds.y + 0.5f;
final Color c = LutHelper.getColour(lut, size_1 - settings.select, size);
addRoi(0, o, x, y, 1, c, 3, 3);
plot.setColor(Color.black);
plot.addLabel(0, 0, "Selected spot intensity = " + MathUtils.rounded(in));
}
ImageJUtils.display(title, plot, 0, wo);
wo.tile();
}
imp.setOverlay(o);
}
use of uk.ac.sussex.gdsc.smlm.model.camera.CameraModel in project GDSC-SMLM by aherbert.
the class PeakFit method checkCameraModel.
/**
* Check the camera model covers the region of the source.
*
* @param fitConfig the fit config
* @param sourceBounds the source bounds of the input image
* @param cropBounds the crop bounds (relative to the input image)
* @param initialise the initialise flag
* @return true, if successful
* @throws IllegalStateException if no camera model exists for the camera type
*/
private static boolean checkCameraModel(FitConfiguration fitConfig, Rectangle sourceBounds, Rectangle cropBounds, boolean initialise) {
final CalibrationReader calibration = fitConfig.getCalibrationReader();
if (calibration.isScmos() && sourceBounds != null) {
CameraModel cameraModel = fitConfig.getCameraModel();
// The camera model origin must be reset to be relative to the source bounds origin
cameraModel = cropCameraModel(cameraModel, sourceBounds, cropBounds, true);
if (cameraModel == null) {
return false;
}
if (initialise && cameraModel instanceof PerPixelCameraModel) {
((PerPixelCameraModel) cameraModel).initialise();
}
fitConfig.setCameraModel(cameraModel);
}
return true;
}
use of uk.ac.sussex.gdsc.smlm.model.camera.CameraModel in project GDSC-SMLM by aherbert.
the class PeakFit method runMaximaFitting.
/**
* Load the selected results from memory. All multiple frame results are added directly to the
* results. All single frame results are added to a list of candidate maxima per frame and fitted
* using the configured parameters.
*/
private void runMaximaFitting() {
final MemoryPeakResults memoryResults = ResultsManager.loadInputResults(settings.inputOption, false, DistanceUnit.PIXEL);
if (memoryResults == null || memoryResults.size() == 0) {
log("No results for maxima fitting");
return;
}
// The total frames (for progress reporting)
int totalFrames;
// A function that can convert a frame into a set of candidate indices
final IntFunction<int[]> frameToMaxIndices;
// The frames to process (should be sorted ascending)
Supplier<IntStream> frames;
// Support fitting all time frames with the same results.
if (settings.fitAcrossAllFrames) {
// Check if the input spans multiple frames
if (getSingleFrame(memoryResults) == 0) {
final int min = memoryResults.getMinFrame();
final int max = memoryResults.getMaxFrame();
final GenericDialog gd = new GenericDialog(TITLE);
gd.enableYesNoCancel();
gd.hideCancelButton();
ImageJUtils.addMessage(gd, "Candidate maxima for fitting span multiple frames (%d-%d).\n \n" + "Please confirm the %s are correct.", min, max, TextUtils.pleural(memoryResults.size(), "candidate"));
gd.showDialog();
if (!gd.wasOKed()) {
return;
}
}
final int[] maxIndices = getMaxIndices(Arrays.asList(memoryResults.toArray()));
// This may not work correctly if using for example a series image source that
// incorrectly estimates the number of frames
totalFrames = source.getFrames();
frameToMaxIndices = frame -> maxIndices;
frames = () -> IntStream.rangeClosed(1, totalFrames);
} else {
// Build a map between the time-frame and the results in that frame.
final Map<Integer, List<PeakResult>> map = Arrays.stream(memoryResults.toArray()).parallel().filter(peakResult -> peakResult.getFrame() == peakResult.getEndFrame()).collect(Collectors.groupingBy(PeakResult::getFrame));
totalFrames = map.size();
// Build a function that can convert a frame into a set of candidate indices
frameToMaxIndices = frame -> getMaxIndices(map.get(frame));
frames = () -> map.keySet().stream().mapToInt(Integer::intValue).sorted();
}
final ImageStack stack = (extraSettings.showProcessedFrames) ? new ImageStack(bounds.width, bounds.height) : null;
// Use the FitEngine to allow multi-threading.
final FitEngine engine = createFitEngine(getNumberOfThreads(totalFrames));
if (engine == null) {
return;
}
final int step = ImageJUtils.getProgressInterval(totalFrames);
// No crop bounds are supported.
// To pre-process data for noise estimation
final boolean isFitCameraCounts = fitConfig.isFitCameraCounts();
final CameraModel cameraModel = fitConfig.getCameraModel();
runTime = System.nanoTime();
final AtomicBoolean shutdown = new AtomicBoolean();
final String format = String.format("Slice: %%d / %d (Results=%%d)", totalFrames);
frames.get().forEachOrdered(slice -> {
if (shutdown.get() || escapePressed()) {
shutdown.set(true);
return;
}
final float[] data = source.get(slice);
if (data == null) {
shutdown.set(true);
return;
}
if (slice % step == 0) {
if (ImageJUtils.showStatus(() -> String.format(format, slice, results.size()))) {
IJ.showProgress(slice, totalFrames);
}
}
// We must pre-process the data before noise estimation
final float[] data2 = data.clone();
if (isFitCameraCounts) {
cameraModel.removeBias(data2);
} else {
cameraModel.removeBiasAndGain(data2);
}
final float noise = FitWorker.estimateNoise(data2, source.getWidth(), source.getHeight(), config.getNoiseMethod());
if (stack != null) {
stack.addSlice(String.format("Frame %d - %d", source.getStartFrameNumber(), source.getEndFrameNumber()), data);
}
// Get the frame number from the source to allow for interlaced and aggregated data
engine.run(createMaximaFitJob(frameToMaxIndices.apply(slice), source.getStartFrameNumber(), source.getEndFrameNumber(), data, bounds, noise));
});
engine.end(shutdown.get());
time = engine.getTime();
runTime = System.nanoTime() - runTime;
if (stack != null) {
ImageJUtils.display("Processed frames", stack);
}
showResults();
source.close();
}
use of uk.ac.sussex.gdsc.smlm.model.camera.CameraModel in project GDSC-SMLM by aherbert.
the class CameraModelManager method runFilterImage.
private void runFilterImage() {
// Select an image
GenericDialog gd = new GenericDialog(TITLE);
final String[] list = ImageJUtils.getImageList(ImageJUtils.GREY_SCALE);
gd.addChoice("Image", list, pluginSettings.getImage());
gd.showDialog();
if (gd.wasCanceled()) {
return;
}
final String image = gd.getNextChoice();
pluginSettings.setImage(image);
final ImagePlus imp = WindowManager.getImage(image);
if (imp == null) {
IJ.error(TITLE, "Failed to find image: " + image);
return;
}
// Select the model
gd = new GenericDialog(TITLE);
final String[] models = listCameraModels(false);
gd.addChoice("Model", models, pluginSettings.getSelected());
gd.addHelp(HelpUrls.getUrl("camera-model-manager-filter"));
gd.showDialog();
if (gd.wasCanceled()) {
return;
}
final String name = gd.getNextChoice();
pluginSettings.setSelected(name);
CameraModel cameraModel = load(name);
if (cameraModel == null) {
IJ.error(TITLE, "Failed to find camera data for model: " + name);
return;
}
// Crop the model if appropriate
try {
Rectangle bounds;
try {
bounds = IJImageSource.getBounds(imp);
} catch (final IllegalArgumentException ex) {
bounds = new Rectangle(pluginSettings.getOriginX(), pluginSettings.getOriginY(), imp.getWidth(), imp.getHeight());
}
cameraModel = PeakFit.cropCameraModel(cameraModel, bounds, null, false);
} catch (final IllegalArgumentException ex) {
IJ.error(TITLE, ex.getMessage());
return;
}
final Rectangle bounds = cameraModel.getBounds();
pluginSettings.setOriginX(bounds.x);
pluginSettings.setOriginY(bounds.y);
// Reset origin for fast filtering
cameraModel = cameraModel.copy();
cameraModel.setOrigin(0, 0);
// Filter all the frames
final ImageSource source = new IJImageSource(imp);
if (!source.open()) {
IJ.error(TITLE, "Cannot open image: " + image);
}
final ImageStack stack = new ImageStack(imp.getWidth(), imp.getHeight());
for (float[] data = source.next(); data != null; data = source.next()) {
cameraModel.removeBiasAndGain(data);
stack.addSlice(null, data);
}
final ImagePlus imp2 = new ImagePlus(imp.getTitle() + " Filtered", stack);
imp2.copyScale(imp);
imp2.show();
}
Aggregations