use of uk.ac.sussex.gdsc.core.annotation.Nullable in project GDSC-SMLM by aherbert.
the class PcPalmAnalysis method computeAutoCorrelationCurveFht.
/**
* Compute the auto-correlation curve using FHT (ImageJ built-in). Computes the correlation image
* and then samples the image at radii up to the specified length to get the average correlation
* at a given radius.
*
* @param im the image
* @param wp the weighted image
* @param maxRadius the max radius
* @param nmPerPixel the nm per pixel
* @param density the density
* @return { distances[], gr[], gr_se[] }
*/
@Nullable
private double[][] computeAutoCorrelationCurveFht(ImageProcessor im, ImageProcessor wp, int maxRadius, double nmPerPixel, double density) {
log("Creating Hartley transforms");
final Fht fht2Im = padToFht2(im);
final Fht fht2W = padToFht2(wp);
if (fht2Im == null || fht2W == null) {
error("Unable to perform Hartley transform");
return null;
}
log("Performing correlation");
final FloatProcessor corrIm = computeAutoCorrelationFht(fht2Im);
final FloatProcessor corrW = computeAutoCorrelationFht(fht2W);
IJ.showProgress(1);
final int centre = corrIm.getHeight() / 2;
final Rectangle crop = new Rectangle(centre - maxRadius, centre - maxRadius, maxRadius * 2, maxRadius * 2);
if (settings.showCorrelationImages) {
displayCorrelation(corrIm, "Image correlation", crop);
displayCorrelation(corrW, "Window correlation", crop);
}
log("Normalising correlation");
final FloatProcessor correlation = normaliseCorrelation(corrIm, corrW, density);
if (settings.showCorrelationImages) {
displayCorrelation(correlation, "Normalised correlation", crop);
}
return computeRadialAverage(maxRadius, nmPerPixel, correlation);
}
use of uk.ac.sussex.gdsc.core.annotation.Nullable in project GDSC-SMLM by aherbert.
the class PcPalmFitting method fitRandomModel.
/**
* Fits the correlation curve with r>0 to the random model using the estimated density and
* precision. Parameters must be fit within a tolerance of the starting values.
*
* @param gr the correlation curve
* @param sigmaS The estimated precision
* @param proteinDensity The estimate protein density
* @param resultColour the result colour
* @return The fitted parameters [precision, density]
*/
@Nullable
private double[] fitRandomModel(double[][] gr, double sigmaS, double proteinDensity, String resultColour) {
final RandomModelFunction function = new RandomModelFunction();
randomModel = function;
ImageJUtils.log("Fitting %s: Estimated precision = %f nm, estimated protein density = %g um^-2", randomModel.getName(), sigmaS, proteinDensity * 1e6);
randomModel.setLogging(true);
for (int i = offset; i < gr[0].length; i++) {
// error)
if (gr[0][i] > sigmaS * settings.fitAboveEstimatedPrecision) {
randomModel.addPoint(gr[0][i], gr[1][i]);
}
}
final LevenbergMarquardtOptimizer optimizer = new LevenbergMarquardtOptimizer();
Optimum optimum;
try {
// @formatter:off
final LeastSquaresProblem problem = new LeastSquaresBuilder().maxEvaluations(Integer.MAX_VALUE).maxIterations(3000).start(new double[] { sigmaS, proteinDensity }).target(function.getY()).weight(new DiagonalMatrix(function.getWeights())).model(function, function::jacobian).build();
// @formatter:on
optimum = optimizer.optimize(problem);
} catch (final TooManyIterationsException ex) {
ImageJUtils.log("Failed to fit %s: Too many iterations (%s)", randomModel.getName(), ex.getMessage());
return null;
} catch (final ConvergenceException ex) {
ImageJUtils.log("Failed to fit %s: %s", randomModel.getName(), ex.getMessage());
return null;
}
randomModel.setLogging(false);
final double[] parameters = optimum.getPoint().toArray();
// Ensure the width is positive
parameters[0] = Math.abs(parameters[0]);
final double ss = optimum.getResiduals().dotProduct(optimum.getResiduals());
final double totalSumSquares = MathUtils.getTotalSumOfSquares(randomModel.getY());
randomModelAdjustedR2 = MathUtils.getAdjustedCoefficientOfDetermination(ss, totalSumSquares, randomModel.size(), parameters.length);
final double fitSigmaS = parameters[0];
final double fitProteinDensity = parameters[1];
// Check the fitted parameters are within tolerance of the initial estimates
final double e1 = parameterDrift(sigmaS, fitSigmaS);
final double e2 = parameterDrift(proteinDensity, fitProteinDensity);
ImageJUtils.log(" %s fit: SS = %f. Adj.R^2 = %f. %d evaluations", randomModel.getName(), ss, randomModelAdjustedR2, optimum.getEvaluations());
ImageJUtils.log(" %s parameters:", randomModel.getName());
ImageJUtils.log(" Average precision = %s nm (%s%%)", MathUtils.rounded(fitSigmaS, 4), MathUtils.rounded(e1, 4));
ImageJUtils.log(" Average protein density = %s um^-2 (%s%%)", MathUtils.rounded(fitProteinDensity * 1e6, 4), MathUtils.rounded(e2, 4));
valid1 = true;
if (settings.fittingTolerance > 0 && (Math.abs(e1) > settings.fittingTolerance || Math.abs(e2) > settings.fittingTolerance)) {
ImageJUtils.log(" Failed to fit %s within tolerance (%s%%): Average precision = %f nm (%s%%)," + " average protein density = %g um^-2 (%s%%)", randomModel.getName(), MathUtils.rounded(settings.fittingTolerance, 4), fitSigmaS, MathUtils.rounded(e1, 4), fitProteinDensity * 1e6, MathUtils.rounded(e2, 4));
valid1 = false;
}
if (valid1) {
// ---------
// TODO - My data does not comply with this criteria.
// This could be due to the PC-PALM Molecule code limiting the nmPerPixel to fit the images in
// memory thus removing correlations at small r.
// It could also be due to the nature of the random simulations being 3D not 2D membranes
// as per the PC-PALM paper.
// ---------
// Evaluate g(r)protein where:
// g(r)peaks = g(r)protein + g(r)stoch
// g(r)peaks ~ 1 + g(r)stoch
// Verify g(r)protein should be <1.5 for all r>0
final double[] grStoch = randomModel.value(parameters);
final double[] grPeaks = randomModel.getY();
final double[] radius = randomModel.getX();
for (int i = 0; i < grPeaks.length; i++) {
// Only evaluate above the fitted average precision
if (radius[i] < fitSigmaS) {
continue;
}
// Note the RandomModelFunction evaluates g(r)stoch + 1
final double grProtein = grPeaks[i] - (grStoch[i] - 1);
if (grProtein > settings.grProteinThreshold) {
// Failed fit
ImageJUtils.log(" Failed to fit %s: g(r)protein %s > %s @ r=%s", randomModel.getName(), MathUtils.rounded(grProtein, 4), MathUtils.rounded(settings.grProteinThreshold, 4), MathUtils.rounded(radius[i], 4));
valid1 = false;
}
}
}
addResult(randomModel.getName(), resultColour, valid1, fitSigmaS, fitProteinDensity, 0, 0, 0, 0, randomModelAdjustedR2);
return parameters;
}
use of uk.ac.sussex.gdsc.core.annotation.Nullable in project GDSC-SMLM by aherbert.
the class BenchmarkFilterAnalysis method scoreFilters.
@Nullable
private FilterScoreResult[] scoreFilters(FilterSet filterSet, boolean createTextResult) {
if (filterSet.size() == 0) {
return null;
}
initialiseScoring(filterSet);
FilterScoreResult[] scoreResults = new FilterScoreResult[filterSet.size()];
if (scoreResults.length == 1) {
// No need to multi-thread this
scoreResults[0] = scoreFilter((DirectFilter) filterSet.getFilters().get(0), DEFAULT_MINIMUM_FILTER, createTextResult, coordinateStore);
} else {
// Multi-thread score all the result
final int nThreads = getThreads(scoreResults.length);
final BlockingQueue<ScoreJob> jobs = new ArrayBlockingQueue<>(nThreads * 2);
final List<Thread> threads = new LinkedList<>();
final Ticker ticker = ImageJUtils.createTicker(scoreResults.length, nThreads, "Scoring Filters");
for (int i = 0; i < nThreads; i++) {
final ScoreWorker worker = new ScoreWorker(jobs, scoreResults, createTextResult, (coordinateStore == null) ? null : coordinateStore.newInstance(), ticker);
final Thread t = new Thread(worker);
threads.add(t);
t.start();
}
int index = 0;
for (final Filter filter : filterSet.getFilters()) {
if (IJ.escapePressed()) {
break;
}
put(jobs, new ScoreJob((DirectFilter) filter, index++));
}
// Finish all the worker threads by passing in a null job
for (int i = threads.size(); i-- != 0; ) {
put(jobs, new ScoreJob(null, -1));
}
// Wait for all to finish
for (final Thread thread : threads) {
try {
thread.join();
} catch (final InterruptedException ex) {
Logger.getLogger(BenchmarkFilterAnalysis.class.getName()).log(Level.WARNING, "Interrupted!", ex);
Thread.currentThread().interrupt();
throw new ConcurrentRuntimeException("Unexpected interruption", ex);
}
}
threads.clear();
ImageJUtils.finished();
// In case the threads were interrupted
if (ImageJUtils.isInterrupted()) {
scoreResults = null;
}
}
finishScoring();
return scoreResults;
}
use of uk.ac.sussex.gdsc.core.annotation.Nullable in project GDSC-SMLM by aherbert.
the class BenchmarkFilterAnalysis method showOverlay.
/**
* Show overlay.
*
* <ul>
*
* <li>Green = TP
*
* <li>Red = FP
*
* <li>Magenta = FP (Ignored from analysis)
*
* <li>Yellow = FN
*
* <li>Orange = FN (Outside border)
*
* </ul>
*
* @param allAssignments The assignments generated from running the filter (or null)
* @param filter the filter
* @return The results from running the filter (or null)
*/
@Nullable
@SuppressWarnings("null")
private PreprocessedPeakResult[] showOverlay(List<FractionalAssignment[]> allAssignments, DirectFilter filter) {
final ImagePlus imp = CreateData.getImage();
if (imp == null) {
return null;
}
// Run the filter manually to get the results that pass.
if (allAssignments == null) {
allAssignments = getAssignments(filter);
}
final Overlay o = new Overlay();
// Do TP
final IntOpenHashSet actual = new IntOpenHashSet();
final IntOpenHashSet predicted = new IntOpenHashSet();
for (final FractionalAssignment[] assignments : allAssignments) {
if (assignments == null || assignments.length == 0) {
continue;
}
float[] tx = null;
float[] ty = null;
int count = 0;
if (settings.showTP) {
tx = new float[assignments.length];
ty = new float[assignments.length];
}
int frame = 0;
for (final FractionalAssignment assignment : assignments) {
final CustomFractionalAssignment c = (CustomFractionalAssignment) assignment;
final UniqueIdPeakResult peak = (UniqueIdPeakResult) c.peak;
final BasePreprocessedPeakResult spot = (BasePreprocessedPeakResult) c.peakResult;
actual.add(peak.uniqueId);
predicted.add(spot.getUniqueId());
frame = spot.getFrame();
if (settings.showTP) {
tx[count] = spot.getX();
ty[count++] = spot.getY();
}
}
if (settings.showTP) {
SpotFinderPreview.addRoi(frame, o, tx, ty, count, Color.green);
}
}
float[] x = new float[10];
float[] y = new float[x.length];
float[] x2 = new float[10];
float[] y2 = new float[x2.length];
// Do FP (all remaining results that are not a TP)
PreprocessedPeakResult[] filterResults = null;
if (settings.showFP) {
final MultiPathFilter multiPathFilter = createMpf(filter, DEFAULT_MINIMUM_FILTER);
filterResults = filterResults(multiPathFilter);
int frame = 0;
int c1 = 0;
int c2 = 0;
for (final PreprocessedPeakResult filterResult : filterResults) {
if (frame != filterResult.getFrame()) {
if (c1 != 0) {
SpotFinderPreview.addRoi(frame, o, x, y, c1, Color.red);
}
if (c2 != 0) {
SpotFinderPreview.addRoi(frame, o, x2, y2, c2, Color.magenta);
}
c1 = c2 = 0;
}
frame = filterResult.getFrame();
if (predicted.contains(filterResult.getUniqueId())) {
continue;
}
if (filterResult.ignore()) {
if (x2.length == c2) {
x2 = Arrays.copyOf(x2, c2 * 2);
y2 = Arrays.copyOf(y2, c2 * 2);
}
x2[c2] = filterResult.getX();
y2[c2++] = filterResult.getY();
} else {
if (x.length == c1) {
x = Arrays.copyOf(x, c1 * 2);
y = Arrays.copyOf(y, c1 * 2);
}
x[c1] = filterResult.getX();
y[c1++] = filterResult.getY();
}
}
if (c1 != 0) {
SpotFinderPreview.addRoi(frame, o, x, y, c1, Color.red);
}
if (c2 != 0) {
SpotFinderPreview.addRoi(frame, o, x2, y2, c2, Color.magenta);
}
}
// Do FN (all remaining peaks that have not been matched)
if (settings.showFN) {
final boolean checkBorder = (filterResult.analysisBorder != null && filterResult.analysisBorder.x != 0);
final float border;
final float xlimit;
final float ylimit;
if (checkBorder) {
final Rectangle lastAnalysisBorder = filterResult.analysisBorder;
border = lastAnalysisBorder.x;
xlimit = lastAnalysisBorder.x + lastAnalysisBorder.width;
ylimit = lastAnalysisBorder.y + lastAnalysisBorder.height;
} else {
border = xlimit = ylimit = 0;
}
// Add the results to the lists
actualCoordinates.forEach(new CustomIntObjectProcedure(x, y, x2, y2) {
@Override
public void accept(int frame, UniqueIdPeakResult[] results) {
int c1 = 0;
int c2 = 0;
if (x.length <= results.length) {
x = new float[results.length];
y = new float[results.length];
}
if (x2.length <= results.length) {
x2 = new float[results.length];
y2 = new float[results.length];
}
for (final UniqueIdPeakResult result : results) {
// Ignore those that were matched by TP
if (actual.contains(result.uniqueId)) {
continue;
}
if (checkBorder && outsideBorder(result, border, xlimit, ylimit)) {
x2[c2] = result.getXPosition();
y2[c2++] = result.getYPosition();
} else {
x[c1] = result.getXPosition();
y[c1++] = result.getYPosition();
}
}
if (c1 != 0) {
SpotFinderPreview.addRoi(frame, o, x, y, c1, Color.yellow);
}
if (c2 != 0) {
SpotFinderPreview.addRoi(frame, o, x2, y2, c2, Color.orange);
}
}
});
}
imp.setOverlay(o);
return filterResults;
}
use of uk.ac.sussex.gdsc.core.annotation.Nullable in project GDSC-SMLM by aherbert.
the class CameraModelFisherInformationAnalysis method createExponents.
@Nullable
private double[] createExponents() {
final int n = 1 + Math.max(0, settings.getSubDivisions());
final double minExp = settings.getMinExponent();
final double maxExp = settings.getMaxExponent();
final double size = (maxExp - minExp) * n + 1;
if (size > 100) {
final ExtendedGenericDialog gd = new ExtendedGenericDialog(TITLE);
gd.addMessage("Number of exponents is " + Math.ceil(size) + ". OK to continue?");
gd.showDialog();
if (gd.wasCanceled()) {
return null;
}
}
final double h = 1.0 / n;
final DoubleArrayList list = new DoubleArrayList();
for (int i = 0; ; i++) {
final double e = minExp + i * h;
list.add(e);
if (e >= settings.getMaxExponent()) {
break;
}
}
return list.toDoubleArray();
}
Aggregations