use of uk.ac.sussex.gdsc.smlm.results.filter.MultiPathFilter in project GDSC-SMLM by aherbert.
the class BenchmarkFilterAnalysis method createResults.
/**
* Create peak results.
*
* @param filterResults The results from running the filter (or null)
* @param filter the filter
*/
private MemoryPeakResults createResults(PreprocessedPeakResult[] filterResults, DirectFilter filter, boolean withBorder) {
if (filterResults == null) {
final MultiPathFilter multiPathFilter = createMpf(filter, defaultMinimalFilter);
filterResults = filterResults(multiPathFilter);
}
final MemoryPeakResults newResults = new MemoryPeakResults();
newResults.copySettings(this.results);
newResults.setName(TITLE);
if (withBorder) {
// To produce the same results as the PeakFit plugin we must implement the border
// functionality used in the FitWorker. This respects the border of the spot filter.
final FitEngineConfiguration config = new FitEngineConfiguration();
updateAllConfiguration(config);
final MaximaSpotFilter spotFilter = config.createSpotFilter();
final int border = spotFilter.getBorder();
final Rectangle bounds = getBounds();
final int borderLimitX = bounds.x + bounds.width - border;
final int borderLimitY = bounds.y + bounds.height - border;
for (final PreprocessedPeakResult spot : filterResults) {
if (spot.getX() > border && spot.getX() < borderLimitX && spot.getY() > border && spot.getY() < borderLimitY) {
final double[] p = spot.toGaussian2DParameters();
final float[] params = new float[p.length];
for (int j = 0; j < p.length; j++) {
params[j] = (float) p[j];
}
final int frame = spot.getFrame();
final int origX = (int) p[Gaussian2DFunction.X_POSITION];
final int origY = (int) p[Gaussian2DFunction.Y_POSITION];
newResults.add(frame, origX, origY, 0, 0, spot.getNoise(), spot.getMeanSignal(), params, null);
}
}
} else {
for (final PreprocessedPeakResult spot : filterResults) {
final double[] p = spot.toGaussian2DParameters();
final float[] params = new float[p.length];
for (int j = 0; j < p.length; j++) {
params[j] = (float) p[j];
}
final int frame = spot.getFrame();
final int origX = (int) p[Gaussian2DFunction.X_POSITION];
final int origY = (int) p[Gaussian2DFunction.Y_POSITION];
newResults.add(frame, origX, origY, 0, 0, spot.getNoise(), spot.getMeanSignal(), params, null);
}
}
return newResults;
}
use of uk.ac.sussex.gdsc.smlm.results.filter.MultiPathFilter in project GDSC-SMLM by aherbert.
the class BenchmarkFilterAnalysis method showOverlay.
/**
* Show overlay.
*
* <ul>
*
* <li>Green = TP
*
* <li>Red = FP
*
* <li>Magenta = FP (Ignored from analysis)
*
* <li>Yellow = FN
*
* <li>Orange = FN (Outside border)
*
* </ul>
*
* @param allAssignments The assignments generated from running the filter (or null)
* @param filter the filter
* @return The results from running the filter (or null)
*/
@Nullable
@SuppressWarnings("null")
private PreprocessedPeakResult[] showOverlay(ArrayList<FractionalAssignment[]> allAssignments, DirectFilter filter) {
final ImagePlus imp = CreateData.getImage();
if (imp == null) {
return null;
}
// Run the filter manually to get the results that pass.
if (allAssignments == null) {
allAssignments = getAssignments(filter);
}
final Overlay o = new Overlay();
// Do TP
final TIntHashSet actual = new TIntHashSet();
final TIntHashSet predicted = new TIntHashSet();
for (final FractionalAssignment[] assignments : allAssignments) {
if (assignments == null || assignments.length == 0) {
continue;
}
float[] tx = null;
float[] ty = null;
int count = 0;
if (settings.showTP) {
tx = new float[assignments.length];
ty = new float[assignments.length];
}
int frame = 0;
for (int i = 0; i < assignments.length; i++) {
final CustomFractionalAssignment c = (CustomFractionalAssignment) assignments[i];
final UniqueIdPeakResult peak = (UniqueIdPeakResult) c.peak;
final BasePreprocessedPeakResult spot = (BasePreprocessedPeakResult) c.peakResult;
actual.add(peak.uniqueId);
predicted.add(spot.getUniqueId());
frame = spot.getFrame();
if (settings.showTP) {
tx[count] = spot.getX();
ty[count++] = spot.getY();
}
}
if (settings.showTP) {
SpotFinderPreview.addRoi(frame, o, tx, ty, count, Color.green);
}
}
float[] x = new float[10];
float[] y = new float[x.length];
float[] x2 = new float[10];
float[] y2 = new float[x2.length];
// Do FP (all remaining results that are not a TP)
PreprocessedPeakResult[] filterResults = null;
if (settings.showFP) {
final MultiPathFilter multiPathFilter = createMpf(filter, defaultMinimalFilter);
filterResults = filterResults(multiPathFilter);
int frame = 0;
int c1 = 0;
int c2 = 0;
for (int i = 0; i < filterResults.length; i++) {
if (frame != filterResults[i].getFrame()) {
if (c1 != 0) {
SpotFinderPreview.addRoi(frame, o, x, y, c1, Color.red);
}
if (c2 != 0) {
SpotFinderPreview.addRoi(frame, o, x2, y2, c2, Color.magenta);
}
c1 = c2 = 0;
}
frame = filterResults[i].getFrame();
if (predicted.contains(filterResults[i].getUniqueId())) {
continue;
}
if (filterResults[i].ignore()) {
if (x2.length == c2) {
x2 = Arrays.copyOf(x2, c2 * 2);
y2 = Arrays.copyOf(y2, c2 * 2);
}
x2[c2] = filterResults[i].getX();
y2[c2++] = filterResults[i].getY();
} else {
if (x.length == c1) {
x = Arrays.copyOf(x, c1 * 2);
y = Arrays.copyOf(y, c1 * 2);
}
x[c1] = filterResults[i].getX();
y[c1++] = filterResults[i].getY();
}
}
if (c1 != 0) {
SpotFinderPreview.addRoi(frame, o, x, y, c1, Color.red);
}
if (c2 != 0) {
SpotFinderPreview.addRoi(frame, o, x2, y2, c2, Color.magenta);
}
}
// Do FN (all remaining peaks that have not been matched)
if (settings.showFN) {
final boolean checkBorder = (filterResult.analysisBorder != null && filterResult.analysisBorder.x != 0);
final float border;
final float xlimit;
final float ylimit;
if (checkBorder) {
final Rectangle lastAnalysisBorder = filterResult.analysisBorder;
border = lastAnalysisBorder.x;
xlimit = lastAnalysisBorder.x + lastAnalysisBorder.width;
ylimit = lastAnalysisBorder.y + lastAnalysisBorder.height;
} else {
border = xlimit = ylimit = 0;
}
// Add the results to the lists
actualCoordinates.forEachEntry(new CustomTIntObjectProcedure(x, y, x2, y2) {
@Override
public boolean execute(int frame, UniqueIdPeakResult[] results) {
int c1 = 0;
int c2 = 0;
if (x.length <= results.length) {
x = new float[results.length];
y = new float[results.length];
}
if (x2.length <= results.length) {
x2 = new float[results.length];
y2 = new float[results.length];
}
for (int i = 0; i < results.length; i++) {
// Ignore those that were matched by TP
if (actual.contains(results[i].uniqueId)) {
continue;
}
if (checkBorder && outsideBorder(results[i], border, xlimit, ylimit)) {
x2[c2] = results[i].getXPosition();
y2[c2++] = results[i].getYPosition();
} else {
x[c1] = results[i].getXPosition();
y[c1++] = results[i].getYPosition();
}
}
if (c1 != 0) {
SpotFinderPreview.addRoi(frame, o, x, y, c1, Color.yellow);
}
if (c2 != 0) {
SpotFinderPreview.addRoi(frame, o, x2, y2, c2, Color.orange);
}
return true;
}
});
}
imp.setOverlay(o);
return filterResults;
}
use of uk.ac.sussex.gdsc.smlm.results.filter.MultiPathFilter in project GDSC-SMLM by aherbert.
the class BenchmarkSpotFit method summariseResults.
private void summariseResults(BenchmarkSpotFitResult spotFitResults, long runTime, final PreprocessedPeakResult[] preprocessedPeakResults, int uniqueIdCount, CandidateData candidateData, TIntObjectHashMap<List<Coordinate>> actualCoordinates) {
// Summarise the fitting results. N fits, N failures.
// Optimal match statistics if filtering is perfect (since fitting is not perfect).
final StoredDataStatistics distanceStats = new StoredDataStatistics();
final StoredDataStatistics depthStats = new StoredDataStatistics();
// Get stats for all fitted results and those that match
// Signal, SNR, Width, xShift, yShift, Precision
createFilterCriteria();
final StoredDataStatistics[][] stats = new StoredDataStatistics[3][filterCriteria.length];
for (int i = 0; i < stats.length; i++) {
for (int j = 0; j < stats[i].length; j++) {
stats[i][j] = new StoredDataStatistics();
}
}
final double nmPerPixel = simulationParameters.pixelPitch;
double tp = 0;
double fp = 0;
int failCtp = 0;
int failCfp = 0;
int ctp = 0;
int cfp = 0;
final int[] singleStatus = new int[FitStatus.values().length];
final int[] multiStatus = new int[singleStatus.length];
final int[] doubletStatus = new int[singleStatus.length];
final int[] multiDoubletStatus = new int[singleStatus.length];
// Easier to materialise the values since we have a lot of non final variables to manipulate
final TIntObjectHashMap<FilterCandidates> fitResults = spotFitResults.fitResults;
final int[] frames = new int[fitResults.size()];
final FilterCandidates[] candidates = new FilterCandidates[fitResults.size()];
final int[] counter = new int[1];
fitResults.forEachEntry((frame, candidate) -> {
frames[counter[0]] = frame;
candidates[counter[0]] = candidate;
counter[0]++;
return true;
});
for (final FilterCandidates result : candidates) {
// Count the number of fit results that matched (tp) and did not match (fp)
tp += result.tp;
fp += result.fp;
for (int i = 0; i < result.fitResult.length; i++) {
if (result.spots[i].match) {
ctp++;
} else {
cfp++;
}
final MultiPathFitResult fitResult = result.fitResult[i];
if (singleStatus != null && result.spots[i].match) {
// Debugging reasons for fit failure
addStatus(singleStatus, fitResult.getSingleFitResult());
addStatus(multiStatus, fitResult.getMultiFitResult());
addStatus(doubletStatus, fitResult.getDoubletFitResult());
addStatus(multiDoubletStatus, fitResult.getMultiDoubletFitResult());
}
if (noMatch(fitResult)) {
if (result.spots[i].match) {
failCtp++;
} else {
failCfp++;
}
}
// We have multi-path results.
// We want statistics for:
// [0] all fitted spots
// [1] fitted spots that match a result
// [2] fitted spots that do not match a result
addToStats(fitResult.getSingleFitResult(), stats);
addToStats(fitResult.getMultiFitResult(), stats);
addToStats(fitResult.getDoubletFitResult(), stats);
addToStats(fitResult.getMultiDoubletFitResult(), stats);
}
// Statistics on spots that fit an actual result
for (int i = 0; i < result.match.length; i++) {
if (!result.match[i].isFitResult()) {
// For now just ignore the candidates that matched
continue;
}
final FitMatch fitMatch = (FitMatch) result.match[i];
distanceStats.add(fitMatch.distance * nmPerPixel);
depthStats.add(fitMatch.zdepth * nmPerPixel);
}
}
if (tp == 0) {
IJ.error(TITLE, "No fit results matched the simulation actual results");
return;
}
// Store data for computing correlation
final double[] i1 = new double[depthStats.getN()];
final double[] i2 = new double[i1.length];
final double[] is = new double[i1.length];
int ci = 0;
for (final FilterCandidates result : candidates) {
for (int i = 0; i < result.match.length; i++) {
if (!result.match[i].isFitResult()) {
// For now just ignore the candidates that matched
continue;
}
final FitMatch fitMatch = (FitMatch) result.match[i];
final ScoredSpot spot = result.spots[fitMatch.index];
i1[ci] = fitMatch.predictedSignal;
i2[ci] = fitMatch.actualSignal;
is[ci] = spot.spot.intensity;
ci++;
}
}
// We want to compute the Jaccard against the spot metric
// Filter the results using the multi-path filter
final ArrayList<MultiPathFitResults> multiPathResults = new ArrayList<>(fitResults.size());
for (int i = 0; i < frames.length; i++) {
final int frame = frames[i];
final MultiPathFitResult[] multiPathFitResults = candidates[i].fitResult;
final int totalCandidates = candidates[i].spots.length;
final List<Coordinate> list = actualCoordinates.get(frame);
final int nActual = (list == null) ? 0 : list.size();
multiPathResults.add(new MultiPathFitResults(frame, multiPathFitResults, totalCandidates, nActual));
}
// Score the results and count the number returned
final List<FractionalAssignment[]> assignments = new ArrayList<>();
final TIntHashSet set = new TIntHashSet(uniqueIdCount);
final FractionScoreStore scoreStore = set::add;
final MultiPathFitResults[] multiResults = multiPathResults.toArray(new MultiPathFitResults[0]);
// Filter with no filter
final MultiPathFilter mpf = new MultiPathFilter(new SignalFilter(0), null, multiFilter.residualsThreshold);
mpf.fractionScoreSubset(multiResults, NullFailCounter.INSTANCE, this.results.size(), assignments, scoreStore, CoordinateStoreFactory.create(0, 0, imp.getWidth(), imp.getHeight(), config.convertUsingHwhMax(config.getDuplicateDistanceParameter())));
final double[][] matchScores = new double[set.size()][];
int count = 0;
for (int i = 0; i < assignments.size(); i++) {
final FractionalAssignment[] a = assignments.get(i);
if (a == null) {
continue;
}
for (int j = 0; j < a.length; j++) {
final PreprocessedPeakResult r = ((PeakFractionalAssignment) a[j]).peakResult;
set.remove(r.getUniqueId());
final double precision = Math.sqrt(r.getLocationVariance());
final double signal = r.getSignal();
final double snr = r.getSnr();
final double width = r.getXSdFactor();
final double xShift = r.getXRelativeShift2();
final double yShift = r.getYRelativeShift2();
// Since these two are combined for filtering and the max is what matters.
final double shift = (xShift > yShift) ? Math.sqrt(xShift) : Math.sqrt(yShift);
final double eshift = Math.sqrt(xShift + yShift);
final double[] score = new double[8];
score[FILTER_SIGNAL] = signal;
score[FILTER_SNR] = snr;
score[FILTER_MIN_WIDTH] = width;
score[FILTER_MAX_WIDTH] = width;
score[FILTER_SHIFT] = shift;
score[FILTER_ESHIFT] = eshift;
score[FILTER_PRECISION] = precision;
score[FILTER_PRECISION + 1] = a[j].getScore();
matchScores[count++] = score;
}
}
// Add the rest
set.forEach(new CustomTIntProcedure(count) {
@Override
public boolean execute(int uniqueId) {
// This should not be null or something has gone wrong
final PreprocessedPeakResult r = preprocessedPeakResults[uniqueId];
if (r == null) {
throw new IllegalArgumentException("Missing result: " + uniqueId);
}
final double precision = Math.sqrt(r.getLocationVariance());
final double signal = r.getSignal();
final double snr = r.getSnr();
final double width = r.getXSdFactor();
final double xShift = r.getXRelativeShift2();
final double yShift = r.getYRelativeShift2();
// Since these two are combined for filtering and the max is what matters.
final double shift = (xShift > yShift) ? Math.sqrt(xShift) : Math.sqrt(yShift);
final double eshift = Math.sqrt(xShift + yShift);
final double[] score = new double[8];
score[FILTER_SIGNAL] = signal;
score[FILTER_SNR] = snr;
score[FILTER_MIN_WIDTH] = width;
score[FILTER_MAX_WIDTH] = width;
score[FILTER_SHIFT] = shift;
score[FILTER_ESHIFT] = eshift;
score[FILTER_PRECISION] = precision;
matchScores[count++] = score;
return true;
}
});
final FitConfiguration fitConfig = config.getFitConfiguration();
// Debug the reasons the fit failed
if (singleStatus != null) {
String name = PeakFit.getSolverName(fitConfig);
if (fitConfig.getFitSolver() == FitSolver.MLE && fitConfig.isModelCamera()) {
name += " Camera";
}
IJ.log("Failure counts: " + name);
printFailures("Single", singleStatus);
printFailures("Multi", multiStatus);
printFailures("Doublet", doubletStatus);
printFailures("Multi doublet", multiDoubletStatus);
}
final StringBuilder sb = new StringBuilder(300);
// Add information about the simulation
final double signal = simulationParameters.averageSignal;
final int n = results.size();
sb.append(imp.getStackSize()).append('\t');
final int w = imp.getWidth();
final int h = imp.getHeight();
sb.append(w).append('\t');
sb.append(h).append('\t');
sb.append(n).append('\t');
final double density = ((double) n / imp.getStackSize()) / (w * h) / (simulationParameters.pixelPitch * simulationParameters.pixelPitch / 1e6);
sb.append(MathUtils.rounded(density)).append('\t');
sb.append(MathUtils.rounded(signal)).append('\t');
sb.append(MathUtils.rounded(simulationParameters.sd)).append('\t');
sb.append(MathUtils.rounded(simulationParameters.pixelPitch)).append('\t');
sb.append(MathUtils.rounded(simulationParameters.depth)).append('\t');
sb.append(simulationParameters.fixedDepth).append('\t');
sb.append(MathUtils.rounded(simulationParameters.gain)).append('\t');
sb.append(MathUtils.rounded(simulationParameters.readNoise)).append('\t');
sb.append(MathUtils.rounded(simulationParameters.background)).append('\t');
sb.append(MathUtils.rounded(simulationParameters.noise)).append('\t');
if (simulationParameters.fullSimulation) {
// The total signal is spread over frames
}
sb.append(MathUtils.rounded(signal / simulationParameters.noise)).append('\t');
sb.append(MathUtils.rounded(simulationParameters.sd / simulationParameters.pixelPitch)).append('\t');
sb.append(spotFilter.getDescription());
// nP and nN is the fractional score of the spot candidates
addCount(sb, (double) candidateData.countPositive + candidateData.countNegative);
addCount(sb, candidateData.countPositive);
addCount(sb, candidateData.countNegative);
addCount(sb, candidateData.fractionPositive);
addCount(sb, candidateData.fractionNegative);
String name = PeakFit.getSolverName(fitConfig);
if (fitConfig.getFitSolver() == FitSolver.MLE && fitConfig.isModelCamera()) {
name += " Camera";
}
add(sb, name);
add(sb, config.getFitting());
spotFitResults.resultPrefix = sb.toString();
// Q. Should I add other fit configuration here?
// The fraction of positive and negative candidates that were included
add(sb, (100.0 * ctp) / candidateData.countPositive);
add(sb, (100.0 * cfp) / candidateData.countNegative);
// Score the fitting results compared to the original simulation.
// Score the candidate selection:
add(sb, ctp + cfp);
add(sb, ctp);
add(sb, cfp);
// TP are all candidates that can be matched to a spot
// FP are all candidates that cannot be matched to a spot
// FN = The number of missed spots
FractionClassificationResult match = new FractionClassificationResult(ctp, cfp, 0, simulationParameters.molecules - ctp);
add(sb, match.getRecall());
add(sb, match.getPrecision());
add(sb, match.getF1Score());
add(sb, match.getJaccard());
// Score the fitting results:
add(sb, failCtp);
add(sb, failCfp);
// TP are all fit results that can be matched to a spot
// FP are all fit results that cannot be matched to a spot
// FN = The number of missed spots
add(sb, tp);
add(sb, fp);
match = new FractionClassificationResult(tp, fp, 0, simulationParameters.molecules - tp);
add(sb, match.getRecall());
add(sb, match.getPrecision());
add(sb, match.getF1Score());
add(sb, match.getJaccard());
// Do it again but pretend we can perfectly filter all the false positives
// add(sb, tp);
match = new FractionClassificationResult(tp, 0, 0, simulationParameters.molecules - tp);
// Recall is unchanged
// Precision will be 100%
add(sb, match.getF1Score());
add(sb, match.getJaccard());
// The mean may be subject to extreme outliers so use the median
double median = distanceStats.getMedian();
add(sb, median);
final WindowOrganiser wo = new WindowOrganiser();
String label = String.format("Recall = %s. n = %d. Median = %s nm. SD = %s nm", MathUtils.rounded(match.getRecall()), distanceStats.getN(), MathUtils.rounded(median), MathUtils.rounded(distanceStats.getStandardDeviation()));
new HistogramPlotBuilder(TITLE, distanceStats, "Match Distance (nm)").setPlotLabel(label).show(wo);
median = depthStats.getMedian();
add(sb, median);
// Sort by spot intensity and produce correlation
double[] correlation = null;
double[] rankCorrelation = null;
double[] rank = null;
final FastCorrelator fastCorrelator = new FastCorrelator();
final ArrayList<Ranking> pc1 = new ArrayList<>();
final ArrayList<Ranking> pc2 = new ArrayList<>();
ci = 0;
if (settings.showCorrelation) {
final int[] indices = SimpleArrayUtils.natural(i1.length);
SortUtils.sortData(indices, is, settings.rankByIntensity, true);
correlation = new double[i1.length];
rankCorrelation = new double[i1.length];
rank = new double[i1.length];
for (final int ci2 : indices) {
fastCorrelator.add(Math.round(i1[ci2]), Math.round(i2[ci2]));
pc1.add(new Ranking(i1[ci2], ci));
pc2.add(new Ranking(i2[ci2], ci));
correlation[ci] = fastCorrelator.getCorrelation();
rankCorrelation[ci] = Correlator.correlation(rank(pc1), rank(pc2));
if (settings.rankByIntensity) {
rank[ci] = is[0] - is[ci];
} else {
rank[ci] = ci;
}
ci++;
}
} else {
for (int i = 0; i < i1.length; i++) {
fastCorrelator.add(Math.round(i1[i]), Math.round(i2[i]));
pc1.add(new Ranking(i1[i], i));
pc2.add(new Ranking(i2[i], i));
}
}
final double pearsonCorr = fastCorrelator.getCorrelation();
final double rankedCorr = Correlator.correlation(rank(pc1), rank(pc2));
// Get the regression
final SimpleRegression regression = new SimpleRegression(false);
for (int i = 0; i < pc1.size(); i++) {
regression.addData(pc1.get(i).value, pc2.get(i).value);
}
// final double intercept = regression.getIntercept();
final double slope = regression.getSlope();
if (settings.showCorrelation) {
String title = TITLE + " Intensity";
Plot plot = new Plot(title, "Candidate", "Spot");
final double[] limits1 = MathUtils.limits(i1);
final double[] limits2 = MathUtils.limits(i2);
plot.setLimits(limits1[0], limits1[1], limits2[0], limits2[1]);
label = String.format("Correlation=%s; Ranked=%s; Slope=%s", MathUtils.rounded(pearsonCorr), MathUtils.rounded(rankedCorr), MathUtils.rounded(slope));
plot.addLabel(0, 0, label);
plot.setColor(Color.red);
plot.addPoints(i1, i2, Plot.DOT);
if (slope > 1) {
plot.drawLine(limits1[0], limits1[0] * slope, limits1[1], limits1[1] * slope);
} else {
plot.drawLine(limits2[0] / slope, limits2[0], limits2[1] / slope, limits2[1]);
}
ImageJUtils.display(title, plot, wo);
title = TITLE + " Correlation";
plot = new Plot(title, "Spot Rank", "Correlation");
final double[] xlimits = MathUtils.limits(rank);
double[] ylimits = MathUtils.limits(correlation);
ylimits = MathUtils.limits(ylimits, rankCorrelation);
plot.setLimits(xlimits[0], xlimits[1], ylimits[0], ylimits[1]);
plot.setColor(Color.red);
plot.addPoints(rank, correlation, Plot.LINE);
plot.setColor(Color.blue);
plot.addPoints(rank, rankCorrelation, Plot.LINE);
plot.setColor(Color.black);
plot.addLabel(0, 0, label);
ImageJUtils.display(title, plot, wo);
}
add(sb, pearsonCorr);
add(sb, rankedCorr);
add(sb, slope);
label = String.format("n = %d. Median = %s nm", depthStats.getN(), MathUtils.rounded(median));
new HistogramPlotBuilder(TITLE, depthStats, "Match Depth (nm)").setRemoveOutliersOption(1).setPlotLabel(label).show(wo);
// Plot histograms of the stats on the same window
final double[] lower = new double[filterCriteria.length];
final double[] upper = new double[lower.length];
final double[] min = new double[lower.length];
final double[] max = new double[lower.length];
for (int i = 0; i < stats[0].length; i++) {
final double[] limits = showDoubleHistogram(stats, i, wo, matchScores);
lower[i] = limits[0];
upper[i] = limits[1];
min[i] = limits[2];
max[i] = limits[3];
}
// Reconfigure some of the range limits
// Make this a bit bigger
upper[FILTER_SIGNAL] *= 2;
// Make this a bit bigger
upper[FILTER_SNR] *= 2;
final double factor = 0.25;
if (lower[FILTER_MIN_WIDTH] != 0) {
// (assuming lower is less than 1)
upper[FILTER_MIN_WIDTH] = 1 - Math.max(0, factor * (1 - lower[FILTER_MIN_WIDTH]));
}
if (upper[FILTER_MIN_WIDTH] != 0) {
// (assuming upper is more than 1)
lower[FILTER_MAX_WIDTH] = 1 + Math.max(0, factor * (upper[FILTER_MAX_WIDTH] - 1));
}
// Round the ranges
final double[] interval = new double[stats[0].length];
interval[FILTER_SIGNAL] = SignalFilter.DEFAULT_INCREMENT;
interval[FILTER_SNR] = SnrFilter.DEFAULT_INCREMENT;
interval[FILTER_MIN_WIDTH] = WidthFilter2.DEFAULT_MIN_INCREMENT;
interval[FILTER_MAX_WIDTH] = WidthFilter.DEFAULT_INCREMENT;
interval[FILTER_SHIFT] = ShiftFilter.DEFAULT_INCREMENT;
interval[FILTER_ESHIFT] = EShiftFilter.DEFAULT_INCREMENT;
interval[FILTER_PRECISION] = PrecisionFilter.DEFAULT_INCREMENT;
interval[FILTER_ITERATIONS] = 0.1;
interval[FILTER_EVALUATIONS] = 0.1;
// Create a range increment
final double[] increment = new double[lower.length];
for (int i = 0; i < increment.length; i++) {
lower[i] = MathUtils.floor(lower[i], interval[i]);
upper[i] = MathUtils.ceil(upper[i], interval[i]);
final double range = upper[i] - lower[i];
// Allow clipping if the range is small compared to the min increment
double multiples = range / interval[i];
// Use 8 multiples for the equivalent of +/- 4 steps around the centre
if (multiples < 8) {
multiples = Math.ceil(multiples);
} else {
multiples = 8;
}
increment[i] = MathUtils.ceil(range / multiples, interval[i]);
if (i == FILTER_MIN_WIDTH) {
// Requires clipping based on the upper limit
lower[i] = upper[i] - increment[i] * multiples;
} else {
upper[i] = lower[i] + increment[i] * multiples;
}
}
for (int i = 0; i < stats[0].length; i++) {
lower[i] = MathUtils.round(lower[i]);
upper[i] = MathUtils.round(upper[i]);
min[i] = MathUtils.round(min[i]);
max[i] = MathUtils.round(max[i]);
increment[i] = MathUtils.round(increment[i]);
sb.append('\t').append(min[i]).append(':').append(lower[i]).append('-').append(upper[i]).append(':').append(max[i]);
}
// Disable some filters
increment[FILTER_SIGNAL] = Double.POSITIVE_INFINITY;
// increment[FILTER_SHIFT] = Double.POSITIVE_INFINITY;
increment[FILTER_ESHIFT] = Double.POSITIVE_INFINITY;
wo.tile();
sb.append('\t').append(TextUtils.nanosToString(runTime));
createTable().append(sb.toString());
if (settings.saveFilterRange) {
GUIFilterSettings filterSettings = SettingsManager.readGuiFilterSettings(0);
String filename = (silent) ? filterSettings.getFilterSetFilename() : ImageJUtils.getFilename("Filter_range_file", filterSettings.getFilterSetFilename());
if (filename == null) {
return;
}
// Remove extension to store the filename
filename = FileUtils.replaceExtension(filename, ".xml");
filterSettings = filterSettings.toBuilder().setFilterSetFilename(filename).build();
// Create a filter set using the ranges
final ArrayList<Filter> filters = new ArrayList<>(4);
// Create the multi-filter using the same precision type as that used during fitting.
// Currently no support for z-filter as 3D astigmatism fitting is experimental.
final PrecisionMethod precisionMethod = getPrecisionMethod((DirectFilter) multiFilter.getFilter());
Function<double[], Filter> generator;
if (precisionMethod == PrecisionMethod.POISSON_CRLB) {
generator = parameters -> new MultiFilterCrlb(parameters[FILTER_SIGNAL], (float) parameters[FILTER_SNR], parameters[FILTER_MIN_WIDTH], parameters[FILTER_MAX_WIDTH], parameters[FILTER_SHIFT], parameters[FILTER_ESHIFT], parameters[FILTER_PRECISION], 0f, 0f);
} else if (precisionMethod == PrecisionMethod.MORTENSEN) {
generator = parameters -> new MultiFilter(parameters[FILTER_SIGNAL], (float) parameters[FILTER_SNR], parameters[FILTER_MIN_WIDTH], parameters[FILTER_MAX_WIDTH], parameters[FILTER_SHIFT], parameters[FILTER_ESHIFT], parameters[FILTER_PRECISION], 0f, 0f);
} else {
// Default
generator = parameters -> new MultiFilter2(parameters[FILTER_SIGNAL], (float) parameters[FILTER_SNR], parameters[FILTER_MIN_WIDTH], parameters[FILTER_MAX_WIDTH], parameters[FILTER_SHIFT], parameters[FILTER_ESHIFT], parameters[FILTER_PRECISION], 0f, 0f);
}
filters.add(generator.apply(lower));
filters.add(generator.apply(upper));
filters.add(generator.apply(increment));
if (saveFilters(filename, filters)) {
SettingsManager.writeSettings(filterSettings);
}
// Create a filter set using the min/max and the initial bounds.
// Set sensible limits
min[FILTER_SIGNAL] = Math.max(min[FILTER_SIGNAL], 30);
max[FILTER_SNR] = Math.min(max[FILTER_SNR], 10000);
max[FILTER_PRECISION] = Math.min(max[FILTER_PRECISION], 100);
// Make the 4-set filters the same as the 3-set filters.
filters.clear();
filters.add(generator.apply(min));
filters.add(generator.apply(lower));
filters.add(generator.apply(upper));
filters.add(generator.apply(max));
saveFilters(FileUtils.replaceExtension(filename, ".4.xml"), filters);
}
spotFitResults.min = min;
spotFitResults.max = max;
}
use of uk.ac.sussex.gdsc.smlm.results.filter.MultiPathFilter in project GDSC-SMLM by aherbert.
the class BenchmarkFilterAnalysis method getAssignments.
/**
* Score the filter using the results list and the configured fail count.
*
* @param filter the filter
* @return The score
*/
private ArrayList<FractionalAssignment[]> getAssignments(DirectFilter filter) {
final MultiPathFilter multiPathFilter = createMpf(filter, defaultMinimalFilter);
final ArrayList<FractionalAssignment[]> allAssignments = new ArrayList<>(fitResultData.resultsList.length);
multiPathFilter.fractionScoreSubset(fitResultData.resultsList, createFailCounter(settings.failCount), fitResultData.countActual, allAssignments, null, coordinateStore);
return allAssignments;
}
use of uk.ac.sussex.gdsc.smlm.results.filter.MultiPathFilter in project GDSC-SMLM by aherbert.
the class FitWorker method run.
/**
* Locate all the peaks in the image specified by the fit job.
*
* <p>WARNING: The FitWorker fits a sub-region of the data for each maxima. It then updates the
* FitResult parameters with an offset reflecting the position. The initialParameters are not
* updated with this offset unless configured.
*
* @param job The fit job
*/
public void run(FitJob job) {
final long start = System.nanoTime();
job.start();
this.job = job;
benchmarking = false;
this.slice = job.slice;
// Used for debugging
// if (logger == null) logger = new gdsc.fitting.logging.ConsoleLogger();
// Crop to the ROI
cc = new CoordinateConverter(job.bounds);
// Note if the bounds change for efficient caching.
newBounds = !cc.dataBounds.equals(lastBounds);
if (newBounds) {
lastBounds = cc.dataBounds;
}
final int width = cc.dataBounds.width;
final int height = cc.dataBounds.height;
borderLimitX = width - border;
borderLimitY = height - border;
data = job.data;
// This is tied to the input data
dataEstimator = null;
// relative to the global origin.
if (isFitCameraCounts) {
cameraModel.removeBias(cc.dataBounds, data);
} else {
cameraModel.removeBiasAndGain(cc.dataBounds, data);
}
final FitParameters params = job.getFitParameters();
this.endT = (params != null) ? params.endT : -1;
candidates = indentifySpots(job, width, height, params);
if (candidates.getSize() == 0) {
finishJob(job, start);
return;
}
fittedBackground = new Statistics();
// Always get the noise and store it with the results.
if (params != null && !Float.isNaN(params.noise)) {
noise = params.noise;
fitConfig.setNoise(noise);
} else if (calculateNoise) {
noise = estimateNoise();
fitConfig.setNoise(noise);
}
// System.out.printf("Slice %d : Noise = %g\n", slice, noise);
if (logger != null) {
LoggerUtils.log(logger, Level.INFO, "Slice %d: Noise = %f", slice, noise);
}
final ImageExtractor ie = ImageExtractor.wrap(data, width, height);
double[] region = null;
final float offsetx = cc.dataBounds.x;
final float offsety = cc.dataBounds.y;
if (params != null && params.fitTask == FitTask.MAXIMA_IDENITIFICATION) {
final float sd0 = (float) xsd;
final float sd1 = (float) ysd;
for (int n = 0; n < candidates.getSize(); n++) {
// Find the background using the perimeter of the data.
// TODO - Perhaps the Gaussian Fitter should be used to produce the initial estimates but no
// actual fit done.
// This would produce coords using the centre-of-mass.
final Candidate candidate = candidates.get(n);
int x = candidate.x;
int y = candidate.y;
final Rectangle regionBounds = ie.getBoxRegionBounds(x, y, fitting);
region = ie.crop(regionBounds, region);
final float b = (float) Gaussian2DFitter.getBackground(region, regionBounds.width, regionBounds.height, 1);
// Offset the coords to the centre of the pixel. Note the bounds will be added later.
// Subtract the background to get the amplitude estimate then convert to signal.
final float amplitude = candidate.intensity - ((relativeIntensity) ? 0 : b);
final float signal = (float) (amplitude * 2.0 * Math.PI * sd0 * sd1);
final int index = y * width + x;
x += offsetx;
y += offsety;
final float[] peakParams = new float[1 + Gaussian2DFunction.PARAMETERS_PER_PEAK];
peakParams[Gaussian2DFunction.BACKGROUND] = b;
peakParams[Gaussian2DFunction.SIGNAL] = signal;
peakParams[Gaussian2DFunction.X_POSITION] = x + 0.5f;
peakParams[Gaussian2DFunction.Y_POSITION] = y + 0.5f;
// peakParams[Gaussian2DFunction.Z_POSITION] = 0;
peakParams[Gaussian2DFunction.X_SD] = sd0;
peakParams[Gaussian2DFunction.Y_SD] = sd1;
// peakParams[Gaussian2DFunction.ANGLE] = 0;
final float u = (float) Gaussian2DPeakResultHelper.getMeanSignalUsingP05(signal, sd0, sd1);
sliceResults.add(createResult(x, y, data[index], 0, noise, u, peakParams, null, n, 0));
}
} else {
initialiseFitting();
// Smooth the data to provide initial background estimates
final float[] smoothedData = backgroundSmoothing.process(data, width, height);
final ImageExtractor ie2 = ImageExtractor.wrap(smoothedData, width, height);
// Perform the Gaussian fit
// The SpotFitter is used to create a dynamic MultiPathFitResult object.
// This is then passed to a multi-path filter. Thus the same fitting decision process
// is used when benchmarking and when running on actual data.
// Note: The SpotFitter labels each PreprocessedFitResult using the offset in the FitResult
// object.
// The initial params and deviations can then be extracted for the results that pass the
// filter.
MultiPathFilter filter;
final IMultiPathFitResults multiPathResults = this;
final SelectedResultStore store = this;
coordinateStore = coordinateStore.resize(cc.dataBounds.x, cc.dataBounds.y, width, height);
if (params != null && params.fitTask == FitTask.BENCHMARKING) {
// Run filtering as normal. However in the event that a candidate is missed or some
// results are not generated we must generate them. This is done in the complete(int)
// method if we set the benchmarking flag.
benchmarking = true;
// Filter using the benchmark filter
filter = params.benchmarkFilter;
if (filter == null) {
// Create a default filter using the standard FitConfiguration to ensure sensible fits
// are stored as the current slice results.
// Note the current fit configuration for benchmarking may have minimal filtering settings
// so we do not use that object.
final FitConfiguration tmp = new FitConfiguration();
final double residualsThreshold = 0.4;
filter = new MultiPathFilter(tmp, createMinimalFilter(PrecisionMethod.POISSON_CRLB), residualsThreshold);
}
} else {
// Filter using the configuration.
if (this.filter == null) {
// This can be cached. Q. Clone the config?
this.filter = new MultiPathFilter(fitConfig, createMinimalFilter(fitConfig.getPrecisionMethod()), config.getResidualsThreshold());
}
filter = this.filter;
}
// If we are benchmarking then do not generate results dynamically since we will store all
// results in the fit job.
dynamicMultiPathFitResult = new DynamicMultiPathFitResult(ie, ie2, !benchmarking);
// dynamicMultiPathFitResult = new DynamicMultiPathFitResult(ie, false);
// The local background computation is only required for the precision method.
// Also compute it when benchmarking.
localBackground = benchmarking || fitConfig.getPrecisionMethodValue() == PrecisionMethod.MORTENSEN_LOCAL_BACKGROUND_VALUE;
// Debug where the fit config may be different between benchmarking and fitting
if (slice == -1) {
fitConfig.initialise(1, 1, 1);
final String newLine = System.lineSeparator();
final String tmpdir = System.getProperty("java.io.tmpdir");
try (BufferedWriter writer = Files.newBufferedWriter(Paths.get(tmpdir, String.format("config.%d.txt", slice)))) {
JsonFormat.printer().appendTo(config.getFitEngineSettings(), writer);
} catch (final IOException ex) {
logger.log(Level.SEVERE, "Unable to write message", ex);
}
FileUtils.save(Paths.get(tmpdir, String.format("filter.%d.xml", slice)).toString(), filter.toXml());
// filter.setDebugFile(String.format("/tmp/fitWorker.%b.txt", benchmarking));
final StringBuilder sb = new StringBuilder();
sb.append((benchmarking) ? ((uk.ac.sussex.gdsc.smlm.results.filter.Filter) filter.getFilter()).toXml() : fitConfig.getSmartFilterString()).append(newLine);
sb.append(((uk.ac.sussex.gdsc.smlm.results.filter.Filter) filter.getMinimalFilter()).toXml()).append(newLine);
sb.append(filter.residualsThreshold).append(newLine);
sb.append(config.getFailuresLimit()).append(newLine);
sb.append(config.getDuplicateDistance()).append(":");
sb.append(config.getDuplicateDistanceAbsolute()).append(newLine);
if (spotFilter != null) {
sb.append(spotFilter.getDescription()).append(newLine);
}
sb.append("MaxCandidate = ").append(candidates.getSize()).append(newLine);
for (int i = 0, len = candidates.getLength(); i < len; i++) {
TextUtils.formatTo(sb, "Fit %d [%d,%d = %.1f]%n", i, candidates.get(i).x, candidates.get(i).y, candidates.get(i).intensity);
}
FileUtils.save(Paths.get(tmpdir, String.format("candidates.%d.xml", slice)).toString(), sb.toString());
}
FailCounter failCounter = config.getFailCounter();
if (!benchmarking && params != null && params.pass != null) {
// We want to store the pass/fail for consecutive candidates
params.pass = new boolean[candidates.getLength()];
failCounter = new RecordingFailCounter(params.pass, failCounter);
filter.select(multiPathResults, failCounter, true, store, coordinateStore);
} else {
filter.select(multiPathResults, failCounter, true, store, coordinateStore);
}
// Note: We go deeper into the candidate list than max candidate
// for any candidate where we have a good fit result as an estimate.
// Q. Should this only be for benchmarking?
// if (benchmarking)
// System.out.printf("Slice %d: %d + %d\n", slice, dynamicMultiPathFitResult.extra,
// candidates.getSize());
// Create the slice results
final CandidateList fitted = gridManager.getFittedCandidates();
sliceResults.ensureCapacity(fitted.getSize());
for (int i = 0; i < fitted.getSize(); i++) {
if (fitted.get(i).fit) {
sliceResults.push(createResult(offsetx, offsety, fitted.get(i)));
}
}
if (logger != null) {
LoggerUtils.log(logger, Level.INFO, "Slice %d: %d / %d = %s", slice, success, candidates.getSize(), TextUtils.pleural(fitted.getSize(), "result"));
}
}
this.results.addAll(sliceResults);
finishJob(job, start);
}
Aggregations