use of uk.ac.sussex.gdsc.core.utils.OpenHashMaps.CustomInt2ObjectOpenHashMap in project gdsc-smlm by aherbert.
the class BenchmarkSpotFit method subsetFilterResults.
/**
* Extract all the filter candidates in order until the desired number of positives have been
* reached and the number of negatives matches the configured parameters.
*
* @param filterResults the filter results
* @param fitting the fitting
* @return The filter candidate data
*/
private CandidateData subsetFilterResults(CustomInt2ObjectOpenHashMap<FilterResult> filterResults, int fitting) {
// Convert fractions from percent
final double f1 = Math.min(1, settings.fractionPositives / 100.0);
final double f2 = settings.fractionNegativesAfterAllPositives / 100.0;
final int[] counter = new int[2];
final Int2ObjectOpenHashMap<FilterCandidates> subset = new Int2ObjectOpenHashMap<>();
final double[] fX = new double[2];
final int[] nX = new int[2];
filterResults.forEach((int frame, FilterResult result) -> {
// Determine the number of positives to find. This score may be fractional.
fX[0] += result.result.getTruePositives();
fX[1] += result.result.getFalsePositives();
// Q. Is r.result.getTruePositives() not the same as the total of r.spots[i].match?
// A. Not if we used fractional scoring.
int count = 0;
for (int i = result.spots.length; i-- > 0; ) {
if (result.spots[i].match) {
count++;
}
}
nX[0] += count;
nX[1] += (result.spots.length - count);
// Make the target use the fractional score
final double np2 = result.result.getTruePositives() * f1;
double targetP = np2;
// Set the target using the closest
if (f1 < 1) {
double np = 0;
double min = result.result.getTruePositives();
for (final ScoredSpot spot : result.spots) {
if (spot.match) {
np += spot.getScore();
final double d = np2 - np;
if (d < min) {
min = d;
targetP = np;
} else {
break;
}
}
}
}
// Count the number of positive & negatives
int pos = 0;
int neg = 0;
double np = 0;
double nn = 0;
boolean reachedTarget = false;
int countAfter = 0;
count = 0;
for (final ScoredSpot spot : result.spots) {
count++;
nn += spot.antiScore();
if (spot.match) {
np += spot.getScore();
pos++;
if (!reachedTarget) {
reachedTarget = np >= targetP;
}
} else {
neg++;
if (reachedTarget) {
countAfter++;
}
}
// Check if we have reached both the limits
if (reachedTarget && countAfter >= settings.negativesAfterAllPositives && (double) neg / (neg + pos) >= f2) {
break;
}
}
counter[0] += count;
counter[1] += result.spots.length;
// We can use all the candidates but only fit up to count
subset.put(frame, new FilterCandidates(pos, neg, np, nn, result.spots, count));
});
// We now add all the candidates but only fit the first N
final int target = counter[0];
final int total = counter[1];
final int added = total - target;
if (extraOptions && added > target) {
ImageJUtils.log("Added %s to %s (total = %d)", TextUtils.pleural(added, "neighbour"), TextUtils.pleural(target, "candidate"), total);
}
return new CandidateData(subset, filterResult.id, fX[0], fX[1], nX[0], nX[1], settings, fitting);
}
use of uk.ac.sussex.gdsc.core.utils.OpenHashMaps.CustomInt2ObjectOpenHashMap in project gdsc-smlm by aherbert.
the class BenchmarkSpotFit method summariseResults.
private void summariseResults(BenchmarkSpotFitResult spotFitResults, long runTime, final PreprocessedPeakResult[] preprocessedPeakResults, int uniqueIdCount, CandidateData candidateData, Int2ObjectOpenHashMap<List<Coordinate>> actualCoordinates) {
// Summarise the fitting results. N fits, N failures.
// Optimal match statistics if filtering is perfect (since fitting is not perfect).
final StoredDataStatistics distanceStats = new StoredDataStatistics();
final StoredDataStatistics depthStats = new StoredDataStatistics();
// Get stats for all fitted results and those that match
// Signal, SNR, Width, xShift, yShift, Precision
createFilterCriteria();
final StoredDataStatistics[][] stats = new StoredDataStatistics[3][filterCriteria.length];
for (int i = 0; i < stats.length; i++) {
for (int j = 0; j < stats[i].length; j++) {
stats[i][j] = new StoredDataStatistics();
}
}
final double nmPerPixel = simulationParameters.pixelPitch;
double tp = 0;
double fp = 0;
int failCtp = 0;
int failCfp = 0;
int ctp = 0;
int cfp = 0;
final int[] singleStatus = new int[FitStatus.values().length];
final int[] multiStatus = new int[singleStatus.length];
final int[] doubletStatus = new int[singleStatus.length];
final int[] multiDoubletStatus = new int[singleStatus.length];
// Easier to materialise the values since we have a lot of non final variables to manipulate
final CustomInt2ObjectOpenHashMap<FilterCandidates> fitResults = spotFitResults.fitResults;
final int[] frames = new int[fitResults.size()];
final FilterCandidates[] candidates = new FilterCandidates[fitResults.size()];
final int[] counter = new int[1];
fitResults.forEach((int key, FilterCandidates value) -> {
frames[counter[0]] = key;
candidates[counter[0]] = value;
counter[0]++;
});
for (final FilterCandidates result : candidates) {
// Count the number of fit results that matched (tp) and did not match (fp)
tp += result.tp;
fp += result.fp;
for (int i = 0; i < result.fitResult.length; i++) {
if (result.spots[i].match) {
ctp++;
} else {
cfp++;
}
final MultiPathFitResult fitResult = result.fitResult[i];
if (singleStatus != null && result.spots[i].match) {
// Debugging reasons for fit failure
addStatus(singleStatus, fitResult.getSingleFitResult());
addStatus(multiStatus, fitResult.getMultiFitResult());
addStatus(doubletStatus, fitResult.getDoubletFitResult());
addStatus(multiDoubletStatus, fitResult.getMultiDoubletFitResult());
}
if (noMatch(fitResult)) {
if (result.spots[i].match) {
failCtp++;
} else {
failCfp++;
}
}
// We have multi-path results.
// We want statistics for:
// [0] all fitted spots
// [1] fitted spots that match a result
// [2] fitted spots that do not match a result
addToStats(fitResult.getSingleFitResult(), stats);
addToStats(fitResult.getMultiFitResult(), stats);
addToStats(fitResult.getDoubletFitResult(), stats);
addToStats(fitResult.getMultiDoubletFitResult(), stats);
}
// Statistics on spots that fit an actual result
for (int i = 0; i < result.match.length; i++) {
if (!result.match[i].isFitResult()) {
// For now just ignore the candidates that matched
continue;
}
final FitMatch fitMatch = (FitMatch) result.match[i];
distanceStats.add(fitMatch.distance * nmPerPixel);
depthStats.add(fitMatch.zdepth * nmPerPixel);
}
}
if (tp == 0) {
IJ.error(TITLE, "No fit results matched the simulation actual results");
return;
}
// Store data for computing correlation
final double[] i1 = new double[depthStats.getN()];
final double[] i2 = new double[i1.length];
final double[] is = new double[i1.length];
int ci = 0;
for (final FilterCandidates result : candidates) {
for (int i = 0; i < result.match.length; i++) {
if (!result.match[i].isFitResult()) {
// For now just ignore the candidates that matched
continue;
}
final FitMatch fitMatch = (FitMatch) result.match[i];
final ScoredSpot spot = result.spots[fitMatch.index];
i1[ci] = fitMatch.predictedSignal;
i2[ci] = fitMatch.actualSignal;
is[ci] = spot.spot.intensity;
ci++;
}
}
// We want to compute the Jaccard against the spot metric
// Filter the results using the multi-path filter
final ArrayList<MultiPathFitResults> multiPathResults = new ArrayList<>(fitResults.size());
for (int i = 0; i < frames.length; i++) {
final int frame = frames[i];
final MultiPathFitResult[] multiPathFitResults = candidates[i].fitResult;
final int totalCandidates = candidates[i].spots.length;
final List<Coordinate> list = actualCoordinates.get(frame);
final int nActual = (list == null) ? 0 : list.size();
multiPathResults.add(new MultiPathFitResults(frame, multiPathFitResults, totalCandidates, nActual));
}
// Score the results and count the number returned
final List<FractionalAssignment[]> assignments = new ArrayList<>();
final IntOpenHashSet set = new IntOpenHashSet(uniqueIdCount);
final FractionScoreStore scoreStore = set::add;
final MultiPathFitResults[] multiResults = multiPathResults.toArray(new MultiPathFitResults[0]);
// Filter with no filter
final MultiPathFilter mpf = new MultiPathFilter(new SignalFilter(0), null, multiFilter.residualsThreshold);
mpf.fractionScoreSubset(multiResults, NullFailCounter.INSTANCE, this.results.size(), assignments, scoreStore, CoordinateStoreFactory.create(0, 0, imp.getWidth(), imp.getHeight(), config.convertUsingHwhMax(config.getDuplicateDistanceParameter())));
final double[][] matchScores = new double[set.size()][];
int count = 0;
for (final FractionalAssignment[] assignmentArray : assignments) {
if (assignmentArray == null) {
continue;
}
for (final FractionalAssignment assignment : assignmentArray) {
final PreprocessedPeakResult r = ((PeakFractionalAssignment) assignment).peakResult;
set.remove(r.getUniqueId());
final double precision = Math.sqrt(r.getLocationVariance());
final double signal = r.getSignal();
final double snr = r.getSnr();
final double width = r.getXSdFactor();
final double xShift = r.getXRelativeShift2();
final double yShift = r.getYRelativeShift2();
// Since these two are combined for filtering and the max is what matters.
final double shift = (xShift > yShift) ? Math.sqrt(xShift) : Math.sqrt(yShift);
final double eshift = Math.sqrt(xShift + yShift);
final double[] score = new double[8];
score[FILTER_SIGNAL] = signal;
score[FILTER_SNR] = snr;
score[FILTER_MIN_WIDTH] = width;
score[FILTER_MAX_WIDTH] = width;
score[FILTER_SHIFT] = shift;
score[FILTER_ESHIFT] = eshift;
score[FILTER_PRECISION] = precision;
score[FILTER_PRECISION + 1] = assignment.getScore();
matchScores[count++] = score;
}
}
// Add the rest
set.forEach(new CustomIntConsumer(count) {
@Override
public void accept(int uniqueId) {
// This should not be null or something has gone wrong
final PreprocessedPeakResult r = preprocessedPeakResults[uniqueId];
if (r == null) {
throw new IllegalArgumentException("Missing result: " + uniqueId);
}
final double precision = Math.sqrt(r.getLocationVariance());
final double signal = r.getSignal();
final double snr = r.getSnr();
final double width = r.getXSdFactor();
final double xShift = r.getXRelativeShift2();
final double yShift = r.getYRelativeShift2();
// Since these two are combined for filtering and the max is what matters.
final double shift = (xShift > yShift) ? Math.sqrt(xShift) : Math.sqrt(yShift);
final double eshift = Math.sqrt(xShift + yShift);
final double[] score = new double[8];
score[FILTER_SIGNAL] = signal;
score[FILTER_SNR] = snr;
score[FILTER_MIN_WIDTH] = width;
score[FILTER_MAX_WIDTH] = width;
score[FILTER_SHIFT] = shift;
score[FILTER_ESHIFT] = eshift;
score[FILTER_PRECISION] = precision;
matchScores[count++] = score;
}
});
final FitConfiguration fitConfig = config.getFitConfiguration();
// Debug the reasons the fit failed
if (singleStatus != null) {
String name = PeakFit.getSolverName(fitConfig);
if (fitConfig.getFitSolver() == FitSolver.MLE && fitConfig.isModelCamera()) {
name += " Camera";
}
IJ.log("Failure counts: " + name);
printFailures("Single", singleStatus);
printFailures("Multi", multiStatus);
printFailures("Doublet", doubletStatus);
printFailures("Multi doublet", multiDoubletStatus);
}
final StringBuilder sb = new StringBuilder(300);
// Add information about the simulation
final double signal = simulationParameters.averageSignal;
final int n = results.size();
final int w = imp.getWidth();
final int h = imp.getHeight();
sb.append(imp.getStackSize()).append('\t').append(w).append('\t').append(h).append('\t').append(n).append('\t');
final double density = ((double) n / imp.getStackSize()) / (w * h) / (simulationParameters.pixelPitch * simulationParameters.pixelPitch / 1e6);
sb.append(MathUtils.rounded(density)).append('\t').append(MathUtils.rounded(signal)).append('\t').append(MathUtils.rounded(simulationParameters.sd)).append('\t').append(MathUtils.rounded(simulationParameters.pixelPitch)).append('\t').append(MathUtils.rounded(simulationParameters.depth)).append('\t').append(simulationParameters.fixedDepth).append('\t').append(MathUtils.rounded(simulationParameters.gain)).append('\t').append(MathUtils.rounded(simulationParameters.readNoise)).append('\t').append(MathUtils.rounded(simulationParameters.background)).append('\t').append(MathUtils.rounded(simulationParameters.noise)).append('\t');
if (simulationParameters.fullSimulation) {
// The total signal is spread over frames
}
sb.append(MathUtils.rounded(signal / simulationParameters.noise)).append('\t').append(MathUtils.rounded(simulationParameters.sd / simulationParameters.pixelPitch)).append('\t').append(spotFilter.getDescription());
// nP and nN is the fractional score of the spot candidates
addCount(sb, (double) candidateData.countPositive + candidateData.countNegative);
addCount(sb, candidateData.countPositive);
addCount(sb, candidateData.countNegative);
addCount(sb, candidateData.fractionPositive);
addCount(sb, candidateData.fractionNegative);
String name = PeakFit.getSolverName(fitConfig);
if (fitConfig.getFitSolver() == FitSolver.MLE && fitConfig.isModelCamera()) {
name += " Camera";
}
add(sb, name);
add(sb, config.getFitting());
spotFitResults.resultPrefix = sb.toString();
// Q. Should I add other fit configuration here?
// The fraction of positive and negative candidates that were included
add(sb, (100.0 * ctp) / candidateData.countPositive);
add(sb, (100.0 * cfp) / candidateData.countNegative);
// Score the fitting results compared to the original simulation.
// Score the candidate selection:
add(sb, ctp + cfp);
add(sb, ctp);
add(sb, cfp);
// TP are all candidates that can be matched to a spot
// FP are all candidates that cannot be matched to a spot
// FN = The number of missed spots
FractionClassificationResult match = new FractionClassificationResult(ctp, cfp, 0, simulationParameters.molecules - ctp);
add(sb, match.getRecall());
add(sb, match.getPrecision());
add(sb, match.getF1Score());
add(sb, match.getJaccard());
// Score the fitting results:
add(sb, failCtp);
add(sb, failCfp);
// TP are all fit results that can be matched to a spot
// FP are all fit results that cannot be matched to a spot
// FN = The number of missed spots
add(sb, tp);
add(sb, fp);
match = new FractionClassificationResult(tp, fp, 0, simulationParameters.molecules - tp);
add(sb, match.getRecall());
add(sb, match.getPrecision());
add(sb, match.getF1Score());
add(sb, match.getJaccard());
// Do it again but pretend we can perfectly filter all the false positives
// add(sb, tp);
match = new FractionClassificationResult(tp, 0, 0, simulationParameters.molecules - tp);
// Recall is unchanged
// Precision will be 100%
add(sb, match.getF1Score());
add(sb, match.getJaccard());
// The mean may be subject to extreme outliers so use the median
double median = distanceStats.getMedian();
add(sb, median);
final WindowOrganiser wo = new WindowOrganiser();
String label = String.format("Recall = %s. n = %d. Median = %s nm. SD = %s nm", MathUtils.rounded(match.getRecall()), distanceStats.getN(), MathUtils.rounded(median), MathUtils.rounded(distanceStats.getStandardDeviation()));
new HistogramPlotBuilder(TITLE, distanceStats, "Match Distance (nm)").setPlotLabel(label).show(wo);
median = depthStats.getMedian();
add(sb, median);
// Sort by spot intensity and produce correlation
double[] correlation = null;
double[] rankCorrelation = null;
double[] rank = null;
final FastCorrelator fastCorrelator = new FastCorrelator();
final ArrayList<Ranking> pc1 = new ArrayList<>();
final ArrayList<Ranking> pc2 = new ArrayList<>();
ci = 0;
if (settings.showCorrelation) {
final int[] indices = SimpleArrayUtils.natural(i1.length);
SortUtils.sortData(indices, is, settings.rankByIntensity, true);
correlation = new double[i1.length];
rankCorrelation = new double[i1.length];
rank = new double[i1.length];
for (final int ci2 : indices) {
fastCorrelator.add(Math.round(i1[ci2]), Math.round(i2[ci2]));
pc1.add(new Ranking(i1[ci2], ci));
pc2.add(new Ranking(i2[ci2], ci));
correlation[ci] = fastCorrelator.getCorrelation();
rankCorrelation[ci] = Correlator.correlation(rank(pc1), rank(pc2));
if (settings.rankByIntensity) {
rank[ci] = is[0] - is[ci];
} else {
rank[ci] = ci;
}
ci++;
}
} else {
for (int i = 0; i < i1.length; i++) {
fastCorrelator.add(Math.round(i1[i]), Math.round(i2[i]));
pc1.add(new Ranking(i1[i], i));
pc2.add(new Ranking(i2[i], i));
}
}
final double pearsonCorr = fastCorrelator.getCorrelation();
final double rankedCorr = Correlator.correlation(rank(pc1), rank(pc2));
// Get the regression
final SimpleRegression regression = new SimpleRegression(false);
for (int i = 0; i < pc1.size(); i++) {
regression.addData(pc1.get(i).value, pc2.get(i).value);
}
// final double intercept = regression.getIntercept();
final double slope = regression.getSlope();
if (settings.showCorrelation) {
String title = TITLE + " Intensity";
Plot plot = new Plot(title, "Candidate", "Spot");
final double[] limits1 = MathUtils.limits(i1);
final double[] limits2 = MathUtils.limits(i2);
plot.setLimits(limits1[0], limits1[1], limits2[0], limits2[1]);
label = String.format("Correlation=%s; Ranked=%s; Slope=%s", MathUtils.rounded(pearsonCorr), MathUtils.rounded(rankedCorr), MathUtils.rounded(slope));
plot.addLabel(0, 0, label);
plot.setColor(Color.red);
plot.addPoints(i1, i2, Plot.DOT);
if (slope > 1) {
plot.drawLine(limits1[0], limits1[0] * slope, limits1[1], limits1[1] * slope);
} else {
plot.drawLine(limits2[0] / slope, limits2[0], limits2[1] / slope, limits2[1]);
}
ImageJUtils.display(title, plot, wo);
title = TITLE + " Correlation";
plot = new Plot(title, "Spot Rank", "Correlation");
final double[] xlimits = MathUtils.limits(rank);
double[] ylimits = MathUtils.limits(correlation);
ylimits = MathUtils.limits(ylimits, rankCorrelation);
plot.setLimits(xlimits[0], xlimits[1], ylimits[0], ylimits[1]);
plot.setColor(Color.red);
plot.addPoints(rank, correlation, Plot.LINE);
plot.setColor(Color.blue);
plot.addPoints(rank, rankCorrelation, Plot.LINE);
plot.setColor(Color.black);
plot.addLabel(0, 0, label);
ImageJUtils.display(title, plot, wo);
}
add(sb, pearsonCorr);
add(sb, rankedCorr);
add(sb, slope);
label = String.format("n = %d. Median = %s nm", depthStats.getN(), MathUtils.rounded(median));
new HistogramPlotBuilder(TITLE, depthStats, "Match Depth (nm)").setRemoveOutliersOption(1).setPlotLabel(label).show(wo);
// Plot histograms of the stats on the same window
final double[] lower = new double[filterCriteria.length];
final double[] upper = new double[lower.length];
final double[] min = new double[lower.length];
final double[] max = new double[lower.length];
for (int i = 0; i < stats[0].length; i++) {
final double[] limits = showDoubleHistogram(stats, i, wo, matchScores);
lower[i] = limits[0];
upper[i] = limits[1];
min[i] = limits[2];
max[i] = limits[3];
}
// Reconfigure some of the range limits
// Make this a bit bigger
upper[FILTER_SIGNAL] *= 2;
// Make this a bit bigger
upper[FILTER_SNR] *= 2;
final double factor = 0.25;
if (lower[FILTER_MIN_WIDTH] != 0) {
// (assuming lower is less than 1)
upper[FILTER_MIN_WIDTH] = 1 - Math.max(0, factor * (1 - lower[FILTER_MIN_WIDTH]));
}
if (upper[FILTER_MIN_WIDTH] != 0) {
// (assuming upper is more than 1)
lower[FILTER_MAX_WIDTH] = 1 + Math.max(0, factor * (upper[FILTER_MAX_WIDTH] - 1));
}
// Round the ranges
final double[] interval = new double[stats[0].length];
interval[FILTER_SIGNAL] = SignalFilter.DEFAULT_INCREMENT;
interval[FILTER_SNR] = SnrFilter.DEFAULT_INCREMENT;
interval[FILTER_MIN_WIDTH] = WidthFilter2.DEFAULT_MIN_INCREMENT;
interval[FILTER_MAX_WIDTH] = WidthFilter.DEFAULT_INCREMENT;
interval[FILTER_SHIFT] = ShiftFilter.DEFAULT_INCREMENT;
interval[FILTER_ESHIFT] = EShiftFilter.DEFAULT_INCREMENT;
interval[FILTER_PRECISION] = PrecisionFilter.DEFAULT_INCREMENT;
interval[FILTER_ITERATIONS] = 0.1;
interval[FILTER_EVALUATIONS] = 0.1;
// Create a range increment
final double[] increment = new double[lower.length];
for (int i = 0; i < increment.length; i++) {
lower[i] = MathUtils.floor(lower[i], interval[i]);
upper[i] = MathUtils.ceil(upper[i], interval[i]);
final double range = upper[i] - lower[i];
// Allow clipping if the range is small compared to the min increment
double multiples = range / interval[i];
// Use 8 multiples for the equivalent of +/- 4 steps around the centre
if (multiples < 8) {
multiples = Math.ceil(multiples);
} else {
multiples = 8;
}
increment[i] = MathUtils.ceil(range / multiples, interval[i]);
if (i == FILTER_MIN_WIDTH) {
// Requires clipping based on the upper limit
lower[i] = upper[i] - increment[i] * multiples;
} else {
upper[i] = lower[i] + increment[i] * multiples;
}
}
for (int i = 0; i < stats[0].length; i++) {
lower[i] = MathUtils.round(lower[i]);
upper[i] = MathUtils.round(upper[i]);
min[i] = MathUtils.round(min[i]);
max[i] = MathUtils.round(max[i]);
increment[i] = MathUtils.round(increment[i]);
sb.append('\t').append(min[i]).append(':').append(lower[i]).append('-').append(upper[i]).append(':').append(max[i]);
}
// Disable some filters
increment[FILTER_SIGNAL] = Double.POSITIVE_INFINITY;
// increment[FILTER_SHIFT] = Double.POSITIVE_INFINITY;
increment[FILTER_ESHIFT] = Double.POSITIVE_INFINITY;
wo.tile();
sb.append('\t').append(TextUtils.nanosToString(runTime));
createTable().append(sb.toString());
if (settings.saveFilterRange) {
GUIFilterSettings filterSettings = SettingsManager.readGuiFilterSettings(0);
String filename = (silent) ? filterSettings.getFilterSetFilename() : ImageJUtils.getFilename("Filter_range_file", filterSettings.getFilterSetFilename());
if (filename == null) {
return;
}
// Remove extension to store the filename
filename = FileUtils.replaceExtension(filename, ".xml");
filterSettings = filterSettings.toBuilder().setFilterSetFilename(filename).build();
// Create a filter set using the ranges
final ArrayList<Filter> filters = new ArrayList<>(4);
// Create the multi-filter using the same precision type as that used during fitting.
// Currently no support for z-filter as 3D astigmatism fitting is experimental.
final PrecisionMethod precisionMethod = getPrecisionMethod((DirectFilter) multiFilter.getFilter());
Function<double[], Filter> generator;
if (precisionMethod == PrecisionMethod.POISSON_CRLB) {
generator = parameters -> new MultiFilterCrlb(parameters[FILTER_SIGNAL], (float) parameters[FILTER_SNR], parameters[FILTER_MIN_WIDTH], parameters[FILTER_MAX_WIDTH], parameters[FILTER_SHIFT], parameters[FILTER_ESHIFT], parameters[FILTER_PRECISION], 0f, 0f);
} else if (precisionMethod == PrecisionMethod.MORTENSEN) {
generator = parameters -> new MultiFilter(parameters[FILTER_SIGNAL], (float) parameters[FILTER_SNR], parameters[FILTER_MIN_WIDTH], parameters[FILTER_MAX_WIDTH], parameters[FILTER_SHIFT], parameters[FILTER_ESHIFT], parameters[FILTER_PRECISION], 0f, 0f);
} else {
// Default
generator = parameters -> new MultiFilter2(parameters[FILTER_SIGNAL], (float) parameters[FILTER_SNR], parameters[FILTER_MIN_WIDTH], parameters[FILTER_MAX_WIDTH], parameters[FILTER_SHIFT], parameters[FILTER_ESHIFT], parameters[FILTER_PRECISION], 0f, 0f);
}
filters.add(generator.apply(lower));
filters.add(generator.apply(upper));
filters.add(generator.apply(increment));
if (saveFilters(filename, filters)) {
SettingsManager.writeSettings(filterSettings);
}
// Create a filter set using the min/max and the initial bounds.
// Set sensible limits
min[FILTER_SIGNAL] = Math.max(min[FILTER_SIGNAL], 30);
max[FILTER_SNR] = Math.min(max[FILTER_SNR], 10000);
max[FILTER_PRECISION] = Math.min(max[FILTER_PRECISION], 100);
// Make the 4-set filters the same as the 3-set filters.
filters.clear();
filters.add(generator.apply(min));
filters.add(generator.apply(lower));
filters.add(generator.apply(upper));
filters.add(generator.apply(max));
saveFilters(FileUtils.replaceExtension(filename, ".4.xml"), filters);
}
spotFitResults.min = min;
spotFitResults.max = max;
}
use of uk.ac.sussex.gdsc.core.utils.OpenHashMaps.CustomInt2ObjectOpenHashMap in project gdsc-smlm by aherbert.
the class BenchmarkSmartSpotRanking method runAnalysis.
private void runAnalysis() {
// Extract all the results in memory into a list per frame. This can be cached
boolean refresh = false;
final Pair<Integer, Int2ObjectOpenHashMap<List<Coordinate>>> coords = coordinateCache.get();
Int2ObjectOpenHashMap<List<Coordinate>> actualCoordinates;
if (coords.getKey() != simulationParameters.id) {
// Do not get integer coordinates
// The Coordinate objects will be PeakResultPoint objects that store the original PeakResult
// from the MemoryPeakResults
actualCoordinates = ResultsMatchCalculator.getCoordinates(results, false);
coordinateCache.set(Pair.of(simulationParameters.id, actualCoordinates));
refresh = true;
} else {
actualCoordinates = coords.getValue();
}
// Extract all the candidates into a list per frame. This can be cached if the settings have not
// changed.
CandidateData candidateData = candidateDataCache.get();
if (refresh || candidateData == null || candidateData.differentSettings(filterResult.id, settings)) {
candidateData = subsetFilterResults(filterResult.filterResults);
candidateDataCache.set(candidateData);
}
final Int2ObjectOpenHashMap<FilterCandidates> filterCandidates = candidateData.filterCandidates;
final ImageStack stack = imp.getImageStack();
// Create a pool of workers
final int nThreads = Prefs.getThreads();
final BlockingQueue<Integer> jobs = new ArrayBlockingQueue<>(nThreads * 2);
final List<Worker> workers = new LinkedList<>();
final List<Thread> threads = new LinkedList<>();
final Ticker ticker = ImageJUtils.createTicker(filterCandidates.size(), nThreads);
for (int i = 0; i < nThreads; i++) {
final Worker worker = new Worker(jobs, stack, actualCoordinates, filterCandidates, ticker);
final Thread t = new Thread(worker);
workers.add(worker);
threads.add(t);
t.start();
}
// Process the frames
filterCandidates.keySet().forEach((IntConsumer) value -> put(jobs, value));
// Finish all the worker threads by passing in a null job
for (int i = threads.size(); i-- != 0; ) {
put(jobs, -1);
}
// Wait for all to finish
for (final Thread thread : threads) {
try {
thread.join();
} catch (final InterruptedException ex) {
Thread.currentThread().interrupt();
throw new ConcurrentRuntimeException("Unexpected interrupt", ex);
}
}
threads.clear();
IJ.showProgress(1);
if (ImageJUtils.isInterrupted()) {
IJ.showStatus("Aborted");
return;
}
IJ.showStatus("Collecting results ...");
final CustomInt2ObjectOpenHashMap<RankResults> rankResults = new CustomInt2ObjectOpenHashMap<>();
for (final Worker w : workers) {
rankResults.putAll(w.results);
}
summariseResults(rankResults, candidateData);
IJ.showStatus("");
}
use of uk.ac.sussex.gdsc.core.utils.OpenHashMaps.CustomInt2ObjectOpenHashMap in project GDSC-SMLM by aherbert.
the class BenchmarkSpotFit method subsetFilterResults.
/**
* Extract all the filter candidates in order until the desired number of positives have been
* reached and the number of negatives matches the configured parameters.
*
* @param filterResults the filter results
* @param fitting the fitting
* @return The filter candidate data
*/
private CandidateData subsetFilterResults(CustomInt2ObjectOpenHashMap<FilterResult> filterResults, int fitting) {
// Convert fractions from percent
final double f1 = Math.min(1, settings.fractionPositives / 100.0);
final double f2 = settings.fractionNegativesAfterAllPositives / 100.0;
final int[] counter = new int[2];
final Int2ObjectOpenHashMap<FilterCandidates> subset = new Int2ObjectOpenHashMap<>();
final double[] fX = new double[2];
final int[] nX = new int[2];
filterResults.forEach((int frame, FilterResult result) -> {
// Determine the number of positives to find. This score may be fractional.
fX[0] += result.result.getTruePositives();
fX[1] += result.result.getFalsePositives();
// Q. Is r.result.getTruePositives() not the same as the total of r.spots[i].match?
// A. Not if we used fractional scoring.
int count = 0;
for (int i = result.spots.length; i-- > 0; ) {
if (result.spots[i].match) {
count++;
}
}
nX[0] += count;
nX[1] += (result.spots.length - count);
// Make the target use the fractional score
final double np2 = result.result.getTruePositives() * f1;
double targetP = np2;
// Set the target using the closest
if (f1 < 1) {
double np = 0;
double min = result.result.getTruePositives();
for (final ScoredSpot spot : result.spots) {
if (spot.match) {
np += spot.getScore();
final double d = np2 - np;
if (d < min) {
min = d;
targetP = np;
} else {
break;
}
}
}
}
// Count the number of positive & negatives
int pos = 0;
int neg = 0;
double np = 0;
double nn = 0;
boolean reachedTarget = false;
int countAfter = 0;
count = 0;
for (final ScoredSpot spot : result.spots) {
count++;
nn += spot.antiScore();
if (spot.match) {
np += spot.getScore();
pos++;
if (!reachedTarget) {
reachedTarget = np >= targetP;
}
} else {
neg++;
if (reachedTarget) {
countAfter++;
}
}
// Check if we have reached both the limits
if (reachedTarget && countAfter >= settings.negativesAfterAllPositives && (double) neg / (neg + pos) >= f2) {
break;
}
}
counter[0] += count;
counter[1] += result.spots.length;
// We can use all the candidates but only fit up to count
subset.put(frame, new FilterCandidates(pos, neg, np, nn, result.spots, count));
});
// We now add all the candidates but only fit the first N
final int target = counter[0];
final int total = counter[1];
final int added = total - target;
if (extraOptions && added > target) {
ImageJUtils.log("Added %s to %s (total = %d)", TextUtils.pleural(added, "neighbour"), TextUtils.pleural(target, "candidate"), total);
}
return new CandidateData(subset, filterResult.id, fX[0], fX[1], nX[0], nX[1], settings, fitting);
}
use of uk.ac.sussex.gdsc.core.utils.OpenHashMaps.CustomInt2ObjectOpenHashMap in project GDSC-SMLM by aherbert.
the class BenchmarkFilterAnalysis method readResults.
private MultiPathFitResults[] readResults() {
// Extract all the results in memory into a list per frame. This can be cached
boolean update = false;
Pair<Integer, CustomInt2ObjectOpenHashMap<UniqueIdPeakResult[]>> coords = COORDINATE_CACHE.get();
if (coords.getKey() != simulationParameters.id) {
coords = Pair.of(simulationParameters.id, getCoordinates(results));
COORDINATE_CACHE.set(coords);
update = true;
}
actualCoordinates = coords.getValue();
spotFitResults = BenchmarkSpotFit.getBenchmarkSpotFitResults();
FitResultData localFitResultData = FIT_RESULTS_DATA_CACHE.get();
final SettingsList scoreSettings = new SettingsList(settings.partialMatchDistance, settings.upperMatchDistance, settings.partialSignalFactor, settings.upperSignalFactor);
final boolean equalScoreSettings = scoreSettings.equals(localFitResultData.scoreSettings);
if (update || localFitResultData.fittingId != spotFitResults.id || !equalScoreSettings || localFitResultData.differentSettings(settings)) {
IJ.showStatus("Reading results ...");
if (localFitResultData.fittingId < 0) {
// Copy the settings from the fitter if this is the first run.
// This just starts the plugin with sensible settings.
// Q. Should this be per new simulation or fitting result instead?
final FitEngineConfiguration config = BenchmarkSpotFit.getFitEngineConfiguration();
settings.failCount = config.getFailuresLimit();
settings.duplicateDistance = config.getDuplicateDistance();
settings.duplicateDistanceAbsolute = config.getDuplicateDistanceAbsolute();
settings.residualsThreshold = (BenchmarkSpotFit.getComputeDoublets()) ? BenchmarkSpotFit.getMultiFilter().residualsThreshold : 1;
}
// This functionality is for choosing the optimum filter for the given scoring metric.
if (!equalScoreSettings) {
filterAnalysisResult.scores.clear();
}
localFitResultData = new FitResultData(spotFitResults.id, scoreSettings, settings);
// @formatter:off
// -=-=-=-
// The scoring is designed to find the best fitter+filter combination for the given spot
// candidates. The ideal combination would correctly fit+pick all the candidate positions
// that are close to a localisation.
//
// Use the following scoring scheme for all candidates:
//
// Candidates
// +----------------------------------------+
// | Actual matches |
// | +-----------+ TN |
// | | FN | |
// | | +---------- |
// | | | TP | | Fitted |
// | +-----------+ | spots |
// | | FP | |
// | +---------+ |
// +----------------------------------------+
//
// Candidates = All the spot candidates
// Actual matches = Any spot candidate or fitted spot candidate that matches a localisation
// Fitted spots = Any spot candidate that was successfully fitted
//
// TP = A spot candidate that was fitted and matches a localisation and is accepted
// FP = A spot candidate that was fitted but does not match a localisation and is accepted
// FN = A spot candidate that failed to be fitted but matches a localisation
// = A spot candidate that was fitted and matches a localisation and is rejected
// TN = A spot candidate that failed to be fitted and does not match a localisation
// = A spot candidate that was fitted and does not match a localisation and is rejected
//
// When fitting only produces one result it is possible to compute the TN score.
// Since unfitted candidates can only be TN or FN we could accumulate these scores and cache
// them. This was the old method of benchmarking single spot fitting and allowed more scores
// to be computed.
//
// When fitting produces multiple results then we have to score each fit result against all
// possible actual results and keep a record of the scores. These can then be assessed when
// the specific results have been chosen by result filtering.
//
// Using a distance ramped scoring function the degree of match can be varied from 0 to 1.
// Using a signal-factor ramped scoring function the degree of fitted can be varied from 0
// to 1. When using ramped scoring functions the fractional allocation of scores using the
// above scheme is performed, i.e. candidates are treated as if they both match and unmatch.
// This results in an equivalent to multiple analysis using different thresholds and averaging
// of the scores.
//
// The totals TP+FP+TN+FN must equal the number of spot candidates. This allows different
// fitting methods to be compared since the total number of candidates is the same.
//
// Precision = TP / (TP+FP) : This is always valid as a minimum criteria score
// Recall = TP / (TP+FN) : This is valid between different fitting methods since a
// method that fits more spots will have a potentially lower FN
// Jaccard = TP / (TP+FN+FP) : This is valid between fitting methods
//
// -=-=-=-
// As an alternative scoring system, different fitting methods can be compared using the same
// TP value but calculating FN = localisations - TP and FP as Positives - TP. This creates a
// score against the original number of simulated molecules using everything that was passed
// through the filter (Positives). This score is comparable when a different spot candidate
// filter has been used and the total number of candidates is different, e.g. Mean filtering
// vs. Gaussian filtering
// -=-=-=-
// @formatter:on
final RampedScore distanceScore = RampedScore.of(spotFitResults.distanceInPixels * settings.upperMatchDistance / 100.0, spotFitResults.distanceInPixels * settings.partialMatchDistance / 100.0, false);
localFitResultData.lowerDistanceInPixels = distanceScore.edge1;
localFitResultData.distanceInPixels = distanceScore.edge0;
final double matchDistance = MathUtils.pow2(localFitResultData.distanceInPixels);
localFitResultData.resultsPrefix3 = "\t" + MathUtils.rounded(distanceScore.edge1 * simulationParameters.pixelPitch) + "\t" + MathUtils.rounded(distanceScore.edge0 * simulationParameters.pixelPitch);
localFitResultData.limitRange = ", d=" + MathUtils.rounded(distanceScore.edge1 * simulationParameters.pixelPitch) + "-" + MathUtils.rounded(distanceScore.edge0 * simulationParameters.pixelPitch);
// Signal factor must be greater than 1
final RampedScore signalScore;
final double spotSignalFactor = BenchmarkSpotFit.getSignalFactor();
if (spotSignalFactor > 0 && settings.upperSignalFactor > 0) {
signalScore = RampedScore.of(spotSignalFactor * settings.upperSignalFactor / 100.0, spotSignalFactor * settings.partialSignalFactor / 100.0, false);
localFitResultData.lowerSignalFactor = signalScore.edge1;
localFitResultData.signalFactor = signalScore.edge0;
localFitResultData.resultsPrefix3 += "\t" + MathUtils.rounded(signalScore.edge1) + "\t" + MathUtils.rounded(signalScore.edge0);
localFitResultData.limitRange += ", s=" + MathUtils.rounded(signalScore.edge1) + "-" + MathUtils.rounded(signalScore.edge0);
} else {
signalScore = null;
localFitResultData.resultsPrefix3 += "\t0\t0";
localFitResultData.lowerSignalFactor = localFitResultData.signalFactor = 0;
}
// Store all the results
final ArrayList<MultiPathFitResults> multiPathFitResults = new ArrayList<>(spotFitResults.fitResults.size());
final List<MultiPathFitResults> syncResults = Collections.synchronizedList(multiPathFitResults);
// This could be multi-threaded ...
final int nThreads = getThreads(spotFitResults.fitResults.size());
final BlockingQueue<Job> jobs = new ArrayBlockingQueue<>(nThreads * 2);
final List<FitResultsWorker> workers = new LinkedList<>();
final List<Thread> threads = new LinkedList<>();
final AtomicInteger uniqueId = new AtomicInteger();
final CoordinateStore localCoordinateStore = createCoordinateStore();
final Ticker ticker = ImageJUtils.createTicker(spotFitResults.fitResults.size(), nThreads, null);
for (int i = 0; i < nThreads; i++) {
final FitResultsWorker worker = new FitResultsWorker(jobs, syncResults, matchDistance, distanceScore, signalScore, uniqueId, localCoordinateStore.newInstance(), ticker, actualCoordinates);
final Thread t = new Thread(worker);
workers.add(worker);
threads.add(t);
t.start();
}
spotFitResults.fitResults.int2ObjectEntrySet().forEach(e -> put(jobs, new Job(e.getIntKey(), e.getValue())));
// Finish all the worker threads by passing in a null job
for (int i = threads.size(); i-- != 0; ) {
put(jobs, new Job(0, null));
}
// Wait for all to finish
for (int i = 0; i < threads.size(); i++) {
try {
threads.get(i).join();
final FitResultsWorker worker = workers.get(i);
localFitResultData.matches += worker.matches;
localFitResultData.fittedResults += worker.included;
localFitResultData.totalResults += worker.total;
localFitResultData.notDuplicateCount += worker.notDuplicateCount;
localFitResultData.newResultCount += worker.newResultCount;
localFitResultData.countActual += worker.includedActual;
if (i == 0) {
localFitResultData.depthStats = worker.depthStats;
localFitResultData.depthFitStats = worker.depthFitStats;
localFitResultData.signalFactorStats = worker.signalFactorStats;
localFitResultData.distanceStats = worker.distanceStats;
} else {
localFitResultData.depthStats.add(worker.depthStats);
localFitResultData.depthFitStats.add(worker.depthFitStats);
localFitResultData.signalFactorStats.add(worker.signalFactorStats);
localFitResultData.distanceStats.add(worker.distanceStats);
}
} catch (final InterruptedException ex) {
Thread.currentThread().interrupt();
throw new ConcurrentRuntimeException("Unexpected interrupt", ex);
}
}
threads.clear();
ImageJUtils.finished();
localFitResultData.maxUniqueId = uniqueId.get();
localFitResultData.resultsList = multiPathFitResults.toArray(new MultiPathFitResults[0]);
Arrays.sort(localFitResultData.resultsList, (o1, o2) -> Integer.compare(o1.getFrame(), o2.getFrame()));
MultiPathFilter.resetValidationFlag(localFitResultData.resultsList);
FIT_RESULTS_DATA_CACHE.set(localFitResultData);
}
fitResultData = localFitResultData;
return localFitResultData.resultsList;
}
Aggregations