use of gnu.trove.map.hash.TIntObjectHashMap in project GDSC-SMLM by aherbert.
the class BenchmarkSpotFit method runFitting.
private BenchmarkSpotFitResult runFitting() {
// Extract all the results in memory into a list per frame. This can be cached
boolean refresh = false;
Pair<Integer, TIntObjectHashMap<List<Coordinate>>> coords = coordinateCache.get();
if (coords.getKey() != simulationParameters.id) {
// Do not get integer coordinates
// The Coordinate objects will be PeakResultPoint objects that store the original PeakResult
// from the MemoryPeakResults
coords = Pair.of(simulationParameters.id, ResultsMatchCalculator.getCoordinates(results, false));
coordinateCache.set(coords);
refresh = true;
}
final TIntObjectHashMap<List<Coordinate>> actualCoordinates = coords.getValue();
// Extract all the candidates into a list per frame. This can be cached if the settings have not
// changed
final int width = (config.isIncludeNeighbours()) ? config.getFittingWidth() : 0;
CandidateData candidateData = candidateDataCache.get();
if (refresh || candidateData == null || candidateData.differentSettings(filterResult.id, settings, width)) {
candidateData = subsetFilterResults(filterResult.filterResults, width);
candidateDataCache.set(candidateData);
}
final StopWatch stopWatch = StopWatch.createStarted();
final ImageStack stack = imp.getImageStack();
clearFitResults();
// Save results to memory
final MemoryPeakResults peakResults = new MemoryPeakResults();
peakResults.copySettings(this.results);
peakResults.setName(TITLE);
config.configureOutputUnits();
final FitConfiguration fitConfig = config.getFitConfiguration();
peakResults.setCalibration(fitConfig.getCalibration());
MemoryPeakResults.addResults(peakResults);
// Create a pool of workers
final int nThreads = Prefs.getThreads();
final BlockingQueue<Integer> jobs = new ArrayBlockingQueue<>(nThreads * 2);
final List<Worker> workers = new LinkedList<>();
final List<Thread> threads = new LinkedList<>();
final Ticker ticker = ImageJUtils.createTicker(stack.getSize(), nThreads, "Fitting frames ...");
final PeakResults syncResults = SynchronizedPeakResults.create(peakResults, nThreads);
for (int i = 0; i < nThreads; i++) {
final Worker worker = new Worker(jobs, stack, actualCoordinates, candidateData.filterCandidates, syncResults, ticker);
final Thread t = new Thread(worker);
workers.add(worker);
threads.add(t);
t.start();
}
// Fit the frames
final long startTime = System.nanoTime();
for (int i = 1; i <= stack.getSize(); i++) {
put(jobs, i);
}
// Finish all the worker threads by passing in a null job
for (int i = 0; i < threads.size(); i++) {
put(jobs, -1);
}
// Wait for all to finish
for (int i = 0; i < threads.size(); i++) {
try {
threads.get(i).join();
} catch (final InterruptedException ex) {
Thread.currentThread().interrupt();
throw new ConcurrentRuntimeException(ex);
}
}
final long runTime = System.nanoTime() - startTime;
threads.clear();
ImageJUtils.finished();
if (ImageJUtils.isInterrupted()) {
return null;
}
stopWatch.stop();
final String timeString = stopWatch.toString();
IJ.log("Spot fit time : " + timeString);
IJ.showStatus("Collecting results ...");
if (fitConfig.isFitCameraCounts()) {
// Convert to photons for consistency
results.convertToPreferredUnits();
}
final TIntObjectHashMap<FilterCandidates> fitResults = new TIntObjectHashMap<>();
for (final Worker w : workers) {
fitResults.putAll(w.results);
}
// Assign a unique ID to each result
int count = 0;
// Materialise into an array since we use it twice
final FilterCandidates[] candidates = fitResults.values(new FilterCandidates[fitResults.size()]);
for (final FilterCandidates result : candidates) {
for (int i = 0; i < result.fitResult.length; i++) {
final MultiPathFitResult fitResult = result.fitResult[i];
count += count(fitResult.getSingleFitResult());
count += count(fitResult.getMultiFitResult());
count += count(fitResult.getDoubletFitResult());
count += count(fitResult.getMultiDoubletFitResult());
}
}
final PreprocessedPeakResult[] preprocessedPeakResults = new PreprocessedPeakResult[count];
count = 0;
for (final FilterCandidates result : candidates) {
for (int i = 0; i < result.fitResult.length; i++) {
final MultiPathFitResult fitResult = result.fitResult[i];
count = store(fitResult.getSingleFitResult(), count, preprocessedPeakResults);
count = store(fitResult.getMultiFitResult(), count, preprocessedPeakResults);
count = store(fitResult.getDoubletFitResult(), count, preprocessedPeakResults);
count = store(fitResult.getMultiDoubletFitResult(), count, preprocessedPeakResults);
}
}
final BenchmarkSpotFitResult newSpotFitResults = new BenchmarkSpotFitResult(simulationParameters.id, fitResults);
newSpotFitResults.distanceInPixels = distanceInPixels;
newSpotFitResults.lowerDistanceInPixels = lowerDistanceInPixels;
newSpotFitResults.stopWatch = stopWatch;
summariseResults(newSpotFitResults, runTime, preprocessedPeakResults, count, candidateData, actualCoordinates);
IJ.showStatus("");
spotFitResults.set(newSpotFitResults);
return newSpotFitResults;
}
use of gnu.trove.map.hash.TIntObjectHashMap in project GDSC-SMLM by aherbert.
the class BenchmarkSpotFit method summariseResults.
private void summariseResults(BenchmarkSpotFitResult spotFitResults, long runTime, final PreprocessedPeakResult[] preprocessedPeakResults, int uniqueIdCount, CandidateData candidateData, TIntObjectHashMap<List<Coordinate>> actualCoordinates) {
// Summarise the fitting results. N fits, N failures.
// Optimal match statistics if filtering is perfect (since fitting is not perfect).
final StoredDataStatistics distanceStats = new StoredDataStatistics();
final StoredDataStatistics depthStats = new StoredDataStatistics();
// Get stats for all fitted results and those that match
// Signal, SNR, Width, xShift, yShift, Precision
createFilterCriteria();
final StoredDataStatistics[][] stats = new StoredDataStatistics[3][filterCriteria.length];
for (int i = 0; i < stats.length; i++) {
for (int j = 0; j < stats[i].length; j++) {
stats[i][j] = new StoredDataStatistics();
}
}
final double nmPerPixel = simulationParameters.pixelPitch;
double tp = 0;
double fp = 0;
int failCtp = 0;
int failCfp = 0;
int ctp = 0;
int cfp = 0;
final int[] singleStatus = new int[FitStatus.values().length];
final int[] multiStatus = new int[singleStatus.length];
final int[] doubletStatus = new int[singleStatus.length];
final int[] multiDoubletStatus = new int[singleStatus.length];
// Easier to materialise the values since we have a lot of non final variables to manipulate
final TIntObjectHashMap<FilterCandidates> fitResults = spotFitResults.fitResults;
final int[] frames = new int[fitResults.size()];
final FilterCandidates[] candidates = new FilterCandidates[fitResults.size()];
final int[] counter = new int[1];
fitResults.forEachEntry((frame, candidate) -> {
frames[counter[0]] = frame;
candidates[counter[0]] = candidate;
counter[0]++;
return true;
});
for (final FilterCandidates result : candidates) {
// Count the number of fit results that matched (tp) and did not match (fp)
tp += result.tp;
fp += result.fp;
for (int i = 0; i < result.fitResult.length; i++) {
if (result.spots[i].match) {
ctp++;
} else {
cfp++;
}
final MultiPathFitResult fitResult = result.fitResult[i];
if (singleStatus != null && result.spots[i].match) {
// Debugging reasons for fit failure
addStatus(singleStatus, fitResult.getSingleFitResult());
addStatus(multiStatus, fitResult.getMultiFitResult());
addStatus(doubletStatus, fitResult.getDoubletFitResult());
addStatus(multiDoubletStatus, fitResult.getMultiDoubletFitResult());
}
if (noMatch(fitResult)) {
if (result.spots[i].match) {
failCtp++;
} else {
failCfp++;
}
}
// We have multi-path results.
// We want statistics for:
// [0] all fitted spots
// [1] fitted spots that match a result
// [2] fitted spots that do not match a result
addToStats(fitResult.getSingleFitResult(), stats);
addToStats(fitResult.getMultiFitResult(), stats);
addToStats(fitResult.getDoubletFitResult(), stats);
addToStats(fitResult.getMultiDoubletFitResult(), stats);
}
// Statistics on spots that fit an actual result
for (int i = 0; i < result.match.length; i++) {
if (!result.match[i].isFitResult()) {
// For now just ignore the candidates that matched
continue;
}
final FitMatch fitMatch = (FitMatch) result.match[i];
distanceStats.add(fitMatch.distance * nmPerPixel);
depthStats.add(fitMatch.zdepth * nmPerPixel);
}
}
if (tp == 0) {
IJ.error(TITLE, "No fit results matched the simulation actual results");
return;
}
// Store data for computing correlation
final double[] i1 = new double[depthStats.getN()];
final double[] i2 = new double[i1.length];
final double[] is = new double[i1.length];
int ci = 0;
for (final FilterCandidates result : candidates) {
for (int i = 0; i < result.match.length; i++) {
if (!result.match[i].isFitResult()) {
// For now just ignore the candidates that matched
continue;
}
final FitMatch fitMatch = (FitMatch) result.match[i];
final ScoredSpot spot = result.spots[fitMatch.index];
i1[ci] = fitMatch.predictedSignal;
i2[ci] = fitMatch.actualSignal;
is[ci] = spot.spot.intensity;
ci++;
}
}
// We want to compute the Jaccard against the spot metric
// Filter the results using the multi-path filter
final ArrayList<MultiPathFitResults> multiPathResults = new ArrayList<>(fitResults.size());
for (int i = 0; i < frames.length; i++) {
final int frame = frames[i];
final MultiPathFitResult[] multiPathFitResults = candidates[i].fitResult;
final int totalCandidates = candidates[i].spots.length;
final List<Coordinate> list = actualCoordinates.get(frame);
final int nActual = (list == null) ? 0 : list.size();
multiPathResults.add(new MultiPathFitResults(frame, multiPathFitResults, totalCandidates, nActual));
}
// Score the results and count the number returned
final List<FractionalAssignment[]> assignments = new ArrayList<>();
final TIntHashSet set = new TIntHashSet(uniqueIdCount);
final FractionScoreStore scoreStore = set::add;
final MultiPathFitResults[] multiResults = multiPathResults.toArray(new MultiPathFitResults[0]);
// Filter with no filter
final MultiPathFilter mpf = new MultiPathFilter(new SignalFilter(0), null, multiFilter.residualsThreshold);
mpf.fractionScoreSubset(multiResults, NullFailCounter.INSTANCE, this.results.size(), assignments, scoreStore, CoordinateStoreFactory.create(0, 0, imp.getWidth(), imp.getHeight(), config.convertUsingHwhMax(config.getDuplicateDistanceParameter())));
final double[][] matchScores = new double[set.size()][];
int count = 0;
for (int i = 0; i < assignments.size(); i++) {
final FractionalAssignment[] a = assignments.get(i);
if (a == null) {
continue;
}
for (int j = 0; j < a.length; j++) {
final PreprocessedPeakResult r = ((PeakFractionalAssignment) a[j]).peakResult;
set.remove(r.getUniqueId());
final double precision = Math.sqrt(r.getLocationVariance());
final double signal = r.getSignal();
final double snr = r.getSnr();
final double width = r.getXSdFactor();
final double xShift = r.getXRelativeShift2();
final double yShift = r.getYRelativeShift2();
// Since these two are combined for filtering and the max is what matters.
final double shift = (xShift > yShift) ? Math.sqrt(xShift) : Math.sqrt(yShift);
final double eshift = Math.sqrt(xShift + yShift);
final double[] score = new double[8];
score[FILTER_SIGNAL] = signal;
score[FILTER_SNR] = snr;
score[FILTER_MIN_WIDTH] = width;
score[FILTER_MAX_WIDTH] = width;
score[FILTER_SHIFT] = shift;
score[FILTER_ESHIFT] = eshift;
score[FILTER_PRECISION] = precision;
score[FILTER_PRECISION + 1] = a[j].getScore();
matchScores[count++] = score;
}
}
// Add the rest
set.forEach(new CustomTIntProcedure(count) {
@Override
public boolean execute(int uniqueId) {
// This should not be null or something has gone wrong
final PreprocessedPeakResult r = preprocessedPeakResults[uniqueId];
if (r == null) {
throw new IllegalArgumentException("Missing result: " + uniqueId);
}
final double precision = Math.sqrt(r.getLocationVariance());
final double signal = r.getSignal();
final double snr = r.getSnr();
final double width = r.getXSdFactor();
final double xShift = r.getXRelativeShift2();
final double yShift = r.getYRelativeShift2();
// Since these two are combined for filtering and the max is what matters.
final double shift = (xShift > yShift) ? Math.sqrt(xShift) : Math.sqrt(yShift);
final double eshift = Math.sqrt(xShift + yShift);
final double[] score = new double[8];
score[FILTER_SIGNAL] = signal;
score[FILTER_SNR] = snr;
score[FILTER_MIN_WIDTH] = width;
score[FILTER_MAX_WIDTH] = width;
score[FILTER_SHIFT] = shift;
score[FILTER_ESHIFT] = eshift;
score[FILTER_PRECISION] = precision;
matchScores[count++] = score;
return true;
}
});
final FitConfiguration fitConfig = config.getFitConfiguration();
// Debug the reasons the fit failed
if (singleStatus != null) {
String name = PeakFit.getSolverName(fitConfig);
if (fitConfig.getFitSolver() == FitSolver.MLE && fitConfig.isModelCamera()) {
name += " Camera";
}
IJ.log("Failure counts: " + name);
printFailures("Single", singleStatus);
printFailures("Multi", multiStatus);
printFailures("Doublet", doubletStatus);
printFailures("Multi doublet", multiDoubletStatus);
}
final StringBuilder sb = new StringBuilder(300);
// Add information about the simulation
final double signal = simulationParameters.averageSignal;
final int n = results.size();
sb.append(imp.getStackSize()).append('\t');
final int w = imp.getWidth();
final int h = imp.getHeight();
sb.append(w).append('\t');
sb.append(h).append('\t');
sb.append(n).append('\t');
final double density = ((double) n / imp.getStackSize()) / (w * h) / (simulationParameters.pixelPitch * simulationParameters.pixelPitch / 1e6);
sb.append(MathUtils.rounded(density)).append('\t');
sb.append(MathUtils.rounded(signal)).append('\t');
sb.append(MathUtils.rounded(simulationParameters.sd)).append('\t');
sb.append(MathUtils.rounded(simulationParameters.pixelPitch)).append('\t');
sb.append(MathUtils.rounded(simulationParameters.depth)).append('\t');
sb.append(simulationParameters.fixedDepth).append('\t');
sb.append(MathUtils.rounded(simulationParameters.gain)).append('\t');
sb.append(MathUtils.rounded(simulationParameters.readNoise)).append('\t');
sb.append(MathUtils.rounded(simulationParameters.background)).append('\t');
sb.append(MathUtils.rounded(simulationParameters.noise)).append('\t');
if (simulationParameters.fullSimulation) {
// The total signal is spread over frames
}
sb.append(MathUtils.rounded(signal / simulationParameters.noise)).append('\t');
sb.append(MathUtils.rounded(simulationParameters.sd / simulationParameters.pixelPitch)).append('\t');
sb.append(spotFilter.getDescription());
// nP and nN is the fractional score of the spot candidates
addCount(sb, (double) candidateData.countPositive + candidateData.countNegative);
addCount(sb, candidateData.countPositive);
addCount(sb, candidateData.countNegative);
addCount(sb, candidateData.fractionPositive);
addCount(sb, candidateData.fractionNegative);
String name = PeakFit.getSolverName(fitConfig);
if (fitConfig.getFitSolver() == FitSolver.MLE && fitConfig.isModelCamera()) {
name += " Camera";
}
add(sb, name);
add(sb, config.getFitting());
spotFitResults.resultPrefix = sb.toString();
// Q. Should I add other fit configuration here?
// The fraction of positive and negative candidates that were included
add(sb, (100.0 * ctp) / candidateData.countPositive);
add(sb, (100.0 * cfp) / candidateData.countNegative);
// Score the fitting results compared to the original simulation.
// Score the candidate selection:
add(sb, ctp + cfp);
add(sb, ctp);
add(sb, cfp);
// TP are all candidates that can be matched to a spot
// FP are all candidates that cannot be matched to a spot
// FN = The number of missed spots
FractionClassificationResult match = new FractionClassificationResult(ctp, cfp, 0, simulationParameters.molecules - ctp);
add(sb, match.getRecall());
add(sb, match.getPrecision());
add(sb, match.getF1Score());
add(sb, match.getJaccard());
// Score the fitting results:
add(sb, failCtp);
add(sb, failCfp);
// TP are all fit results that can be matched to a spot
// FP are all fit results that cannot be matched to a spot
// FN = The number of missed spots
add(sb, tp);
add(sb, fp);
match = new FractionClassificationResult(tp, fp, 0, simulationParameters.molecules - tp);
add(sb, match.getRecall());
add(sb, match.getPrecision());
add(sb, match.getF1Score());
add(sb, match.getJaccard());
// Do it again but pretend we can perfectly filter all the false positives
// add(sb, tp);
match = new FractionClassificationResult(tp, 0, 0, simulationParameters.molecules - tp);
// Recall is unchanged
// Precision will be 100%
add(sb, match.getF1Score());
add(sb, match.getJaccard());
// The mean may be subject to extreme outliers so use the median
double median = distanceStats.getMedian();
add(sb, median);
final WindowOrganiser wo = new WindowOrganiser();
String label = String.format("Recall = %s. n = %d. Median = %s nm. SD = %s nm", MathUtils.rounded(match.getRecall()), distanceStats.getN(), MathUtils.rounded(median), MathUtils.rounded(distanceStats.getStandardDeviation()));
new HistogramPlotBuilder(TITLE, distanceStats, "Match Distance (nm)").setPlotLabel(label).show(wo);
median = depthStats.getMedian();
add(sb, median);
// Sort by spot intensity and produce correlation
double[] correlation = null;
double[] rankCorrelation = null;
double[] rank = null;
final FastCorrelator fastCorrelator = new FastCorrelator();
final ArrayList<Ranking> pc1 = new ArrayList<>();
final ArrayList<Ranking> pc2 = new ArrayList<>();
ci = 0;
if (settings.showCorrelation) {
final int[] indices = SimpleArrayUtils.natural(i1.length);
SortUtils.sortData(indices, is, settings.rankByIntensity, true);
correlation = new double[i1.length];
rankCorrelation = new double[i1.length];
rank = new double[i1.length];
for (final int ci2 : indices) {
fastCorrelator.add(Math.round(i1[ci2]), Math.round(i2[ci2]));
pc1.add(new Ranking(i1[ci2], ci));
pc2.add(new Ranking(i2[ci2], ci));
correlation[ci] = fastCorrelator.getCorrelation();
rankCorrelation[ci] = Correlator.correlation(rank(pc1), rank(pc2));
if (settings.rankByIntensity) {
rank[ci] = is[0] - is[ci];
} else {
rank[ci] = ci;
}
ci++;
}
} else {
for (int i = 0; i < i1.length; i++) {
fastCorrelator.add(Math.round(i1[i]), Math.round(i2[i]));
pc1.add(new Ranking(i1[i], i));
pc2.add(new Ranking(i2[i], i));
}
}
final double pearsonCorr = fastCorrelator.getCorrelation();
final double rankedCorr = Correlator.correlation(rank(pc1), rank(pc2));
// Get the regression
final SimpleRegression regression = new SimpleRegression(false);
for (int i = 0; i < pc1.size(); i++) {
regression.addData(pc1.get(i).value, pc2.get(i).value);
}
// final double intercept = regression.getIntercept();
final double slope = regression.getSlope();
if (settings.showCorrelation) {
String title = TITLE + " Intensity";
Plot plot = new Plot(title, "Candidate", "Spot");
final double[] limits1 = MathUtils.limits(i1);
final double[] limits2 = MathUtils.limits(i2);
plot.setLimits(limits1[0], limits1[1], limits2[0], limits2[1]);
label = String.format("Correlation=%s; Ranked=%s; Slope=%s", MathUtils.rounded(pearsonCorr), MathUtils.rounded(rankedCorr), MathUtils.rounded(slope));
plot.addLabel(0, 0, label);
plot.setColor(Color.red);
plot.addPoints(i1, i2, Plot.DOT);
if (slope > 1) {
plot.drawLine(limits1[0], limits1[0] * slope, limits1[1], limits1[1] * slope);
} else {
plot.drawLine(limits2[0] / slope, limits2[0], limits2[1] / slope, limits2[1]);
}
ImageJUtils.display(title, plot, wo);
title = TITLE + " Correlation";
plot = new Plot(title, "Spot Rank", "Correlation");
final double[] xlimits = MathUtils.limits(rank);
double[] ylimits = MathUtils.limits(correlation);
ylimits = MathUtils.limits(ylimits, rankCorrelation);
plot.setLimits(xlimits[0], xlimits[1], ylimits[0], ylimits[1]);
plot.setColor(Color.red);
plot.addPoints(rank, correlation, Plot.LINE);
plot.setColor(Color.blue);
plot.addPoints(rank, rankCorrelation, Plot.LINE);
plot.setColor(Color.black);
plot.addLabel(0, 0, label);
ImageJUtils.display(title, plot, wo);
}
add(sb, pearsonCorr);
add(sb, rankedCorr);
add(sb, slope);
label = String.format("n = %d. Median = %s nm", depthStats.getN(), MathUtils.rounded(median));
new HistogramPlotBuilder(TITLE, depthStats, "Match Depth (nm)").setRemoveOutliersOption(1).setPlotLabel(label).show(wo);
// Plot histograms of the stats on the same window
final double[] lower = new double[filterCriteria.length];
final double[] upper = new double[lower.length];
final double[] min = new double[lower.length];
final double[] max = new double[lower.length];
for (int i = 0; i < stats[0].length; i++) {
final double[] limits = showDoubleHistogram(stats, i, wo, matchScores);
lower[i] = limits[0];
upper[i] = limits[1];
min[i] = limits[2];
max[i] = limits[3];
}
// Reconfigure some of the range limits
// Make this a bit bigger
upper[FILTER_SIGNAL] *= 2;
// Make this a bit bigger
upper[FILTER_SNR] *= 2;
final double factor = 0.25;
if (lower[FILTER_MIN_WIDTH] != 0) {
// (assuming lower is less than 1)
upper[FILTER_MIN_WIDTH] = 1 - Math.max(0, factor * (1 - lower[FILTER_MIN_WIDTH]));
}
if (upper[FILTER_MIN_WIDTH] != 0) {
// (assuming upper is more than 1)
lower[FILTER_MAX_WIDTH] = 1 + Math.max(0, factor * (upper[FILTER_MAX_WIDTH] - 1));
}
// Round the ranges
final double[] interval = new double[stats[0].length];
interval[FILTER_SIGNAL] = SignalFilter.DEFAULT_INCREMENT;
interval[FILTER_SNR] = SnrFilter.DEFAULT_INCREMENT;
interval[FILTER_MIN_WIDTH] = WidthFilter2.DEFAULT_MIN_INCREMENT;
interval[FILTER_MAX_WIDTH] = WidthFilter.DEFAULT_INCREMENT;
interval[FILTER_SHIFT] = ShiftFilter.DEFAULT_INCREMENT;
interval[FILTER_ESHIFT] = EShiftFilter.DEFAULT_INCREMENT;
interval[FILTER_PRECISION] = PrecisionFilter.DEFAULT_INCREMENT;
interval[FILTER_ITERATIONS] = 0.1;
interval[FILTER_EVALUATIONS] = 0.1;
// Create a range increment
final double[] increment = new double[lower.length];
for (int i = 0; i < increment.length; i++) {
lower[i] = MathUtils.floor(lower[i], interval[i]);
upper[i] = MathUtils.ceil(upper[i], interval[i]);
final double range = upper[i] - lower[i];
// Allow clipping if the range is small compared to the min increment
double multiples = range / interval[i];
// Use 8 multiples for the equivalent of +/- 4 steps around the centre
if (multiples < 8) {
multiples = Math.ceil(multiples);
} else {
multiples = 8;
}
increment[i] = MathUtils.ceil(range / multiples, interval[i]);
if (i == FILTER_MIN_WIDTH) {
// Requires clipping based on the upper limit
lower[i] = upper[i] - increment[i] * multiples;
} else {
upper[i] = lower[i] + increment[i] * multiples;
}
}
for (int i = 0; i < stats[0].length; i++) {
lower[i] = MathUtils.round(lower[i]);
upper[i] = MathUtils.round(upper[i]);
min[i] = MathUtils.round(min[i]);
max[i] = MathUtils.round(max[i]);
increment[i] = MathUtils.round(increment[i]);
sb.append('\t').append(min[i]).append(':').append(lower[i]).append('-').append(upper[i]).append(':').append(max[i]);
}
// Disable some filters
increment[FILTER_SIGNAL] = Double.POSITIVE_INFINITY;
// increment[FILTER_SHIFT] = Double.POSITIVE_INFINITY;
increment[FILTER_ESHIFT] = Double.POSITIVE_INFINITY;
wo.tile();
sb.append('\t').append(TextUtils.nanosToString(runTime));
createTable().append(sb.toString());
if (settings.saveFilterRange) {
GUIFilterSettings filterSettings = SettingsManager.readGuiFilterSettings(0);
String filename = (silent) ? filterSettings.getFilterSetFilename() : ImageJUtils.getFilename("Filter_range_file", filterSettings.getFilterSetFilename());
if (filename == null) {
return;
}
// Remove extension to store the filename
filename = FileUtils.replaceExtension(filename, ".xml");
filterSettings = filterSettings.toBuilder().setFilterSetFilename(filename).build();
// Create a filter set using the ranges
final ArrayList<Filter> filters = new ArrayList<>(4);
// Create the multi-filter using the same precision type as that used during fitting.
// Currently no support for z-filter as 3D astigmatism fitting is experimental.
final PrecisionMethod precisionMethod = getPrecisionMethod((DirectFilter) multiFilter.getFilter());
Function<double[], Filter> generator;
if (precisionMethod == PrecisionMethod.POISSON_CRLB) {
generator = parameters -> new MultiFilterCrlb(parameters[FILTER_SIGNAL], (float) parameters[FILTER_SNR], parameters[FILTER_MIN_WIDTH], parameters[FILTER_MAX_WIDTH], parameters[FILTER_SHIFT], parameters[FILTER_ESHIFT], parameters[FILTER_PRECISION], 0f, 0f);
} else if (precisionMethod == PrecisionMethod.MORTENSEN) {
generator = parameters -> new MultiFilter(parameters[FILTER_SIGNAL], (float) parameters[FILTER_SNR], parameters[FILTER_MIN_WIDTH], parameters[FILTER_MAX_WIDTH], parameters[FILTER_SHIFT], parameters[FILTER_ESHIFT], parameters[FILTER_PRECISION], 0f, 0f);
} else {
// Default
generator = parameters -> new MultiFilter2(parameters[FILTER_SIGNAL], (float) parameters[FILTER_SNR], parameters[FILTER_MIN_WIDTH], parameters[FILTER_MAX_WIDTH], parameters[FILTER_SHIFT], parameters[FILTER_ESHIFT], parameters[FILTER_PRECISION], 0f, 0f);
}
filters.add(generator.apply(lower));
filters.add(generator.apply(upper));
filters.add(generator.apply(increment));
if (saveFilters(filename, filters)) {
SettingsManager.writeSettings(filterSettings);
}
// Create a filter set using the min/max and the initial bounds.
// Set sensible limits
min[FILTER_SIGNAL] = Math.max(min[FILTER_SIGNAL], 30);
max[FILTER_SNR] = Math.min(max[FILTER_SNR], 10000);
max[FILTER_PRECISION] = Math.min(max[FILTER_PRECISION], 100);
// Make the 4-set filters the same as the 3-set filters.
filters.clear();
filters.add(generator.apply(min));
filters.add(generator.apply(lower));
filters.add(generator.apply(upper));
filters.add(generator.apply(max));
saveFilters(FileUtils.replaceExtension(filename, ".4.xml"), filters);
}
spotFitResults.min = min;
spotFitResults.max = max;
}
use of gnu.trove.map.hash.TIntObjectHashMap in project GDSC-SMLM by aherbert.
the class BenchmarkSmartSpotRanking method runAnalysis.
private void runAnalysis() {
// Extract all the results in memory into a list per frame. This can be cached
boolean refresh = false;
final Pair<Integer, TIntObjectHashMap<List<Coordinate>>> coords = coordinateCache.get();
TIntObjectHashMap<List<Coordinate>> actualCoordinates;
if (coords.getKey() != simulationParameters.id) {
// Do not get integer coordinates
// The Coordinate objects will be PeakResultPoint objects that store the original PeakResult
// from the MemoryPeakResults
actualCoordinates = ResultsMatchCalculator.getCoordinates(results, false);
coordinateCache.set(Pair.of(simulationParameters.id, actualCoordinates));
refresh = true;
} else {
actualCoordinates = coords.getValue();
}
// Extract all the candidates into a list per frame. This can be cached if the settings have not
// changed.
CandidateData candidateData = candidateDataCache.get();
if (refresh || candidateData == null || candidateData.differentSettings(filterResult.id, settings)) {
candidateData = subsetFilterResults(filterResult.filterResults);
candidateDataCache.set(candidateData);
}
final TIntObjectHashMap<FilterCandidates> filterCandidates = candidateData.filterCandidates;
final ImageStack stack = imp.getImageStack();
// Create a pool of workers
final int nThreads = Prefs.getThreads();
final BlockingQueue<Integer> jobs = new ArrayBlockingQueue<>(nThreads * 2);
final List<Worker> workers = new LinkedList<>();
final List<Thread> threads = new LinkedList<>();
final Ticker ticker = ImageJUtils.createTicker(filterCandidates.size(), nThreads);
for (int i = 0; i < nThreads; i++) {
final Worker worker = new Worker(jobs, stack, actualCoordinates, filterCandidates, ticker);
final Thread t = new Thread(worker);
workers.add(worker);
threads.add(t);
t.start();
}
// Process the frames
filterCandidates.forEachKey(value -> {
put(jobs, value);
return true;
});
// Finish all the worker threads by passing in a null job
for (int i = 0; i < threads.size(); i++) {
put(jobs, -1);
}
// Wait for all to finish
for (int i = 0; i < threads.size(); i++) {
try {
threads.get(i).join();
} catch (final InterruptedException ex) {
Thread.currentThread().interrupt();
throw new ConcurrentRuntimeException("Unexpected interrupt", ex);
}
}
threads.clear();
IJ.showProgress(1);
if (ImageJUtils.isInterrupted()) {
IJ.showStatus("Aborted");
return;
}
IJ.showStatus("Collecting results ...");
final TIntObjectHashMap<RankResults> rankResults = new TIntObjectHashMap<>();
for (final Worker w : workers) {
rankResults.putAll(w.results);
}
summariseResults(rankResults, candidateData);
IJ.showStatus("");
}
use of gnu.trove.map.hash.TIntObjectHashMap in project GDSC-SMLM by aherbert.
the class BenchmarkSmartSpotRanking method subsetFilterResults.
/**
* Extract all the filter candidates in order until the desired number of positives have been
* reached and the number of negatives matches the configured parameters.
*
* @param filterResults the filter results
* @return The filter candidate data
*/
private CandidateData subsetFilterResults(TIntObjectHashMap<FilterResult> filterResults) {
// Convert fractions from percent
final double f1 = Math.min(1, settings.fractionPositives / 100.0);
final double f2 = settings.fractionNegativesAfterAllPositives / 100.0;
final TIntObjectHashMap<FilterCandidates> subset = new TIntObjectHashMap<>();
final double[] fX = new double[2];
final int[] nX = new int[2];
filterResults.forEachEntry((frame, result) -> {
// Determine the number of positives to find. This score may be fractional.
fX[0] += result.result.getTruePositives();
fX[1] += result.result.getFalsePositives();
// Q. Is r.result.getTruePositives() not the same as the total of r.spots[i].match?
// A. Not if we used fractional scoring.
int count = 0;
for (int i = result.spots.length; i-- > 0; ) {
if (result.spots[i].match) {
count++;
}
}
nX[0] += count;
nX[1] += (result.spots.length - count);
// Make the target use the fractional score
final double np2 = result.result.getTruePositives() * f1;
double targetP = np2;
// Set the target using the closest
if (f1 < 1) {
double np = 0;
double min = result.result.getTruePositives();
for (final ScoredSpot spot : result.spots) {
if (spot.match) {
np += spot.getScore();
final double d = np2 - np;
if (d < min) {
min = d;
targetP = np;
} else {
break;
}
}
}
}
// Count the number of positive & negatives
int pos = 0;
int neg = 0;
double np = 0;
double nn = 0;
boolean reachedTarget = false;
int negAfter = 0;
count = 0;
for (final ScoredSpot spot : result.spots) {
count++;
nn += spot.antiScore();
if (spot.match) {
np += spot.getScore();
pos++;
if (!reachedTarget) {
reachedTarget = np >= targetP;
}
} else {
neg++;
if (reachedTarget) {
negAfter++;
}
}
if (reachedTarget && // Check if we have reached both the limits
negAfter >= settings.negativesAfterAllPositives && (double) neg / (neg + pos) >= f2) {
break;
}
}
// TODO - This is different from BenchmarkSpotFit where all the candidates are
// included but only the first N are processed. Should this be changed here too.
subset.put(frame, new FilterCandidates(pos, neg, np, nn, Arrays.copyOf(result.spots, count)));
return true;
});
return new CandidateData(subset, filterResult.id, fX[0], fX[1], nX[0], nX[1], settings);
}
use of gnu.trove.map.hash.TIntObjectHashMap in project GDSC-SMLM by aherbert.
the class BenchmarkSpotFilter method addSpotsToMemory.
/**
* Add all the true-positives to memory as a new results set.
*
* @param filterResults the filter results
*/
private static void addSpotsToMemory(TIntObjectHashMap<FilterResult> filterResults) {
final MemoryPeakResults results = new MemoryPeakResults();
results.setName(TITLE + " TP " + truePositivesResultsSetId.getAndIncrement());
filterResults.forEachEntry((TIntObjectProcedure<FilterResult>) (peak, filterResult) -> {
for (final ScoredSpot spot : filterResult.spots) {
if (spot.match) {
final float[] params = new float[] { 0, spot.getIntensity(), 0, spot.spot.x, spot.spot.y, 0, 0 };
results.add(peak, spot.spot.x, spot.spot.y, spot.getIntensity(), 0d, 0f, 0f, params, null);
}
}
return true;
});
MemoryPeakResults.addResults(results);
}
Aggregations