use of ij.plugin.WindowOrganiser in project GDSC-SMLM by aherbert.
the class FilterAnalysis method showPlots.
private void showPlots() {
if (plots.isEmpty())
return;
// Display the top N plots
int[] list = new int[plots.size()];
int i = 0;
for (NamedPlot p : plots) {
Plot2 plot = new Plot2(p.name, p.xAxisName, "Jaccard", p.xValues, p.yValues);
plot.setLimits(p.xValues[0], p.xValues[p.xValues.length - 1], 0, 1);
plot.setColor(Color.RED);
plot.draw();
plot.setColor(Color.BLUE);
plot.addPoints(p.xValues, p.yValues, Plot2.CROSS);
PlotWindow plotWindow = Utils.display(p.name, plot);
list[i++] = plotWindow.getImagePlus().getID();
}
new WindowOrganiser().tileWindows(list);
}
use of ij.plugin.WindowOrganiser in project GDSC-SMLM by aherbert.
the class BenchmarkFit method run.
private void run() {
// Initialise the answer. Convert to units of the image (ADUs and pixels)
answer[Gaussian2DFunction.BACKGROUND] = benchmarkParameters.getBackground() * benchmarkParameters.gain;
answer[Gaussian2DFunction.SIGNAL] = benchmarkParameters.getSignal() * benchmarkParameters.gain;
answer[Gaussian2DFunction.X_POSITION] = benchmarkParameters.x;
answer[Gaussian2DFunction.Y_POSITION] = benchmarkParameters.y;
answer[Gaussian2DFunction.X_SD] = benchmarkParameters.s / benchmarkParameters.a;
answer[Gaussian2DFunction.Y_SD] = benchmarkParameters.s / benchmarkParameters.a;
// Set up the fit region. Always round down since 0.5 is the centre of the pixel.
int x = (int) benchmarkParameters.x;
int y = (int) benchmarkParameters.y;
region = new Rectangle(x - regionSize, y - regionSize, 2 * regionSize + 1, 2 * regionSize + 1);
if (!new Rectangle(0, 0, imp.getWidth(), imp.getHeight()).contains(region)) {
// Check if it is incorrect by only 1 pixel
if (region.width <= imp.getWidth() + 1 && region.height <= imp.getHeight() + 1) {
Utils.log("Adjusting region %s to fit within image bounds (%dx%d)", region.toString(), imp.getWidth(), imp.getHeight());
region = new Rectangle(0, 0, imp.getWidth(), imp.getHeight());
} else {
IJ.error(TITLE, "Fit region does not fit within the image");
return;
}
}
// Adjust the centre & account for 0.5 pixel offset during fitting
x -= region.x;
y -= region.y;
answer[Gaussian2DFunction.X_POSITION] -= (region.x + 0.5);
answer[Gaussian2DFunction.Y_POSITION] -= (region.y + 0.5);
// Configure for fitting
fitConfig.setBackgroundFitting(backgroundFitting);
fitConfig.setNotSignalFitting(!signalFitting);
fitConfig.setComputeDeviations(false);
final ImageStack stack = imp.getImageStack();
// Create a pool of workers
int nThreads = Prefs.getThreads();
BlockingQueue<Integer> jobs = new ArrayBlockingQueue<Integer>(nThreads * 2);
List<Worker> workers = new LinkedList<Worker>();
List<Thread> threads = new LinkedList<Thread>();
for (int i = 0; i < nThreads; i++) {
Worker worker = new Worker(jobs, stack, region, fitConfig);
Thread t = new Thread(worker);
workers.add(worker);
threads.add(t);
t.start();
}
final int totalFrames = benchmarkParameters.frames;
// Store all the fitting results
results = new double[totalFrames * getNumberOfStartPoints()][];
resultsTime = new long[results.length];
// Fit the frames
totalProgress = totalFrames;
stepProgress = Utils.getProgressInterval(totalProgress);
progress = 0;
for (int i = 0; i < totalFrames; i++) {
// Only fit if there were simulated photons
if (benchmarkParameters.p[i] > 0) {
put(jobs, i);
}
}
// Finish all the worker threads by passing in a null job
for (int i = 0; i < threads.size(); i++) {
put(jobs, -1);
}
// Wait for all to finish
for (int i = 0; i < threads.size(); i++) {
try {
threads.get(i).join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
threads.clear();
if (comFitting)
Utils.log(TITLE + ": CoM within start offset = %d / %d (%s%%)", comValid.intValue(), totalFrames, Utils.rounded((100.0 * comValid.intValue()) / totalFrames));
IJ.showProgress(1);
IJ.showStatus("Collecting results ...");
// Collect the results
Statistics[] stats = new Statistics[NAMES.length];
for (int i = 0; i < workers.size(); i++) {
Statistics[] next = workers.get(i).stats;
for (int j = 0; j < next.length; j++) {
if (stats[j] == null)
stats[j] = next[j];
else
stats[j].add(next[j]);
}
}
workers.clear();
// Show a table of the results
summariseResults(stats);
// Optionally show histograms
if (showHistograms) {
IJ.showStatus("Calculating histograms ...");
int[] idList = new int[NAMES.length];
int count = 0;
double[] convert = getConversionFactors();
boolean requireRetile = false;
for (int i = 0; i < NAMES.length; i++) {
if (displayHistograms[i] && convert[i] != 0) {
// We will have to convert the values...
double[] tmp = ((StoredDataStatistics) stats[i]).getValues();
for (int j = 0; j < tmp.length; j++) tmp[j] *= convert[i];
StoredDataStatistics tmpStats = new StoredDataStatistics(tmp);
idList[count++] = Utils.showHistogram(TITLE, tmpStats, NAMES[i], 0, 0, histogramBins, String.format("%s +/- %s", Utils.rounded(tmpStats.getMean()), Utils.rounded(tmpStats.getStandardDeviation())));
requireRetile = requireRetile || Utils.isNewWindow();
}
}
if (count > 0 && requireRetile) {
idList = Arrays.copyOf(idList, count);
new WindowOrganiser().tileWindows(idList);
}
}
if (saveRawData) {
String dir = Utils.getDirectory("Data_directory", rawDataDirectory);
if (dir != null)
saveData(stats, dir);
}
IJ.showStatus("");
}
use of ij.plugin.WindowOrganiser in project GDSC-SMLM by aherbert.
the class BenchmarkSpotFilter method run.
/*
* (non-Javadoc)
*
* @see ij.plugin.PlugIn#run(java.lang.String)
*/
public void run(String arg) {
SMLMUsageTracker.recordPlugin(this.getClass(), arg);
extraOptions = Utils.isExtraOptions();
batchMode = "batch".equals(arg);
simulationParameters = CreateData.simulationParameters;
if (simulationParameters == null) {
IJ.error(TITLE, "No benchmark spot parameters in memory");
return;
}
imp = CreateData.getImage();
if (imp == null) {
IJ.error(TITLE, "No benchmark image");
return;
}
results = CreateData.getResults();
if (results == null) {
IJ.error(TITLE, "No benchmark results in memory");
return;
}
if (!showDialog())
return;
// Clear old results to free memory
if (filterResult != null) {
filterResult.filterResults.clear();
filterResult.filterResults = null;
filterResult = null;
}
// For graphs
windowOrganiser = new WindowOrganiser();
if (batchMode) {
// Batch mode to test enumeration of filters
final double sd = simulationParameters.s / simulationParameters.a;
final int limit = (int) Math.floor(3 * sd);
double[] searchParam = getRange(minSearch, maxSearch, 1);
// Continuous parameters
double[] pEmpty = new double[0];
double[] mParam = (batchMean) ? getRange(limit, 0.05) : pEmpty;
double[] gParam = (batchGaussian) ? getRange(limit, 0.05) : pEmpty;
// Less continuous parameters
double[] cParam = (batchCircular) ? getRange(limit, 0.5) : pEmpty;
// Discrete parameters
double[] medParam = (batchMedian) ? getRange(limit, 1) : pEmpty;
setupProgress(imp.getImageStackSize() * searchParam.length * (mParam.length + gParam.length + cParam.length + medParam.length), "Frame");
ArrayList<BatchResult[]> batchResults = new ArrayList<BatchResult[]>(cachedBatchResults.size());
config.setDataFilterType(DataFilterType.SINGLE);
for (double search : searchParam) {
// Run all, store the results for plotting.
// Allow re-use of these if they are cached to allow quick reanalysis of results.
config.setSearch(search);
if (batchMean)
batchResults.add(addToCache(DataFilter.MEAN, mParam, search));
if (batchGaussian)
batchResults.add(addToCache(DataFilter.GAUSSIAN, gParam, search));
if (batchCircular)
batchResults.add(addToCache(DataFilter.CIRCULAR_MEAN, cParam, search));
if (batchMean)
batchResults.add(addToCache(DataFilter.MEDIAN, medParam, search));
}
IJ.showProgress(-1);
IJ.showStatus("");
if (Utils.isInterrupted())
return;
// Analysis options
GenericDialog gd = new GenericDialog(TITLE);
gd.addMessage("Choose performance plots:");
for (int i = 0; i < batchPlot.length; i++) gd.addCheckbox(batchPlotNames[i], batchPlot[i]);
gd.addChoice("Selection", SELECTION, SELECTION[selection]);
gd.addCheckbox("Show_plots", showPlot);
gd.addCheckbox("Plot_rank_by_intensity", rankByIntensity);
gd.addCheckbox("Show_failures_plots", showFailuresPlot);
gd.addCheckbox("Show_TP", showTP);
gd.addCheckbox("Show_FP", showFP);
gd.addCheckbox("Show_FN", showFN);
gd.showDialog();
if (gd.wasCanceled())
return;
for (int i = 0; i < batchPlot.length; i++) batchPlot[i] = gd.getNextBoolean();
selection = gd.getNextChoiceIndex();
showPlot = gd.getNextBoolean();
rankByIntensity = gd.getNextBoolean();
showFailuresPlot = gd.getNextBoolean();
showTP = gd.getNextBoolean();
showFP = gd.getNextBoolean();
showFN = gd.getNextBoolean();
// Plot charts
for (int i = 0; i < batchPlot.length; i++) plot(i, batchResults);
// Store in global singleton
filterResult = analyse(batchResults);
} else {
// Single filter mode
setupProgress(imp.getImageStackSize(), "Frame");
filterResult = run(config, filterRelativeDistances);
}
IJ.showProgress(-1);
IJ.showStatus("");
getTable(false).flush();
if (filterResult == null)
return;
// Store a clone of the config
filterResult.config = filterResult.config.clone();
// Debugging the matches
if (debug)
addSpotsToMemory(filterResult.filterResults);
if (showFailuresPlot)
showFailuresPlot(filterResult);
if (showPlot)
showPlot(filterResult);
if (isShowOverlay())
showOverlay(imp, filterResult);
windowOrganiser.tile();
}
use of ij.plugin.WindowOrganiser in project GDSC-SMLM by aherbert.
the class BenchmarkSpotFit method summariseResults.
private void summariseResults(TIntObjectHashMap<FilterCandidates> filterCandidates, long runTime, final PreprocessedPeakResult[] preprocessedPeakResults, int nUniqueIDs) {
createTable();
// Summarise the fitting results. N fits, N failures.
// Optimal match statistics if filtering is perfect (since fitting is not perfect).
StoredDataStatistics distanceStats = new StoredDataStatistics();
StoredDataStatistics depthStats = new StoredDataStatistics();
// Get stats for all fitted results and those that match
// Signal, SNR, Width, xShift, yShift, Precision
createFilterCriteria();
StoredDataStatistics[][] stats = new StoredDataStatistics[3][filterCriteria.length];
for (int i = 0; i < stats.length; i++) for (int j = 0; j < stats[i].length; j++) stats[i][j] = new StoredDataStatistics();
final double nmPerPixel = simulationParameters.a;
double tp = 0, fp = 0;
int failcTP = 0, failcFP = 0;
int cTP = 0, cFP = 0;
int[] singleStatus = null, multiStatus = null, doubletStatus = null, multiDoubletStatus = null;
singleStatus = new int[FitStatus.values().length];
multiStatus = new int[singleStatus.length];
doubletStatus = new int[singleStatus.length];
multiDoubletStatus = new int[singleStatus.length];
// Easier to materialise the values since we have a lot of non final variables to manipulate
final int[] frames = new int[filterCandidates.size()];
final FilterCandidates[] candidates = new FilterCandidates[filterCandidates.size()];
final int[] counter = new int[1];
filterCandidates.forEachEntry(new TIntObjectProcedure<FilterCandidates>() {
public boolean execute(int a, FilterCandidates b) {
frames[counter[0]] = a;
candidates[counter[0]] = b;
counter[0]++;
return true;
}
});
for (FilterCandidates result : candidates) {
// Count the number of fit results that matched (tp) and did not match (fp)
tp += result.tp;
fp += result.fp;
for (int i = 0; i < result.fitResult.length; i++) {
if (result.spots[i].match)
cTP++;
else
cFP++;
final MultiPathFitResult fitResult = result.fitResult[i];
if (singleStatus != null && result.spots[i].match) {
// Debugging reasons for fit failure
addStatus(singleStatus, fitResult.getSingleFitResult());
addStatus(multiStatus, fitResult.getMultiFitResult());
addStatus(doubletStatus, fitResult.getDoubletFitResult());
addStatus(multiDoubletStatus, fitResult.getMultiDoubletFitResult());
}
if (noMatch(fitResult)) {
if (result.spots[i].match)
failcTP++;
else
failcFP++;
}
// We have multi-path results.
// We want statistics for:
// [0] all fitted spots
// [1] fitted spots that match a result
// [2] fitted spots that do not match a result
addToStats(fitResult.getSingleFitResult(), stats);
addToStats(fitResult.getMultiFitResult(), stats);
addToStats(fitResult.getDoubletFitResult(), stats);
addToStats(fitResult.getMultiDoubletFitResult(), stats);
}
// Statistics on spots that fit an actual result
for (int i = 0; i < result.match.length; i++) {
if (!result.match[i].isFitResult())
// For now just ignore the candidates that matched
continue;
FitMatch fitMatch = (FitMatch) result.match[i];
distanceStats.add(fitMatch.d * nmPerPixel);
depthStats.add(fitMatch.z * nmPerPixel);
}
}
// Store data for computing correlation
double[] i1 = new double[depthStats.getN()];
double[] i2 = new double[i1.length];
double[] is = new double[i1.length];
int ci = 0;
for (FilterCandidates result : candidates) {
for (int i = 0; i < result.match.length; i++) {
if (!result.match[i].isFitResult())
// For now just ignore the candidates that matched
continue;
FitMatch fitMatch = (FitMatch) result.match[i];
ScoredSpot spot = result.spots[fitMatch.i];
i1[ci] = fitMatch.predictedSignal;
i2[ci] = fitMatch.actualSignal;
is[ci] = spot.spot.intensity;
ci++;
}
}
// We want to compute the Jaccard against the spot metric
// Filter the results using the multi-path filter
ArrayList<MultiPathFitResults> multiPathResults = new ArrayList<MultiPathFitResults>(filterCandidates.size());
for (int i = 0; i < frames.length; i++) {
int frame = frames[i];
MultiPathFitResult[] multiPathFitResults = candidates[i].fitResult;
int totalCandidates = candidates[i].spots.length;
int nActual = actualCoordinates.get(frame).size();
multiPathResults.add(new MultiPathFitResults(frame, multiPathFitResults, totalCandidates, nActual));
}
// Score the results and count the number returned
List<FractionalAssignment[]> assignments = new ArrayList<FractionalAssignment[]>();
final TIntHashSet set = new TIntHashSet(nUniqueIDs);
FractionScoreStore scoreStore = new FractionScoreStore() {
public void add(int uniqueId) {
set.add(uniqueId);
}
};
MultiPathFitResults[] multiResults = multiPathResults.toArray(new MultiPathFitResults[multiPathResults.size()]);
// Filter with no filter
MultiPathFilter mpf = new MultiPathFilter(new SignalFilter(0), null, multiFilter.residualsThreshold);
FractionClassificationResult fractionResult = mpf.fractionScoreSubset(multiResults, Integer.MAX_VALUE, this.results.size(), assignments, scoreStore, CoordinateStoreFactory.create(imp.getWidth(), imp.getHeight(), fitConfig.getDuplicateDistance()));
double nPredicted = fractionResult.getTP() + fractionResult.getFP();
final double[][] matchScores = new double[set.size()][];
int count = 0;
for (int i = 0; i < assignments.size(); i++) {
FractionalAssignment[] a = assignments.get(i);
if (a == null)
continue;
for (int j = 0; j < a.length; j++) {
final PreprocessedPeakResult r = ((PeakFractionalAssignment) a[j]).peakResult;
set.remove(r.getUniqueId());
final double precision = Math.sqrt(r.getLocationVariance());
final double signal = r.getSignal();
final double snr = r.getSNR();
final double width = r.getXSDFactor();
final double xShift = r.getXRelativeShift2();
final double yShift = r.getYRelativeShift2();
// Since these two are combined for filtering and the max is what matters.
final double shift = (xShift > yShift) ? Math.sqrt(xShift) : Math.sqrt(yShift);
final double eshift = Math.sqrt(xShift + yShift);
final double[] score = new double[8];
score[FILTER_SIGNAL] = signal;
score[FILTER_SNR] = snr;
score[FILTER_MIN_WIDTH] = width;
score[FILTER_MAX_WIDTH] = width;
score[FILTER_SHIFT] = shift;
score[FILTER_ESHIFT] = eshift;
score[FILTER_PRECISION] = precision;
score[FILTER_PRECISION + 1] = a[j].getScore();
matchScores[count++] = score;
}
}
// Add the rest
set.forEach(new CustomTIntProcedure(count) {
public boolean execute(int uniqueId) {
// This should not be null or something has gone wrong
PreprocessedPeakResult r = preprocessedPeakResults[uniqueId];
if (r == null)
throw new RuntimeException("Missing result: " + uniqueId);
final double precision = Math.sqrt(r.getLocationVariance());
final double signal = r.getSignal();
final double snr = r.getSNR();
final double width = r.getXSDFactor();
final double xShift = r.getXRelativeShift2();
final double yShift = r.getYRelativeShift2();
// Since these two are combined for filtering and the max is what matters.
final double shift = (xShift > yShift) ? Math.sqrt(xShift) : Math.sqrt(yShift);
final double eshift = Math.sqrt(xShift + yShift);
final double[] score = new double[8];
score[FILTER_SIGNAL] = signal;
score[FILTER_SNR] = snr;
score[FILTER_MIN_WIDTH] = width;
score[FILTER_MAX_WIDTH] = width;
score[FILTER_SHIFT] = shift;
score[FILTER_ESHIFT] = eshift;
score[FILTER_PRECISION] = precision;
matchScores[c++] = score;
return true;
}
});
// Debug the reasons the fit failed
if (singleStatus != null) {
String name = PeakFit.getSolverName(fitConfig);
if (fitConfig.getFitSolver() == FitSolver.MLE && fitConfig.isModelCamera())
name += " Camera";
System.out.println("Failure counts: " + name);
printFailures("Single", singleStatus);
printFailures("Multi", multiStatus);
printFailures("Doublet", doubletStatus);
printFailures("Multi doublet", multiDoubletStatus);
}
StringBuilder sb = new StringBuilder(300);
// Add information about the simulation
//(simulationParameters.minSignal + simulationParameters.maxSignal) * 0.5;
final double signal = simulationParameters.signalPerFrame;
final int n = results.size();
sb.append(imp.getStackSize()).append("\t");
final int w = imp.getWidth();
final int h = imp.getHeight();
sb.append(w).append("\t");
sb.append(h).append("\t");
sb.append(n).append("\t");
double density = ((double) n / imp.getStackSize()) / (w * h) / (simulationParameters.a * simulationParameters.a / 1e6);
sb.append(Utils.rounded(density)).append("\t");
sb.append(Utils.rounded(signal)).append("\t");
sb.append(Utils.rounded(simulationParameters.s)).append("\t");
sb.append(Utils.rounded(simulationParameters.a)).append("\t");
sb.append(Utils.rounded(simulationParameters.depth)).append("\t");
sb.append(simulationParameters.fixedDepth).append("\t");
sb.append(Utils.rounded(simulationParameters.gain)).append("\t");
sb.append(Utils.rounded(simulationParameters.readNoise)).append("\t");
sb.append(Utils.rounded(simulationParameters.b)).append("\t");
sb.append(Utils.rounded(simulationParameters.b2)).append("\t");
// Compute the noise
double noise = simulationParameters.b2;
if (simulationParameters.emCCD) {
// The b2 parameter was computed without application of the EM-CCD noise factor of 2.
//final double b2 = backgroundVariance + readVariance
// = simulationParameters.b + readVariance
// This should be applied only to the background variance.
final double readVariance = noise - simulationParameters.b;
noise = simulationParameters.b * 2 + readVariance;
}
if (simulationParameters.fullSimulation) {
// The total signal is spread over frames
}
sb.append(Utils.rounded(signal / Math.sqrt(noise))).append("\t");
sb.append(Utils.rounded(simulationParameters.s / simulationParameters.a)).append("\t");
sb.append(spotFilter.getDescription());
// nP and nN is the fractional score of the spot candidates
addCount(sb, nP + nN);
addCount(sb, nP);
addCount(sb, nN);
addCount(sb, fP);
addCount(sb, fN);
String name = PeakFit.getSolverName(fitConfig);
if (fitConfig.getFitSolver() == FitSolver.MLE && fitConfig.isModelCamera())
name += " Camera";
add(sb, name);
add(sb, config.getFitting());
resultPrefix = sb.toString();
// Q. Should I add other fit configuration here?
// The fraction of positive and negative candidates that were included
add(sb, (100.0 * cTP) / nP);
add(sb, (100.0 * cFP) / nN);
// Score the fitting results compared to the original simulation.
// Score the candidate selection:
add(sb, cTP + cFP);
add(sb, cTP);
add(sb, cFP);
// TP are all candidates that can be matched to a spot
// FP are all candidates that cannot be matched to a spot
// FN = The number of missed spots
FractionClassificationResult m = new FractionClassificationResult(cTP, cFP, 0, simulationParameters.molecules - cTP);
add(sb, m.getRecall());
add(sb, m.getPrecision());
add(sb, m.getF1Score());
add(sb, m.getJaccard());
// Score the fitting results:
add(sb, failcTP);
add(sb, failcFP);
// TP are all fit results that can be matched to a spot
// FP are all fit results that cannot be matched to a spot
// FN = The number of missed spots
add(sb, tp);
add(sb, fp);
m = new FractionClassificationResult(tp, fp, 0, simulationParameters.molecules - tp);
add(sb, m.getRecall());
add(sb, m.getPrecision());
add(sb, m.getF1Score());
add(sb, m.getJaccard());
// Do it again but pretend we can perfectly filter all the false positives
//add(sb, tp);
m = new FractionClassificationResult(tp, 0, 0, simulationParameters.molecules - tp);
// Recall is unchanged
// Precision will be 100%
add(sb, m.getF1Score());
add(sb, m.getJaccard());
// The mean may be subject to extreme outliers so use the median
double median = distanceStats.getMedian();
add(sb, median);
WindowOrganiser wo = new WindowOrganiser();
String label = String.format("Recall = %s. n = %d. Median = %s nm. SD = %s nm", Utils.rounded(m.getRecall()), distanceStats.getN(), Utils.rounded(median), Utils.rounded(distanceStats.getStandardDeviation()));
int id = Utils.showHistogram(TITLE, distanceStats, "Match Distance (nm)", 0, 0, 0, label);
if (Utils.isNewWindow())
wo.add(id);
median = depthStats.getMedian();
add(sb, median);
// Sort by spot intensity and produce correlation
int[] indices = Utils.newArray(i1.length, 0, 1);
if (showCorrelation)
Sort.sort(indices, is, rankByIntensity);
double[] r = (showCorrelation) ? new double[i1.length] : null;
double[] sr = (showCorrelation) ? new double[i1.length] : null;
double[] rank = (showCorrelation) ? new double[i1.length] : null;
ci = 0;
FastCorrelator fastCorrelator = new FastCorrelator();
ArrayList<Ranking> pc1 = new ArrayList<Ranking>();
ArrayList<Ranking> pc2 = new ArrayList<Ranking>();
for (int ci2 : indices) {
fastCorrelator.add((long) Math.round(i1[ci2]), (long) Math.round(i2[ci2]));
pc1.add(new Ranking(i1[ci2], ci));
pc2.add(new Ranking(i2[ci2], ci));
if (showCorrelation) {
r[ci] = fastCorrelator.getCorrelation();
sr[ci] = Correlator.correlation(rank(pc1), rank(pc2));
if (rankByIntensity)
rank[ci] = is[0] - is[ci];
else
rank[ci] = ci;
}
ci++;
}
final double pearsonCorr = fastCorrelator.getCorrelation();
final double rankedCorr = Correlator.correlation(rank(pc1), rank(pc2));
// Get the regression
SimpleRegression regression = new SimpleRegression(false);
for (int i = 0; i < pc1.size(); i++) regression.addData(pc1.get(i).value, pc2.get(i).value);
//final double intercept = regression.getIntercept();
final double slope = regression.getSlope();
if (showCorrelation) {
String title = TITLE + " Intensity";
Plot plot = new Plot(title, "Candidate", "Spot");
double[] limits1 = Maths.limits(i1);
double[] limits2 = Maths.limits(i2);
plot.setLimits(limits1[0], limits1[1], limits2[0], limits2[1]);
label = String.format("Correlation=%s; Ranked=%s; Slope=%s", Utils.rounded(pearsonCorr), Utils.rounded(rankedCorr), Utils.rounded(slope));
plot.addLabel(0, 0, label);
plot.setColor(Color.red);
plot.addPoints(i1, i2, Plot.DOT);
if (slope > 1)
plot.drawLine(limits1[0], limits1[0] * slope, limits1[1], limits1[1] * slope);
else
plot.drawLine(limits2[0] / slope, limits2[0], limits2[1] / slope, limits2[1]);
PlotWindow pw = Utils.display(title, plot);
if (Utils.isNewWindow())
wo.add(pw);
title = TITLE + " Correlation";
plot = new Plot(title, "Spot Rank", "Correlation");
double[] xlimits = Maths.limits(rank);
double[] ylimits = Maths.limits(r);
ylimits = Maths.limits(ylimits, sr);
plot.setLimits(xlimits[0], xlimits[1], ylimits[0], ylimits[1]);
plot.setColor(Color.red);
plot.addPoints(rank, r, Plot.LINE);
plot.setColor(Color.blue);
plot.addPoints(rank, sr, Plot.LINE);
plot.setColor(Color.black);
plot.addLabel(0, 0, label);
pw = Utils.display(title, plot);
if (Utils.isNewWindow())
wo.add(pw);
}
add(sb, pearsonCorr);
add(sb, rankedCorr);
add(sb, slope);
label = String.format("n = %d. Median = %s nm", depthStats.getN(), Utils.rounded(median));
id = Utils.showHistogram(TITLE, depthStats, "Match Depth (nm)", 0, 1, 0, label);
if (Utils.isNewWindow())
wo.add(id);
// Plot histograms of the stats on the same window
double[] lower = new double[filterCriteria.length];
double[] upper = new double[lower.length];
min = new double[lower.length];
max = new double[lower.length];
for (int i = 0; i < stats[0].length; i++) {
double[] limits = showDoubleHistogram(stats, i, wo, matchScores, nPredicted);
lower[i] = limits[0];
upper[i] = limits[1];
min[i] = limits[2];
max[i] = limits[3];
}
// Reconfigure some of the range limits
// Make this a bit bigger
upper[FILTER_SIGNAL] *= 2;
// Make this a bit bigger
upper[FILTER_SNR] *= 2;
double factor = 0.25;
if (lower[FILTER_MIN_WIDTH] != 0)
// (assuming lower is less than 1)
upper[FILTER_MIN_WIDTH] = 1 - Math.max(0, factor * (1 - lower[FILTER_MIN_WIDTH]));
if (upper[FILTER_MIN_WIDTH] != 0)
// (assuming upper is more than 1)
lower[FILTER_MAX_WIDTH] = 1 + Math.max(0, factor * (upper[FILTER_MAX_WIDTH] - 1));
// Round the ranges
final double[] interval = new double[stats[0].length];
interval[FILTER_SIGNAL] = SignalFilter.DEFAULT_INCREMENT;
interval[FILTER_SNR] = SNRFilter.DEFAULT_INCREMENT;
interval[FILTER_MIN_WIDTH] = WidthFilter2.DEFAULT_MIN_INCREMENT;
interval[FILTER_MAX_WIDTH] = WidthFilter.DEFAULT_INCREMENT;
interval[FILTER_SHIFT] = ShiftFilter.DEFAULT_INCREMENT;
interval[FILTER_ESHIFT] = EShiftFilter.DEFAULT_INCREMENT;
interval[FILTER_PRECISION] = PrecisionFilter.DEFAULT_INCREMENT;
interval[FILTER_ITERATIONS] = 0.1;
interval[FILTER_EVALUATIONS] = 0.1;
// Create a range increment
double[] increment = new double[lower.length];
for (int i = 0; i < increment.length; i++) {
lower[i] = Maths.floor(lower[i], interval[i]);
upper[i] = Maths.ceil(upper[i], interval[i]);
double range = upper[i] - lower[i];
// Allow clipping if the range is small compared to the min increment
double multiples = range / interval[i];
// Use 8 multiples for the equivalent of +/- 4 steps around the centre
if (multiples < 8) {
multiples = Math.ceil(multiples);
} else
multiples = 8;
increment[i] = Maths.ceil(range / multiples, interval[i]);
if (i == FILTER_MIN_WIDTH)
// Requires clipping based on the upper limit
lower[i] = upper[i] - increment[i] * multiples;
else
upper[i] = lower[i] + increment[i] * multiples;
}
for (int i = 0; i < stats[0].length; i++) {
lower[i] = Maths.round(lower[i]);
upper[i] = Maths.round(upper[i]);
min[i] = Maths.round(min[i]);
max[i] = Maths.round(max[i]);
increment[i] = Maths.round(increment[i]);
sb.append("\t").append(min[i]).append(':').append(lower[i]).append('-').append(upper[i]).append(':').append(max[i]);
}
// Disable some filters
increment[FILTER_SIGNAL] = Double.POSITIVE_INFINITY;
//increment[FILTER_SHIFT] = Double.POSITIVE_INFINITY;
increment[FILTER_ESHIFT] = Double.POSITIVE_INFINITY;
wo.tile();
sb.append("\t").append(Utils.timeToString(runTime / 1000000.0));
summaryTable.append(sb.toString());
if (saveFilterRange) {
GlobalSettings gs = SettingsManager.loadSettings();
FilterSettings filterSettings = gs.getFilterSettings();
String filename = (silent) ? filterSettings.filterSetFilename : Utils.getFilename("Filter_range_file", filterSettings.filterSetFilename);
if (filename == null)
return;
// Remove extension to store the filename
filename = Utils.replaceExtension(filename, ".xml");
filterSettings.filterSetFilename = filename;
// Create a filter set using the ranges
ArrayList<Filter> filters = new ArrayList<Filter>(3);
filters.add(new MultiFilter2(lower[0], (float) lower[1], lower[2], lower[3], lower[4], lower[5], lower[6]));
filters.add(new MultiFilter2(upper[0], (float) upper[1], upper[2], upper[3], upper[4], upper[5], upper[6]));
filters.add(new MultiFilter2(increment[0], (float) increment[1], increment[2], increment[3], increment[4], increment[5], increment[6]));
if (saveFilters(filename, filters))
SettingsManager.saveSettings(gs);
// Create a filter set using the min/max and the initial bounds.
// Set sensible limits
min[FILTER_SIGNAL] = Math.max(min[FILTER_SIGNAL], 30);
max[FILTER_PRECISION] = Math.min(max[FILTER_PRECISION], 100);
// Commented this out so that the 4-set filters are the same as the 3-set filters.
// The difference leads to differences when optimising.
// // Use half the initial bounds (hoping this is a good starting guess for the optimum)
// final boolean[] limitToLower = new boolean[min.length];
// limitToLower[FILTER_SIGNAL] = true;
// limitToLower[FILTER_SNR] = true;
// limitToLower[FILTER_MIN_WIDTH] = true;
// limitToLower[FILTER_MAX_WIDTH] = false;
// limitToLower[FILTER_SHIFT] = false;
// limitToLower[FILTER_ESHIFT] = false;
// limitToLower[FILTER_PRECISION] = true;
// for (int i = 0; i < limitToLower.length; i++)
// {
// final double range = (upper[i] - lower[i]) / 2;
// if (limitToLower[i])
// upper[i] = lower[i] + range;
// else
// lower[i] = upper[i] - range;
// }
filters = new ArrayList<Filter>(4);
filters.add(new MultiFilter2(min[0], (float) min[1], min[2], min[3], min[4], min[5], min[6]));
filters.add(new MultiFilter2(lower[0], (float) lower[1], lower[2], lower[3], lower[4], lower[5], lower[6]));
filters.add(new MultiFilter2(upper[0], (float) upper[1], upper[2], upper[3], upper[4], upper[5], upper[6]));
filters.add(new MultiFilter2(max[0], (float) max[1], max[2], max[3], max[4], max[5], max[6]));
saveFilters(Utils.replaceExtension(filename, ".4.xml"), filters);
}
}
use of ij.plugin.WindowOrganiser in project GDSC-SMLM by aherbert.
the class PSFDrift method computeDrift.
private void computeDrift() {
// Create a grid of XY offset positions between 0-1 for PSF insert
final double[] grid = new double[gridSize];
for (int i = 0; i < grid.length; i++) grid[i] = (double) i / gridSize;
// Configure fitting region
final int w = 2 * regionSize + 1;
centrePixel = w / 2;
// Check region size using the image PSF
double newPsfWidth = (double) imp.getWidth() / scale;
if (Math.ceil(newPsfWidth) > w)
Utils.log(TITLE + ": Fitted region size (%d) is smaller than the scaled PSF (%.1f)", w, newPsfWidth);
// Create robust PSF fitting settings
final double a = psfSettings.nmPerPixel * scale;
final double sa = PSFCalculator.squarePixelAdjustment(psfSettings.nmPerPixel * (psfSettings.fwhm / Gaussian2DFunction.SD_TO_FWHM_FACTOR), a);
fitConfig.setInitialPeakStdDev(sa / a);
fitConfig.setBackgroundFitting(backgroundFitting);
fitConfig.setNotSignalFitting(false);
fitConfig.setComputeDeviations(false);
fitConfig.setDisableSimpleFilter(true);
// Create the PSF over the desired z-depth
int depth = (int) Math.round(zDepth / psfSettings.nmPerSlice);
int startSlice = psfSettings.zCentre - depth;
int endSlice = psfSettings.zCentre + depth;
int nSlices = imp.getStackSize();
startSlice = (startSlice < 1) ? 1 : (startSlice > nSlices) ? nSlices : startSlice;
endSlice = (endSlice < 1) ? 1 : (endSlice > nSlices) ? nSlices : endSlice;
ImagePSFModel psf = createImagePSF(startSlice, endSlice);
int minz = startSlice - psfSettings.zCentre;
int maxz = endSlice - psfSettings.zCentre;
final int nZ = maxz - minz + 1;
final int gridSize2 = grid.length * grid.length;
total = nZ * gridSize2;
// Store all the fitting results
int nStartPoints = getNumberOfStartPoints();
results = new double[total * nStartPoints][];
// TODO - Add ability to iterate this, adjusting the current offset in the PSF
// each iteration
// Create a pool of workers
int nThreads = Prefs.getThreads();
BlockingQueue<Job> jobs = new ArrayBlockingQueue<Job>(nThreads * 2);
List<Worker> workers = new LinkedList<Worker>();
List<Thread> threads = new LinkedList<Thread>();
for (int i = 0; i < nThreads; i++) {
Worker worker = new Worker(jobs, psf, w, fitConfig);
Thread t = new Thread(worker);
workers.add(worker);
threads.add(t);
t.start();
}
// Fit
Utils.showStatus("Fitting ...");
final int step = Utils.getProgressInterval(total);
outer: for (int z = minz, i = 0; z <= maxz; z++) {
for (int x = 0; x < grid.length; x++) for (int y = 0; y < grid.length; y++, i++) {
if (IJ.escapePressed()) {
break outer;
}
put(jobs, new Job(z, grid[x], grid[y], i));
if (i % step == 0) {
IJ.showProgress(i, total);
}
}
}
// If escaped pressed then do not need to stop the workers, just return
if (Utils.isInterrupted()) {
IJ.showProgress(1);
return;
}
// Finish all the worker threads by passing in a null job
for (int i = 0; i < threads.size(); i++) {
put(jobs, new Job());
}
// Wait for all to finish
for (int i = 0; i < threads.size(); i++) {
try {
threads.get(i).join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
threads.clear();
IJ.showProgress(1);
IJ.showStatus("");
// Plot the average and SE for the drift curve
// Plot the recall
double[] zPosition = new double[nZ];
double[] avX = new double[nZ];
double[] seX = new double[nZ];
double[] avY = new double[nZ];
double[] seY = new double[nZ];
double[] recall = new double[nZ];
for (int z = minz, i = 0; z <= maxz; z++, i++) {
Statistics statsX = new Statistics();
Statistics statsY = new Statistics();
for (int s = 0; s < nStartPoints; s++) {
int resultPosition = i * gridSize2 + s * total;
final int endResultPosition = resultPosition + gridSize2;
while (resultPosition < endResultPosition) {
if (results[resultPosition] != null) {
statsX.add(results[resultPosition][0]);
statsY.add(results[resultPosition][1]);
}
resultPosition++;
}
}
zPosition[i] = z * psfSettings.nmPerSlice;
avX[i] = statsX.getMean();
seX[i] = statsX.getStandardError();
avY[i] = statsY.getMean();
seY[i] = statsY.getStandardError();
recall[i] = (double) statsX.getN() / (nStartPoints * gridSize2);
}
// Find the range from the z-centre above the recall limit
int centre = 0;
for (int slice = startSlice, i = 0; slice <= endSlice; slice++, i++) {
if (slice == psfSettings.zCentre) {
centre = i;
break;
}
}
if (recall[centre] < recallLimit)
return;
int start = centre, end = centre;
for (int i = centre; i-- > 0; ) {
if (recall[i] < recallLimit)
break;
start = i;
}
for (int i = centre; ++i < recall.length; ) {
if (recall[i] < recallLimit)
break;
end = i;
}
int iterations = 1;
LoessInterpolator loess = null;
if (smoothing > 0)
loess = new LoessInterpolator(smoothing, iterations);
double[][] smoothx = displayPlot("Drift X", "X (nm)", zPosition, avX, seX, loess, start, end);
double[][] smoothy = displayPlot("Drift Y", "Y (nm)", zPosition, avY, seY, loess, start, end);
displayPlot("Recall", "Recall", zPosition, recall, null, null, start, end);
WindowOrganiser wo = new WindowOrganiser();
wo.tileWindows(idList);
// Ask the user if they would like to store them in the image
GenericDialog gd = new GenericDialog(TITLE);
gd.enableYesNoCancel();
gd.hideCancelButton();
startSlice = psfSettings.zCentre - (centre - start);
endSlice = psfSettings.zCentre + (end - centre);
gd.addMessage(String.format("Save the drift to the PSF?\n \nSlices %d (%s nm) - %d (%s nm) above recall limit", startSlice, Utils.rounded(zPosition[start]), endSlice, Utils.rounded(zPosition[end])));
gd.addMessage("Optionally average the end points to set drift outside the limits.\n(Select zero to ignore)");
gd.addSlider("Number_of_points", 0, 10, positionsToAverage);
gd.showDialog();
if (gd.wasOKed()) {
positionsToAverage = Math.abs((int) gd.getNextNumber());
ArrayList<PSFOffset> offset = new ArrayList<PSFOffset>();
final double pitch = psfSettings.nmPerPixel;
int j = 0, jj = 0;
for (int i = start, slice = startSlice; i <= end; slice++, i++) {
j = findCentre(zPosition[i], smoothx, j);
if (j == -1) {
Utils.log("Failed to find the offset for depth %.2f", zPosition[i]);
continue;
}
// The offset should store the difference to the centre in pixels so divide by the pixel pitch
double cx = smoothx[1][j] / pitch;
double cy = smoothy[1][j] / pitch;
jj = findOffset(slice, jj);
if (jj != -1) {
cx += psfSettings.offset[jj].cx;
cy += psfSettings.offset[jj].cy;
}
offset.add(new PSFOffset(slice, cx, cy));
}
addMissingOffsets(startSlice, endSlice, nSlices, offset);
psfSettings.offset = offset.toArray(new PSFOffset[offset.size()]);
psfSettings.addNote(TITLE, String.format("Solver=%s, Region=%d", PeakFit.getSolverName(fitConfig), regionSize));
imp.setProperty("Info", XmlUtils.toXML(psfSettings));
}
}
Aggregations