use of uk.ac.sussex.gdsc.core.utils.Statistics in project GDSC-SMLM by aherbert.
the class Fire method run.
@Override
public void run(String arg) {
extraOptions = ImageJUtils.isExtraOptions();
SmlmUsageTracker.recordPlugin(this.getClass(), arg);
// Require some fit results and selected regions
final int size = MemoryPeakResults.countMemorySize();
if (size == 0) {
IJ.error(pluginTitle, "There are no fitting results in memory");
return;
}
settings = Settings.load();
settings.save();
if ("q".equals(arg)) {
pluginTitle += " Q estimation";
runQEstimation();
return;
}
IJ.showStatus(pluginTitle + " ...");
if (!showInputDialog()) {
return;
}
MemoryPeakResults inputResults1 = ResultsManager.loadInputResults(settings.inputOption, false, null, null);
if (MemoryPeakResults.isEmpty(inputResults1)) {
IJ.error(pluginTitle, "No results could be loaded");
return;
}
MemoryPeakResults inputResults2 = ResultsManager.loadInputResults(settings.inputOption2, false, null, null);
inputResults1 = cropToRoi(inputResults1);
if (inputResults1.size() < 2) {
IJ.error(pluginTitle, "No results within the crop region");
return;
}
if (inputResults2 != null) {
inputResults2 = cropToRoi(inputResults2);
if (inputResults2.size() < 2) {
IJ.error(pluginTitle, "No results2 within the crop region");
return;
}
}
initialise(inputResults1, inputResults2);
if (!showDialog()) {
return;
}
final long start = System.currentTimeMillis();
// Compute FIRE
String name = inputResults1.getName();
final double fourierImageScale = Settings.scaleValues[settings.imageScaleIndex];
final int imageSize = Settings.imageSizeValues[settings.imageSizeIndex];
if (this.results2 != null) {
name += " vs " + this.results2.getName();
final FireResult result = calculateFireNumber(fourierMethod, samplingMethod, thresholdMethod, fourierImageScale, imageSize);
if (result != null) {
logResult(name, result);
if (settings.showFrcCurve) {
showFrcCurve(name, result, thresholdMethod);
}
}
} else {
FireResult result = null;
final int repeats = (settings.randomSplit) ? Math.max(1, settings.repeats) : 1;
setProgress(repeats);
if (repeats == 1) {
result = calculateFireNumber(fourierMethod, samplingMethod, thresholdMethod, fourierImageScale, imageSize);
if (result != null) {
logResult(name, result);
if (settings.showFrcCurve) {
showFrcCurve(name, result, thresholdMethod);
}
}
} else {
// Multi-thread this ...
final int nThreads = MathUtils.min(repeats, getThreads());
final ExecutorService executor = Executors.newFixedThreadPool(nThreads);
final LocalList<Future<?>> futures = new LocalList<>(repeats);
final LocalList<FireWorker> workers = new LocalList<>(repeats);
IJ.showProgress(0);
IJ.showStatus(pluginTitle + " computing ...");
for (int i = 1; i <= repeats; i++) {
final FireWorker w = new FireWorker(i, fourierImageScale, imageSize);
workers.add(w);
futures.add(executor.submit(w));
}
// Wait for all to finish
executor.shutdown();
ConcurrencyUtils.waitForCompletionUnchecked(futures);
IJ.showProgress(1);
// Show a combined FRC curve plot of all the smoothed curves if we have multiples.
final LUT valuesLut = LutHelper.createLut(LutColour.FIRE_GLOW);
final LutHelper.DefaultLutMapper mapper = new LutHelper.DefaultLutMapper(0, repeats);
final FrcCurvePlot curve = new FrcCurvePlot();
final Statistics stats = new Statistics();
final WindowOrganiser wo = new WindowOrganiser();
boolean oom = false;
for (int i = 0; i < repeats; i++) {
final FireWorker w = workers.get(i);
if (w.oom) {
oom = true;
}
if (w.result == null) {
continue;
}
result = w.result;
if (!Double.isNaN(result.fireNumber)) {
stats.add(result.fireNumber);
}
if (settings.showFrcCurveRepeats) {
// Output each FRC curve using a suffix.
logResult(w.name, result);
wo.add(ImageJUtils.display(w.plot.getTitle(), w.plot));
}
if (settings.showFrcCurve) {
final int index = mapper.map(i + 1);
curve.add(name, result, thresholdMethod, LutHelper.getColour(valuesLut, index), Color.blue, null);
}
}
if (result != null) {
wo.cascade();
final double mean = stats.getMean();
logResult(name, result, mean, stats);
if (settings.showFrcCurve) {
curve.addResolution(mean);
final Plot plot = curve.getPlot();
ImageJUtils.display(plot.getTitle(), plot);
}
}
if (oom) {
// @formatter:off
IJ.error(pluginTitle, "ERROR - Parallel computation out-of-memory.\n \n" + TextUtils.wrap("The number of results will be reduced. " + "Please reduce the size of the Fourier image " + "or change the number of threads " + "using the extra options (hold down the 'Shift' " + "key when running the plugin).", 80));
// @formatter:on
}
}
// Only do this once
if (settings.showFrcTimeEvolution && result != null && !Double.isNaN(result.fireNumber)) {
showFrcTimeEvolution(name, result.fireNumber, thresholdMethod, nmPerUnit / result.getNmPerPixel(), imageSize);
}
}
IJ.showStatus(pluginTitle + " complete : " + TextUtils.millisToString(System.currentTimeMillis() - start));
}
use of uk.ac.sussex.gdsc.core.utils.Statistics in project GDSC-SMLM by aherbert.
the class PcPalmMolecules method runSimulation.
private void runSimulation(boolean resultsAvailable) {
if (resultsAvailable && !showSimulationDialog()) {
return;
}
startLog();
log("Simulation parameters");
if (settings.blinkingDistribution == 3) {
log(" - Clusters = %d", settings.numberOfMolecules);
log(" - Simulation size = %s um", MathUtils.rounded(settings.simulationSize, 4));
log(" - Molecules/cluster = %s", MathUtils.rounded(settings.blinkingRate, 4));
log(" - Blinking distribution = %s", Settings.BLINKING_DISTRIBUTION[settings.blinkingDistribution]);
log(" - p-Value = %s", MathUtils.rounded(settings.pvalue, 4));
} else {
log(" - Molecules = %d", settings.numberOfMolecules);
log(" - Simulation size = %s um", MathUtils.rounded(settings.simulationSize, 4));
log(" - Blinking rate = %s", MathUtils.rounded(settings.blinkingRate, 4));
log(" - Blinking distribution = %s", Settings.BLINKING_DISTRIBUTION[settings.blinkingDistribution]);
}
log(" - Average precision = %s nm", MathUtils.rounded(settings.sigmaS, 4));
log(" - Clusters simulation = " + Settings.CLUSTER_SIMULATION[settings.clusterSimulation]);
if (settings.clusterSimulation > 0) {
log(" - Cluster number = %s +/- %s", MathUtils.rounded(settings.clusterNumber, 4), MathUtils.rounded(settings.clusterNumberStdDev, 4));
log(" - Cluster radius = %s nm", MathUtils.rounded(settings.clusterRadius, 4));
}
final double nmPerPixel = 100;
final double width = settings.simulationSize * 1000.0;
final UniformRandomProvider rng = UniformRandomProviders.create();
final UniformDistribution dist = new UniformDistribution(null, new double[] { width, width, 0 }, rng.nextInt());
final NormalizedGaussianSampler gauss = SamplerUtils.createNormalizedGaussianSampler(rng);
settings.molecules = new ArrayList<>(settings.numberOfMolecules);
// Create some dummy results since the calibration is required for later analysis
settings.results = new MemoryPeakResults(PsfHelper.create(PSFType.CUSTOM));
settings.results.setCalibration(CalibrationHelper.create(nmPerPixel, 1, 100));
settings.results.setSource(new NullSource("Molecule Simulation"));
settings.results.begin();
int count = 0;
// Generate a sequence of coordinates
final ArrayList<double[]> xyz = new ArrayList<>((int) (settings.numberOfMolecules * 1.1));
final Statistics statsRadius = new Statistics();
final Statistics statsSize = new Statistics();
final String maskTitle = TITLE + " Cluster Mask";
ByteProcessor bp = null;
double maskScale = 0;
if (settings.clusterSimulation > 0) {
// Simulate clusters.
// Note: In the Veatch et al. paper (Plos 1, e31457) correlation functions are built using
// circles with small radii of 4-8 Arbitrary Units (AU) or large radii of 10-30 AU. A
// fluctuations model is created at T = 1.075 Tc. It is not clear exactly how the particles
// are distributed.
// It may be that a mask is created first using the model. The particles are placed on the
// mask using a specified density. This simulation produces a figure to show either a damped
// cosine function (circles) or an exponential (fluctuations). The number of particles in
// each circle may be randomly determined just by density. The figure does not discuss the
// derivation of the cluster size statistic.
//
// If this plugin simulation is run with a uniform distribution and blinking rate of 1 then
// the damped cosine function is reproduced. The curve crosses g(r)=1 at a value equivalent
// to the average distance to the centre-of-mass of each drawn cluster, not the input cluster
// radius parameter (which is a hard upper limit on the distance to centre).
final int maskSize = settings.lowResolutionImageSize;
int[] mask = null;
// scale is in nm/pixel
maskScale = width / maskSize;
final ArrayList<double[]> clusterCentres = new ArrayList<>();
int totalSteps = 1 + (int) Math.ceil(settings.numberOfMolecules / settings.clusterNumber);
if (settings.clusterSimulation == 2 || settings.clusterSimulation == 3) {
// Clusters are non-overlapping circles
// Ensure the circles do not overlap by using an exclusion mask that accumulates
// out-of-bounds pixels by drawing the last cluster (plus some border) on an image. When no
// more pixels are available then stop generating molecules.
// This is done by cumulatively filling a mask and using the MaskDistribution to select
// a new point. This may be slow but it works.
// TODO - Allow clusters of different sizes...
mask = new int[maskSize * maskSize];
Arrays.fill(mask, 255);
MaskDistribution maskDistribution = new MaskDistribution(mask, maskSize, maskSize, 0, maskScale, maskScale, rng);
double[] centre;
IJ.showStatus("Computing clusters mask");
final int roiRadius = (int) Math.round((settings.clusterRadius * 2) / maskScale);
if (settings.clusterSimulation == 3) {
// Generate a mask of circles then sample from that.
// If we want to fill the mask completely then adjust the total steps to be the number of
// circles that can fit inside the mask.
totalSteps = (int) (maskSize * maskSize / (Math.PI * MathUtils.pow2(settings.clusterRadius / maskScale)));
}
while ((centre = maskDistribution.next()) != null && clusterCentres.size() < totalSteps) {
IJ.showProgress(clusterCentres.size(), totalSteps);
// The mask returns the coordinates with the centre of the image at 0,0
centre[0] += width / 2;
centre[1] += width / 2;
clusterCentres.add(centre);
// Fill in the mask around the centre to exclude any more circles that could overlap
final double cx = centre[0] / maskScale;
final double cy = centre[1] / maskScale;
fillMask(mask, maskSize, (int) cx, (int) cy, roiRadius, 0);
try {
maskDistribution = new MaskDistribution(mask, maskSize, maskSize, 0, maskScale, maskScale, rng);
} catch (final IllegalArgumentException ex) {
// This can happen when there are no more non-zero pixels
log("WARNING: No more room for clusters on the mask area (created %d of estimated %d)", clusterCentres.size(), totalSteps);
break;
}
}
ImageJUtils.finished();
} else {
// Pick centres randomly from the distribution
while (clusterCentres.size() < totalSteps) {
clusterCentres.add(dist.next());
}
}
final double scaledRadius = settings.clusterRadius / maskScale;
if (settings.showClusterMask || settings.clusterSimulation == 3) {
// Show the mask for the clusters
if (mask == null) {
mask = new int[maskSize * maskSize];
} else {
Arrays.fill(mask, 0);
}
final int roiRadius = (int) Math.round(scaledRadius);
for (final double[] c : clusterCentres) {
final double cx = c[0] / maskScale;
final double cy = c[1] / maskScale;
fillMask(mask, maskSize, (int) cx, (int) cy, roiRadius, 1);
}
if (settings.clusterSimulation == 3) {
// We have the mask. Now pick points at random from the mask.
final MaskDistribution maskDistribution = new MaskDistribution(mask, maskSize, maskSize, 0, maskScale, maskScale, rng);
// Allocate each molecule position to a parent circle so defining clusters.
final int[][] clusters = new int[clusterCentres.size()][];
final int[] clusterSize = new int[clusters.length];
for (int i = 0; i < settings.numberOfMolecules; i++) {
final double[] centre = maskDistribution.next();
// The mask returns the coordinates with the centre of the image at 0,0
centre[0] += width / 2;
centre[1] += width / 2;
xyz.add(centre);
// Output statistics on cluster size and number.
// TODO - Finding the closest cluster could be done better than an all-vs-all comparison
double max = distance2(centre, clusterCentres.get(0));
int cluster = 0;
for (int j = 1; j < clusterCentres.size(); j++) {
final double d2 = distance2(centre, clusterCentres.get(j));
if (d2 < max) {
max = d2;
cluster = j;
}
}
// Assign point i to cluster
centre[2] = cluster;
if (clusterSize[cluster] == 0) {
clusters[cluster] = new int[10];
}
if (clusters[cluster].length <= clusterSize[cluster]) {
clusters[cluster] = Arrays.copyOf(clusters[cluster], (int) (clusters[cluster].length * 1.5));
}
clusters[cluster][clusterSize[cluster]++] = i;
}
// Generate real cluster size statistics
for (int j = 0; j < clusterSize.length; j++) {
final int size = clusterSize[j];
if (size == 0) {
continue;
}
statsSize.add(size);
if (size == 1) {
statsRadius.add(0);
continue;
}
// Find centre of cluster and add the distance to each point
final double[] com = new double[2];
for (int n = 0; n < size; n++) {
final double[] xy = xyz.get(clusters[j][n]);
for (int k = 0; k < 2; k++) {
com[k] += xy[k];
}
}
for (int k = 0; k < 2; k++) {
com[k] /= size;
}
for (int n = 0; n < size; n++) {
final double dx = xyz.get(clusters[j][n])[0] - com[0];
final double dy = xyz.get(clusters[j][n])[1] - com[1];
statsRadius.add(Math.sqrt(dx * dx + dy * dy));
}
}
}
if (settings.showClusterMask) {
bp = new ByteProcessor(maskSize, maskSize);
for (int i = 0; i < mask.length; i++) {
if (mask[i] != 0) {
bp.set(i, 128);
}
}
ImageJUtils.display(maskTitle, bp);
}
}
// Use the simulated cluster centres to create clusters of the desired size
if (settings.clusterSimulation == 1 || settings.clusterSimulation == 2) {
for (final double[] clusterCentre : clusterCentres) {
final int clusterN = (int) Math.round((settings.clusterNumberStdDev > 0) ? settings.clusterNumber + gauss.sample() * settings.clusterNumberStdDev : settings.clusterNumber);
if (clusterN < 1) {
continue;
}
if (clusterN == 1) {
// No need for a cluster around a point
xyz.add(clusterCentre);
statsRadius.add(0);
statsSize.add(1);
} else {
// Generate N random points within a circle of the chosen cluster radius.
// Locate the centre-of-mass and the average distance to the centre.
final double[] com = new double[3];
int size = 0;
while (size < clusterN) {
// Generate a random point within a circle uniformly
// http://stackoverflow.com/questions/5837572/generate-a-random-point-within-a-circle-uniformly
final double t = 2.0 * Math.PI * rng.nextDouble();
final double u = rng.nextDouble() + rng.nextDouble();
final double r = settings.clusterRadius * ((u > 1) ? 2 - u : u);
final double x = r * Math.cos(t);
final double y = r * Math.sin(t);
final double[] xy = new double[] { clusterCentre[0] + x, clusterCentre[1] + y };
xyz.add(xy);
for (int k = 0; k < 2; k++) {
com[k] += xy[k];
}
size++;
}
// Add the distance of the points from the centre of the cluster.
// Note this does not account for the movement due to precision.
statsSize.add(size);
if (size == 1) {
statsRadius.add(0);
} else {
for (int k = 0; k < 2; k++) {
com[k] /= size;
}
while (size > 0) {
final double dx = xyz.get(xyz.size() - size)[0] - com[0];
final double dy = xyz.get(xyz.size() - size)[1] - com[1];
statsRadius.add(Math.sqrt(dx * dx + dy * dy));
size--;
}
}
}
}
}
} else {
// Random distribution
for (int i = 0; i < settings.numberOfMolecules; i++) {
xyz.add(dist.next());
}
}
// The Gaussian sigma should be applied so the overall distance from the centre
// ( sqrt(x^2+y^2) ) has a standard deviation of sigmaS?
final double sigma1D = settings.sigmaS / Math.sqrt(2);
// Show optional histograms
StoredDataStatistics intraDistances = null;
StoredData blinks = null;
if (settings.showHistograms) {
final int capacity = (int) (xyz.size() * settings.blinkingRate);
intraDistances = new StoredDataStatistics(capacity);
blinks = new StoredData(capacity);
}
final Statistics statsSigma = new Statistics();
for (int i = 0; i < xyz.size(); i++) {
int occurrences = getBlinks(rng, settings.blinkingRate);
if (blinks != null) {
blinks.add(occurrences);
}
final int size = settings.molecules.size();
// Get coordinates in nm
final double[] moleculeXyz = xyz.get(i);
if (bp != null && occurrences > 0) {
bp.putPixel((int) Math.round(moleculeXyz[0] / maskScale), (int) Math.round(moleculeXyz[1] / maskScale), 255);
}
while (occurrences-- > 0) {
final double[] localisationXy = Arrays.copyOf(moleculeXyz, 2);
// Add random precision
if (sigma1D > 0) {
final double dx = gauss.sample() * sigma1D;
final double dy = gauss.sample() * sigma1D;
localisationXy[0] += dx;
localisationXy[1] += dy;
if (!dist.isWithinXy(localisationXy)) {
continue;
}
// Calculate mean-squared displacement
statsSigma.add(dx * dx + dy * dy);
}
final double x = localisationXy[0];
final double y = localisationXy[1];
settings.molecules.add(new Molecule(x, y, i, 1));
// Store in pixels
final float xx = (float) (x / nmPerPixel);
final float yy = (float) (y / nmPerPixel);
final float[] params = PeakResult.createParams(0, 0, xx, yy, 0);
settings.results.add(i + 1, (int) xx, (int) yy, 0, 0, 0, 0, params, null);
}
if (settings.molecules.size() > size) {
count++;
if (intraDistances != null) {
final int newCount = settings.molecules.size() - size;
if (newCount == 1) {
// No intra-molecule distances
continue;
}
// Get the distance matrix between these molecules
final double[][] matrix = new double[newCount][newCount];
for (int ii = size, x = 0; ii < settings.molecules.size(); ii++, x++) {
for (int jj = size + 1, y = 1; jj < settings.molecules.size(); jj++, y++) {
final double d2 = settings.molecules.get(ii).distance2(settings.molecules.get(jj));
matrix[x][y] = matrix[y][x] = d2;
}
}
// Get the maximum distance for particle linkage clustering of this molecule
double max = 0;
for (int x = 0; x < newCount; x++) {
// Compare to all-other molecules and get the minimum distance
// needed to join at least one
double linkDistance = Double.POSITIVE_INFINITY;
for (int y = 0; y < newCount; y++) {
if (x == y) {
continue;
}
if (matrix[x][y] < linkDistance) {
linkDistance = matrix[x][y];
}
}
// Check if this is larger
if (max < linkDistance) {
max = linkDistance;
}
}
intraDistances.add(Math.sqrt(max));
}
}
}
settings.results.end();
if (bp != null) {
final ImagePlus imp = ImageJUtils.display(maskTitle, bp);
final Calibration cal = imp.getCalibration();
cal.setUnit("nm");
cal.pixelWidth = cal.pixelHeight = maskScale;
}
log("Simulation results");
log(" * Molecules = %d (%d activated)", xyz.size(), count);
log(" * Blinking rate = %s", MathUtils.rounded((double) settings.molecules.size() / xyz.size(), 4));
log(" * Precision (Mean-displacement) = %s nm", (statsSigma.getN() > 0) ? MathUtils.rounded(Math.sqrt(statsSigma.getMean()), 4) : "0");
if (intraDistances != null) {
if (intraDistances.getN() == 0) {
log(" * Mean Intra-Molecule particle linkage distance = 0 nm");
log(" * Fraction of inter-molecule particle linkage @ 0 nm = 0 %%");
} else {
plot(blinks, "Blinks/Molecule", true);
final double[][] intraHist = plot(intraDistances, "Intra-molecule particle linkage distance", false);
// Determine 95th and 99th percentile
// Will not be null as we requested a non-integer histogram.
int p99 = intraHist[0].length - 1;
final double limit1 = 0.99 * intraHist[1][p99];
final double limit2 = 0.95 * intraHist[1][p99];
while (intraHist[1][p99] > limit1 && p99 > 0) {
p99--;
}
int p95 = p99;
while (intraHist[1][p95] > limit2 && p95 > 0) {
p95--;
}
log(" * Mean Intra-Molecule particle linkage distance = %s nm" + " (95%% = %s, 99%% = %s, 100%% = %s)", MathUtils.rounded(intraDistances.getMean(), 4), MathUtils.rounded(intraHist[0][p95], 4), MathUtils.rounded(intraHist[0][p99], 4), MathUtils.rounded(intraHist[0][intraHist[0].length - 1], 4));
if (settings.distanceAnalysis) {
performDistanceAnalysis(intraHist, p99);
}
}
}
if (settings.clusterSimulation > 0) {
log(" * Cluster number = %s +/- %s", MathUtils.rounded(statsSize.getMean(), 4), MathUtils.rounded(statsSize.getStandardDeviation(), 4));
log(" * Cluster radius = %s +/- %s nm (mean distance to centre-of-mass)", MathUtils.rounded(statsRadius.getMean(), 4), MathUtils.rounded(statsRadius.getStandardDeviation(), 4));
}
}
use of uk.ac.sussex.gdsc.core.utils.Statistics in project GDSC-SMLM by aherbert.
the class PsfDrift method computeDrift.
private void computeDrift() {
// Create a grid of XY offset positions between 0-1 for PSF insert
final double[] grid = new double[settings.gridSize];
for (int i = 0; i < grid.length; i++) {
grid[i] = (double) i / settings.gridSize;
}
// Configure fitting region
final int w = 2 * settings.regionSize + 1;
centrePixel = w / 2;
// Check region size using the image PSF
final double newPsfWidth = imp.getWidth() / settings.scale;
if (Math.ceil(newPsfWidth) > w) {
ImageJUtils.log(TITLE + ": Fitted region size (%d) is smaller than the scaled PSF (%.1f)", w, newPsfWidth);
}
// Create robust PSF fitting settings
final double a = psfSettings.getPixelSize() * settings.scale;
final double sa = PsfCalculator.squarePixelAdjustment(psfSettings.getPixelSize() * (psfSettings.getFwhm() / Gaussian2DFunction.SD_TO_FWHM_FACTOR), a);
fitConfig.setInitialPeakStdDev(sa / a);
fitConfig.setBackgroundFitting(settings.backgroundFitting);
fitConfig.setNotSignalFitting(false);
fitConfig.setComputeDeviations(false);
fitConfig.setDisableSimpleFilter(true);
// Create the PSF over the desired z-depth
final int depth = (int) Math.round(settings.zDepth / psfSettings.getPixelDepth());
int startSlice = psfSettings.getCentreImage() - depth;
int endSlice = psfSettings.getCentreImage() + depth;
final int nSlices = imp.getStackSize();
startSlice = MathUtils.clip(1, nSlices, startSlice);
endSlice = MathUtils.clip(1, nSlices, endSlice);
final ImagePsfModel psf = createImagePsf(startSlice, endSlice, settings.scale);
final int minz = startSlice - psfSettings.getCentreImage();
final int maxz = endSlice - psfSettings.getCentreImage();
final int nZ = maxz - minz + 1;
final int gridSize2 = grid.length * grid.length;
total = nZ * gridSize2;
// Store all the fitting results
final int nStartPoints = getNumberOfStartPoints();
results = new double[total * nStartPoints][];
// TODO - Add ability to iterate this, adjusting the current offset in the PSF
// each iteration
// Create a pool of workers
final int threadCount = Prefs.getThreads();
final Ticker ticker = ImageJUtils.createTicker(total, threadCount, "Fitting...");
final BlockingQueue<Job> jobs = new ArrayBlockingQueue<>(threadCount * 2);
final List<Thread> threads = new LinkedList<>();
for (int i = 0; i < threadCount; i++) {
final Worker worker = new Worker(jobs, psf, w, fitConfig, ticker);
final Thread t = new Thread(worker);
threads.add(t);
t.start();
}
// Fit
outer: for (int z = minz, i = 0; z <= maxz; z++) {
for (int x = 0; x < grid.length; x++) {
for (int y = 0; y < grid.length; y++, i++) {
if (IJ.escapePressed()) {
break outer;
}
put(jobs, new Job(z, grid[x], grid[y], i));
}
}
}
// If escaped pressed then do not need to stop the workers, just return
if (ImageJUtils.isInterrupted()) {
ImageJUtils.finished();
return;
}
// Finish all the worker threads by passing in a null job
for (int i = 0; i < threads.size(); i++) {
put(jobs, new Job());
}
// Wait for all to finish
for (int i = 0; i < threads.size(); i++) {
try {
threads.get(i).join();
} catch (final InterruptedException ex) {
Thread.currentThread().interrupt();
throw new ConcurrentRuntimeException("Unexpected interrupt", ex);
}
}
threads.clear();
ImageJUtils.finished();
// Plot the average and SE for the drift curve
// Plot the recall
final double[] zPosition = new double[nZ];
final double[] avX = new double[nZ];
final double[] seX = new double[nZ];
final double[] avY = new double[nZ];
final double[] seY = new double[nZ];
final double[] recall = new double[nZ];
for (int z = minz, i = 0; z <= maxz; z++, i++) {
final Statistics statsX = new Statistics();
final Statistics statsY = new Statistics();
for (int s = 0; s < nStartPoints; s++) {
int resultPosition = i * gridSize2 + s * total;
final int endResultPosition = resultPosition + gridSize2;
while (resultPosition < endResultPosition) {
if (results[resultPosition] != null) {
statsX.add(results[resultPosition][0]);
statsY.add(results[resultPosition][1]);
}
resultPosition++;
}
}
zPosition[i] = z * psfSettings.getPixelDepth();
avX[i] = statsX.getMean();
seX[i] = statsX.getStandardError();
avY[i] = statsY.getMean();
seY[i] = statsY.getStandardError();
recall[i] = (double) statsX.getN() / (nStartPoints * gridSize2);
}
// Find the range from the z-centre above the recall limit
int centre = 0;
for (int slice = startSlice, i = 0; slice <= endSlice; slice++, i++) {
if (slice == psfSettings.getCentreImage()) {
centre = i;
break;
}
}
if (recall[centre] < settings.recallLimit) {
return;
}
int start = centre;
int end = centre;
for (int i = centre; i-- > 0; ) {
if (recall[i] < settings.recallLimit) {
break;
}
start = i;
}
for (int i = centre; ++i < recall.length; ) {
if (recall[i] < settings.recallLimit) {
break;
}
end = i;
}
final int iterations = 1;
LoessInterpolator loess = null;
if (settings.smoothing > 0) {
loess = new LoessInterpolator(settings.smoothing, iterations);
}
final double[][] smoothx = displayPlot("Drift X", "X (nm)", zPosition, avX, seX, loess, start, end);
final double[][] smoothy = displayPlot("Drift Y", "Y (nm)", zPosition, avY, seY, loess, start, end);
displayPlot("Recall", "Recall", zPosition, recall, null, null, start, end);
windowOrganiser.tile();
// Ask the user if they would like to store them in the image
final GenericDialog gd = new GenericDialog(TITLE);
gd.enableYesNoCancel();
gd.hideCancelButton();
startSlice = psfSettings.getCentreImage() - (centre - start);
endSlice = psfSettings.getCentreImage() + (end - centre);
ImageJUtils.addMessage(gd, "Save the drift to the PSF?\n \nSlices %d (%s nm) - %d (%s nm) above recall limit", startSlice, MathUtils.rounded(zPosition[start]), endSlice, MathUtils.rounded(zPosition[end]));
gd.addMessage("Optionally average the end points to set drift outside the limits.\n" + "(Select zero to ignore)");
gd.addSlider("Number_of_points", 0, 10, settings.positionsToAverage);
gd.showDialog();
if (gd.wasOKed()) {
settings.positionsToAverage = Math.abs((int) gd.getNextNumber());
final Map<Integer, Offset> oldOffset = psfSettings.getOffsetsMap();
final boolean useOldOffset = settings.useOffset && !oldOffset.isEmpty();
final LocalList<double[]> offset = new LocalList<>();
final double pitch = psfSettings.getPixelSize();
int index = 0;
for (int i = start, slice = startSlice; i <= end; slice++, i++) {
index = findCentre(zPosition[i], smoothx, index);
if (index == -1) {
ImageJUtils.log("Failed to find the offset for depth %.2f", zPosition[i]);
continue;
}
// The offset should store the difference to the centre in pixels so divide by the pixel
// pitch
double cx = smoothx[1][index] / pitch;
double cy = smoothy[1][index] / pitch;
if (useOldOffset) {
final Offset o = oldOffset.get(slice);
if (o != null) {
cx += o.getCx();
cy += o.getCy();
}
}
offset.add(new double[] { slice, cx, cy });
}
addMissingOffsets(startSlice, endSlice, nSlices, offset);
final Offset.Builder offsetBuilder = Offset.newBuilder();
final ImagePSF.Builder imagePsfBuilder = psfSettings.toBuilder();
for (final double[] o : offset) {
final int slice = (int) o[0];
offsetBuilder.setCx(o[1]);
offsetBuilder.setCy(o[2]);
imagePsfBuilder.putOffsets(slice, offsetBuilder.build());
}
imagePsfBuilder.putNotes(TITLE, String.format("Solver=%s, Region=%d", PeakFit.getSolverName(fitConfig), settings.regionSize));
imp.setProperty("Info", ImagePsfHelper.toString(imagePsfBuilder));
}
}
use of uk.ac.sussex.gdsc.core.utils.Statistics in project GDSC-SMLM by aherbert.
the class MeanVarianceTest method run.
@Override
public void run(String arg) {
SmlmUsageTracker.recordPlugin(this.getClass(), arg);
settings = Settings.load();
settings.save();
String helpKey = "mean-variance-test";
if (ImageJUtils.isExtraOptions()) {
final ImagePlus imp = WindowManager.getCurrentImage();
if (imp.getStackSize() > 1) {
final GenericDialog gd = new GenericDialog(TITLE);
gd.addMessage("Perform single image analysis on the current image?");
gd.addNumericField("Bias", settings.bias, 0);
gd.addHelp(HelpUrls.getUrl(helpKey));
gd.showDialog();
if (gd.wasCanceled()) {
return;
}
singleImage = true;
settings.bias = Math.abs(gd.getNextNumber());
} else {
IJ.error(TITLE, "Single-image mode requires a stack");
return;
}
}
List<ImageSample> images;
String inputDirectory = "";
if (singleImage) {
IJ.showStatus("Loading images...");
images = getImages();
if (images.size() == 0) {
IJ.error(TITLE, "Not enough images for analysis");
return;
}
} else {
inputDirectory = IJ.getDirectory("Select image series ...");
if (inputDirectory == null) {
return;
}
final SeriesOpener series = new SeriesOpener(inputDirectory);
series.setVariableSize(true);
if (series.getNumberOfImages() < 3) {
IJ.error(TITLE, "Not enough images in the selected directory");
return;
}
if (!IJ.showMessageWithCancel(TITLE, String.format("Analyse %d images, first image:\n%s", series.getNumberOfImages(), series.getImageList()[0]))) {
return;
}
IJ.showStatus("Loading images");
images = getImages(series);
if (images.size() < 3) {
IJ.error(TITLE, "Not enough images for analysis");
return;
}
if (images.get(0).exposure != 0) {
IJ.error(TITLE, "First image in series must have exposure 0 (Bias image)");
return;
}
}
final boolean emMode = (arg != null && arg.contains("em"));
GenericDialog gd = new GenericDialog(TITLE);
gd.addMessage("Set the output options:");
gd.addCheckbox("Show_table", settings.showTable);
gd.addCheckbox("Show_charts", settings.showCharts);
if (emMode) {
// Ask the user for the camera gain ...
gd.addMessage("Estimating the EM-gain requires the camera gain without EM readout enabled");
gd.addNumericField("Camera_gain (Count/e-)", settings.cameraGain, 4);
}
if (emMode) {
helpKey += "-em-ccd";
}
gd.addHelp(HelpUrls.getUrl(helpKey));
gd.showDialog();
if (gd.wasCanceled()) {
return;
}
settings.showTable = gd.getNextBoolean();
settings.showCharts = gd.getNextBoolean();
if (emMode) {
settings.cameraGain = gd.getNextNumber();
}
IJ.showStatus("Computing mean & variance");
final double nImages = images.size();
for (int i = 0; i < images.size(); i++) {
IJ.showStatus(String.format("Computing mean & variance %d/%d", i + 1, images.size()));
images.get(i).compute(singleImage, i / nImages, (i + 1) / nImages);
}
IJ.showProgress(1);
IJ.showStatus("Computing results");
// Allow user to input multiple bias images
int start = 0;
final Statistics biasStats = new Statistics();
final Statistics noiseStats = new Statistics();
final double bias;
if (singleImage) {
bias = settings.bias;
} else {
while (start < images.size()) {
final ImageSample sample = images.get(start);
if (sample.exposure == 0) {
biasStats.add(sample.means);
for (final PairSample pair : sample.samples) {
noiseStats.add(pair.variance);
}
start++;
} else {
break;
}
}
bias = biasStats.getMean();
}
// Get the mean-variance data
int total = 0;
for (int i = start; i < images.size(); i++) {
total += images.get(i).samples.size();
}
if (settings.showTable && total > 2000) {
gd = new GenericDialog(TITLE);
gd.addMessage("Table output requires " + total + " entries.\n \nYou may want to disable the table.");
gd.addCheckbox("Show_table", settings.showTable);
gd.showDialog();
if (gd.wasCanceled()) {
return;
}
settings.showTable = gd.getNextBoolean();
}
final TextWindow results = (settings.showTable) ? createResultsWindow() : null;
double[] mean = new double[total];
double[] variance = new double[mean.length];
final Statistics gainStats = (singleImage) ? new StoredDataStatistics(total) : new Statistics();
final WeightedObservedPoints obs = new WeightedObservedPoints();
for (int i = (singleImage) ? 0 : start, j = 0; i < images.size(); i++) {
final StringBuilder sb = (settings.showTable) ? new StringBuilder() : null;
final ImageSample sample = images.get(i);
for (final PairSample pair : sample.samples) {
if (j % 16 == 0) {
IJ.showProgress(j, total);
}
mean[j] = pair.getMean();
variance[j] = pair.variance;
// Gain is in Count / e
double gain = variance[j] / (mean[j] - bias);
gainStats.add(gain);
obs.add(mean[j], variance[j]);
if (emMode) {
gain /= (2 * settings.cameraGain);
}
if (sb != null) {
sb.append(sample.title).append('\t');
sb.append(sample.exposure).append('\t');
sb.append(pair.slice1).append('\t');
sb.append(pair.slice2).append('\t');
sb.append(IJ.d2s(pair.mean1, 2)).append('\t');
sb.append(IJ.d2s(pair.mean2, 2)).append('\t');
sb.append(IJ.d2s(mean[j], 2)).append('\t');
sb.append(IJ.d2s(variance[j], 2)).append('\t');
sb.append(MathUtils.rounded(gain, 4)).append("\n");
}
j++;
}
if (results != null && sb != null) {
results.append(sb.toString());
}
}
IJ.showProgress(1);
if (singleImage) {
StoredDataStatistics stats = (StoredDataStatistics) gainStats;
ImageJUtils.log(TITLE);
if (emMode) {
final double[] values = stats.getValues();
MathArrays.scaleInPlace(0.5, values);
stats = StoredDataStatistics.create(values);
}
if (settings.showCharts) {
// Plot the gain over time
final String title = TITLE + " Gain vs Frame";
final Plot plot = new Plot(title, "Slice", "Gain");
plot.addPoints(SimpleArrayUtils.newArray(gainStats.getN(), 1, 1.0), stats.getValues(), Plot.LINE);
final PlotWindow pw = ImageJUtils.display(title, plot);
// Show a histogram
final String label = String.format("Mean = %s, Median = %s", MathUtils.rounded(stats.getMean()), MathUtils.rounded(stats.getMedian()));
final WindowOrganiser wo = new WindowOrganiser();
final PlotWindow pw2 = new HistogramPlotBuilder(TITLE, stats, "Gain").setRemoveOutliersOption(1).setPlotLabel(label).show(wo);
if (wo.isNotEmpty()) {
final Point point = pw.getLocation();
point.y += pw.getHeight();
pw2.setLocation(point);
}
}
ImageJUtils.log("Single-image mode: %s camera", (emMode) ? "EM-CCD" : "Standard");
final double gain = stats.getMedian();
if (emMode) {
final double totalGain = gain;
final double emGain = totalGain / settings.cameraGain;
ImageJUtils.log(" Gain = 1 / %s (Count/e-)", MathUtils.rounded(settings.cameraGain, 4));
ImageJUtils.log(" EM-Gain = %s", MathUtils.rounded(emGain, 4));
ImageJUtils.log(" Total Gain = %s (Count/e-)", MathUtils.rounded(totalGain, 4));
} else {
settings.cameraGain = gain;
ImageJUtils.log(" Gain = 1 / %s (Count/e-)", MathUtils.rounded(settings.cameraGain, 4));
}
} else {
IJ.showStatus("Computing fit");
// Sort
final int[] indices = rank(mean);
mean = reorder(mean, indices);
variance = reorder(variance, indices);
// Compute optimal coefficients.
// a - b x
final double[] init = { 0, 1 / gainStats.getMean() };
final PolynomialCurveFitter fitter = PolynomialCurveFitter.create(2).withStartPoint(init);
final double[] best = fitter.fit(obs.toList());
// Construct the polynomial that best fits the data.
final PolynomialFunction fitted = new PolynomialFunction(best);
if (settings.showCharts) {
// Plot mean verses variance. Gradient is gain in Count/e.
final String title = TITLE + " results";
final Plot plot = new Plot(title, "Mean", "Variance");
final double[] xlimits = MathUtils.limits(mean);
final double[] ylimits = MathUtils.limits(variance);
double xrange = (xlimits[1] - xlimits[0]) * 0.05;
if (xrange == 0) {
xrange = 0.05;
}
double yrange = (ylimits[1] - ylimits[0]) * 0.05;
if (yrange == 0) {
yrange = 0.05;
}
plot.setLimits(xlimits[0] - xrange, xlimits[1] + xrange, ylimits[0] - yrange, ylimits[1] + yrange);
plot.setColor(Color.blue);
plot.addPoints(mean, variance, Plot.CROSS);
plot.setColor(Color.red);
plot.addPoints(new double[] { mean[0], mean[mean.length - 1] }, new double[] { fitted.value(mean[0]), fitted.value(mean[mean.length - 1]) }, Plot.LINE);
ImageJUtils.display(title, plot);
}
final double avBiasNoise = Math.sqrt(noiseStats.getMean());
ImageJUtils.log(TITLE);
ImageJUtils.log(" Directory = %s", inputDirectory);
ImageJUtils.log(" Bias = %s +/- %s (Count)", MathUtils.rounded(bias, 4), MathUtils.rounded(avBiasNoise, 4));
ImageJUtils.log(" Variance = %s + %s * mean", MathUtils.rounded(best[0], 4), MathUtils.rounded(best[1], 4));
if (emMode) {
// The gradient is the observed gain of the noise.
// In an EM-CCD there is a noise factor of 2.
// Q. Is this true for a correct noise factor calibration:
// double noiseFactor = (Read Noise EM-CCD) / (Read Noise CCD)
// Em-gain is the observed gain divided by the noise factor multiplied by camera gain
final double emGain = best[1] / (2 * settings.cameraGain);
// Compute total gain
final double totalGain = emGain * settings.cameraGain;
final double readNoise = avBiasNoise / settings.cameraGain;
// Effective noise is standard deviation of the bias image divided by the total gain (in
// Count/e-)
final double readNoiseE = avBiasNoise / totalGain;
ImageJUtils.log(" Read Noise = %s (e-) [%s (Count)]", MathUtils.rounded(readNoise, 4), MathUtils.rounded(avBiasNoise, 4));
ImageJUtils.log(" Gain = 1 / %s (Count/e-)", MathUtils.rounded(1 / settings.cameraGain, 4));
ImageJUtils.log(" EM-Gain = %s", MathUtils.rounded(emGain, 4));
ImageJUtils.log(" Total Gain = %s (Count/e-)", MathUtils.rounded(totalGain, 4));
ImageJUtils.log(" Effective Read Noise = %s (e-) (Read Noise/Total Gain)", MathUtils.rounded(readNoiseE, 4));
} else {
// The gradient is the observed gain of the noise.
settings.cameraGain = best[1];
// Noise is standard deviation of the bias image divided by the gain (in Count/e-)
final double readNoise = avBiasNoise / settings.cameraGain;
ImageJUtils.log(" Read Noise = %s (e-) [%s (Count)]", MathUtils.rounded(readNoise, 4), MathUtils.rounded(avBiasNoise, 4));
ImageJUtils.log(" Gain = 1 / %s (Count/e-)", MathUtils.rounded(1 / settings.cameraGain, 4));
}
}
IJ.showStatus("");
}
use of uk.ac.sussex.gdsc.core.utils.Statistics in project GDSC-SMLM by aherbert.
the class FitWorker method run.
/**
* Locate all the peaks in the image specified by the fit job.
*
* <p>WARNING: The FitWorker fits a sub-region of the data for each maxima. It then updates the
* FitResult parameters with an offset reflecting the position. The initialParameters are not
* updated with this offset unless configured.
*
* @param job The fit job
*/
public void run(FitJob job) {
final long start = System.nanoTime();
job.start();
this.job = job;
benchmarking = false;
this.slice = job.slice;
// Used for debugging
// if (logger == null) logger = new gdsc.fitting.logging.ConsoleLogger();
// Crop to the ROI
cc = new CoordinateConverter(job.bounds);
// Note if the bounds change for efficient caching.
newBounds = !cc.dataBounds.equals(lastBounds);
if (newBounds) {
lastBounds = cc.dataBounds;
}
final int width = cc.dataBounds.width;
final int height = cc.dataBounds.height;
borderLimitX = width - border;
borderLimitY = height - border;
data = job.data;
// This is tied to the input data
dataEstimator = null;
// relative to the global origin.
if (isFitCameraCounts) {
cameraModel.removeBias(cc.dataBounds, data);
} else {
cameraModel.removeBiasAndGain(cc.dataBounds, data);
}
final FitParameters params = job.getFitParameters();
this.endT = (params != null) ? params.endT : -1;
candidates = indentifySpots(job, width, height, params);
if (candidates.getSize() == 0) {
finishJob(job, start);
return;
}
fittedBackground = new Statistics();
// Always get the noise and store it with the results.
if (params != null && !Float.isNaN(params.noise)) {
noise = params.noise;
fitConfig.setNoise(noise);
} else if (calculateNoise) {
noise = estimateNoise();
fitConfig.setNoise(noise);
}
// System.out.printf("Slice %d : Noise = %g\n", slice, noise);
if (logger != null) {
LoggerUtils.log(logger, Level.INFO, "Slice %d: Noise = %f", slice, noise);
}
final ImageExtractor ie = ImageExtractor.wrap(data, width, height);
double[] region = null;
final float offsetx = cc.dataBounds.x;
final float offsety = cc.dataBounds.y;
if (params != null && params.fitTask == FitTask.MAXIMA_IDENITIFICATION) {
final float sd0 = (float) xsd;
final float sd1 = (float) ysd;
for (int n = 0; n < candidates.getSize(); n++) {
// Find the background using the perimeter of the data.
// TODO - Perhaps the Gaussian Fitter should be used to produce the initial estimates but no
// actual fit done.
// This would produce coords using the centre-of-mass.
final Candidate candidate = candidates.get(n);
int x = candidate.x;
int y = candidate.y;
final Rectangle regionBounds = ie.getBoxRegionBounds(x, y, fitting);
region = ie.crop(regionBounds, region);
final float b = (float) Gaussian2DFitter.getBackground(region, regionBounds.width, regionBounds.height, 1);
// Offset the coords to the centre of the pixel. Note the bounds will be added later.
// Subtract the background to get the amplitude estimate then convert to signal.
final float amplitude = candidate.intensity - ((relativeIntensity) ? 0 : b);
final float signal = (float) (amplitude * 2.0 * Math.PI * sd0 * sd1);
final int index = y * width + x;
x += offsetx;
y += offsety;
final float[] peakParams = new float[1 + Gaussian2DFunction.PARAMETERS_PER_PEAK];
peakParams[Gaussian2DFunction.BACKGROUND] = b;
peakParams[Gaussian2DFunction.SIGNAL] = signal;
peakParams[Gaussian2DFunction.X_POSITION] = x + 0.5f;
peakParams[Gaussian2DFunction.Y_POSITION] = y + 0.5f;
// peakParams[Gaussian2DFunction.Z_POSITION] = 0;
peakParams[Gaussian2DFunction.X_SD] = sd0;
peakParams[Gaussian2DFunction.Y_SD] = sd1;
// peakParams[Gaussian2DFunction.ANGLE] = 0;
final float u = (float) Gaussian2DPeakResultHelper.getMeanSignalUsingP05(signal, sd0, sd1);
sliceResults.add(createResult(x, y, data[index], 0, noise, u, peakParams, null, n, 0));
}
} else {
initialiseFitting();
// Smooth the data to provide initial background estimates
final float[] smoothedData = backgroundSmoothing.process(data, width, height);
final ImageExtractor ie2 = ImageExtractor.wrap(smoothedData, width, height);
// Perform the Gaussian fit
// The SpotFitter is used to create a dynamic MultiPathFitResult object.
// This is then passed to a multi-path filter. Thus the same fitting decision process
// is used when benchmarking and when running on actual data.
// Note: The SpotFitter labels each PreprocessedFitResult using the offset in the FitResult
// object.
// The initial params and deviations can then be extracted for the results that pass the
// filter.
MultiPathFilter filter;
final IMultiPathFitResults multiPathResults = this;
final SelectedResultStore store = this;
coordinateStore = coordinateStore.resize(cc.dataBounds.x, cc.dataBounds.y, width, height);
if (params != null && params.fitTask == FitTask.BENCHMARKING) {
// Run filtering as normal. However in the event that a candidate is missed or some
// results are not generated we must generate them. This is done in the complete(int)
// method if we set the benchmarking flag.
benchmarking = true;
// Filter using the benchmark filter
filter = params.benchmarkFilter;
if (filter == null) {
// Create a default filter using the standard FitConfiguration to ensure sensible fits
// are stored as the current slice results.
// Note the current fit configuration for benchmarking may have minimal filtering settings
// so we do not use that object.
final FitConfiguration tmp = new FitConfiguration();
final double residualsThreshold = 0.4;
filter = new MultiPathFilter(tmp, createMinimalFilter(PrecisionMethod.POISSON_CRLB), residualsThreshold);
}
} else {
// Filter using the configuration.
if (this.filter == null) {
// This can be cached. Q. Clone the config?
this.filter = new MultiPathFilter(fitConfig, createMinimalFilter(fitConfig.getPrecisionMethod()), config.getResidualsThreshold());
}
filter = this.filter;
}
// If we are benchmarking then do not generate results dynamically since we will store all
// results in the fit job.
dynamicMultiPathFitResult = new DynamicMultiPathFitResult(ie, ie2, !benchmarking);
// dynamicMultiPathFitResult = new DynamicMultiPathFitResult(ie, false);
// The local background computation is only required for the precision method.
// Also compute it when benchmarking.
localBackground = benchmarking || fitConfig.getPrecisionMethodValue() == PrecisionMethod.MORTENSEN_LOCAL_BACKGROUND_VALUE;
// Debug where the fit config may be different between benchmarking and fitting
if (slice == -1) {
fitConfig.initialise(1, 1, 1);
final String newLine = System.lineSeparator();
final String tmpdir = System.getProperty("java.io.tmpdir");
try (BufferedWriter writer = Files.newBufferedWriter(Paths.get(tmpdir, String.format("config.%d.txt", slice)))) {
JsonFormat.printer().appendTo(config.getFitEngineSettings(), writer);
} catch (final IOException ex) {
logger.log(Level.SEVERE, "Unable to write message", ex);
}
FileUtils.save(Paths.get(tmpdir, String.format("filter.%d.xml", slice)).toString(), filter.toXml());
// filter.setDebugFile(String.format("/tmp/fitWorker.%b.txt", benchmarking));
final StringBuilder sb = new StringBuilder();
sb.append((benchmarking) ? ((uk.ac.sussex.gdsc.smlm.results.filter.Filter) filter.getFilter()).toXml() : fitConfig.getSmartFilterString()).append(newLine);
sb.append(((uk.ac.sussex.gdsc.smlm.results.filter.Filter) filter.getMinimalFilter()).toXml()).append(newLine);
sb.append(filter.residualsThreshold).append(newLine);
sb.append(config.getFailuresLimit()).append(newLine);
sb.append(config.getDuplicateDistance()).append(":");
sb.append(config.getDuplicateDistanceAbsolute()).append(newLine);
if (spotFilter != null) {
sb.append(spotFilter.getDescription()).append(newLine);
}
sb.append("MaxCandidate = ").append(candidates.getSize()).append(newLine);
for (int i = 0, len = candidates.getLength(); i < len; i++) {
TextUtils.formatTo(sb, "Fit %d [%d,%d = %.1f]%n", i, candidates.get(i).x, candidates.get(i).y, candidates.get(i).intensity);
}
FileUtils.save(Paths.get(tmpdir, String.format("candidates.%d.xml", slice)).toString(), sb.toString());
}
FailCounter failCounter = config.getFailCounter();
if (!benchmarking && params != null && params.pass != null) {
// We want to store the pass/fail for consecutive candidates
params.pass = new boolean[candidates.getLength()];
failCounter = new RecordingFailCounter(params.pass, failCounter);
filter.select(multiPathResults, failCounter, true, store, coordinateStore);
} else {
filter.select(multiPathResults, failCounter, true, store, coordinateStore);
}
// Note: We go deeper into the candidate list than max candidate
// for any candidate where we have a good fit result as an estimate.
// Q. Should this only be for benchmarking?
// if (benchmarking)
// System.out.printf("Slice %d: %d + %d\n", slice, dynamicMultiPathFitResult.extra,
// candidates.getSize());
// Create the slice results
final CandidateList fitted = gridManager.getFittedCandidates();
sliceResults.ensureCapacity(fitted.getSize());
for (int i = 0; i < fitted.getSize(); i++) {
if (fitted.get(i).fit) {
sliceResults.push(createResult(offsetx, offsety, fitted.get(i)));
}
}
if (logger != null) {
LoggerUtils.log(logger, Level.INFO, "Slice %d: %d / %d = %s", slice, success, candidates.getSize(), TextUtils.pleural(fitted.getSize(), "result"));
}
}
this.results.addAll(sliceResults);
finishJob(job, start);
}
Aggregations