use of uk.ac.sussex.gdsc.core.math.SimpleArrayMoment in project GDSC-SMLM by aherbert.
the class CmosAnalysis method runAnalysis.
private void runAnalysis() {
final long start = System.currentTimeMillis();
// Create thread pool and workers. The system is likely to be IO limited
// so reduce the computation threads to allow the reading thread in the
// SeriesImageSource to run.
// If the images are small enough to fit into memory then 3 threads are used,
// otherwise it is 1.
final int nThreads = Math.max(1, getThreads() - 3);
final ExecutorService executor = Executors.newFixedThreadPool(nThreads);
final LocalList<Future<?>> futures = new LocalList<>(nThreads);
final LocalList<ImageWorker> workers = new LocalList<>(nThreads);
final double[][] data = new double[subDirs.size() * 2][];
double[] pixelOffset = null;
double[] pixelVariance = null;
Statistics statsOffset = null;
Statistics statsVariance = null;
// For each sub-directory compute the mean and variance
final int nSubDirs = subDirs.size();
boolean error = false;
int width = 0;
int height = 0;
for (int n = 0; n < nSubDirs; n++) {
ImageJUtils.showSlowProgress(n, nSubDirs);
final SubDir sd = subDirs.unsafeGet(n);
ImageJUtils.showStatus(() -> "Analysing " + sd.name);
final StopWatch sw = StopWatch.createStarted();
// Option to reuse data
final File file = new File(settings.directory, "perPixel" + sd.name + ".tif");
boolean found = false;
if (settings.reuseProcessedData && file.exists()) {
final Opener opener = new Opener();
opener.setSilentMode(true);
final ImagePlus imp = opener.openImage(file.getPath());
if (imp != null && imp.getStackSize() == 2 && imp.getBitDepth() == 32) {
if (n == 0) {
width = imp.getWidth();
height = imp.getHeight();
} else if (width != imp.getWidth() || height != imp.getHeight()) {
error = true;
IJ.error(TITLE, "Image width/height mismatch in image series: " + file.getPath() + String.format("\n \nExpected %dx%d, Found %dx%d", width, height, imp.getWidth(), imp.getHeight()));
break;
}
final ImageStack stack = imp.getImageStack();
data[2 * n] = SimpleArrayUtils.toDouble((float[]) stack.getPixels(1));
data[2 * n + 1] = SimpleArrayUtils.toDouble((float[]) stack.getPixels(2));
found = true;
}
}
if (!found) {
// Open the series
final SeriesImageSource source = new SeriesImageSource(sd.name, sd.path.getPath());
if (!source.open()) {
error = true;
IJ.error(TITLE, "Failed to open image series: " + sd.path.getPath());
break;
}
if (n == 0) {
width = source.getWidth();
height = source.getHeight();
} else if (width != source.getWidth() || height != source.getHeight()) {
error = true;
IJ.error(TITLE, "Image width/height mismatch in image series: " + sd.path.getPath() + String.format("\n \nExpected %dx%d, Found %dx%d", width, height, source.getWidth(), source.getHeight()));
break;
}
// So the bar remains at 99% when workers have finished use frames + 1
final Ticker ticker = ImageJUtils.createTicker(source.getFrames() + 1L, nThreads);
// Open the first frame to get the bit depth.
// Assume the first pixels are not empty as the source is open.
Object pixels = source.nextRaw();
final int bitDepth = ImageJUtils.getBitDepth(pixels);
ArrayMoment moment;
if (settings.rollingAlgorithm) {
moment = new RollingArrayMoment();
// We assume 16-bit camera at the maximum
} else if (bitDepth <= 16 && IntegerArrayMoment.isValid(IntegerType.UNSIGNED_16, source.getFrames())) {
moment = new IntegerArrayMoment();
} else {
moment = new SimpleArrayMoment();
}
final BlockingQueue<Object> jobs = new ArrayBlockingQueue<>(nThreads * 2);
for (int i = 0; i < nThreads; i++) {
final ImageWorker worker = new ImageWorker(ticker, jobs, moment);
workers.add(worker);
futures.add(executor.submit(worker));
}
// Process the raw pixel data
long lastTime = 0;
while (pixels != null) {
final long time = System.currentTimeMillis();
if (time - lastTime > 150) {
if (ImageJUtils.isInterrupted()) {
error = true;
break;
}
lastTime = time;
IJ.showStatus("Analysing " + sd.name + " Frame " + source.getStartFrameNumber());
}
put(jobs, pixels);
pixels = source.nextRaw();
}
source.close();
if (error) {
// Kill the workers
workers.stream().forEach(worker -> worker.finished = true);
// Clear the queue
jobs.clear();
// Signal any waiting workers
workers.stream().forEach(worker -> jobs.add(ImageWorker.STOP_SIGNAL));
// Cancel by interruption. We set the finished flag so the ImageWorker should
// ignore the interrupt.
futures.stream().forEach(future -> future.cancel(true));
break;
}
// Finish all the worker threads cleanly
workers.stream().forEach(worker -> jobs.add(ImageWorker.STOP_SIGNAL));
// Wait for all to finish
ConcurrencyUtils.waitForCompletionUnchecked(futures);
// Create the final aggregate statistics
for (final ImageWorker w : workers) {
moment.add(w.moment);
}
data[2 * n] = moment.getMean();
data[2 * n + 1] = moment.getVariance();
// Get the processing speed.
sw.stop();
// ticker holds the number of number of frames processed
final double bits = (double) bitDepth * source.getFrames() * source.getWidth() * source.getHeight();
final double bps = bits / sw.getTime(TimeUnit.SECONDS);
final SiPrefix prefix = SiPrefix.getSiPrefix(bps);
ImageJUtils.log("Processed %d frames. Time = %s. Rate = %s %sbits/s", moment.getN(), sw.toString(), MathUtils.rounded(prefix.convert(bps)), prefix.getPrefix());
// Reset
futures.clear();
workers.clear();
final ImageStack stack = new ImageStack(width, height);
stack.addSlice("Mean", SimpleArrayUtils.toFloat(data[2 * n]));
stack.addSlice("Variance", SimpleArrayUtils.toFloat(data[2 * n + 1]));
IJ.save(new ImagePlus("PerPixel", stack), file.getPath());
}
final Statistics s = Statistics.create(data[2 * n]);
if (pixelOffset != null) {
// Compute mean ADU
final Statistics signal = new Statistics();
final double[] mean = data[2 * n];
for (int i = 0; i < pixelOffset.length; i++) {
signal.add(mean[i] - pixelOffset[i]);
}
ImageJUtils.log("%s Mean = %s +/- %s. Signal = %s +/- %s ADU", sd.name, MathUtils.rounded(s.getMean()), MathUtils.rounded(s.getStandardDeviation()), MathUtils.rounded(signal.getMean()), MathUtils.rounded(signal.getStandardDeviation()));
} else {
// Set the offset assuming the first sub-directory is the bias image
pixelOffset = data[0];
pixelVariance = data[1];
statsOffset = s;
statsVariance = Statistics.create(pixelVariance);
ImageJUtils.log("%s Offset = %s +/- %s. Variance = %s +/- %s", sd.name, MathUtils.rounded(s.getMean()), MathUtils.rounded(s.getStandardDeviation()), MathUtils.rounded(statsVariance.getMean()), MathUtils.rounded(statsVariance.getStandardDeviation()));
}
IJ.showProgress(1);
}
ImageJUtils.clearSlowProgress();
if (error) {
executor.shutdownNow();
IJ.showStatus(TITLE + " cancelled");
return;
}
executor.shutdown();
if (pixelOffset == null || pixelVariance == null) {
IJ.showStatus(TITLE + " error: no bias image");
return;
}
// Compute the gain
ImageJUtils.showStatus("Computing gain");
final double[] pixelGain = new double[pixelOffset.length];
final double[] bibiT = new double[pixelGain.length];
final double[] biaiT = new double[pixelGain.length];
// Ignore first as this is the 0 exposure image
for (int n = 1; n < nSubDirs; n++) {
// Use equation 2.5 from the Huang et al paper.
final double[] b = data[2 * n];
final double[] a = data[2 * n + 1];
for (int i = 0; i < pixelGain.length; i++) {
final double bi = b[i] - pixelOffset[i];
final double ai = a[i] - pixelVariance[i];
bibiT[i] += bi * bi;
biaiT[i] += bi * ai;
}
}
for (int i = 0; i < pixelGain.length; i++) {
pixelGain[i] = biaiT[i] / bibiT[i];
}
final Statistics statsGain = Statistics.create(pixelGain);
ImageJUtils.log("Gain Mean = %s +/- %s", MathUtils.rounded(statsGain.getMean()), MathUtils.rounded(statsGain.getStandardDeviation()));
// Histogram of offset, variance and gain
final int bins = 2 * HistogramPlot.getBinsSturgesRule(pixelGain.length);
final WindowOrganiser wo = new WindowOrganiser();
showHistogram("Offset (ADU)", pixelOffset, bins, statsOffset, wo);
showHistogram("Variance (ADU^2)", pixelVariance, bins, statsVariance, wo);
showHistogram("Gain (ADU/e)", pixelGain, bins, statsGain, wo);
wo.tile();
// Save
final float[] bias = SimpleArrayUtils.toFloat(pixelOffset);
final float[] variance = SimpleArrayUtils.toFloat(pixelVariance);
final float[] gain = SimpleArrayUtils.toFloat(pixelGain);
measuredStack = new ImageStack(width, height);
measuredStack.addSlice("Offset", bias);
measuredStack.addSlice("Variance", variance);
measuredStack.addSlice("Gain", gain);
final ExtendedGenericDialog egd = new ExtendedGenericDialog(TITLE);
egd.addMessage("Save the sCMOS camera model?");
if (settings.modelDirectory == null) {
settings.modelDirectory = settings.directory;
settings.modelName = "sCMOS Camera";
}
egd.addStringField("Model_name", settings.modelName, 30);
egd.addDirectoryField("Model_directory", settings.modelDirectory);
egd.showDialog();
if (!egd.wasCanceled()) {
settings.modelName = egd.getNextString();
settings.modelDirectory = egd.getNextString();
saveCameraModel(width, height, bias, gain, variance);
}
// Remove the status from the ij.io.ImageWriter class
IJ.showStatus("");
ImageJUtils.log("Analysis time = " + TextUtils.millisToString(System.currentTimeMillis() - start));
}
use of uk.ac.sussex.gdsc.core.math.SimpleArrayMoment in project GDSC-SMLM by aherbert.
the class BaseFunctionSolverTest method canFitSingleGaussian.
void canFitSingleGaussian(RandomSeed seed, FunctionSolver solver, boolean applyBounds, NoiseModel noiseModel) {
// Allow reporting the fit deviations
final boolean report = false;
double[] crlb = null;
SimpleArrayMoment moment = null;
final double[] noise = getNoise(seed, noiseModel);
if (solver.isWeighted()) {
solver.setWeights(getWeights(seed, noiseModel));
}
final UniformRandomProvider rg = RngUtils.create(seed.getSeed());
for (final double s : signal) {
final double[] expected = createParams(1, s, 0, 0, 1);
final double[] lower = createParams(0, s * 0.5, -0.3, -0.3, 0.8);
final double[] upper = createParams(3, s * 2, 0.3, 0.3, 1.2);
if (applyBounds) {
solver.setBounds(lower, upper);
}
if (report) {
// Compute the CRLB for a Poisson process
final PoissonGradientProcedure gp = PoissonGradientProcedureUtils.create((Gradient1Function) ((BaseFunctionSolver) solver).getGradientFunction());
gp.computeFisherInformation(expected);
final FisherInformationMatrix f = new FisherInformationMatrix(gp.getLinear(), gp.numberOfGradients);
crlb = f.crlbSqrt();
// Compute the deviations.
// Note this is not the same as the CRLB as the fit is repeated
// against the same data.
// It should be repeated against different data generated with constant
// parameters and variable noise.
moment = new SimpleArrayMoment();
}
final double[] data = drawGaussian(expected, noise, noiseModel, rg);
for (final double db : base) {
for (final double dx : shift) {
for (final double dy : shift) {
for (final double dsx : factor) {
final double[] p = createParams(db, s, dx, dy, dsx);
final double[] fp = fitGaussian(solver, data, p, expected);
for (int i = 0; i < expected.length; i++) {
if (fp[i] < lower[i]) {
Assertions.fail(FunctionUtils.getSupplier("Fit Failed: [%d] %.2f < %.2f: %s != %s", i, fp[i], lower[i], Arrays.toString(fp), Arrays.toString(expected)));
}
if (fp[i] > upper[i]) {
Assertions.fail(FunctionUtils.getSupplier("Fit Failed: [%d] %.2f > %.2f: %s != %s", i, fp[i], upper[i], Arrays.toString(fp), Arrays.toString(expected)));
}
if (report) {
fp[i] = expected[i] - fp[i];
}
}
// Store the deviations
if (report) {
moment.add(fp);
}
}
}
}
}
// Report
if (report) {
logger.info(FunctionUtils.getSupplier("%s %s %f : CRLB = %s, Deviations = %s", solver.getClass().getSimpleName(), noiseModel, s, Arrays.toString(crlb), Arrays.toString(moment.getStandardDeviation())));
}
}
}
Aggregations