use of gdsc.core.math.ArrayMoment in project GDSC-SMLM by aherbert.
the class CMOSAnalysis method run.
private void run() {
long start = System.currentTimeMillis();
// Avoid all the file saves from updating the progress bar and status line
Utils.setShowProgress(false);
Utils.setShowStatus(false);
JLabel statusLine = Utils.getStatusLine();
progressBar = Utils.getProgressBar();
// Create thread pool and workers
ExecutorService executor = Executors.newFixedThreadPool(getThreads());
TurboList<Future<?>> futures = new TurboList<Future<?>>(nThreads);
TurboList<ImageWorker> workers = new TurboList<ImageWorker>(nThreads);
double[][][] data = new double[subDirs.size()][2][];
double[] pixelOffset = null, pixelVariance = null;
Statistics statsOffset = null, statsVariance = null;
// For each sub-directory compute the mean and variance
final int nSubDirs = subDirs.size();
boolean error = false;
for (int n = 0; n < nSubDirs; n++) {
SubDir sd = subDirs.getf(n);
statusLine.setText("Analysing " + sd.name);
// Open the series
SeriesImageSource source = new SeriesImageSource(sd.name, sd.path.getPath());
//source.setLogProgress(true);
if (!source.open()) {
error = true;
IJ.error(TITLE, "Failed to open image series: " + sd.path.getPath());
break;
}
// So the bar remains at 99% when workers have finished
totalProgress = source.getFrames() + 1;
stepProgress = Utils.getProgressInterval(totalProgress);
progress = 0;
progressBar.show(0);
ArrayMoment moment = (rollingAlgorithm) ? new RollingArrayMoment() : new SimpleArrayMoment();
final BlockingQueue<ImageJob> jobs = new ArrayBlockingQueue<ImageJob>(nThreads * 2);
for (int i = 0; i < nThreads; i++) {
final ImageWorker worker = new ImageWorker(jobs, moment);
workers.add(worker);
futures.add(executor.submit(worker));
}
// Process the data
for (float[] pixels = source.next(); pixels != null; pixels = source.next()) {
put(jobs, new ImageJob(pixels));
}
// Finish all the worker threads by passing in a null job
for (int i = 0; i < nThreads; i++) {
put(jobs, new ImageJob(null));
}
// Wait for all to finish
for (int t = futures.size(); t-- > 0; ) {
try {
// The future .get() method will block until completed
futures.get(t).get();
} catch (Exception e) {
// This should not happen.
e.printStackTrace();
}
}
// Create the final aggregate statistics
for (ImageWorker w : workers) moment.add(w.moment);
data[n][0] = moment.getFirstMoment();
data[n][1] = moment.getVariance();
// Reset
futures.clear();
workers.clear();
Statistics s = new Statistics(data[n][0]);
if (n != 0) {
// Compute mean ADU
Statistics signal = new Statistics();
double[] mean = data[n][0];
for (int i = 0; i < pixelOffset.length; i++) signal.add(mean[i] - pixelOffset[i]);
Utils.log("%s Mean = %s +/- %s. Signal = %s +/- %s ADU", sd.name, Utils.rounded(s.getMean()), Utils.rounded(s.getStandardDeviation()), Utils.rounded(signal.getMean()), Utils.rounded(signal.getStandardDeviation()));
// TODO - optionally save
ImageStack stack = new ImageStack(source.getWidth(), source.getHeight());
stack.addSlice("Mean", Utils.toFloat(data[n][0]));
stack.addSlice("Variance", Utils.toFloat(data[n][1]));
IJ.save(new ImagePlus("PerPixel", stack), new File(directory, "perPixel" + sd.name + ".tif").getPath());
} else {
pixelOffset = data[0][0];
pixelVariance = data[0][1];
statsOffset = s;
statsVariance = new Statistics(pixelVariance);
Utils.log("%s Offset = %s +/- %s. Variance = %s +/- %s", sd.name, Utils.rounded(s.getMean()), Utils.rounded(s.getStandardDeviation()), Utils.rounded(statsVariance.getMean()), Utils.rounded(statsVariance.getStandardDeviation()));
}
}
Utils.setShowStatus(true);
Utils.setShowProgress(true);
IJ.showProgress(1);
executor.shutdown();
if (error)
return;
// Compute the gain
statusLine.setText("Computing gain");
double[] pixelGain = new double[pixelOffset.length];
// Ignore first as this is the 0 exposure image
for (int i = 0; i < pixelGain.length; i++) {
// Use equation 2.5 from the Huang et al paper.
double bibiT = 0;
double biaiT = 0;
for (int n = 1; n < nSubDirs; n++) {
double bi = data[n][0][i] - pixelOffset[i];
double ai = data[n][1][i] - pixelVariance[i];
bibiT += bi * bi;
biaiT += bi * ai;
}
pixelGain[i] = biaiT / bibiT;
}
Statistics statsGain = new Statistics(pixelGain);
Utils.log("Gain Mean = %s +/- %s", Utils.rounded(statsGain.getMean()), Utils.rounded(statsGain.getStandardDeviation()));
// Histogram of offset, variance and gain
int bins = Utils.getBinsSturges(pixelGain.length);
WindowOrganiser wo = new WindowOrganiser();
showHistogram("Offset", pixelOffset, bins, statsOffset, wo);
showHistogram("Variance", pixelVariance, bins, statsVariance, wo);
showHistogram("Gain", pixelGain, bins, statsGain, wo);
wo.tile();
// Save
measuredStack = new ImageStack(size, size);
measuredStack.addSlice("Offset", Utils.toFloat(pixelOffset));
measuredStack.addSlice("Variance", Utils.toFloat(pixelVariance));
measuredStack.addSlice("Gain", Utils.toFloat(pixelGain));
IJ.save(new ImagePlus("PerPixel", measuredStack), new File(directory, "perPixel.tif").getPath());
// Remove the status from the ij.io.ImageWriter class
IJ.showStatus("");
Utils.log("Analysis time = " + Utils.timeToString(System.currentTimeMillis() - start));
}
Aggregations