use of uk.ac.sussex.gdsc.core.utils.LocalList in project GDSC-SMLM by aherbert.
the class CubicSplineFunctionTest method speedTest.
@SuppressWarnings("null")
private void speedTest(int n, int order) {
// No assertions, this is just a report
Assumptions.assumeTrue(logger.isLoggable(Level.INFO));
// Assumptions.assumeTrue(TestSettings.allow(TestComplexity.MEDIUM));
final CubicSplineFunction cf = (n == 2) ? f2 : f1;
Assumptions.assumeTrue(null != cf);
final CubicSplineFunction cff = (n == 2) ? f2f : f1f;
final ErfGaussian2DFunction gf = (ErfGaussian2DFunction) GaussianFunctionFactory.create2D(n, maxx, maxy, GaussianFunctionFactory.FIT_ASTIGMATISM, zModel);
final Gaussian2DFunction gf2 = (order < 2) ? GaussianFunctionFactory.create2D(n, maxx, maxy, GaussianFunctionFactory.FIT_SIMPLE_FREE_CIRCLE, zModel) : null;
final LocalList<double[]> l1 = new LocalList<>();
final LocalList<double[]> l2 = new LocalList<>();
final LocalList<double[]> l3 = new LocalList<>();
final double[] a = new double[1 + n * CubicSplineFunction.PARAMETERS_PER_PEAK];
final double[] b = new double[1 + n * Gaussian2DFunction.PARAMETERS_PER_PEAK];
double[] bb = null;
a[CubicSplineFunction.BACKGROUND] = 0.1;
b[Gaussian2DFunction.BACKGROUND] = 0.1;
for (int i = 0; i < n; i++) {
a[i * CubicSplineFunction.PARAMETERS_PER_PEAK + CubicSplineFunction.SIGNAL] = 10;
b[i * Gaussian2DFunction.PARAMETERS_PER_PEAK + Gaussian2DFunction.SIGNAL] = 10;
}
if (n == 2) {
// Fix second peak parameters
a[CubicSplineFunction.PARAMETERS_PER_PEAK + CubicSplineFunction.X_POSITION] = testcx1[0];
a[CubicSplineFunction.PARAMETERS_PER_PEAK + CubicSplineFunction.Y_POSITION] = testcy1[0];
a[CubicSplineFunction.PARAMETERS_PER_PEAK + CubicSplineFunction.Z_POSITION] = testcz1[0];
b[Gaussian2DFunction.PARAMETERS_PER_PEAK + Gaussian2DFunction.X_POSITION] = testcx1[0];
b[Gaussian2DFunction.PARAMETERS_PER_PEAK + Gaussian2DFunction.Y_POSITION] = testcy1[0];
b[Gaussian2DFunction.PARAMETERS_PER_PEAK + Gaussian2DFunction.Z_POSITION] = testcz1[0];
}
if (gf2 != null) {
bb = b.clone();
if (n == 2) {
// Fix second peak parameters
bb[Gaussian2DFunction.PARAMETERS_PER_PEAK + Gaussian2DFunction.X_SD] = zModel.getSx(testcz1[0]);
bb[Gaussian2DFunction.PARAMETERS_PER_PEAK + Gaussian2DFunction.Y_SD] = zModel.getSy(testcz1[0]);
}
}
for (int x = 0; x <= maxx; x++) {
a[CubicSplineFunction.X_POSITION] = x;
b[Gaussian2DFunction.X_POSITION] = x;
for (int y = 0; y <= maxy; y++) {
a[CubicSplineFunction.Y_POSITION] = y;
b[Gaussian2DFunction.Y_POSITION] = y;
for (int z = -zDepth; z <= zDepth; z++) {
a[CubicSplineFunction.Z_POSITION] = z;
b[Gaussian2DFunction.Z_POSITION] = z;
l1.add(a.clone());
l2.add(b.clone());
if (gf2 != null) {
bb[Gaussian2DFunction.X_SD] = zModel.getSx(z);
bb[Gaussian2DFunction.Y_SD] = zModel.getSy(z);
l3.add(bb.clone());
}
}
}
}
final double[][] x1 = l1.toArray(new double[0][]);
final double[][] x2 = l2.toArray(new double[0][]);
final double[][] x3 = l3.toArray(new double[0][]);
final TimingService ts = new TimingService(5);
ts.execute(new FunctionTimingTask(gf, x2, order));
if (gf2 != null) {
ts.execute(new FunctionTimingTask(gf2, x3, order));
}
ts.execute(new FunctionTimingTask(cf, x1, order));
ts.execute(new FunctionTimingTask(cff, x1, order, " single-precision"));
final int size = ts.getSize();
ts.repeat(size);
logger.info(ts.getReport(size));
}
use of uk.ac.sussex.gdsc.core.utils.LocalList in project GDSC-SMLM by aherbert.
the class FastLogTest method canTestDoubleErrorRange.
@SeededTest
void canTestDoubleErrorRange(RandomSeed seed) {
Assumptions.assumeTrue(logger.isLoggable(Level.INFO));
Assumptions.assumeTrue(TestSettings.allow(TestComplexity.HIGH));
final UniformRandomProvider rng = RngUtils.create(seed.getSeed());
final LocalList<TestFastLog> test = new LocalList<>();
final int n = 13;
test.add(new TestFastLog(IcsiFastLog.create(n, DataType.DOUBLE)));
test.add(new TestFastLog(new FFastLog(n)));
test.add(new TestFastLog(new DFastLog(n)));
test.add(new TestFastLog(new TurboLog(n)));
// Full range in blocks.
// Only when the number is around 1 or min value are there significant errors
final double[] d = new double[10000000];
final double[] logD = null;
// All
// testDoubleErrorRange(test, n, d, logD, 0, 255, 0);
// Only a problem around min value and x==1
// testDoubleErrorRange(rng, test, n, d, logD, 0, 2, 0);
testDoubleErrorRange(rng, test, n, d, logD, 1021, 1026, 0);
testDoubleErrorRange(rng, test, n, d, logD, 2045, 2047, 0);
}
use of uk.ac.sussex.gdsc.core.utils.LocalList in project GDSC-SMLM by aherbert.
the class CmosAnalysis method runAnalysis.
private void runAnalysis() {
final long start = System.currentTimeMillis();
// Create thread pool and workers. The system is likely to be IO limited
// so reduce the computation threads to allow the reading thread in the
// SeriesImageSource to run.
// If the images are small enough to fit into memory then 3 threads are used,
// otherwise it is 1.
final int nThreads = Math.max(1, getThreads() - 3);
final ExecutorService executor = Executors.newFixedThreadPool(nThreads);
final LocalList<Future<?>> futures = new LocalList<>(nThreads);
final LocalList<ImageWorker> workers = new LocalList<>(nThreads);
final double[][] data = new double[subDirs.size() * 2][];
double[] pixelOffset = null;
double[] pixelVariance = null;
Statistics statsOffset = null;
Statistics statsVariance = null;
// For each sub-directory compute the mean and variance
final int nSubDirs = subDirs.size();
boolean error = false;
int width = 0;
int height = 0;
for (int n = 0; n < nSubDirs; n++) {
ImageJUtils.showSlowProgress(n, nSubDirs);
final SubDir sd = subDirs.unsafeGet(n);
ImageJUtils.showStatus(() -> "Analysing " + sd.name);
final StopWatch sw = StopWatch.createStarted();
// Option to reuse data
final File file = new File(settings.directory, "perPixel" + sd.name + ".tif");
boolean found = false;
if (settings.reuseProcessedData && file.exists()) {
final Opener opener = new Opener();
opener.setSilentMode(true);
final ImagePlus imp = opener.openImage(file.getPath());
if (imp != null && imp.getStackSize() == 2 && imp.getBitDepth() == 32) {
if (n == 0) {
width = imp.getWidth();
height = imp.getHeight();
} else if (width != imp.getWidth() || height != imp.getHeight()) {
error = true;
IJ.error(TITLE, "Image width/height mismatch in image series: " + file.getPath() + String.format("\n \nExpected %dx%d, Found %dx%d", width, height, imp.getWidth(), imp.getHeight()));
break;
}
final ImageStack stack = imp.getImageStack();
data[2 * n] = SimpleArrayUtils.toDouble((float[]) stack.getPixels(1));
data[2 * n + 1] = SimpleArrayUtils.toDouble((float[]) stack.getPixels(2));
found = true;
}
}
if (!found) {
// Open the series
final SeriesImageSource source = new SeriesImageSource(sd.name, sd.path.getPath());
if (!source.open()) {
error = true;
IJ.error(TITLE, "Failed to open image series: " + sd.path.getPath());
break;
}
if (n == 0) {
width = source.getWidth();
height = source.getHeight();
} else if (width != source.getWidth() || height != source.getHeight()) {
error = true;
IJ.error(TITLE, "Image width/height mismatch in image series: " + sd.path.getPath() + String.format("\n \nExpected %dx%d, Found %dx%d", width, height, source.getWidth(), source.getHeight()));
break;
}
// So the bar remains at 99% when workers have finished use frames + 1
final Ticker ticker = ImageJUtils.createTicker(source.getFrames() + 1L, nThreads);
// Open the first frame to get the bit depth.
// Assume the first pixels are not empty as the source is open.
Object pixels = source.nextRaw();
final int bitDepth = ImageJUtils.getBitDepth(pixels);
ArrayMoment moment;
if (settings.rollingAlgorithm) {
moment = new RollingArrayMoment();
// We assume 16-bit camera at the maximum
} else if (bitDepth <= 16 && IntegerArrayMoment.isValid(IntegerType.UNSIGNED_16, source.getFrames())) {
moment = new IntegerArrayMoment();
} else {
moment = new SimpleArrayMoment();
}
final BlockingQueue<Object> jobs = new ArrayBlockingQueue<>(nThreads * 2);
for (int i = 0; i < nThreads; i++) {
final ImageWorker worker = new ImageWorker(ticker, jobs, moment);
workers.add(worker);
futures.add(executor.submit(worker));
}
// Process the raw pixel data
long lastTime = 0;
while (pixels != null) {
final long time = System.currentTimeMillis();
if (time - lastTime > 150) {
if (ImageJUtils.isInterrupted()) {
error = true;
break;
}
lastTime = time;
IJ.showStatus("Analysing " + sd.name + " Frame " + source.getStartFrameNumber());
}
put(jobs, pixels);
pixels = source.nextRaw();
}
source.close();
if (error) {
// Kill the workers
workers.stream().forEach(worker -> worker.finished = true);
// Clear the queue
jobs.clear();
// Signal any waiting workers
workers.stream().forEach(worker -> jobs.add(ImageWorker.STOP_SIGNAL));
// Cancel by interruption. We set the finished flag so the ImageWorker should
// ignore the interrupt.
futures.stream().forEach(future -> future.cancel(true));
break;
}
// Finish all the worker threads cleanly
workers.stream().forEach(worker -> jobs.add(ImageWorker.STOP_SIGNAL));
// Wait for all to finish
ConcurrencyUtils.waitForCompletionUnchecked(futures);
// Create the final aggregate statistics
for (final ImageWorker w : workers) {
moment.add(w.moment);
}
data[2 * n] = moment.getMean();
data[2 * n + 1] = moment.getVariance();
// Get the processing speed.
sw.stop();
// ticker holds the number of number of frames processed
final double bits = (double) bitDepth * source.getFrames() * source.getWidth() * source.getHeight();
final double bps = bits / sw.getTime(TimeUnit.SECONDS);
final SiPrefix prefix = SiPrefix.getSiPrefix(bps);
ImageJUtils.log("Processed %d frames. Time = %s. Rate = %s %sbits/s", moment.getN(), sw.toString(), MathUtils.rounded(prefix.convert(bps)), prefix.getPrefix());
// Reset
futures.clear();
workers.clear();
final ImageStack stack = new ImageStack(width, height);
stack.addSlice("Mean", SimpleArrayUtils.toFloat(data[2 * n]));
stack.addSlice("Variance", SimpleArrayUtils.toFloat(data[2 * n + 1]));
IJ.save(new ImagePlus("PerPixel", stack), file.getPath());
}
final Statistics s = Statistics.create(data[2 * n]);
if (pixelOffset != null) {
// Compute mean ADU
final Statistics signal = new Statistics();
final double[] mean = data[2 * n];
for (int i = 0; i < pixelOffset.length; i++) {
signal.add(mean[i] - pixelOffset[i]);
}
ImageJUtils.log("%s Mean = %s +/- %s. Signal = %s +/- %s ADU", sd.name, MathUtils.rounded(s.getMean()), MathUtils.rounded(s.getStandardDeviation()), MathUtils.rounded(signal.getMean()), MathUtils.rounded(signal.getStandardDeviation()));
} else {
// Set the offset assuming the first sub-directory is the bias image
pixelOffset = data[0];
pixelVariance = data[1];
statsOffset = s;
statsVariance = Statistics.create(pixelVariance);
ImageJUtils.log("%s Offset = %s +/- %s. Variance = %s +/- %s", sd.name, MathUtils.rounded(s.getMean()), MathUtils.rounded(s.getStandardDeviation()), MathUtils.rounded(statsVariance.getMean()), MathUtils.rounded(statsVariance.getStandardDeviation()));
}
IJ.showProgress(1);
}
ImageJUtils.clearSlowProgress();
if (error) {
executor.shutdownNow();
IJ.showStatus(TITLE + " cancelled");
return;
}
executor.shutdown();
if (pixelOffset == null || pixelVariance == null) {
IJ.showStatus(TITLE + " error: no bias image");
return;
}
// Compute the gain
ImageJUtils.showStatus("Computing gain");
final double[] pixelGain = new double[pixelOffset.length];
final double[] bibiT = new double[pixelGain.length];
final double[] biaiT = new double[pixelGain.length];
// Ignore first as this is the 0 exposure image
for (int n = 1; n < nSubDirs; n++) {
// Use equation 2.5 from the Huang et al paper.
final double[] b = data[2 * n];
final double[] a = data[2 * n + 1];
for (int i = 0; i < pixelGain.length; i++) {
final double bi = b[i] - pixelOffset[i];
final double ai = a[i] - pixelVariance[i];
bibiT[i] += bi * bi;
biaiT[i] += bi * ai;
}
}
for (int i = 0; i < pixelGain.length; i++) {
pixelGain[i] = biaiT[i] / bibiT[i];
}
final Statistics statsGain = Statistics.create(pixelGain);
ImageJUtils.log("Gain Mean = %s +/- %s", MathUtils.rounded(statsGain.getMean()), MathUtils.rounded(statsGain.getStandardDeviation()));
// Histogram of offset, variance and gain
final int bins = 2 * HistogramPlot.getBinsSturgesRule(pixelGain.length);
final WindowOrganiser wo = new WindowOrganiser();
showHistogram("Offset (ADU)", pixelOffset, bins, statsOffset, wo);
showHistogram("Variance (ADU^2)", pixelVariance, bins, statsVariance, wo);
showHistogram("Gain (ADU/e)", pixelGain, bins, statsGain, wo);
wo.tile();
// Save
final float[] bias = SimpleArrayUtils.toFloat(pixelOffset);
final float[] variance = SimpleArrayUtils.toFloat(pixelVariance);
final float[] gain = SimpleArrayUtils.toFloat(pixelGain);
measuredStack = new ImageStack(width, height);
measuredStack.addSlice("Offset", bias);
measuredStack.addSlice("Variance", variance);
measuredStack.addSlice("Gain", gain);
final ExtendedGenericDialog egd = new ExtendedGenericDialog(TITLE);
egd.addMessage("Save the sCMOS camera model?");
if (settings.modelDirectory == null) {
settings.modelDirectory = settings.directory;
settings.modelName = "sCMOS Camera";
}
egd.addStringField("Model_name", settings.modelName, 30);
egd.addDirectoryField("Model_directory", settings.modelDirectory);
egd.showDialog();
if (!egd.wasCanceled()) {
settings.modelName = egd.getNextString();
settings.modelDirectory = egd.getNextString();
saveCameraModel(width, height, bias, gain, variance);
}
// Remove the status from the ij.io.ImageWriter class
IJ.showStatus("");
ImageJUtils.log("Analysis time = " + TextUtils.millisToString(System.currentTimeMillis() - start));
}
use of uk.ac.sussex.gdsc.core.utils.LocalList in project GDSC-SMLM by aherbert.
the class CmosAnalysis method simulate.
private void simulate() throws IOException {
// Create the offset, variance and gain for each pixel
final int n = settings.size * settings.size;
final float[] pixelOffset = new float[n];
final float[] pixelVariance = new float[n];
final float[] pixelGain = new float[n];
IJ.showStatus("Creating random per-pixel readout");
final long start = System.currentTimeMillis();
final UniformRandomProvider rg = UniformRandomProviders.create();
final DiscreteSampler pd = PoissonSamplerUtils.createPoissonSampler(rg, settings.offset);
final ContinuousSampler ed = SamplerUtils.createExponentialSampler(rg, settings.variance);
final SharedStateContinuousSampler gauss = SamplerUtils.createGaussianSampler(rg, settings.gain, settings.gainStdDev);
Ticker ticker = ImageJUtils.createTicker(n, 0);
for (int i = 0; i < n; i++) {
// Q. Should these be clipped to a sensible range?
pixelOffset[i] = pd.sample();
pixelVariance[i] = (float) ed.sample();
pixelGain[i] = (float) gauss.sample();
ticker.tick();
}
IJ.showProgress(1);
// Save to the directory as a stack
final ImageStack simulationStack = new ImageStack(settings.size, settings.size);
simulationStack.addSlice("Offset", pixelOffset);
simulationStack.addSlice("Variance", pixelVariance);
simulationStack.addSlice("Gain", pixelGain);
simulationImp = new ImagePlus("PerPixel", simulationStack);
// Only the info property is saved to the TIFF file
simulationImp.setProperty("Info", String.format("Offset=%s; Variance=%s; Gain=%s +/- %s", MathUtils.rounded(settings.offset), MathUtils.rounded(settings.variance), MathUtils.rounded(settings.gain), MathUtils.rounded(settings.gainStdDev)));
IJ.save(simulationImp, new File(settings.directory, "perPixelSimulation.tif").getPath());
// Create thread pool and workers
final int threadCount = getThreads();
final ExecutorService executor = Executors.newFixedThreadPool(threadCount);
final LocalList<Future<?>> futures = new LocalList<>(numberOfThreads);
// Simulate the exposure input.
final int[] photons = settings.getPhotons();
// For saving stacks
final int blockSize = 10;
int numberPerThread = (int) Math.ceil((double) settings.frames / numberOfThreads);
// Convert to fit the block size
numberPerThread = (int) Math.ceil((double) numberPerThread / blockSize) * blockSize;
final Pcg32 rng = Pcg32.xshrs(start);
// Note the bias is increased by 3-fold so add 2 to the length
ticker = Ticker.createStarted(new ImageJTrackProgress(true), (long) (photons.length + 2) * settings.frames, threadCount > 1);
for (final int p : photons) {
ImageJUtils.showStatus(() -> "Simulating " + TextUtils.pleural(p, "photon"));
// Create the directory
final Path out = Paths.get(settings.directory, String.format("photon%03d", p));
Files.createDirectories(out);
// Increase frames for bias image
final int frames = settings.frames * (p == 0 ? 3 : 1);
for (int from = 0; from < frames; ) {
final int to = Math.min(from + numberPerThread, frames);
futures.add(executor.submit(new SimulationWorker(ticker, rng.split(), out.toString(), simulationStack, from, to, blockSize, p)));
from = to;
}
ConcurrencyUtils.waitForCompletionUnchecked(futures);
futures.clear();
}
final String msg = "Simulation time = " + TextUtils.millisToString(System.currentTimeMillis() - start);
IJ.showStatus(msg);
ImageJUtils.clearSlowProgress();
executor.shutdown();
ImageJUtils.log(msg);
}
use of uk.ac.sussex.gdsc.core.utils.LocalList in project GDSC-SMLM by aherbert.
the class FailCountManager method loadData.
/**
* Load the data from a file.
*/
private void loadData() {
final String filename = ImageJUtils.getFilename("Fail_count_data_filename", settings.getFilename());
if (filename == null) {
return;
}
settings.setFilename(filename);
final LocalList<FailCountData> countData = new LocalList<>();
try (BufferedReader br = Files.newBufferedReader(Paths.get(filename))) {
final Pattern pattern = Pattern.compile("[\t, ]+");
// Ignore the first line
String line = br.readLine();
final BooleanArray array = new BooleanArray(100);
int lastId = 0;
int lastCandidate = 0;
while ((line = br.readLine()) != null) {
final String[] data = pattern.split(line);
if (data.length != 3) {
throw new IOException("Require 3 fields in the data");
}
final int id = Integer.parseInt(data[0]);
if (id < 1) {
throw new IOException("ID must be strictly positive");
}
final int candidate = Integer.parseInt(data[1]);
if (candidate < 1) {
throw new IOException("Candidate must be strictly positive");
}
final boolean ok = guessStatus(data[2]);
if (lastId != id) {
if (array.size() > 0) {
countData.add(new FailCountData(lastId, array.toArray()));
array.clear();
}
if (candidate != 1) {
throw new IOException("Candidate must start at 1");
}
lastId = id;
// Ensure continuous
lastCandidate = candidate - 1;
}
// Require continuous sequence
if (candidate - lastCandidate == 1) {
array.add(ok);
lastCandidate = candidate;
} else {
// Make impossible to add any more for this ID
lastCandidate = -1;
}
}
// Final ID
if (array.size() > 0) {
countData.add(new FailCountData(lastId, array.toArray()));
}
IJ.showMessage(TITLE, "Loaded " + TextUtils.pleural(countData.size(), "sequence"));
failCountDataRef.set(countData);
} catch (final NumberFormatException | IOException ex) {
IJ.error(TITLE, "Failed to load data:\n" + ex.getMessage());
}
}
Aggregations