Search in sources :

Example 1 with CameraType

use of uk.ac.sussex.gdsc.smlm.data.config.CalibrationProtos.CameraType in project GDSC-SMLM by aherbert.

the class CreateData method validateCameraOptions.

private void validateCameraOptions() {
    final CameraType cameraType = settings.getCameraType();
    final boolean isCcd = CalibrationProtosHelper.isCcdCameraType(cameraType);
    if (isCcd) {
        if (cameraType == CameraType.EMCCD) {
            ParameterUtils.isPositive("EM gain", settings.getEmGain());
        }
        ParameterUtils.isPositive("Camera gain", settings.getCameraGain());
        ParameterUtils.isPositive("Read noise", settings.getReadNoise());
        final double noiseRange = settings.getReadNoise() * settings.getCameraGain() * 4;
        ParameterUtils.isEqualOrAbove("Bias must prevent clipping the read noise (@ +/- 4 StdDev) so ", settings.getBias(), noiseRange);
        cameraModel = createCcdCameraModel();
    } else if (cameraType == CameraType.SCMOS) {
        // Load the model
        cameraModel = CameraModelManager.load(settings.getCameraModelName());
        if (cameraModel == null) {
            throw new IllegalArgumentException("Unknown camera model for name: " + settings.getCameraModelName());
        }
        // Check the width is above the selected size
        Rectangle modelBounds = cameraModel.getBounds();
        final int size = settings.getSize();
        if (modelBounds.width < size || modelBounds.height < size) {
            throw new IllegalArgumentException(String.format("Camera model bounds [x=%d,y=%d,width=%d,height=%d] are smaller than " + "simulation size [%d]", modelBounds.x, modelBounds.y, modelBounds.width, modelBounds.height, size));
        }
        // Ask for a crop
        if (modelBounds.width > size || modelBounds.height > size) {
            final GenericDialog gd = new GenericDialog(TITLE);
            // @formatter:off
            ImageJUtils.addMessage(gd, "WARNING:\n \nCamera model bounds\n[x=%d,y=%d,width=%d,height=%d]\n" + "are larger than the simulation size [=%d].\n \nCrop the model?", modelBounds.x, modelBounds.y, modelBounds.width, modelBounds.height, size);
            // @formatter:on
            gd.addCheckbox("Random_crop", settings.getRandomCrop());
            final int upperx = modelBounds.x + modelBounds.width - size;
            final int uppery = modelBounds.y + modelBounds.height - size;
            gd.addSlider("Origin_x", modelBounds.x, upperx, MathUtils.clip(modelBounds.x, upperx, settings.getOriginX()));
            gd.addSlider("Origin_y", modelBounds.y, uppery, MathUtils.clip(modelBounds.y, uppery, settings.getOriginY()));
            gd.addHelp(HelpUrls.getUrl(helpKey));
            gd.showDialog();
            if (gd.wasCanceled()) {
                throw new IllegalArgumentException("Unknown camera model crop");
            }
            settings.setRandomCrop(gd.getNextBoolean());
            settings.setOriginX((int) gd.getNextNumber());
            settings.setOriginY((int) gd.getNextNumber());
            SettingsManager.writeSettings(settings.build());
            int ox;
            int oy;
            if (settings.getRandomCrop()) {
                final UniformRandomProvider rng = createRandomGenerator();
                ox = new DiscreteUniformSampler(rng, modelBounds.x, upperx).sample();
                oy = new DiscreteUniformSampler(rng, modelBounds.y, uppery).sample();
            } else {
                ox = settings.getOriginX();
                oy = settings.getOriginY();
            }
            final Rectangle bounds = new Rectangle(ox, oy, size, size);
            cameraModel = cameraModel.crop(bounds, false);
            modelBounds = cameraModel.getBounds();
            if (modelBounds.width != size || modelBounds.height != size) {
                throw new IllegalArgumentException("Failed to crop camera model to bounds: " + bounds);
            }
        }
    } else {
        throw new IllegalArgumentException("Unsupported camera type: " + CalibrationProtosHelper.getName(cameraType));
    }
}
Also used : DiscreteUniformSampler(org.apache.commons.rng.sampling.distribution.DiscreteUniformSampler) GenericDialog(ij.gui.GenericDialog) ExtendedGenericDialog(uk.ac.sussex.gdsc.core.ij.gui.ExtendedGenericDialog) Rectangle(java.awt.Rectangle) UniformRandomProvider(org.apache.commons.rng.UniformRandomProvider) CameraType(uk.ac.sussex.gdsc.smlm.data.config.CalibrationProtos.CameraType)

Example 2 with CameraType

use of uk.ac.sussex.gdsc.smlm.data.config.CalibrationProtos.CameraType in project GDSC-SMLM by aherbert.

the class CreateData method showSimulationParametersDialog.

private SimulationParameters showSimulationParametersDialog(ImagePlus imp, MemoryPeakResults results) {
    final int molecules = results.size();
    // Get the missing parameters from the user
    boolean fullSimulation = false;
    double sd = -1;
    if (!results.convertToPreferredUnits()) {
        IJ.error(TITLE, String.format("Results should be in the preferred units (%s,%s)", UnitHelper.getName(MemoryPeakResults.PREFERRED_DISTANCE_UNIT), UnitHelper.getName(MemoryPeakResults.PREFERRED_INTENSITY_UNIT)));
        return null;
    }
    // Get these from the data
    final RawResultProcedure sp = new RawResultProcedure(results);
    sp.getBixyz();
    final float[] signal = sp.intensity;
    float[] limits = MathUtils.limits(signal);
    final double minSignal = limits[0];
    final double maxSignal = limits[1];
    final double signalPerFrame = MathUtils.sum(signal) / molecules;
    final float[] depths = sp.z;
    limits = MathUtils.limits(depths);
    float depth = Math.max(Math.abs(limits[0]), Math.abs(limits[1]));
    final boolean fixedDepth = Double.compare(limits[0], limits[1]) == 0;
    final CalibrationWriter cal = results.getCalibrationWriter();
    final String iUnits = " " + UnitHelper.getName(cal.getIntensityUnit());
    final String zUnits = " " + UnitHelper.getName(cal.getDistanceUnit());
    // Get this from the user
    double background = -1;
    // Use last simulation parameters for missing settings.
    // This is good if we are re-running the plugin to load data.
    Rectangle lastCameraBounds = null;
    if (simulationParameters != null && simulationParameters.isLoaded()) {
        fullSimulation = simulationParameters.fullSimulation;
        sd = simulationParameters.sd;
        background = simulationParameters.background;
        if (!cal.hasBias()) {
            cal.setBias(simulationParameters.bias);
        }
        if (!cal.hasCountPerPhoton()) {
            cal.setCountPerPhoton(simulationParameters.gain);
        }
        if (!cal.hasQuantumEfficiency()) {
            cal.setQuantumEfficiency(simulationParameters.qe);
        }
        if (!cal.hasReadNoise()) {
            cal.setReadNoise(simulationParameters.readNoise);
        }
        if (!cal.hasCameraType()) {
            cal.setCameraType(simulationParameters.cameraType);
        }
        if (!cal.hasNmPerPixel()) {
            cal.setNmPerPixel(simulationParameters.pixelPitch);
        }
        if (!cal.hasCameraModelName()) {
            cal.setCameraModelName(simulationParameters.cameraModelName);
        }
        lastCameraBounds = simulationParameters.cameraBounds;
    }
    // Show a dialog to confirm settings
    final ExtendedGenericDialog gd = new ExtendedGenericDialog(TITLE);
    final StringBuilder sb = new StringBuilder();
    sb.append("Results contain ").append(TextUtils.pleural(molecules, "molecule")).append('\n');
    sb.append("Min signal = ").append(MathUtils.rounded(minSignal)).append(iUnits).append('\n');
    sb.append("Max signal = ").append(MathUtils.rounded(maxSignal)).append(iUnits).append('\n');
    sb.append("Av signal = ").append(MathUtils.rounded(signalPerFrame)).append(iUnits).append('\n');
    if (fixedDepth) {
        sb.append("Fixed depth = ").append(MathUtils.rounded(depth)).append(zUnits).append('\n');
    }
    gd.addMessage(sb.toString());
    gd.addCheckbox("Flourophore_simulation", fullSimulation);
    gd.addNumericField("Gaussian_SD", sd, 3, 8, "nm");
    gd.addNumericField("Pixel_pitch", cal.getNmPerPixel(), 3, 8, "nm");
    gd.addNumericField("Background", background, 3, 8, "photon");
    // Camera type does not need the full simulation settings. Plus the units are different
    // so just re-implement.
    gd.addChoice("Camera_type", SettingsManager.getCameraTypeNames(), CalibrationProtosHelper.getName(cal.getCameraType()), new OptionListener<Integer>() {

        @Override
        public boolean collectOptions(Integer field) {
            cal.setCameraType(SettingsManager.getCameraTypeValues()[field]);
            return collectOptions(false);
        }

        @Override
        public boolean collectOptions() {
            return collectOptions(true);
        }

        private boolean collectOptions(boolean silent) {
            final CameraType cameraType = cal.getCameraType();
            final boolean isCcd = CalibrationProtosHelper.isCcdCameraType(cameraType);
            final ExtendedGenericDialog egd = new ExtendedGenericDialog(TITLE, null);
            if (isCcd) {
                egd.addNumericField("Total_gain", cal.getCountPerPhoton(), 3, 8, "count/photon");
                egd.addNumericField("Quantum_efficiency", cal.getQuantumEfficiency(), 3, 8, "e-/photon");
                egd.addNumericField("Read_noise", cal.getReadNoise(), 3, 8, "count");
                egd.addNumericField("Bias", cal.getBias(), 3, 8, "count");
            } else if (cameraType == CameraType.SCMOS) {
                final String[] models = CameraModelManager.listCameraModels(true);
                egd.addChoice("Camera_model_name", models, cal.getCameraModelName());
                egd.addNumericField("Quantum_efficiency", cal.getQuantumEfficiency(), 2, 6, "electron/photon");
            } else {
                IJ.error("Unsupported camera type " + CalibrationProtosHelper.getName(cameraType));
                return false;
            }
            egd.setSilent(silent);
            egd.showDialog(true, gd);
            if (egd.wasCanceled()) {
                return false;
            }
            if (isCcd) {
                cal.setCountPerPhoton(egd.getNextNumber());
                cal.setQuantumEfficiency(egd.getNextNumber());
                cal.setReadNoise(egd.getNextNumber());
                cal.setBias(egd.getNextNumber());
            } else if (cameraType == CameraType.SCMOS) {
                cal.setCameraModelName(egd.getNextChoice());
                cal.setQuantumEfficiency(Math.abs(egd.getNextNumber()));
            }
            return true;
        }
    });
    if (!fixedDepth) {
        gd.addNumericField("Depth", depth, 3, 8, "pixel");
    }
    gd.addHelp(HelpUrls.getUrl("load-benchmark-data"));
    gd.showDialog();
    if (gd.wasCanceled()) {
        return null;
    }
    fullSimulation = gd.getNextBoolean();
    sd = gd.getNextNumber();
    cal.setNmPerPixel(gd.getNextNumber());
    background = gd.getNextNumber();
    cal.setCameraType(SettingsManager.getCameraTypeValues()[gd.getNextChoiceIndex()]);
    float myDepth = depth;
    if (!fixedDepth) {
        myDepth = (float) gd.getNextNumber();
        if (myDepth < depth) {
            IJ.error(TITLE, String.format("Input depth is smaller than the depth guessed from the data: %f < %f", myDepth, depth));
            return null;
        }
        depth = myDepth;
    }
    gd.collectOptions();
    // Validate settings
    Rectangle modelBounds = null;
    try {
        ParameterUtils.isAboveZero("Gaussian SD", sd);
        ParameterUtils.isAboveZero("Pixel pitch", cal.getNmPerPixel());
        ParameterUtils.isPositive("Background", background);
        ParameterUtils.isAboveZero("Quantum efficiency", cal.getQuantumEfficiency());
        ParameterUtils.isEqualOrBelow("Quantum efficiency", cal.getQuantumEfficiency(), 1);
        if (cal.isCcdCamera()) {
            ParameterUtils.isAboveZero("Total gain", cal.getCountPerPhoton());
            ParameterUtils.isPositive("Read noise", cal.getReadNoise());
            ParameterUtils.isPositive("Bias", cal.getBias());
        } else if (cal.isScmos()) {
            // Load the model
            cameraModel = CameraModelManager.load(cal.getCameraModelName());
            if (cameraModel == null) {
                IJ.error(TITLE, "Unknown camera model for name: " + cal.getCameraModelName());
                return null;
            }
            int ox = 0;
            int oy = 0;
            if (lastCameraBounds != null) {
                ox = lastCameraBounds.x;
                oy = lastCameraBounds.y;
            }
            cameraModel = PeakFit.cropCameraModel(cameraModel, new Rectangle(ox, oy, imp.getWidth(), imp.getHeight()), null, false);
            modelBounds = cameraModel.getBounds();
            final IJImageSource imageSource = (IJImageSource) results.getSource();
            imageSource.setOrigin(modelBounds.x, modelBounds.y);
            cal.clearGlobalCameraSettings();
        } else {
            IJ.error(TITLE, "Unknown camera type: " + cal.getCameraType());
            return null;
        }
    } catch (final IllegalArgumentException ex) {
        IJ.error(TITLE, ex.getMessage());
        return null;
    }
    // Store calibration
    results.setCalibration(cal.getCalibration());
    final double a = cal.getNmPerPixel();
    final double bias = cal.getBias();
    final double gain = cal.getCountPerPhoton();
    final double readNoise = cal.getReadNoise();
    final double qe = cal.getQuantumEfficiency();
    // Note: The calibration will throw an exception if the converter cannot be created.
    // This is OK as the data will be invalid.
    // Convert +/- depth to total depth in nm
    depth = cal.getDistanceConverter(DistanceUnit.NM).convert(depth * 2);
    // Compute total background variance in photons
    final double backgroundVariance = background;
    // Do not add EM-CCD noise factor. The Mortensen formula also includes this factor
    // so this is "double-counting" the EM-CCD.
    // if (emCCD)
    // backgroundVariance *= 2;
    // Read noise is in ADUs. Convert to Photons to get contribution to background variance
    final double readNoiseInPhotons = readNoise / gain;
    // Get the expected value at each pixel in photons. Assuming a Poisson distribution this
    // is equal to the total variance at the pixel.
    final double b2 = backgroundVariance + readNoiseInPhotons * readNoiseInPhotons;
    // Convert values to photons
    final TypeConverter<IntensityUnit> ic = cal.getIntensityConverter(IntensityUnit.PHOTON);
    final SimulationParameters p = new SimulationParameters(molecules, fullSimulation, sd, a, ic.convert(minSignal), ic.convert(maxSignal), ic.convert(signalPerFrame), depth, fixedDepth, bias, gain, qe, readNoise, cal.getCameraType(), cal.getCameraModelName(), modelBounds, background, b2, createPsf(sd / a));
    p.loaded = true;
    return p;
}
Also used : RawResultProcedure(uk.ac.sussex.gdsc.smlm.results.procedures.RawResultProcedure) Rectangle(java.awt.Rectangle) ExtendedGenericDialog(uk.ac.sussex.gdsc.core.ij.gui.ExtendedGenericDialog) ReadHint(uk.ac.sussex.gdsc.smlm.results.ImageSource.ReadHint) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) IJImageSource(uk.ac.sussex.gdsc.smlm.ij.IJImageSource) CalibrationWriter(uk.ac.sussex.gdsc.smlm.data.config.CalibrationWriter) IntensityUnit(uk.ac.sussex.gdsc.smlm.data.config.UnitProtos.IntensityUnit) CameraType(uk.ac.sussex.gdsc.smlm.data.config.CalibrationProtos.CameraType)

Example 3 with CameraType

use of uk.ac.sussex.gdsc.smlm.data.config.CalibrationProtos.CameraType in project GDSC-SMLM by aherbert.

the class BenchmarkFit method summariseResults.

private void summariseResults(Statistics[] stats, CameraModel cameraModel) {
    createTable();
    final StringBuilder sb = new StringBuilder();
    // Create the benchmark settings and the fitting settings
    sb.append(benchmarkParameters.getMolecules()).append('\t');
    sb.append(MathUtils.rounded(benchmarkParameters.getSignal())).append('\t');
    sb.append(MathUtils.rounded(benchmarkParameters.sd)).append('\t');
    sb.append(MathUtils.rounded(benchmarkParameters.pixelPitch)).append('\t');
    sb.append(MathUtils.rounded(getSa() * benchmarkParameters.pixelPitch)).append('\t');
    // Report XY in nm from the pixel centre
    sb.append(MathUtils.rounded(distanceFromCentre(benchmarkParameters.x))).append('\t');
    sb.append(MathUtils.rounded(distanceFromCentre(benchmarkParameters.y))).append('\t');
    sb.append(MathUtils.rounded(benchmarkParameters.pixelPitch * benchmarkParameters.z)).append('\t');
    final CameraType cameraType = benchmarkParameters.cameraType;
    if (cameraType == CameraType.SCMOS) {
        sb.append("sCMOS (").append(benchmarkParameters.cameraModelName).append(") ");
        final Rectangle bounds = benchmarkParameters.cameraBounds;
        final Rectangle cropBounds = cameraModel.getBounds();
        sb.append(" ").append(bounds.x + cropBounds.x).append(",").append(bounds.y + cropBounds.y);
        sb.append(" ").append(region.width).append("x").append(region.width);
    } else {
        sb.append(CalibrationProtosHelper.getName(cameraType));
        sb.append(" Gain=").append(benchmarkParameters.gain);
        sb.append(" B=").append(benchmarkParameters.bias);
    }
    sb.append('\t');
    sb.append(MathUtils.rounded(benchmarkParameters.getBackground())).append('\t');
    sb.append(MathUtils.rounded(benchmarkParameters.noise)).append('\t');
    sb.append(MathUtils.rounded(benchmarkParameters.getSignal() / benchmarkParameters.noise)).append('\t');
    sb.append(MathUtils.rounded(benchmarkParameters.precisionN)).append('\t');
    sb.append(MathUtils.rounded(benchmarkParameters.precisionX)).append('\t');
    sb.append(MathUtils.rounded(benchmarkParameters.precisionXml)).append('\t');
    sb.append(region.width).append("x");
    sb.append(region.height).append('\t');
    sb.append(MathUtils.rounded(fitConfig.getInitialPeakStdDev() * benchmarkParameters.pixelPitch)).append('\t');
    sb.append(PsfProtosHelper.getName(fitConfig.getPsf().getPsfType()));
    // Only fixed fitting can ignore the signal
    if (fitConfig.isFixedPsf() && !signalFitting) {
        sb.append("NS");
    }
    if (!backgroundFitting) {
        sb.append("NB");
    }
    sb.append(":").append(PeakFit.getSolverName(fitConfig));
    if (fitConfig.isModelCameraMle()) {
        sb.append(":Camera\t");
        // Add details of the noise model for the MLE
        final CalibrationReader r = new CalibrationReader(fitConfig.getCalibration());
        sb.append("EM=").append(r.isEmCcd());
        sb.append(":G=").append(r.getCountPerPhoton());
        sb.append(":N=").append(r.getReadNoise());
    } else {
        sb.append('\t');
    }
    // Convert to units of the image (ADUs and pixels)
    final double[] convert = getConversionFactors();
    // Store the results for fitting on this benchmark dataset
    final BenchmarkResult benchmarkResult = new BenchmarkResult(benchmarkParameters, answer, sb.toString(), convert, this.results, this.resultsTime);
    if (!benchmarkResults.isEmpty()) {
        // Clear the results if the benchmark has changed
        if (benchmarkResults.getFirst().benchmarkParameters.id != benchmarkParameters.id) {
            benchmarkResults.clear();
        }
    }
    benchmarkResults.add(benchmarkResult);
    // Now output the actual results ...
    sb.append('\t');
    final double recall = (stats[0].getN() / (double) startPoints.length) / benchmarkParameters.getMolecules();
    sb.append(MathUtils.rounded(recall));
    for (int i = 0; i < stats.length; i++) {
        if (convert[i] != 0) {
            sb.append('\t').append(MathUtils.rounded(stats[i].getMean() * convert[i], 6)).append('\t').append(MathUtils.rounded(stats[i].getStandardDeviation() * convert[i]));
        } else {
            sb.append("\t0\t0");
        }
    }
    summaryTable.append(sb.toString());
}
Also used : Rectangle(java.awt.Rectangle) CameraType(uk.ac.sussex.gdsc.smlm.data.config.CalibrationProtos.CameraType) CalibrationReader(uk.ac.sussex.gdsc.smlm.data.config.CalibrationReader)

Example 4 with CameraType

use of uk.ac.sussex.gdsc.smlm.data.config.CalibrationProtos.CameraType in project GDSC-SMLM by aherbert.

the class CreateData method addCameraOptionsHeader.

private void addCameraOptionsHeader(Formatter formatter) {
    final CameraType cameraType = settings.getCameraType();
    final boolean isCcd = CalibrationProtosHelper.isCcdCameraType(cameraType);
    if (isCcd) {
        if (cameraType == CameraType.EMCCD) {
            addHeaderLine(formatter, "EM_gain", settings.getEmGain());
        }
        addHeaderLine(formatter, "Camera_gain", settings.getCameraGain());
        addHeaderLine(formatter, "Quantum_efficiency", getQuantumEfficiency());
        addHeaderLine(formatter, "Read_noise", settings.getReadNoise());
        addHeaderLine(formatter, "Bias", settings.getBias());
    } else if (cameraType == CameraType.SCMOS) {
        addHeaderLine(formatter, "Camera_model_name", settings.getCameraModelName());
        addHeaderLine(formatter, "Quantum_efficiency", getQuantumEfficiency());
    }
}
Also used : CameraType(uk.ac.sussex.gdsc.smlm.data.config.CalibrationProtos.CameraType)

Example 5 with CameraType

use of uk.ac.sussex.gdsc.smlm.data.config.CalibrationProtos.CameraType in project GDSC-SMLM by aherbert.

the class CreateData method createLikelihoodFunction.

/**
 * Creates the likelihood function. This is used for CRLB computation.
 */
private void createLikelihoodFunction() {
    final CameraType cameraType = settings.getCameraType();
    final boolean isCcd = CalibrationProtosHelper.isCcdCameraType(cameraType);
    fiFunction = new BasePoissonFisherInformation[settings.getSize() * settings.getSize()];
    if (isCcd) {
        BasePoissonFisherInformation fi;
        final CreateDataSettingsHelper helper = new CreateDataSettingsHelper(settings);
        final double readNoise = helper.getReadNoiseInCounts();
        if (cameraType == CameraType.EMCCD) {
            // We only want the amplification (without QE applied)
            final double amp = helper.getAmplification();
            // This should be interpolated from a stored curve
            final InterpolatedPoissonFisherInformation i = CameraModelFisherInformationAnalysis.loadFunction(CameraModelFisherInformationAnalysis.CameraType.EM_CCD, amp, readNoise);
            if (i == null) {
                throw new IllegalStateException("No stored Fisher information for EM-CCD camera with gain " + amp + " and noise " + readNoise + "\n \nPlease generate using the " + CameraModelFisherInformationAnalysis.TITLE);
            }
            fi = i;
        } else {
            // This is fast enough to compute dynamically.
            // Read noise is in electrons so use directly.
            fi = new PoissonGaussianFisherInformation(settings.getReadNoise());
        }
        Arrays.fill(fiFunction, fi);
    } else if (cameraType == CameraType.SCMOS) {
        // Build per-pixel likelihood function.
        // Get the normalised variance per pixel.
        final float[] v = cameraModel.getNormalisedVariance(cameraModel.getBounds());
        // Build the function
        for (int i = 0; i < fiFunction.length; i++) {
            fiFunction[i] = new PoissonGaussianFisherInformation(Math.sqrt(v[i]));
        }
    } else {
        throw new IllegalArgumentException("Unsupported camera type: " + CalibrationProtosHelper.getName(cameraType));
    }
}
Also used : CreateDataSettingsHelper(uk.ac.sussex.gdsc.smlm.ij.settings.CreateDataSettingsHelper) BasePoissonFisherInformation(uk.ac.sussex.gdsc.smlm.function.BasePoissonFisherInformation) PoissonGaussianFisherInformation(uk.ac.sussex.gdsc.smlm.function.PoissonGaussianFisherInformation) CameraType(uk.ac.sussex.gdsc.smlm.data.config.CalibrationProtos.CameraType) InterpolatedPoissonFisherInformation(uk.ac.sussex.gdsc.smlm.function.InterpolatedPoissonFisherInformation)

Aggregations

CameraType (uk.ac.sussex.gdsc.smlm.data.config.CalibrationProtos.CameraType)8 ExtendedGenericDialog (uk.ac.sussex.gdsc.core.ij.gui.ExtendedGenericDialog)4 Rectangle (java.awt.Rectangle)3 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)2 CalibrationReader (uk.ac.sussex.gdsc.smlm.data.config.CalibrationReader)2 CalibrationWriter (uk.ac.sussex.gdsc.smlm.data.config.CalibrationWriter)2 IntensityUnit (uk.ac.sussex.gdsc.smlm.data.config.UnitProtos.IntensityUnit)2 GenericDialog (ij.gui.GenericDialog)1 UniformRandomProvider (org.apache.commons.rng.UniformRandomProvider)1 DiscreteUniformSampler (org.apache.commons.rng.sampling.distribution.DiscreteUniformSampler)1 Calibration (uk.ac.sussex.gdsc.smlm.data.config.CalibrationProtos.Calibration)1 AngleUnit (uk.ac.sussex.gdsc.smlm.data.config.UnitProtos.AngleUnit)1 DistanceUnit (uk.ac.sussex.gdsc.smlm.data.config.UnitProtos.DistanceUnit)1 BasePoissonFisherInformation (uk.ac.sussex.gdsc.smlm.function.BasePoissonFisherInformation)1 InterpolatedPoissonFisherInformation (uk.ac.sussex.gdsc.smlm.function.InterpolatedPoissonFisherInformation)1 PoissonGaussianFisherInformation (uk.ac.sussex.gdsc.smlm.function.PoissonGaussianFisherInformation)1 IJImageSource (uk.ac.sussex.gdsc.smlm.ij.IJImageSource)1 CreateDataSettingsHelper (uk.ac.sussex.gdsc.smlm.ij.settings.CreateDataSettingsHelper)1 ReadHint (uk.ac.sussex.gdsc.smlm.results.ImageSource.ReadHint)1 RawResultProcedure (uk.ac.sussex.gdsc.smlm.results.procedures.RawResultProcedure)1