use of uk.ac.sussex.gdsc.smlm.ij.IJImageSource in project GDSC-SMLM by aherbert.
the class CreateData method setNoise.
/**
* Sets the noise in the results if missing.
*
* @param results the results
* @param imp the imp
*/
private static void setNoise(MemoryPeakResults results, ImagePlus imp) {
// Loaded results do not have noise
if (results.hasNoise()) {
return;
}
IJ.showStatus("Estimating noise ...");
// Compute noise per frame
final ImageStack stack = imp.getImageStack();
final int width = stack.getWidth();
final int height = stack.getHeight();
final IJImageSource source = new IJImageSource(imp);
final float[] noise = new float[source.getFrames() + 1];
source.setReadHint(ReadHint.SEQUENTIAL);
source.open();
for (int slice = 1; slice < noise.length; slice++) {
final float[] data = source.next();
// Use the trimmed method as there may be a lot of spots in the frame
noise[slice] = FitWorker.estimateNoise(data, width, height, NoiseEstimatorMethod.QUICK_RESIDUALS_LEAST_TRIMMED_OF_SQUARES);
}
// Statistics stats = Statistics.create(Arrays.copyOfRange(noise, 1, noise.length));
// System.out.printf("Noise = %.3f +/- %.3f (%d)\n", stats.getMean(),
// stats.getStandardDeviation(), stats.getN());
// Convert noise units from counts to the result format
final TypeConverter<IntensityUnit> c = results.getIntensityConverter(IntensityUnit.COUNT);
for (int i = 0; i < noise.length; i++) {
noise[i] = c.convertBack(noise[i]);
}
results.forEach((PeakResultProcedure) result -> {
if (result.getFrame() < noise.length) {
result.setNoise(noise[result.getFrame()]);
}
});
}
use of uk.ac.sussex.gdsc.smlm.ij.IJImageSource in project GDSC-SMLM by aherbert.
the class CreateData method loadBenchmarkData.
/**
* Load benchmark data using an open image and a XYZ text file.
*/
private void loadBenchmarkData() {
if (!showLoadDialog()) {
// resetMemory();
return;
}
// Load the image
final ImagePlus imp = WindowManager.getImage(benchmarkImage);
if (imp == null) {
IJ.error(TITLE, "No benchmark image: " + benchmarkImage);
// resetMemory();
return;
}
// Load the results
final MemoryPeakResults results = getSimulationResults();
if (results == null) {
IJ.error(TITLE, "No benchmark results: " + benchmarkResultsName);
// resetMemory();
return;
}
results.setName(imp.getTitle() + " (Results)");
results.setBounds(new Rectangle(0, 0, imp.getWidth(), imp.getHeight()));
final IJImageSource imageSource = new IJImageSource(imp);
results.setSource(imageSource);
// Load the settings as these are used in the dialog
settings = SettingsManager.readCreateDataSettings(0).toBuilder();
simulationParameters = showSimulationParametersDialog(imp, results);
if (simulationParameters != null) {
// Convert data to allow analysis as if a Gaussian2D PSF
final boolean isGaussian2D = PsfHelper.isGaussian2D(results.getPsf());
if (isGaussian2D) {
Gaussian2DPeakResultHelper.addMeanIntensity(results.getPsf(), results);
} else if (simulationParameters.sd > 0) {
final TypeConverter<DistanceUnit> dc = results.getDistanceConverter(DistanceUnit.NM);
final PSF.Builder psf = PsfProtosHelper.getDefaultPsf(PSFType.ONE_AXIS_GAUSSIAN_2D).toBuilder();
psf.getParametersBuilder(0).setValue(dc.convertBack(simulationParameters.sd));
results.setPsf(psf.build());
// Update all the results. This assumes the results do not have data for a custom PSF,
// i.e. the parameters only have [t,i,x,y,z]
final LocalList<PeakResult> newResults = new LocalList<>(results.size());
final float sd = (float) dc.convertBack(simulationParameters.sd);
final double meanFactor = Gaussian2DPeakResultHelper.getMeanSignalUsingP05(1, sd, sd);
results.forEach((PeakResultProcedure) r -> {
final PeakResult peak = r.resize(PeakResult.STANDARD_PARAMETERS + 1);
peak.setMeanIntensity((float) (peak.getIntensity() * meanFactor));
peak.setParameter(PeakResult.STANDARD_PARAMETERS, sd);
newResults.add(peak);
});
results.begin();
results.addAll(newResults);
results.end();
}
setBackground(results);
setNoise(results, imp);
setBenchmarkResults(imp, results);
IJ.showStatus("Loaded " + TextUtils.pleural(results.size(), "result"));
} else {
resetMemory();
}
}
use of uk.ac.sussex.gdsc.smlm.ij.IJImageSource in project GDSC-SMLM by aherbert.
the class CreateData method showSimulationParametersDialog.
private SimulationParameters showSimulationParametersDialog(ImagePlus imp, MemoryPeakResults results) {
final int molecules = results.size();
// Get the missing parameters from the user
boolean fullSimulation = false;
double sd = -1;
if (!results.convertToPreferredUnits()) {
IJ.error(TITLE, String.format("Results should be in the preferred units (%s,%s)", UnitHelper.getName(MemoryPeakResults.PREFERRED_DISTANCE_UNIT), UnitHelper.getName(MemoryPeakResults.PREFERRED_INTENSITY_UNIT)));
return null;
}
// Get these from the data
final RawResultProcedure sp = new RawResultProcedure(results);
sp.getBixyz();
final float[] signal = sp.intensity;
float[] limits = MathUtils.limits(signal);
final double minSignal = limits[0];
final double maxSignal = limits[1];
final double signalPerFrame = MathUtils.sum(signal) / molecules;
final float[] depths = sp.z;
limits = MathUtils.limits(depths);
float depth = Math.max(Math.abs(limits[0]), Math.abs(limits[1]));
final boolean fixedDepth = Double.compare(limits[0], limits[1]) == 0;
final CalibrationWriter cal = results.getCalibrationWriter();
final String iUnits = " " + UnitHelper.getName(cal.getIntensityUnit());
final String zUnits = " " + UnitHelper.getName(cal.getDistanceUnit());
// Get this from the user
double background = -1;
// Use last simulation parameters for missing settings.
// This is good if we are re-running the plugin to load data.
Rectangle lastCameraBounds = null;
if (simulationParameters != null && simulationParameters.isLoaded()) {
fullSimulation = simulationParameters.fullSimulation;
sd = simulationParameters.sd;
background = simulationParameters.background;
if (!cal.hasBias()) {
cal.setBias(simulationParameters.bias);
}
if (!cal.hasCountPerPhoton()) {
cal.setCountPerPhoton(simulationParameters.gain);
}
if (!cal.hasQuantumEfficiency()) {
cal.setQuantumEfficiency(simulationParameters.qe);
}
if (!cal.hasReadNoise()) {
cal.setReadNoise(simulationParameters.readNoise);
}
if (!cal.hasCameraType()) {
cal.setCameraType(simulationParameters.cameraType);
}
if (!cal.hasNmPerPixel()) {
cal.setNmPerPixel(simulationParameters.pixelPitch);
}
if (!cal.hasCameraModelName()) {
cal.setCameraModelName(simulationParameters.cameraModelName);
}
lastCameraBounds = simulationParameters.cameraBounds;
}
// Show a dialog to confirm settings
final ExtendedGenericDialog gd = new ExtendedGenericDialog(TITLE);
final StringBuilder sb = new StringBuilder();
sb.append("Results contain ").append(TextUtils.pleural(molecules, "molecule")).append('\n');
sb.append("Min signal = ").append(MathUtils.rounded(minSignal)).append(iUnits).append('\n');
sb.append("Max signal = ").append(MathUtils.rounded(maxSignal)).append(iUnits).append('\n');
sb.append("Av signal = ").append(MathUtils.rounded(signalPerFrame)).append(iUnits).append('\n');
if (fixedDepth) {
sb.append("Fixed depth = ").append(MathUtils.rounded(depth)).append(zUnits).append('\n');
}
gd.addMessage(sb.toString());
gd.addCheckbox("Flourophore_simulation", fullSimulation);
gd.addNumericField("Gaussian_SD", sd, 3, 8, "nm");
gd.addNumericField("Pixel_pitch", cal.getNmPerPixel(), 3, 8, "nm");
gd.addNumericField("Background", background, 3, 8, "photon");
// Camera type does not need the full simulation settings. Plus the units are different
// so just re-implement.
gd.addChoice("Camera_type", SettingsManager.getCameraTypeNames(), CalibrationProtosHelper.getName(cal.getCameraType()), new OptionListener<Integer>() {
@Override
public boolean collectOptions(Integer field) {
cal.setCameraType(SettingsManager.getCameraTypeValues()[field]);
return collectOptions(false);
}
@Override
public boolean collectOptions() {
return collectOptions(true);
}
private boolean collectOptions(boolean silent) {
final CameraType cameraType = cal.getCameraType();
final boolean isCcd = CalibrationProtosHelper.isCcdCameraType(cameraType);
final ExtendedGenericDialog egd = new ExtendedGenericDialog(TITLE, null);
if (isCcd) {
egd.addNumericField("Total_gain", cal.getCountPerPhoton(), 3, 8, "count/photon");
egd.addNumericField("Quantum_efficiency", cal.getQuantumEfficiency(), 3, 8, "e-/photon");
egd.addNumericField("Read_noise", cal.getReadNoise(), 3, 8, "count");
egd.addNumericField("Bias", cal.getBias(), 3, 8, "count");
} else if (cameraType == CameraType.SCMOS) {
final String[] models = CameraModelManager.listCameraModels(true);
egd.addChoice("Camera_model_name", models, cal.getCameraModelName());
egd.addNumericField("Quantum_efficiency", cal.getQuantumEfficiency(), 2, 6, "electron/photon");
} else {
IJ.error("Unsupported camera type " + CalibrationProtosHelper.getName(cameraType));
return false;
}
egd.setSilent(silent);
egd.showDialog(true, gd);
if (egd.wasCanceled()) {
return false;
}
if (isCcd) {
cal.setCountPerPhoton(egd.getNextNumber());
cal.setQuantumEfficiency(egd.getNextNumber());
cal.setReadNoise(egd.getNextNumber());
cal.setBias(egd.getNextNumber());
} else if (cameraType == CameraType.SCMOS) {
cal.setCameraModelName(egd.getNextChoice());
cal.setQuantumEfficiency(Math.abs(egd.getNextNumber()));
}
return true;
}
});
if (!fixedDepth) {
gd.addNumericField("Depth", depth, 3, 8, "pixel");
}
gd.addHelp(HelpUrls.getUrl("load-benchmark-data"));
gd.showDialog();
if (gd.wasCanceled()) {
return null;
}
fullSimulation = gd.getNextBoolean();
sd = gd.getNextNumber();
cal.setNmPerPixel(gd.getNextNumber());
background = gd.getNextNumber();
cal.setCameraType(SettingsManager.getCameraTypeValues()[gd.getNextChoiceIndex()]);
float myDepth = depth;
if (!fixedDepth) {
myDepth = (float) gd.getNextNumber();
if (myDepth < depth) {
IJ.error(TITLE, String.format("Input depth is smaller than the depth guessed from the data: %f < %f", myDepth, depth));
return null;
}
depth = myDepth;
}
gd.collectOptions();
// Validate settings
Rectangle modelBounds = null;
try {
ParameterUtils.isAboveZero("Gaussian SD", sd);
ParameterUtils.isAboveZero("Pixel pitch", cal.getNmPerPixel());
ParameterUtils.isPositive("Background", background);
ParameterUtils.isAboveZero("Quantum efficiency", cal.getQuantumEfficiency());
ParameterUtils.isEqualOrBelow("Quantum efficiency", cal.getQuantumEfficiency(), 1);
if (cal.isCcdCamera()) {
ParameterUtils.isAboveZero("Total gain", cal.getCountPerPhoton());
ParameterUtils.isPositive("Read noise", cal.getReadNoise());
ParameterUtils.isPositive("Bias", cal.getBias());
} else if (cal.isScmos()) {
// Load the model
cameraModel = CameraModelManager.load(cal.getCameraModelName());
if (cameraModel == null) {
IJ.error(TITLE, "Unknown camera model for name: " + cal.getCameraModelName());
return null;
}
int ox = 0;
int oy = 0;
if (lastCameraBounds != null) {
ox = lastCameraBounds.x;
oy = lastCameraBounds.y;
}
cameraModel = PeakFit.cropCameraModel(cameraModel, new Rectangle(ox, oy, imp.getWidth(), imp.getHeight()), null, false);
modelBounds = cameraModel.getBounds();
final IJImageSource imageSource = (IJImageSource) results.getSource();
imageSource.setOrigin(modelBounds.x, modelBounds.y);
cal.clearGlobalCameraSettings();
} else {
IJ.error(TITLE, "Unknown camera type: " + cal.getCameraType());
return null;
}
} catch (final IllegalArgumentException ex) {
IJ.error(TITLE, ex.getMessage());
return null;
}
// Store calibration
results.setCalibration(cal.getCalibration());
final double a = cal.getNmPerPixel();
final double bias = cal.getBias();
final double gain = cal.getCountPerPhoton();
final double readNoise = cal.getReadNoise();
final double qe = cal.getQuantumEfficiency();
// Note: The calibration will throw an exception if the converter cannot be created.
// This is OK as the data will be invalid.
// Convert +/- depth to total depth in nm
depth = cal.getDistanceConverter(DistanceUnit.NM).convert(depth * 2);
// Compute total background variance in photons
final double backgroundVariance = background;
// Do not add EM-CCD noise factor. The Mortensen formula also includes this factor
// so this is "double-counting" the EM-CCD.
// if (emCCD)
// backgroundVariance *= 2;
// Read noise is in ADUs. Convert to Photons to get contribution to background variance
final double readNoiseInPhotons = readNoise / gain;
// Get the expected value at each pixel in photons. Assuming a Poisson distribution this
// is equal to the total variance at the pixel.
final double b2 = backgroundVariance + readNoiseInPhotons * readNoiseInPhotons;
// Convert values to photons
final TypeConverter<IntensityUnit> ic = cal.getIntensityConverter(IntensityUnit.PHOTON);
final SimulationParameters p = new SimulationParameters(molecules, fullSimulation, sd, a, ic.convert(minSignal), ic.convert(maxSignal), ic.convert(signalPerFrame), depth, fixedDepth, bias, gain, qe, readNoise, cal.getCameraType(), cal.getCameraModelName(), modelBounds, background, b2, createPsf(sd / a));
p.loaded = true;
return p;
}
use of uk.ac.sussex.gdsc.smlm.ij.IJImageSource in project GDSC-SMLM by aherbert.
the class PeakResultTableModelFrame method doSourceShowImage.
private void doSourceShowImage() {
final PeakResultTableModel model = getModel();
if (model == null) {
return;
}
final ImageSource source = model.getSource();
if (source == null) {
return;
}
// Check if already open
final ImagePlus imp = WindowManager.getImage(source.getName());
if (imp != null) {
imp.getWindow().toFront();
return;
}
// Check if an ImageJ image source
if (source instanceof IJImageSource) {
final IJImageSource imageSource = (IJImageSource) source;
final String path = imageSource.getPath();
if (path != null && new File(path).exists()) {
IJ.showStatus("Opening image ...");
IJ.open(path);
IJ.showStatus("");
} else {
IJ.log("Cannot find the image source: " + path);
}
return;
}
// Open a SeriesImageSource.
if (source instanceof SeriesImageSource) {
final SeriesImageSource imageSource = (SeriesImageSource) source;
// No memory buffer
imageSource.setBufferLimit(0);
imageSource.setReadHint(ReadHint.NONSEQUENTIAL);
if (!source.open()) {
IJ.log("Cannot open the series image source");
return;
}
new TiffSeriesVirtualStack(imageSource).show();
}
}
use of uk.ac.sussex.gdsc.smlm.ij.IJImageSource in project GDSC-SMLM by aherbert.
the class PeakFit method addSingleFrameOverlay.
private void addSingleFrameOverlay() {
// If a single frame was processed add the peaks as an overlay if they are in memory
ImagePlus localImp = this.imp;
if (fitMaxima && singleFrame > 0 && source instanceof IJImageSource) {
final String title = source.getName();
localImp = WindowManager.getImage(title);
}
if (singleFrame > 0 && localImp != null) {
MemoryPeakResults memoryResults = null;
for (final PeakResults r : this.results.toArray()) {
if (r instanceof MemoryPeakResults) {
memoryResults = (MemoryPeakResults) r;
break;
}
}
if (memoryResults == null || memoryResults.size() == 0) {
return;
}
final ExtendedGenericDialog gd = new ExtendedGenericDialog(TITLE);
gd.enableYesNoCancel();
gd.hideCancelButton();
gd.addMessage("Add the fitted localisations as an overlay?");
gd.showDialog();
if (!gd.wasOKed()) {
return;
}
final LUT lut = LutHelper.createLut(LutColour.ICE);
final Overlay o = new Overlay();
final int size = memoryResults.size();
final Counter j = new Counter(size);
final ImagePlus finalImp = localImp;
memoryResults.forEach(DistanceUnit.PIXEL, (XyResultProcedure) (x, y) -> {
final PointRoi roi = new OffsetPointRoi(x, y);
final Color c = LutHelper.getColour(lut, j.decrementAndGet(), size);
roi.setStrokeColor(c);
roi.setFillColor(c);
if (finalImp.getStackSize() > 1) {
roi.setPosition(singleFrame);
}
o.add(roi);
});
localImp.setOverlay(o);
localImp.getWindow().toFront();
}
}
Aggregations