use of uk.ac.sussex.gdsc.core.annotation.Nullable in project gdsc by aherbert.
the class FindFoci_PlugIn method getBatchImages.
/**
* Gets the batch images.
*
* @param directory the directory
* @return the batch images
*/
@Nullable
public static String[] getBatchImages(String directory) {
if (directory == null) {
return null;
}
// Get a list of files
final File[] fileList = (new File(directory)).listFiles();
if (fileList == null) {
return null;
}
// Exclude directories
String[] list = new String[fileList.length];
int count = 0;
for (int i = 0; i < list.length; i++) {
if (fileList[i].isFile()) {
list[count++] = fileList[i].getName();
}
}
list = Arrays.copyOf(list, count);
// Now exclude non-image files as per the ImageJ FolderOpener
final FolderOpener fo = new FolderOpener();
list = fo.trimFileList(list);
if (list == null) {
return null;
}
list = fo.sortFileList(list);
// Now exclude mask images
count = 0;
for (final String name : list) {
if (name.contains("mask.")) {
continue;
}
list[count++] = name;
}
return Arrays.copyOf(list, count);
}
use of uk.ac.sussex.gdsc.core.annotation.Nullable in project gdsc by aherbert.
the class FindFociOptimiser_PlugIn method runOptimiser.
/**
* Enumerate the parameters for FindFoci on the provided image.
*
* <p>Returns null if the image is invalid, there are no ROI points inside the mask, the algorithm
* was cancelled or cannot produce results.
*
* @param imp The image
* @param mask The mask
* @param ticker the ticker
* @return The results
*/
@Nullable
private OptimiserResult runOptimiser(ImagePlus imp, ImagePlus mask, Ticker ticker) {
if (invalidImage(imp)) {
return null;
}
final AssignedPoint[] roiPoints = extractRoiPoints(imp, mask);
if (roiPoints.length == 0) {
IJ.showMessage("Error", "Image must have a point ROI or corresponding ROI file");
return null;
}
final ArrayList<Result> results = new ArrayList<>(combinations);
// Set the threshold for assigning points matches as a fraction of the image size
final double distanceThreshold = getDistanceThreshold(imp, settings.matchSearchMethod, settings.matchSearchDistance);
final ToDoubleBiFunction<Coordinate, Coordinate> distanceFunction = CoordinateUtils.getSquaredDistanceFunction(imp.getCalibration(), is3D(roiPoints));
// The stopwatch for the total run-time
final StopWatch sw = new StopWatch();
// The total time for analysis
long analysisTime = 0;
final FindFociBaseProcessor ff = new FindFoci_PlugIn().createFindFociProcessor(imp);
final FindFociProcessorOptions processorOptions = new FindFociProcessorOptions(true);
// Only one supported peak method.
// The parameter values for absolute height and relative height are on a different scale
// and using both methods is not yet supported.
processorOptions.setPeakMethod(settings.peakMethod);
processorOptions.setMaxPeaks(settings.maxPeaks);
int id = 0;
for (int blurCount = 0; blurCount < blurArray.length; blurCount++) {
final double blur = blurArray[blurCount];
processorOptions.setGaussianBlur(blur);
final StopWatch sw0 = new StopWatch();
final ImagePlus imp2 = ff.blur(imp, blur);
sw0.stop();
// Iterate over the options
int thresholdMethodIndex = 0;
for (int b = 0; b < backgroundMethodArray.length; b++) {
processorOptions.setBackgroundMethod(backgroundMethodArray[b]);
if (backgroundMethodArray[b] == BackgroundMethod.AUTO_THRESHOLD) {
processorOptions.setThresholdMethod(thresholdMethodArray[thresholdMethodIndex++]);
}
final StatisticsMethod[] statisticsMethods = backgroundMethodHasStatisticsMode(backgroundMethodArray[b]) ? statisticsMethodArray : new StatisticsMethod[] { StatisticsMethod.ALL };
for (final StatisticsMethod statisticsMethod : statisticsMethods) {
processorOptions.setStatisticsMethod(statisticsMethod);
final StopWatch sw1 = sw0.create();
final FindFociInitResults initResults = ff.findMaximaInit(imp, imp2, mask, processorOptions);
sw1.stop();
if (initResults == null) {
return null;
}
FindFociInitResults searchInitArray = null;
for (double backgroundParameter = backgroundParameterMinArray[b]; backgroundParameter <= backgroundParameterMax; backgroundParameter += backgroundParameterInterval) {
// Use zero when there is no parameter
processorOptions.setBackgroundParameter((backgroundMethodHasParameter(backgroundMethodArray[b])) ? backgroundParameter : 0);
// Log on first blur iteration
boolean logBackground = (blurCount == 0) && !multiMode;
for (int s = 0; s < searchMethodArray.length; s++) {
processorOptions.setSearchMethod(searchMethodArray[s]);
for (double searchParameter = searchParameterMinArray[s]; searchParameter <= searchParameterMax; searchParameter += searchParameterInterval) {
// Use zero when there is no parameter
processorOptions.setSearchParameter((searchMethodHasParameter(searchMethodArray[s])) ? searchParameter : 0);
searchInitArray = ff.copyForStagedProcessing(initResults, searchInitArray);
final StopWatch sw2 = sw1.create();
final FindFociSearchResults searchArray = ff.findMaximaSearch(searchInitArray, processorOptions);
sw2.stop();
if (searchArray == null) {
return null;
}
FindFociInitResults mergeInitArray = null;
if (logBackground) {
// Log the background level on the first occurrence
final float backgroundLevel = searchInitArray.stats.background;
logBackground = false;
IJ.log(String.format("Background level - %s %s: %s = %g", backgroundMethodArray[b].getDescription(), backgroundMethodHasStatisticsMode(backgroundMethodArray[b]) ? "(" + statisticsMethod + ") " : "", ((backgroundMethodHasParameter(backgroundMethodArray[b])) ? IJ.d2s(backgroundParameter, 2) : processorOptions.getThresholdMethod().getDescription()), backgroundLevel));
}
// Note: Currently only 1 PeakMethod is supported so there is no iteration over this
for (double peakParameter = peakParameterMin; peakParameter <= peakParameterMax; peakParameter += peakParameterInterval) {
processorOptions.setPeakParameter(peakParameter);
final StopWatch sw3 = sw2.create();
final FindFociMergeTempResults mergePeakResults = ff.findMaximaMergePeak(searchInitArray, searchArray, processorOptions);
sw3.stop();
for (int minSize = minSizeMin; minSize <= minSizeMax; minSize += minSizeInterval) {
processorOptions.setMinSize(minSize);
final StopWatch sw4 = sw3.create();
final FindFociMergeTempResults mergeSizeResults = ff.findMaximaMergeSize(searchInitArray, mergePeakResults, processorOptions);
sw4.stop();
for (final EnumSet<AlgorithmOption> options : optionsArray) {
processorOptions.setOptions(options);
mergeInitArray = ff.copyForStagedProcessing(searchInitArray, mergeInitArray);
final StopWatch sw5 = sw4.create();
final FindFociMergeResults mergeArray = ff.findMaximaMergeFinal(mergeInitArray, mergeSizeResults, processorOptions);
sw5.stop();
if (mergeArray == null) {
return null;
}
for (final SortMethod sortMethod : sortMethodArray) {
processorOptions.setSortMethod(sortMethod);
for (int c = 0; c < centreMethodArray.length; c++) {
processorOptions.setCentreMethod(centreMethodArray[c]);
for (double centreParameter = centreParameterMinArray[c]; centreParameter <= centreParameterMaxArray[c]; centreParameter += centreParameterIntervalArray[c]) {
processorOptions.setCentreParameter(centreParameter);
final StopWatch sw6 = sw5.create();
final FindFociResults peakResults = ff.findMaximaResults(mergeInitArray, mergeArray, processorOptions);
final long time = sw6.stop();
ticker.tick();
if (peakResults != null) {
// Get the results
// The analysis time is not included in the speed-up factor
final long start = System.nanoTime();
final Parameters runOptions = new Parameters(processorOptions);
final Result result = analyseResults(id, roiPoints, peakResults.results, distanceThreshold, runOptions, time, settings.beta, distanceFunction);
results.add(result);
analysisTime += System.nanoTime() - start;
}
id++;
if (IJ.escapePressed()) {
return null;
}
}
}
}
}
}
}
}
}
}
}
}
}
sw.stop();
// All possible results sort methods are highest first
sortResults(results, settings.resultsSortMethod);
return new OptimiserResult(results, sw.getTime(), analysisTime);
}
use of uk.ac.sussex.gdsc.core.annotation.Nullable in project gdsc by aherbert.
the class FindFociOptimisedIntProcessor method getSortedMaxpoints.
/**
* {@inheritDoc}
*/
@Nullable
@Override
protected Coordinate[] getSortedMaxpoints(Object pixels, int[] maxima, byte[] types, float globalMin, float threshold) {
final ArrayList<Coordinate> maxpoints = new ArrayList<>(500);
// working list for expanding local plateaus
final IntArrayList pointList = new IntArrayList();
// Int processing
final int valueMin = (int) globalMin;
final int valueThreshold = (int) threshold;
int id = 0;
final int[] xyz = new int[3];
setPixels(pixels);
if (is2D()) {
for (int i = maxxByMaxyByMaxz; i-- > 0; ) {
if ((types[i] & (EXCLUDED | MAX_AREA | PLATEAU | NOT_MAXIMUM)) != 0) {
continue;
}
final int v = image[i];
if (v < valueThreshold || v == valueMin) {
continue;
}
getXy(i, xyz);
final int x = xyz[0];
final int y = xyz[1];
// Check whether we have a local maximum.
final boolean isInnerXy = (y != 0 && y != ylimit) && (x != 0 && x != xlimit);
boolean isMax = true;
boolean equalNeighbour = false;
for (int d = 8; d-- > 0; ) {
if (isInnerXy || isWithinXy(x, y, d)) {
final int vNeighbor = image[i + offset[d]];
if (vNeighbor > v) {
isMax = false;
break;
} else if (vNeighbor == v) {
// Neighbour is equal, this is a potential plateau maximum
equalNeighbour = true;
} else {
// This is lower so cannot be a maxima
types[i + offset[d]] |= NOT_MAXIMUM;
}
}
}
if (isMax) {
id++;
if (id >= searchCapacity) {
IJ.log("The number of potential maxima exceeds the search capacity: " + searchCapacity + ". Try using a denoising/smoothing filter or increase the capacity.");
return null;
}
if (equalNeighbour) {
// Search the local area marking all equal neighbour points as maximum
if (!expandMaximum(maxima, types, valueMin, valueThreshold, i, v, id, maxpoints, pointList)) {
// Not a true maximum, ignore this
id--;
}
} else {
types[i] |= MAXIMUM | MAX_AREA;
maxima[i] = id;
maxpoints.add(new Coordinate(getIndex(x, y), id, v));
}
}
}
} else {
for (int i = maxxByMaxyByMaxz; i-- > 0; ) {
if ((types[i] & (EXCLUDED | MAX_AREA | PLATEAU | NOT_MAXIMUM)) != 0) {
continue;
}
final int v = image[i];
if (v < valueThreshold || v == valueMin) {
continue;
}
getXyz(i, xyz);
final int x = xyz[0];
final int y = xyz[1];
final int z = xyz[2];
// Check whether we have a local maximum.
final boolean isInnerXy = (y != 0 && y != ylimit) && (x != 0 && x != xlimit);
final boolean isInnerXyz = (zlimit == 0) ? isInnerXy : isInnerXy && (z != 0 && z != zlimit);
boolean isMax = true;
boolean equalNeighbour = false;
for (int d = 26; d-- > 0; ) {
if (isInnerXyz || (isInnerXy && isWithinZ(z, d)) || isWithinXyz(x, y, z, d)) {
final int vNeighbor = image[i + offset[d]];
if (vNeighbor > v) {
isMax = false;
break;
} else if (vNeighbor == v) {
// Neighbour is equal, this is a potential plateau maximum
equalNeighbour = true;
} else {
// This is lower so cannot be a maxima
types[i + offset[d]] |= NOT_MAXIMUM;
}
}
}
if (isMax) {
id++;
if (id >= searchCapacity) {
IJ.log("The number of potential maxima exceeds the search capacity: " + searchCapacity + ". Try using a denoising/smoothing filter or increase the capacity.");
return null;
}
if (equalNeighbour) {
// Search the local area marking all equal neighbour points as maximum
if (!expandMaximum(maxima, types, valueMin, valueThreshold, i, v, id, maxpoints, pointList)) {
// Not a true maximum, ignore this
id--;
}
} else {
types[i] |= MAXIMUM | MAX_AREA;
maxima[i] = id;
maxpoints.add(new Coordinate(getIndex(x, y, z), id, v));
}
}
}
}
if (ImageJUtils.isInterrupted()) {
return null;
}
for (int i = maxxByMaxyByMaxz; i-- > 0; ) {
// reset attributes no longer needed
types[i] &= ~NOT_MAXIMUM;
}
Collections.sort(maxpoints, Coordinate::compare);
// Build a map between the original id and the new id following the sort
final int[] idMap = new int[maxpoints.size() + 1];
// Label the points
for (int i = 0; i < maxpoints.size(); i++) {
final int newId = (i + 1);
final int oldId = maxpoints.get(i).getId();
idMap[oldId] = newId;
maxpoints.get(i).setId(newId);
}
reassignMaxima(maxima, idMap);
return maxpoints.toArray(new Coordinate[0]);
}
use of uk.ac.sussex.gdsc.core.annotation.Nullable in project gdsc by aherbert.
the class AssignFociToClusters_PlugIn method getPoints.
@Nullable
private ArrayList<ClusterPoint> getPoints() {
if (results == null) {
return null;
}
final ArrayList<ClusterPoint> points = new ArrayList<>(results.size());
// Image values correspond to the reverse order of the results.
for (int i = 0, id = results.size(); i < results.size(); i++, id--) {
final FindFociResult result = results.get(i);
points.add(ClusterPoint.newClusterPoint(id, result.x, result.y, getWeight(result)));
}
return points;
}
use of uk.ac.sussex.gdsc.core.annotation.Nullable in project gdsc-smlm by aherbert.
the class DriftCalculator method calculateUsingFrames.
/**
* Calculates drift using images from N consecutive frames aligned to the overall image.
*
* @param results the results
* @param limits the limits
* @param reconstructionSize the reconstruction size
* @return the drift { dx[], dy[] }
*/
@Nullable
private double[][] calculateUsingFrames(MemoryPeakResults results, int[] limits, int reconstructionSize) {
// Extract the localisations into blocks of N consecutive frames
final BlockPeakResultProcedure p = new BlockPeakResultProcedure(settings);
results.sort();
results.forEach(p);
final List<List<Localisation>> blocks = p.blocks;
if (blocks.size() <= 1) {
tracker.log("ERROR : Require at least 2 images for drift calculation");
return null;
}
// Check the final block has enough localisations
final List<Localisation> nextBlock = p.nextBlock;
if (nextBlock.size() < settings.minimimLocalisations) {
blocks.remove(blocks.size() - 1);
if (blocks.size() <= 1) {
tracker.log("ERROR : Require at least 2 images for drift calculation");
return null;
}
final List<Localisation> combinedBlock = blocks.get(blocks.size() - 1);
combinedBlock.addAll(nextBlock);
}
// Find the average time point for each block
final int[] blockT = new int[blocks.size()];
int time = 0;
for (final List<Localisation> block : blocks) {
long sum = 0;
for (final Localisation r : block) {
sum += r.time;
}
blockT[time++] = (int) (sum / block.size());
}
// Calculate a scale to use when constructing the images for alignment
final Rectangle bounds = results.getBounds(true);
final ResultsImageSettings.Builder builder = ResultsImageSettings.newBuilder().setImageSizeMode(ResultsImageSizeMode.IMAGE_SIZE).setImageSize(reconstructionSize);
final float scale = ImagePeakResultsFactory.getScale(builder, bounds, 1);
executor = Executors.newFixedThreadPool(Prefs.getThreads());
final double[] dx = new double[limits[1] + 1];
final double[] dy = new double[dx.length];
final double[] originalDriftTimePoints = getOriginalDriftTimePoints(dx, blockT);
lastdx = null;
final double smoothing = updateSmoothingParameter(originalDriftTimePoints);
double change = calculateDriftUsingFrames(blocks, blockT, bounds, scale, dx, dy, originalDriftTimePoints, smoothing, settings.iterations);
if (Double.isNaN(change) || tracker.isEnded()) {
return null;
}
plotDrift(limits, dx, dy);
ImageJUtils.log("Drift Calculator : Initial drift " + MathUtils.rounded(change));
for (int i = 1; i <= settings.maxIterations; i++) {
change = calculateDriftUsingFrames(blocks, blockT, bounds, scale, dx, dy, originalDriftTimePoints, smoothing, settings.iterations);
if (Double.isNaN(change)) {
return null;
}
plotDrift(limits, dx, dy);
if (converged(i, change, getTotalDrift(dx, dy, originalDriftTimePoints))) {
break;
}
}
if (tracker.isEnded()) {
return null;
}
plotDrift(limits, dx, dy);
return new double[][] { dx, dy };
}
Aggregations