use of uk.ac.sussex.gdsc.smlm.results.PeakResultPoint in project GDSC-SMLM by aherbert.
the class ResultsMatchCalculator method addPairResult.
private static String addPairResult(StringBuilder sb, PointPair pair) {
sb.setLength(0);
final PeakResultPoint p1 = (PeakResultPoint) pair.getPoint1();
final PeakResultPoint p2 = (PeakResultPoint) pair.getPoint2();
final int t = (p1 != null) ? p1.getTime() : p2.getTime();
sb.append(t).append('\t');
addPoint(sb, p1);
addPoint(sb, p2);
final double d = pair.getXyDistance();
if (d >= 0) {
sb.append(rounder.round(d)).append('\t');
} else {
sb.append("-\t");
}
return sb.toString();
}
use of uk.ac.sussex.gdsc.smlm.results.PeakResultPoint in project GDSC-SMLM by aherbert.
the class ResultsMatchCalculator method getCoordinates.
/**
* Build a map between the peak id (time point) and a list of coordinates.
*
* @param results the results
* @param coordinateMethod the coordinate method
* @param integerCoordinates True if the values should be rounded down to integers
* @return the coordinates
*/
public static TIntObjectHashMap<List<Coordinate>> getCoordinates(MemoryPeakResults results, CoordinateMethod coordinateMethod, final boolean integerCoordinates) {
final TIntObjectHashMap<List<Coordinate>> coords = new TIntObjectHashMap<>();
if (results.size() > 0) {
// Do not use HashMap directly to build the coords object since there
// will be many calls to getEntry(). Instead sort the results and use
// a new list for each time point
results.sort();
final int minT = results.getFirstFrame();
final int maxT = results.getLastFrame();
// Create lists
final ArrayList<ArrayList<Coordinate>> tmpCoords = new ArrayList<>(maxT - minT + 1);
for (int t = minT; t <= maxT; t++) {
tmpCoords.add(new ArrayList<Coordinate>());
}
// Add the results to the lists
results.forEach((PeakResultProcedure) result -> {
final float x;
final float y;
final float z;
if (integerCoordinates) {
x = (int) result.getXPosition();
y = (int) result.getYPosition();
z = (int) result.getZPosition();
} else {
x = result.getXPosition();
y = result.getYPosition();
z = result.getZPosition();
}
final int startFrame = getStartFrame(result, coordinateMethod);
final int endFrame = getEndFrame(result, coordinateMethod);
for (int t = startFrame - minT, i = endFrame - startFrame + 1; i-- > 0; t++) {
tmpCoords.get(t).add(new PeakResultPoint(t + minT, x, y, z, result));
}
});
// Put in the map
for (int t = minT, i = 0; t <= maxT; t++, i++) {
coords.put(t, tmpCoords.get(i));
}
}
return coords;
}
use of uk.ac.sussex.gdsc.smlm.results.PeakResultPoint in project GDSC-SMLM by aherbert.
the class ClassificationMatchCalculator method runCompareClassifications.
private void runCompareClassifications(MemoryPeakResults results1, MemoryPeakResults results2) {
final List<PointPair> allMatches = new LinkedList<>();
// Optionally exclude results which do not have an id and/or category
Predicate<PeakResult> test = settings.useId == ClassAnalysis.IGNORE_ZERO ? r -> r.getId() != 0 : null;
if (settings.useCategory == ClassAnalysis.IGNORE_ZERO) {
final Predicate<PeakResult> test2 = r -> r.getCategory() != 0;
test = test == null ? test2 : test.and(test2);
} else if (test == null) {
test = r -> true;
}
// Divide the results into time points
final TIntObjectHashMap<List<PeakResultPoint>> coordinates1 = getCoordinates(results1, test);
final TIntObjectHashMap<List<PeakResultPoint>> coordinates2 = getCoordinates(results2, test);
// Process each time point
int n1 = 0;
int n2 = 0;
for (final int t : getTimepoints(coordinates1, coordinates2)) {
final Coordinate[] c1 = getCoordinates(coordinates1, t);
final Coordinate[] c2 = getCoordinates(coordinates2, t);
n1 += c1.length;
n2 += c2.length;
final List<PointPair> matches = new LinkedList<>();
MatchCalculator.analyseResults3D(c1, c2, settings.matchDistance, null, null, null, matches);
allMatches.addAll(matches);
}
if (allMatches.isEmpty()) {
IJ.error(TITLE, "No localisation matches between the two results sets");
return;
}
// Get the unique Ids and Categories in the matches.
final Mapper ids = getMapper(allMatches, PeakResult::getId, settings.useId);
final Mapper cats = getMapper(allMatches, PeakResult::getCategory, settings.useCategory);
// Map id/category to an index = stride * cat + id
final int stride = ids.size();
// Any integer is allowed as an index
if ((long) stride * cats.size() > 1L << 32) {
IJ.error(TITLE, "Too many combinations of id and category to assigne unique labels");
return;
}
// Extract indices
final int[] set1 = new int[allMatches.size()];
final int[] set2 = new int[allMatches.size()];
int i = 0;
for (final PointPair r : allMatches) {
set1[i] = toIndex(stride, ids, cats, ((PeakResultPoint) r.getPoint1()).getPeakResult());
set2[i] = toIndex(stride, ids, cats, ((PeakResultPoint) r.getPoint2()).getPeakResult());
i++;
}
final Resequencer re = new Resequencer();
re.setCacheMap(true);
re.renumber(set1);
re.renumber(set2);
// Compare
final RandIndex r = new RandIndex().compute(set1, set2);
final TextWindow resultsWindow = ImageJUtils.refresh(resultsWindowRef, () -> new TextWindow(TITLE + " Results", "Results1\tResults2\tID\tCategory\tn1\tc1\tn2\tc2\tMatched\tRand Index\tAdjusted RI", "", 900, 300));
try (BufferedTextWindow bw = new BufferedTextWindow(resultsWindow)) {
final StringBuilder sb = new StringBuilder(2048);
sb.append(results1.getName()).append('\t');
sb.append(results2.getName()).append('\t');
sb.append(ANALYSIS_OPTION[settings.useId.ordinal()]).append('\t');
sb.append(ANALYSIS_OPTION[settings.useCategory.ordinal()]).append('\t');
sb.append(n1).append('\t');
sb.append(MathUtils.max(set1) + 1).append('\t');
sb.append(n2).append('\t');
sb.append(MathUtils.max(set2) + 1).append('\t');
sb.append(set1.length).append('\t');
sb.append(MathUtils.rounded(r.getRandIndex())).append('\t');
sb.append(MathUtils.rounded(r.getAdjustedRandIndex())).append('\t');
bw.append(sb.toString());
}
}
use of uk.ac.sussex.gdsc.smlm.results.PeakResultPoint in project GDSC-SMLM by aherbert.
the class ClassificationMatchCalculator method getMapper.
/**
* Gets the mapper that can create a value from a natural sequence starting from 0 for each unique
* key in the results. If the analysis is set to ignore then a single mapping to zero is created.
*
* @param allMatches the all matches
* @param fun the function to get the key value
* @param analysis the type of analysis
* @return the mapper
*/
private static Mapper getMapper(List<PointPair> allMatches, ToIntFunction<PeakResult> fun, ClassAnalysis analysis) {
if (analysis == ClassAnalysis.IGNORE) {
return Mapper.single();
}
// Find the unique values
final TIntHashSet set = new TIntHashSet();
for (final PointPair r : allMatches) {
set.add(fun.applyAsInt(((PeakResultPoint) r.getPoint1()).getPeakResult()));
set.add(fun.applyAsInt(((PeakResultPoint) r.getPoint2()).getPeakResult()));
}
// Edge case of 1 value
if (set.size() == 1) {
return Mapper.single();
}
// Map to a natural sequence from zero
final int[] keys = set.toArray();
Arrays.sort(keys);
// Check if a discrete sequence already
if (keys[keys.length - 1] - keys[0] == set.size() - 1) {
return Mapper.offset(set.size(), keys[0]);
}
// Map each key to a value starting from 0
final TIntIntHashMap map = new TIntIntHashMap(keys.length);
for (final int k : keys) {
map.put(k, map.size());
}
return new Mapper() {
@Override
public int size() {
return map.size();
}
@Override
public int map(int key) {
return map.get(key);
}
};
}
use of uk.ac.sussex.gdsc.smlm.results.PeakResultPoint in project GDSC-SMLM by aherbert.
the class BenchmarkSpotFit method runFitting.
private BenchmarkSpotFitResult runFitting() {
// Extract all the results in memory into a list per frame. This can be cached
boolean refresh = false;
Pair<Integer, TIntObjectHashMap<List<Coordinate>>> coords = coordinateCache.get();
if (coords.getKey() != simulationParameters.id) {
// Do not get integer coordinates
// The Coordinate objects will be PeakResultPoint objects that store the original PeakResult
// from the MemoryPeakResults
coords = Pair.of(simulationParameters.id, ResultsMatchCalculator.getCoordinates(results, false));
coordinateCache.set(coords);
refresh = true;
}
final TIntObjectHashMap<List<Coordinate>> actualCoordinates = coords.getValue();
// Extract all the candidates into a list per frame. This can be cached if the settings have not
// changed
final int width = (config.isIncludeNeighbours()) ? config.getFittingWidth() : 0;
CandidateData candidateData = candidateDataCache.get();
if (refresh || candidateData == null || candidateData.differentSettings(filterResult.id, settings, width)) {
candidateData = subsetFilterResults(filterResult.filterResults, width);
candidateDataCache.set(candidateData);
}
final StopWatch stopWatch = StopWatch.createStarted();
final ImageStack stack = imp.getImageStack();
clearFitResults();
// Save results to memory
final MemoryPeakResults peakResults = new MemoryPeakResults();
peakResults.copySettings(this.results);
peakResults.setName(TITLE);
config.configureOutputUnits();
final FitConfiguration fitConfig = config.getFitConfiguration();
peakResults.setCalibration(fitConfig.getCalibration());
MemoryPeakResults.addResults(peakResults);
// Create a pool of workers
final int nThreads = Prefs.getThreads();
final BlockingQueue<Integer> jobs = new ArrayBlockingQueue<>(nThreads * 2);
final List<Worker> workers = new LinkedList<>();
final List<Thread> threads = new LinkedList<>();
final Ticker ticker = ImageJUtils.createTicker(stack.getSize(), nThreads, "Fitting frames ...");
final PeakResults syncResults = SynchronizedPeakResults.create(peakResults, nThreads);
for (int i = 0; i < nThreads; i++) {
final Worker worker = new Worker(jobs, stack, actualCoordinates, candidateData.filterCandidates, syncResults, ticker);
final Thread t = new Thread(worker);
workers.add(worker);
threads.add(t);
t.start();
}
// Fit the frames
final long startTime = System.nanoTime();
for (int i = 1; i <= stack.getSize(); i++) {
put(jobs, i);
}
// Finish all the worker threads by passing in a null job
for (int i = 0; i < threads.size(); i++) {
put(jobs, -1);
}
// Wait for all to finish
for (int i = 0; i < threads.size(); i++) {
try {
threads.get(i).join();
} catch (final InterruptedException ex) {
Thread.currentThread().interrupt();
throw new ConcurrentRuntimeException(ex);
}
}
final long runTime = System.nanoTime() - startTime;
threads.clear();
ImageJUtils.finished();
if (ImageJUtils.isInterrupted()) {
return null;
}
stopWatch.stop();
final String timeString = stopWatch.toString();
IJ.log("Spot fit time : " + timeString);
IJ.showStatus("Collecting results ...");
if (fitConfig.isFitCameraCounts()) {
// Convert to photons for consistency
results.convertToPreferredUnits();
}
final TIntObjectHashMap<FilterCandidates> fitResults = new TIntObjectHashMap<>();
for (final Worker w : workers) {
fitResults.putAll(w.results);
}
// Assign a unique ID to each result
int count = 0;
// Materialise into an array since we use it twice
final FilterCandidates[] candidates = fitResults.values(new FilterCandidates[fitResults.size()]);
for (final FilterCandidates result : candidates) {
for (int i = 0; i < result.fitResult.length; i++) {
final MultiPathFitResult fitResult = result.fitResult[i];
count += count(fitResult.getSingleFitResult());
count += count(fitResult.getMultiFitResult());
count += count(fitResult.getDoubletFitResult());
count += count(fitResult.getMultiDoubletFitResult());
}
}
final PreprocessedPeakResult[] preprocessedPeakResults = new PreprocessedPeakResult[count];
count = 0;
for (final FilterCandidates result : candidates) {
for (int i = 0; i < result.fitResult.length; i++) {
final MultiPathFitResult fitResult = result.fitResult[i];
count = store(fitResult.getSingleFitResult(), count, preprocessedPeakResults);
count = store(fitResult.getMultiFitResult(), count, preprocessedPeakResults);
count = store(fitResult.getDoubletFitResult(), count, preprocessedPeakResults);
count = store(fitResult.getMultiDoubletFitResult(), count, preprocessedPeakResults);
}
}
final BenchmarkSpotFitResult newSpotFitResults = new BenchmarkSpotFitResult(simulationParameters.id, fitResults);
newSpotFitResults.distanceInPixels = distanceInPixels;
newSpotFitResults.lowerDistanceInPixels = lowerDistanceInPixels;
newSpotFitResults.stopWatch = stopWatch;
summariseResults(newSpotFitResults, runTime, preprocessedPeakResults, count, candidateData, actualCoordinates);
IJ.showStatus("");
spotFitResults.set(newSpotFitResults);
return newSpotFitResults;
}
Aggregations