use of gdsc.core.match.FractionClassificationResult in project GDSC-SMLM by aherbert.
the class BenchmarkFilterAnalysis method findOptimum.
public SearchResult<FilterScore> findOptimum(double[][] points) {
ga_iteration++;
SimpleFilterScore max = es_optimum;
final FilterScoreResult[] scoreResults = scoreFilters(setStrength(new FilterSet(searchSpaceToFilters(points))), false);
if (scoreResults == null)
return null;
for (int index = 0; index < scoreResults.length; index++) {
final FilterScoreResult scoreResult = scoreResults[index];
final SimpleFilterScore result = new SimpleFilterScore(scoreResult, true, scoreResult.criteria >= minCriteria);
if (result.compareTo(max) < 0) {
max = result;
}
}
es_optimum = max;
// Add the best filter to the table
// This filter may not have been part of the scored subset so use the entire results set for reporting
DirectFilter filter = max.r.filter;
FractionClassificationResult r = scoreFilter(filter, minimalFilter, ga_resultsList, coordinateStore);
final StringBuilder text = createResult(filter, r);
add(text, ga_iteration);
gaWindow.append(text.toString());
return new SearchResult<FilterScore>(filter.getParameters(), max);
}
use of gdsc.core.match.FractionClassificationResult in project GDSC-SMLM by aherbert.
the class BenchmarkFilterAnalysis method reportResults.
private ComplexFilterScore reportResults(boolean newResults, List<ComplexFilterScore> filters) {
if (filters.isEmpty()) {
IJ.log("Warning: No filters pass the criteria");
return null;
}
getCoordinateStore();
Collections.sort(filters);
FractionClassificationResult topFilterClassificationResult = null;
ArrayList<FractionalAssignment[]> topFilterResults = null;
String topFilterSummary = null;
if (showSummaryTable || saveTemplate) {
createSummaryWindow();
int n = 0;
final double range = (summaryDepth / simulationParameters.a) * 0.5;
int np = 0;
for (double depth : depthStats) {
if (Math.abs(depth) < range)
np++;
}
for (ComplexFilterScore fs : filters) {
final ArrayList<FractionalAssignment[]> list = new ArrayList<FractionalAssignment[]>(resultsList.length);
final FractionClassificationResult r = scoreFilter(fs.getFilter(), minimalFilter, resultsList, list, coordinateStore);
final StringBuilder sb = createResult(fs.getFilter(), r);
if (topFilterResults == null) {
topFilterResults = list;
topFilterClassificationResult = r;
}
// Show the recall at the specified depth. Sum the distance and signal factor of all scored spots.
int scored = 0;
double tp = 0, d = 0, sf = 0, rmsd = 0;
SimpleRegression regression = new SimpleRegression(false);
for (FractionalAssignment[] assignments : list) {
if (assignments == null)
continue;
for (int i = 0; i < assignments.length; i++) {
final CustomFractionalAssignment c = (CustomFractionalAssignment) assignments[i];
if (Math.abs(c.peak.error) <= range)
tp += c.getScore();
d += c.d;
sf += c.getSignalFactor();
rmsd += c.d * c.d;
regression.addData(c.peakResult.getSignal(), c.peak.getSignal());
}
scored += assignments.length;
}
final double slope = regression.getSlope();
sb.append('\t');
sb.append(Utils.rounded((double) tp / np)).append('\t');
sb.append(Utils.rounded(d / scored)).append('\t');
sb.append(Utils.rounded(sf / scored)).append('\t');
sb.append(Utils.rounded(Math.sqrt(rmsd / scored))).append('\t');
sb.append(Utils.rounded(slope)).append('\t');
if (fs.atLimit() != null)
sb.append(fs.atLimit());
String text = sb.toString();
if (topFilterSummary == null) {
topFilterSummary = text;
if (!showSummaryTable)
break;
}
if (fs.time != 0) {
sb.append('\t');
sb.append(fs.algorithm);
sb.append('\t');
sb.append(org.apache.commons.lang3.time.DurationFormatUtils.formatDurationHMS(fs.time));
} else
sb.append("\t\t");
if (fs.paramTime != 0) {
sb.append('\t');
sb.append(fs.getParamAlgorithm());
sb.append('\t');
sb.append(org.apache.commons.lang3.time.DurationFormatUtils.formatDurationHMS(fs.paramTime));
} else
sb.append("\t\t");
text = sb.toString();
if (isHeadless)
IJ.log(text);
else
summaryWindow.append(text);
n++;
if (summaryTopN > 0 && n >= summaryTopN)
break;
}
// Add a spacer to the summary table if we have multiple results
if (n > 1 && showSummaryTable) {
if (isHeadless)
IJ.log("");
else
summaryWindow.append("");
}
}
DirectFilter bestFilter = filters.get(0).getFilter();
if (saveBestFilter)
saveFilter(bestFilter);
if (topFilterClassificationResult == null) {
topFilterResults = new ArrayList<FractionalAssignment[]>(resultsList.length);
topFilterClassificationResult = scoreFilter(bestFilter, minimalFilter, resultsList, topFilterResults, coordinateStore);
}
if (newResults || scores.isEmpty()) {
scores.add(new FilterResult(failCount, residualsThreshold, duplicateDistance, filters.get(0)));
}
if (saveTemplate)
saveTemplate(topFilterSummary);
showPlots();
calculateSensitivity();
topFilterResults = depthAnalysis(topFilterResults, bestFilter);
topFilterResults = scoreAnalysis(topFilterResults, bestFilter);
componentAnalysis(topFilterClassificationResult, filters.get(0));
PreprocessedPeakResult[] filterResults = null;
if (isShowOverlay())
filterResults = showOverlay(topFilterResults, bestFilter);
saveResults(filterResults, bestFilter);
wo.tile();
return filters.get(0);
}
use of gdsc.core.match.FractionClassificationResult in project GDSC-SMLM by aherbert.
the class BenchmarkFilterAnalysis method scoreFilter.
private FilterScoreResult scoreFilter(DirectFilter filter, DirectFilter minFilter, boolean createTextResult, CoordinateStore coordinateStore) {
FractionClassificationResult r = scoreFilter(filter, minFilter, ga_resultsListToScore, coordinateStore);
// // DEBUG - Test if the two methods produce the same results
// FractionClassificationResult r2 = scoreFilter(filter, minFilter, BenchmarkFilterAnalysis.clonedResultsList);
// if (!gdsc.core.utils.DoubleEquality.almostEqualRelativeOrAbsolute(r.getTP(), r2.getTP(), 1e-6, 1e-10) ||
// !gdsc.core.utils.DoubleEquality.almostEqualRelativeOrAbsolute(r.getFP(), r2.getFP(), 1e-6, 1e-10) ||
// !gdsc.core.utils.DoubleEquality.almostEqualRelativeOrAbsolute(r.getFN(), r2.getFN(), 1e-6, 1e-10))
// {
// System.out.printf("TP %f != %f, FP %f != %f, FN %f != %f : %s\n", r.getTP(), r2.getTP(), r.getFP(),
// r2.getFP(), r.getFN(), r2.getFN(), filter.getName());
//
// // // Debug
// // MultiPathFilter multiPathFilter = createMPF(filter, minFilter);
// // multiPathFilter.setDebugFile("/tmp/1.txt");
// // multiPathFilter.fractionScoreSubset(ga_resultsListToScore, failCount, nActual, null);
// // multiPathFilter = createMPF(filter, minFilter);
// // multiPathFilter.setDebugFile("/tmp/2.txt");
// // multiPathFilter.fractionScoreSubset(BenchmarkFilterAnalysis.clonedResultsList, failCount,
// // nActual, null);
// }
// else
// {
// //System.out.println("Matched scores");
// }
final double score = getScore(r);
final double criteria = getCriteria(r);
// Show the result if it achieves the criteria limit
final String text = (createTextResult && criteria >= minCriteria) ? createResult(filter, r).toString() : null;
return new FilterScoreResult(score, criteria, filter, text);
}
use of gdsc.core.match.FractionClassificationResult in project GDSC-SMLM by aherbert.
the class Filter method fractionScoreSubset.
/**
* Filter the results and return the performance score. Allows benchmarking the filter by marking the results as
* true or false.
* <p>
* Input PeakResults must be allocated a score for true positive, false positive, true negative and false negative
* (accessed via the object property get methods). The filter is run and results that pass accumulate scores for
* true positive and false positive, otherwise the scores are accumulated for true negative and false negative. The
* simplest scoring scheme is to mark valid results as tp=fn=1 and fp=tn=0 and invalid results the opposite.
* <p>
* The number of consecutive rejections are counted per frame. When the configured number of failures is reached all
* remaining results for the frame are rejected. This assumes the results are ordered by the frame.
* <p>
* Note that this method is to be used to score a subset that was generated using
* {@link #filterSubset(MemoryPeakResults, int)} since the number of consecutive failures before each peak are
* expected to be stored in the origX property.
*
* @param resultsList
* a list of results to analyse
* @param failures
* the number of failures to allow per frame before all peaks are rejected
* @param tn
* The initial true negatives (used when the results have been pre-filtered)
* @param fn
* The initial false negatives (used when the results have been pre-filtered)
* @param n
* The initial negatives (used when the results have been pre-filtered)
* @return the score
*/
public FractionClassificationResult fractionScoreSubset(List<MemoryPeakResults> resultsList, int failures, double tn, double fn, int n) {
int p = 0;
double fp = 0;
double tp = 0;
for (MemoryPeakResults peakResults : resultsList) {
setup(peakResults);
int frame = -1;
int failCount = 0;
for (PeakResult peak : peakResults.getResults()) {
// Reset fail count for new frames
if (frame != peak.getFrame()) {
frame = peak.getFrame();
failCount = 0;
}
failCount += peak.origX;
// Reject all peaks if we have exceeded the fail count
final boolean isPositive;
if (failCount > failures) {
isPositive = false;
} else {
// Otherwise assess the peak
isPositive = accept(peak);
}
if (isPositive) {
failCount = 0;
} else {
failCount++;
}
if (isPositive) {
p++;
tp += peak.getTruePositiveScore();
fp += peak.getFalsePositiveScore();
} else {
fn += peak.getFalseNegativeScore();
tn += peak.getTrueNegativeScore();
}
}
n += peakResults.size();
end();
}
n -= p;
return new FractionClassificationResult(tp, fp, tn, fn, p, n);
}
use of gdsc.core.match.FractionClassificationResult in project GDSC-SMLM by aherbert.
the class Filter method fractionScore2.
/**
* Filter the results and return the performance score. Allows benchmarking the filter by marking the results as
* true or false.
* <p>
* Input PeakResults must be allocated a score for true positive, false positive, true negative and false negative
* (accessed via the object property get methods). The filter is run and results that pass accumulate scores for
* true positive and false positive, otherwise the scores are accumulated for true negative and false negative. The
* simplest scoring scheme is to mark valid results as tp=fn=1 and fp=tn=0 and invalid results the opposite.
* <p>
* The number of consecutive rejections are counted per frame. When the configured number of failures is reached all
* remaining results for the frame are rejected. This assumes the results are ordered by the frame.
* <p>
* Note that this method is to be used to score a set of results that may have been extracted from a larger set
* since the number of consecutive failures before each peak are expected to be stored in the origY property. Set
* this to zero and the results should be identical to {@link #fractionScore(List, int)}.
*
* @param resultsList
* a list of results to analyse
* @param failures
* the number of failures to allow per frame before all peaks are rejected
* @return the score
*/
public FractionClassificationResult fractionScore2(List<MemoryPeakResults> resultsList, int failures) {
int p = 0, n = 0;
double fp = 0, fn = 0;
double tp = 0, tn = 0;
for (MemoryPeakResults peakResults : resultsList) {
setup(peakResults);
int frame = -1;
int failCount = 0;
for (PeakResult peak : peakResults.getResults()) {
// Reset fail count for new frames
if (frame != peak.getFrame()) {
frame = peak.getFrame();
failCount = 0;
}
failCount += peak.origY;
// Reject all peaks if we have exceeded the fail count
final boolean isPositive;
if (failCount > failures) {
isPositive = false;
} else {
// Otherwise assess the peak
isPositive = accept(peak);
}
if (isPositive) {
failCount = 0;
} else {
failCount++;
}
if (isPositive) {
p++;
tp += peak.getTruePositiveScore();
fp += peak.getFalsePositiveScore();
} else {
fn += peak.getFalseNegativeScore();
tn += peak.getTrueNegativeScore();
}
}
n += peakResults.size();
end();
}
n -= p;
return new FractionClassificationResult(tp, fp, tn, fn, p, n);
}
Aggregations