use of org.apache.commons.math3.stat.descriptive.rank.Min in project GDSC-SMLM by aherbert.
the class FIRE method calculatePrecisionHistogram.
/**
* Calculate a histogram of the precision. The precision can be either stored in the results or calculated using the
* Mortensen formula. If the precision method for Q estimation is not fixed then the histogram is fitted with a
* Gaussian to create an initial estimate.
*
* @param precisionMethod
* the precision method
* @return The precision histogram
*/
private PrecisionHistogram calculatePrecisionHistogram() {
boolean logFitParameters = false;
String title = results.getName() + " Precision Histogram";
// Check if the results has the precision already or if it can be computed.
boolean canUseStored = canUseStoredPrecision(results);
boolean canCalculatePrecision = canCalculatePrecision(results);
// Set the method to compute a histogram. Default to the user selected option.
PrecisionMethod m = null;
if (canUseStored && precisionMethod == PrecisionMethod.STORED)
m = precisionMethod;
else if (canCalculatePrecision && precisionMethod == PrecisionMethod.CALCULATE)
m = precisionMethod;
if (m == null) {
// We only have two choices so if one is available then select it.
if (canUseStored)
m = PrecisionMethod.STORED;
else if (canCalculatePrecision)
m = PrecisionMethod.CALCULATE;
// If the user selected a method not available then log a warning
if (m != null && precisionMethod != PrecisionMethod.FIXED) {
IJ.log(String.format("%s : Selected precision method '%s' not available, switching to '%s'", TITLE, precisionMethod, m.getName()));
}
if (m == null) {
// This does not matter if the user has provide a fixed input.
if (precisionMethod == PrecisionMethod.FIXED) {
PrecisionHistogram histogram = new PrecisionHistogram(title);
histogram.mean = mean;
histogram.sigma = sigma;
return histogram;
}
// No precision
return null;
}
}
// We get here if we can compute precision.
// Build the histogram
StoredDataStatistics precision = new StoredDataStatistics(results.size());
if (m == PrecisionMethod.STORED) {
for (PeakResult r : results.getResults()) {
precision.add(r.getPrecision());
}
} else {
final double nmPerPixel = results.getNmPerPixel();
final double gain = results.getGain();
final boolean emCCD = results.isEMCCD();
for (PeakResult r : results.getResults()) {
precision.add(r.getPrecision(nmPerPixel, gain, emCCD));
}
}
//System.out.printf("Raw p = %f\n", precision.getMean());
double yMin = Double.NEGATIVE_INFINITY, yMax = 0;
// Set the min and max y-values using 1.5 x IQR
DescriptiveStatistics stats = precision.getStatistics();
double lower = stats.getPercentile(25);
double upper = stats.getPercentile(75);
if (Double.isNaN(lower) || Double.isNaN(upper)) {
if (logFitParameters)
Utils.log("Error computing IQR: %f - %f", lower, upper);
} else {
double iqr = upper - lower;
yMin = FastMath.max(lower - iqr, stats.getMin());
yMax = FastMath.min(upper + iqr, stats.getMax());
if (logFitParameters)
Utils.log(" Data range: %f - %f. Plotting 1.5x IQR: %f - %f", stats.getMin(), stats.getMax(), yMin, yMax);
}
if (yMin == Double.NEGATIVE_INFINITY) {
int n = 5;
yMin = Math.max(stats.getMin(), stats.getMean() - n * stats.getStandardDeviation());
yMax = Math.min(stats.getMax(), stats.getMean() + n * stats.getStandardDeviation());
if (logFitParameters)
Utils.log(" Data range: %f - %f. Plotting mean +/- %dxSD: %f - %f", stats.getMin(), stats.getMax(), n, yMin, yMax);
}
// Get the data within the range
double[] data = precision.getValues();
precision = new StoredDataStatistics(data.length);
for (double d : data) {
if (d < yMin || d > yMax)
continue;
precision.add(d);
}
int histogramBins = Utils.getBins(precision, Utils.BinMethod.SCOTT);
float[][] hist = Utils.calcHistogram(precision.getFloatValues(), yMin, yMax, histogramBins);
PrecisionHistogram histogram = new PrecisionHistogram(hist, precision, title);
if (precisionMethod == PrecisionMethod.FIXED) {
histogram.mean = mean;
histogram.sigma = sigma;
return histogram;
}
// Fitting of the histogram to produce the initial estimate
// Extract non-zero data
float[] x = Arrays.copyOf(hist[0], hist[0].length);
float[] y = hist[1];
int count = 0;
float dx = (x[1] - x[0]) * 0.5f;
for (int i = 0; i < y.length; i++) if (y[i] > 0) {
x[count] = x[i] + dx;
y[count] = y[i];
count++;
}
x = Arrays.copyOf(x, count);
y = Arrays.copyOf(y, count);
// Sense check to fitted data. Get mean and SD of histogram
double[] stats2 = Utils.getHistogramStatistics(x, y);
if (logFitParameters)
Utils.log(" Initial Statistics: %f +/- %f", stats2[0], stats2[1]);
histogram.mean = stats2[0];
histogram.sigma = stats2[1];
// Standard Gaussian fit
double[] parameters = fitGaussian(x, y);
if (parameters == null) {
Utils.log(" Failed to fit initial Gaussian");
return histogram;
}
double newMean = parameters[1];
double error = Math.abs(stats2[0] - newMean) / stats2[1];
if (error > 3) {
Utils.log(" Failed to fit Gaussian: %f standard deviations from histogram mean", error);
return histogram;
}
if (newMean < yMin || newMean > yMax) {
Utils.log(" Failed to fit Gaussian: %f outside data range %f - %f", newMean, yMin, yMax);
return histogram;
}
if (logFitParameters)
Utils.log(" Initial Gaussian: %f @ %f +/- %f", parameters[0], parameters[1], parameters[2]);
histogram.mean = parameters[1];
histogram.sigma = parameters[2];
return histogram;
}
use of org.apache.commons.math3.stat.descriptive.rank.Min in project GDSC-SMLM by aherbert.
the class BenchmarkSpotFit method summariseResults.
private void summariseResults(TIntObjectHashMap<FilterCandidates> filterCandidates, long runTime, final PreprocessedPeakResult[] preprocessedPeakResults, int nUniqueIDs) {
createTable();
// Summarise the fitting results. N fits, N failures.
// Optimal match statistics if filtering is perfect (since fitting is not perfect).
StoredDataStatistics distanceStats = new StoredDataStatistics();
StoredDataStatistics depthStats = new StoredDataStatistics();
// Get stats for all fitted results and those that match
// Signal, SNR, Width, xShift, yShift, Precision
createFilterCriteria();
StoredDataStatistics[][] stats = new StoredDataStatistics[3][filterCriteria.length];
for (int i = 0; i < stats.length; i++) for (int j = 0; j < stats[i].length; j++) stats[i][j] = new StoredDataStatistics();
final double nmPerPixel = simulationParameters.a;
double tp = 0, fp = 0;
int failcTP = 0, failcFP = 0;
int cTP = 0, cFP = 0;
int[] singleStatus = null, multiStatus = null, doubletStatus = null, multiDoubletStatus = null;
singleStatus = new int[FitStatus.values().length];
multiStatus = new int[singleStatus.length];
doubletStatus = new int[singleStatus.length];
multiDoubletStatus = new int[singleStatus.length];
// Easier to materialise the values since we have a lot of non final variables to manipulate
final int[] frames = new int[filterCandidates.size()];
final FilterCandidates[] candidates = new FilterCandidates[filterCandidates.size()];
final int[] counter = new int[1];
filterCandidates.forEachEntry(new TIntObjectProcedure<FilterCandidates>() {
public boolean execute(int a, FilterCandidates b) {
frames[counter[0]] = a;
candidates[counter[0]] = b;
counter[0]++;
return true;
}
});
for (FilterCandidates result : candidates) {
// Count the number of fit results that matched (tp) and did not match (fp)
tp += result.tp;
fp += result.fp;
for (int i = 0; i < result.fitResult.length; i++) {
if (result.spots[i].match)
cTP++;
else
cFP++;
final MultiPathFitResult fitResult = result.fitResult[i];
if (singleStatus != null && result.spots[i].match) {
// Debugging reasons for fit failure
addStatus(singleStatus, fitResult.getSingleFitResult());
addStatus(multiStatus, fitResult.getMultiFitResult());
addStatus(doubletStatus, fitResult.getDoubletFitResult());
addStatus(multiDoubletStatus, fitResult.getMultiDoubletFitResult());
}
if (noMatch(fitResult)) {
if (result.spots[i].match)
failcTP++;
else
failcFP++;
}
// We have multi-path results.
// We want statistics for:
// [0] all fitted spots
// [1] fitted spots that match a result
// [2] fitted spots that do not match a result
addToStats(fitResult.getSingleFitResult(), stats);
addToStats(fitResult.getMultiFitResult(), stats);
addToStats(fitResult.getDoubletFitResult(), stats);
addToStats(fitResult.getMultiDoubletFitResult(), stats);
}
// Statistics on spots that fit an actual result
for (int i = 0; i < result.match.length; i++) {
if (!result.match[i].isFitResult())
// For now just ignore the candidates that matched
continue;
FitMatch fitMatch = (FitMatch) result.match[i];
distanceStats.add(fitMatch.d * nmPerPixel);
depthStats.add(fitMatch.z * nmPerPixel);
}
}
// Store data for computing correlation
double[] i1 = new double[depthStats.getN()];
double[] i2 = new double[i1.length];
double[] is = new double[i1.length];
int ci = 0;
for (FilterCandidates result : candidates) {
for (int i = 0; i < result.match.length; i++) {
if (!result.match[i].isFitResult())
// For now just ignore the candidates that matched
continue;
FitMatch fitMatch = (FitMatch) result.match[i];
ScoredSpot spot = result.spots[fitMatch.i];
i1[ci] = fitMatch.predictedSignal;
i2[ci] = fitMatch.actualSignal;
is[ci] = spot.spot.intensity;
ci++;
}
}
// We want to compute the Jaccard against the spot metric
// Filter the results using the multi-path filter
ArrayList<MultiPathFitResults> multiPathResults = new ArrayList<MultiPathFitResults>(filterCandidates.size());
for (int i = 0; i < frames.length; i++) {
int frame = frames[i];
MultiPathFitResult[] multiPathFitResults = candidates[i].fitResult;
int totalCandidates = candidates[i].spots.length;
int nActual = actualCoordinates.get(frame).size();
multiPathResults.add(new MultiPathFitResults(frame, multiPathFitResults, totalCandidates, nActual));
}
// Score the results and count the number returned
List<FractionalAssignment[]> assignments = new ArrayList<FractionalAssignment[]>();
final TIntHashSet set = new TIntHashSet(nUniqueIDs);
FractionScoreStore scoreStore = new FractionScoreStore() {
public void add(int uniqueId) {
set.add(uniqueId);
}
};
MultiPathFitResults[] multiResults = multiPathResults.toArray(new MultiPathFitResults[multiPathResults.size()]);
// Filter with no filter
MultiPathFilter mpf = new MultiPathFilter(new SignalFilter(0), null, multiFilter.residualsThreshold);
FractionClassificationResult fractionResult = mpf.fractionScoreSubset(multiResults, Integer.MAX_VALUE, this.results.size(), assignments, scoreStore, CoordinateStoreFactory.create(imp.getWidth(), imp.getHeight(), fitConfig.getDuplicateDistance()));
double nPredicted = fractionResult.getTP() + fractionResult.getFP();
final double[][] matchScores = new double[set.size()][];
int count = 0;
for (int i = 0; i < assignments.size(); i++) {
FractionalAssignment[] a = assignments.get(i);
if (a == null)
continue;
for (int j = 0; j < a.length; j++) {
final PreprocessedPeakResult r = ((PeakFractionalAssignment) a[j]).peakResult;
set.remove(r.getUniqueId());
final double precision = Math.sqrt(r.getLocationVariance());
final double signal = r.getSignal();
final double snr = r.getSNR();
final double width = r.getXSDFactor();
final double xShift = r.getXRelativeShift2();
final double yShift = r.getYRelativeShift2();
// Since these two are combined for filtering and the max is what matters.
final double shift = (xShift > yShift) ? Math.sqrt(xShift) : Math.sqrt(yShift);
final double eshift = Math.sqrt(xShift + yShift);
final double[] score = new double[8];
score[FILTER_SIGNAL] = signal;
score[FILTER_SNR] = snr;
score[FILTER_MIN_WIDTH] = width;
score[FILTER_MAX_WIDTH] = width;
score[FILTER_SHIFT] = shift;
score[FILTER_ESHIFT] = eshift;
score[FILTER_PRECISION] = precision;
score[FILTER_PRECISION + 1] = a[j].getScore();
matchScores[count++] = score;
}
}
// Add the rest
set.forEach(new CustomTIntProcedure(count) {
public boolean execute(int uniqueId) {
// This should not be null or something has gone wrong
PreprocessedPeakResult r = preprocessedPeakResults[uniqueId];
if (r == null)
throw new RuntimeException("Missing result: " + uniqueId);
final double precision = Math.sqrt(r.getLocationVariance());
final double signal = r.getSignal();
final double snr = r.getSNR();
final double width = r.getXSDFactor();
final double xShift = r.getXRelativeShift2();
final double yShift = r.getYRelativeShift2();
// Since these two are combined for filtering and the max is what matters.
final double shift = (xShift > yShift) ? Math.sqrt(xShift) : Math.sqrt(yShift);
final double eshift = Math.sqrt(xShift + yShift);
final double[] score = new double[8];
score[FILTER_SIGNAL] = signal;
score[FILTER_SNR] = snr;
score[FILTER_MIN_WIDTH] = width;
score[FILTER_MAX_WIDTH] = width;
score[FILTER_SHIFT] = shift;
score[FILTER_ESHIFT] = eshift;
score[FILTER_PRECISION] = precision;
matchScores[c++] = score;
return true;
}
});
// Debug the reasons the fit failed
if (singleStatus != null) {
String name = PeakFit.getSolverName(fitConfig);
if (fitConfig.getFitSolver() == FitSolver.MLE && fitConfig.isModelCamera())
name += " Camera";
System.out.println("Failure counts: " + name);
printFailures("Single", singleStatus);
printFailures("Multi", multiStatus);
printFailures("Doublet", doubletStatus);
printFailures("Multi doublet", multiDoubletStatus);
}
StringBuilder sb = new StringBuilder(300);
// Add information about the simulation
//(simulationParameters.minSignal + simulationParameters.maxSignal) * 0.5;
final double signal = simulationParameters.signalPerFrame;
final int n = results.size();
sb.append(imp.getStackSize()).append("\t");
final int w = imp.getWidth();
final int h = imp.getHeight();
sb.append(w).append("\t");
sb.append(h).append("\t");
sb.append(n).append("\t");
double density = ((double) n / imp.getStackSize()) / (w * h) / (simulationParameters.a * simulationParameters.a / 1e6);
sb.append(Utils.rounded(density)).append("\t");
sb.append(Utils.rounded(signal)).append("\t");
sb.append(Utils.rounded(simulationParameters.s)).append("\t");
sb.append(Utils.rounded(simulationParameters.a)).append("\t");
sb.append(Utils.rounded(simulationParameters.depth)).append("\t");
sb.append(simulationParameters.fixedDepth).append("\t");
sb.append(Utils.rounded(simulationParameters.gain)).append("\t");
sb.append(Utils.rounded(simulationParameters.readNoise)).append("\t");
sb.append(Utils.rounded(simulationParameters.b)).append("\t");
sb.append(Utils.rounded(simulationParameters.b2)).append("\t");
// Compute the noise
double noise = simulationParameters.b2;
if (simulationParameters.emCCD) {
// The b2 parameter was computed without application of the EM-CCD noise factor of 2.
//final double b2 = backgroundVariance + readVariance
// = simulationParameters.b + readVariance
// This should be applied only to the background variance.
final double readVariance = noise - simulationParameters.b;
noise = simulationParameters.b * 2 + readVariance;
}
if (simulationParameters.fullSimulation) {
// The total signal is spread over frames
}
sb.append(Utils.rounded(signal / Math.sqrt(noise))).append("\t");
sb.append(Utils.rounded(simulationParameters.s / simulationParameters.a)).append("\t");
sb.append(spotFilter.getDescription());
// nP and nN is the fractional score of the spot candidates
addCount(sb, nP + nN);
addCount(sb, nP);
addCount(sb, nN);
addCount(sb, fP);
addCount(sb, fN);
String name = PeakFit.getSolverName(fitConfig);
if (fitConfig.getFitSolver() == FitSolver.MLE && fitConfig.isModelCamera())
name += " Camera";
add(sb, name);
add(sb, config.getFitting());
resultPrefix = sb.toString();
// Q. Should I add other fit configuration here?
// The fraction of positive and negative candidates that were included
add(sb, (100.0 * cTP) / nP);
add(sb, (100.0 * cFP) / nN);
// Score the fitting results compared to the original simulation.
// Score the candidate selection:
add(sb, cTP + cFP);
add(sb, cTP);
add(sb, cFP);
// TP are all candidates that can be matched to a spot
// FP are all candidates that cannot be matched to a spot
// FN = The number of missed spots
FractionClassificationResult m = new FractionClassificationResult(cTP, cFP, 0, simulationParameters.molecules - cTP);
add(sb, m.getRecall());
add(sb, m.getPrecision());
add(sb, m.getF1Score());
add(sb, m.getJaccard());
// Score the fitting results:
add(sb, failcTP);
add(sb, failcFP);
// TP are all fit results that can be matched to a spot
// FP are all fit results that cannot be matched to a spot
// FN = The number of missed spots
add(sb, tp);
add(sb, fp);
m = new FractionClassificationResult(tp, fp, 0, simulationParameters.molecules - tp);
add(sb, m.getRecall());
add(sb, m.getPrecision());
add(sb, m.getF1Score());
add(sb, m.getJaccard());
// Do it again but pretend we can perfectly filter all the false positives
//add(sb, tp);
m = new FractionClassificationResult(tp, 0, 0, simulationParameters.molecules - tp);
// Recall is unchanged
// Precision will be 100%
add(sb, m.getF1Score());
add(sb, m.getJaccard());
// The mean may be subject to extreme outliers so use the median
double median = distanceStats.getMedian();
add(sb, median);
WindowOrganiser wo = new WindowOrganiser();
String label = String.format("Recall = %s. n = %d. Median = %s nm. SD = %s nm", Utils.rounded(m.getRecall()), distanceStats.getN(), Utils.rounded(median), Utils.rounded(distanceStats.getStandardDeviation()));
int id = Utils.showHistogram(TITLE, distanceStats, "Match Distance (nm)", 0, 0, 0, label);
if (Utils.isNewWindow())
wo.add(id);
median = depthStats.getMedian();
add(sb, median);
// Sort by spot intensity and produce correlation
int[] indices = Utils.newArray(i1.length, 0, 1);
if (showCorrelation)
Sort.sort(indices, is, rankByIntensity);
double[] r = (showCorrelation) ? new double[i1.length] : null;
double[] sr = (showCorrelation) ? new double[i1.length] : null;
double[] rank = (showCorrelation) ? new double[i1.length] : null;
ci = 0;
FastCorrelator fastCorrelator = new FastCorrelator();
ArrayList<Ranking> pc1 = new ArrayList<Ranking>();
ArrayList<Ranking> pc2 = new ArrayList<Ranking>();
for (int ci2 : indices) {
fastCorrelator.add((long) Math.round(i1[ci2]), (long) Math.round(i2[ci2]));
pc1.add(new Ranking(i1[ci2], ci));
pc2.add(new Ranking(i2[ci2], ci));
if (showCorrelation) {
r[ci] = fastCorrelator.getCorrelation();
sr[ci] = Correlator.correlation(rank(pc1), rank(pc2));
if (rankByIntensity)
rank[ci] = is[0] - is[ci];
else
rank[ci] = ci;
}
ci++;
}
final double pearsonCorr = fastCorrelator.getCorrelation();
final double rankedCorr = Correlator.correlation(rank(pc1), rank(pc2));
// Get the regression
SimpleRegression regression = new SimpleRegression(false);
for (int i = 0; i < pc1.size(); i++) regression.addData(pc1.get(i).value, pc2.get(i).value);
//final double intercept = regression.getIntercept();
final double slope = regression.getSlope();
if (showCorrelation) {
String title = TITLE + " Intensity";
Plot plot = new Plot(title, "Candidate", "Spot");
double[] limits1 = Maths.limits(i1);
double[] limits2 = Maths.limits(i2);
plot.setLimits(limits1[0], limits1[1], limits2[0], limits2[1]);
label = String.format("Correlation=%s; Ranked=%s; Slope=%s", Utils.rounded(pearsonCorr), Utils.rounded(rankedCorr), Utils.rounded(slope));
plot.addLabel(0, 0, label);
plot.setColor(Color.red);
plot.addPoints(i1, i2, Plot.DOT);
if (slope > 1)
plot.drawLine(limits1[0], limits1[0] * slope, limits1[1], limits1[1] * slope);
else
plot.drawLine(limits2[0] / slope, limits2[0], limits2[1] / slope, limits2[1]);
PlotWindow pw = Utils.display(title, plot);
if (Utils.isNewWindow())
wo.add(pw);
title = TITLE + " Correlation";
plot = new Plot(title, "Spot Rank", "Correlation");
double[] xlimits = Maths.limits(rank);
double[] ylimits = Maths.limits(r);
ylimits = Maths.limits(ylimits, sr);
plot.setLimits(xlimits[0], xlimits[1], ylimits[0], ylimits[1]);
plot.setColor(Color.red);
plot.addPoints(rank, r, Plot.LINE);
plot.setColor(Color.blue);
plot.addPoints(rank, sr, Plot.LINE);
plot.setColor(Color.black);
plot.addLabel(0, 0, label);
pw = Utils.display(title, plot);
if (Utils.isNewWindow())
wo.add(pw);
}
add(sb, pearsonCorr);
add(sb, rankedCorr);
add(sb, slope);
label = String.format("n = %d. Median = %s nm", depthStats.getN(), Utils.rounded(median));
id = Utils.showHistogram(TITLE, depthStats, "Match Depth (nm)", 0, 1, 0, label);
if (Utils.isNewWindow())
wo.add(id);
// Plot histograms of the stats on the same window
double[] lower = new double[filterCriteria.length];
double[] upper = new double[lower.length];
min = new double[lower.length];
max = new double[lower.length];
for (int i = 0; i < stats[0].length; i++) {
double[] limits = showDoubleHistogram(stats, i, wo, matchScores, nPredicted);
lower[i] = limits[0];
upper[i] = limits[1];
min[i] = limits[2];
max[i] = limits[3];
}
// Reconfigure some of the range limits
// Make this a bit bigger
upper[FILTER_SIGNAL] *= 2;
// Make this a bit bigger
upper[FILTER_SNR] *= 2;
double factor = 0.25;
if (lower[FILTER_MIN_WIDTH] != 0)
// (assuming lower is less than 1)
upper[FILTER_MIN_WIDTH] = 1 - Math.max(0, factor * (1 - lower[FILTER_MIN_WIDTH]));
if (upper[FILTER_MIN_WIDTH] != 0)
// (assuming upper is more than 1)
lower[FILTER_MAX_WIDTH] = 1 + Math.max(0, factor * (upper[FILTER_MAX_WIDTH] - 1));
// Round the ranges
final double[] interval = new double[stats[0].length];
interval[FILTER_SIGNAL] = SignalFilter.DEFAULT_INCREMENT;
interval[FILTER_SNR] = SNRFilter.DEFAULT_INCREMENT;
interval[FILTER_MIN_WIDTH] = WidthFilter2.DEFAULT_MIN_INCREMENT;
interval[FILTER_MAX_WIDTH] = WidthFilter.DEFAULT_INCREMENT;
interval[FILTER_SHIFT] = ShiftFilter.DEFAULT_INCREMENT;
interval[FILTER_ESHIFT] = EShiftFilter.DEFAULT_INCREMENT;
interval[FILTER_PRECISION] = PrecisionFilter.DEFAULT_INCREMENT;
interval[FILTER_ITERATIONS] = 0.1;
interval[FILTER_EVALUATIONS] = 0.1;
// Create a range increment
double[] increment = new double[lower.length];
for (int i = 0; i < increment.length; i++) {
lower[i] = Maths.floor(lower[i], interval[i]);
upper[i] = Maths.ceil(upper[i], interval[i]);
double range = upper[i] - lower[i];
// Allow clipping if the range is small compared to the min increment
double multiples = range / interval[i];
// Use 8 multiples for the equivalent of +/- 4 steps around the centre
if (multiples < 8) {
multiples = Math.ceil(multiples);
} else
multiples = 8;
increment[i] = Maths.ceil(range / multiples, interval[i]);
if (i == FILTER_MIN_WIDTH)
// Requires clipping based on the upper limit
lower[i] = upper[i] - increment[i] * multiples;
else
upper[i] = lower[i] + increment[i] * multiples;
}
for (int i = 0; i < stats[0].length; i++) {
lower[i] = Maths.round(lower[i]);
upper[i] = Maths.round(upper[i]);
min[i] = Maths.round(min[i]);
max[i] = Maths.round(max[i]);
increment[i] = Maths.round(increment[i]);
sb.append("\t").append(min[i]).append(':').append(lower[i]).append('-').append(upper[i]).append(':').append(max[i]);
}
// Disable some filters
increment[FILTER_SIGNAL] = Double.POSITIVE_INFINITY;
//increment[FILTER_SHIFT] = Double.POSITIVE_INFINITY;
increment[FILTER_ESHIFT] = Double.POSITIVE_INFINITY;
wo.tile();
sb.append("\t").append(Utils.timeToString(runTime / 1000000.0));
summaryTable.append(sb.toString());
if (saveFilterRange) {
GlobalSettings gs = SettingsManager.loadSettings();
FilterSettings filterSettings = gs.getFilterSettings();
String filename = (silent) ? filterSettings.filterSetFilename : Utils.getFilename("Filter_range_file", filterSettings.filterSetFilename);
if (filename == null)
return;
// Remove extension to store the filename
filename = Utils.replaceExtension(filename, ".xml");
filterSettings.filterSetFilename = filename;
// Create a filter set using the ranges
ArrayList<Filter> filters = new ArrayList<Filter>(3);
filters.add(new MultiFilter2(lower[0], (float) lower[1], lower[2], lower[3], lower[4], lower[5], lower[6]));
filters.add(new MultiFilter2(upper[0], (float) upper[1], upper[2], upper[3], upper[4], upper[5], upper[6]));
filters.add(new MultiFilter2(increment[0], (float) increment[1], increment[2], increment[3], increment[4], increment[5], increment[6]));
if (saveFilters(filename, filters))
SettingsManager.saveSettings(gs);
// Create a filter set using the min/max and the initial bounds.
// Set sensible limits
min[FILTER_SIGNAL] = Math.max(min[FILTER_SIGNAL], 30);
max[FILTER_PRECISION] = Math.min(max[FILTER_PRECISION], 100);
// Commented this out so that the 4-set filters are the same as the 3-set filters.
// The difference leads to differences when optimising.
// // Use half the initial bounds (hoping this is a good starting guess for the optimum)
// final boolean[] limitToLower = new boolean[min.length];
// limitToLower[FILTER_SIGNAL] = true;
// limitToLower[FILTER_SNR] = true;
// limitToLower[FILTER_MIN_WIDTH] = true;
// limitToLower[FILTER_MAX_WIDTH] = false;
// limitToLower[FILTER_SHIFT] = false;
// limitToLower[FILTER_ESHIFT] = false;
// limitToLower[FILTER_PRECISION] = true;
// for (int i = 0; i < limitToLower.length; i++)
// {
// final double range = (upper[i] - lower[i]) / 2;
// if (limitToLower[i])
// upper[i] = lower[i] + range;
// else
// lower[i] = upper[i] - range;
// }
filters = new ArrayList<Filter>(4);
filters.add(new MultiFilter2(min[0], (float) min[1], min[2], min[3], min[4], min[5], min[6]));
filters.add(new MultiFilter2(lower[0], (float) lower[1], lower[2], lower[3], lower[4], lower[5], lower[6]));
filters.add(new MultiFilter2(upper[0], (float) upper[1], upper[2], upper[3], upper[4], upper[5], upper[6]));
filters.add(new MultiFilter2(max[0], (float) max[1], max[2], max[3], max[4], max[5], max[6]));
saveFilters(Utils.replaceExtension(filename, ".4.xml"), filters);
}
}
use of org.apache.commons.math3.stat.descriptive.rank.Min in project GDSC-SMLM by aherbert.
the class BenchmarkSpotFit method showDoubleHistogram.
private double[] showDoubleHistogram(StoredDataStatistics[][] stats, final int i, WindowOrganiser wo, double[][] matchScores, double nPredicted) {
String xLabel = filterCriteria[i].name;
LowerLimit lower = filterCriteria[i].lower;
UpperLimit upper = filterCriteria[i].upper;
double[] j = null;
double[] metric = null;
double maxJ = 0;
if (i <= FILTER_PRECISION && (showFilterScoreHistograms || upper.requiresJaccard || lower.requiresJaccard)) {
// Jaccard score verses the range of the metric
Arrays.sort(matchScores, new Comparator<double[]>() {
public int compare(double[] o1, double[] o2) {
if (o1[i] < o2[i])
return -1;
if (o1[i] > o2[i])
return 1;
return 0;
}
});
final int scoreIndex = FILTER_PRECISION + 1;
int n = results.size();
double tp = 0;
double fp = 0;
j = new double[matchScores.length + 1];
metric = new double[j.length];
for (int k = 0; k < matchScores.length; k++) {
final double score = matchScores[k][scoreIndex];
tp += score;
fp += (1 - score);
j[k + 1] = tp / (fp + n);
metric[k + 1] = matchScores[k][i];
}
metric[0] = metric[1];
maxJ = Maths.max(j);
if (showFilterScoreHistograms) {
String title = TITLE + " Jaccard " + xLabel;
Plot plot = new Plot(title, xLabel, "Jaccard", metric, j);
// Remove outliers
double[] limitsx = Maths.limits(metric);
Percentile p = new Percentile();
double l = p.evaluate(metric, 25);
double u = p.evaluate(metric, 75);
double iqr = 1.5 * (u - l);
limitsx[1] = Math.min(limitsx[1], u + iqr);
plot.setLimits(limitsx[0], limitsx[1], 0, Maths.max(j));
PlotWindow pw = Utils.display(title, plot);
if (Utils.isNewWindow())
wo.add(pw);
}
}
// [0] is all
// [1] is matches
// [2] is no match
StoredDataStatistics s1 = stats[0][i];
StoredDataStatistics s2 = stats[1][i];
StoredDataStatistics s3 = stats[2][i];
if (s1.getN() == 0)
return new double[4];
DescriptiveStatistics d = s1.getStatistics();
double median = 0;
Plot2 plot = null;
String title = null;
if (showFilterScoreHistograms) {
median = d.getPercentile(50);
String label = String.format("n = %d. Median = %s nm", s1.getN(), Utils.rounded(median));
int id = Utils.showHistogram(TITLE, s1, xLabel, filterCriteria[i].minBinWidth, (filterCriteria[i].restrictRange) ? 1 : 0, 0, label);
if (id == 0) {
IJ.log("Failed to show the histogram: " + xLabel);
return new double[4];
}
if (Utils.isNewWindow())
wo.add(id);
title = WindowManager.getImage(id).getTitle();
// Reverse engineer the histogram settings
plot = Utils.plot;
double[] xValues = Utils.xValues;
int bins = xValues.length;
double yMin = xValues[0];
double binSize = xValues[1] - xValues[0];
double yMax = xValues[0] + (bins - 1) * binSize;
if (s2.getN() > 0) {
double[] values = s2.getValues();
double[][] hist = Utils.calcHistogram(values, yMin, yMax, bins);
if (hist[0].length > 0) {
plot.setColor(Color.red);
plot.addPoints(hist[0], hist[1], Plot2.BAR);
Utils.display(title, plot);
}
}
if (s3.getN() > 0) {
double[] values = s3.getValues();
double[][] hist = Utils.calcHistogram(values, yMin, yMax, bins);
if (hist[0].length > 0) {
plot.setColor(Color.blue);
plot.addPoints(hist[0], hist[1], Plot2.BAR);
Utils.display(title, plot);
}
}
}
// Do cumulative histogram
double[][] h1 = Maths.cumulativeHistogram(s1.getValues(), true);
double[][] h2 = Maths.cumulativeHistogram(s2.getValues(), true);
double[][] h3 = Maths.cumulativeHistogram(s3.getValues(), true);
if (showFilterScoreHistograms) {
title = TITLE + " Cumul " + xLabel;
plot = new Plot2(title, xLabel, "Frequency");
// Find limits
double[] xlimit = Maths.limits(h1[0]);
xlimit = Maths.limits(xlimit, h2[0]);
xlimit = Maths.limits(xlimit, h3[0]);
// Restrict using the inter-quartile range
if (filterCriteria[i].restrictRange) {
double q1 = d.getPercentile(25);
double q2 = d.getPercentile(75);
double iqr = (q2 - q1) * 2.5;
xlimit[0] = Maths.max(xlimit[0], median - iqr);
xlimit[1] = Maths.min(xlimit[1], median + iqr);
}
plot.setLimits(xlimit[0], xlimit[1], 0, 1.05);
plot.addPoints(h1[0], h1[1], Plot.LINE);
plot.setColor(Color.red);
plot.addPoints(h2[0], h2[1], Plot.LINE);
plot.setColor(Color.blue);
plot.addPoints(h3[0], h3[1], Plot.LINE);
}
// Determine the maximum difference between the TP and FP
double maxx1 = 0;
double maxx2 = 0;
double max1 = 0;
double max2 = 0;
// We cannot compute the delta histogram, or use percentiles
if (s2.getN() == 0) {
upper = UpperLimit.ZERO;
lower = LowerLimit.ZERO;
}
final boolean requireLabel = (showFilterScoreHistograms && filterCriteria[i].requireLabel);
if (requireLabel || upper.requiresDeltaHistogram() || lower.requiresDeltaHistogram()) {
if (s2.getN() != 0 && s3.getN() != 0) {
LinearInterpolator li = new LinearInterpolator();
PolynomialSplineFunction f1 = li.interpolate(h2[0], h2[1]);
PolynomialSplineFunction f2 = li.interpolate(h3[0], h3[1]);
for (double x : h1[0]) {
if (x < h2[0][0] || x < h3[0][0])
continue;
try {
double v1 = f1.value(x);
double v2 = f2.value(x);
double diff = v2 - v1;
if (diff > 0) {
if (max1 < diff) {
max1 = diff;
maxx1 = x;
}
} else {
if (max2 > diff) {
max2 = diff;
maxx2 = x;
}
}
} catch (OutOfRangeException e) {
// Because we reached the end
break;
}
}
} else {
// Switch to percentiles if we have no delta histogram
if (upper.requiresDeltaHistogram())
upper = UpperLimit.NINETY_NINE_PERCENT;
if (lower.requiresDeltaHistogram())
lower = LowerLimit.ONE_PERCENT;
}
// System.out.printf("Bounds %s : %s, pos %s, neg %s, %s\n", xLabel, Utils.rounded(getPercentile(h2, 0.01)),
// Utils.rounded(maxx1), Utils.rounded(maxx2), Utils.rounded(getPercentile(h1, 0.99)));
}
if (showFilterScoreHistograms) {
// We use bins=1 on charts where we do not need a label
if (requireLabel) {
String label = String.format("Max+ %s @ %s, Max- %s @ %s", Utils.rounded(max1), Utils.rounded(maxx1), Utils.rounded(max2), Utils.rounded(maxx2));
plot.setColor(Color.black);
plot.addLabel(0, 0, label);
}
PlotWindow pw = Utils.display(title, plot);
if (Utils.isNewWindow())
wo.add(pw.getImagePlus().getID());
}
// Now compute the bounds using the desired limit
double l, u;
switch(lower) {
case ONE_PERCENT:
l = getPercentile(h2, 0.01);
break;
case MAX_NEGATIVE_CUMUL_DELTA:
l = maxx2;
break;
case ZERO:
l = 0;
break;
case HALF_MAX_JACCARD_VALUE:
l = getValue(metric, j, maxJ * 0.5);
break;
default:
throw new RuntimeException("Missing lower limit method");
}
switch(upper) {
case MAX_POSITIVE_CUMUL_DELTA:
u = maxx1;
break;
case NINETY_NINE_PERCENT:
u = getPercentile(h2, 0.99);
break;
case NINETY_NINE_NINE_PERCENT:
u = getPercentile(h2, 0.999);
break;
case ZERO:
u = 0;
break;
case MAX_JACCARD2:
u = getValue(metric, j, maxJ) * 2;
//System.out.printf("MaxJ = %.4f @ %.3f\n", maxJ, u / 2);
break;
default:
throw new RuntimeException("Missing upper limit method");
}
double min = getPercentile(h1, 0);
double max = getPercentile(h1, 1);
return new double[] { l, u, min, max };
}
use of org.apache.commons.math3.stat.descriptive.rank.Min in project lucene-solr by apache.
the class EmpiricalDistributionEvaluator method evaluate.
public Tuple evaluate(Tuple tuple) throws IOException {
if (subEvaluators.size() != 1) {
throw new IOException("Empirical dist expects 1 column as a parameters");
}
StreamEvaluator colEval1 = subEvaluators.get(0);
List<Number> numbers1 = (List<Number>) colEval1.evaluate(tuple);
double[] column1 = new double[numbers1.size()];
for (int i = 0; i < numbers1.size(); i++) {
column1[i] = numbers1.get(i).doubleValue();
}
Arrays.sort(column1);
EmpiricalDistribution empiricalDistribution = new EmpiricalDistribution();
empiricalDistribution.load(column1);
Map map = new HashMap();
StatisticalSummary statisticalSummary = empiricalDistribution.getSampleStats();
map.put("max", statisticalSummary.getMax());
map.put("mean", statisticalSummary.getMean());
map.put("min", statisticalSummary.getMin());
map.put("stdev", statisticalSummary.getStandardDeviation());
map.put("sum", statisticalSummary.getSum());
map.put("N", statisticalSummary.getN());
map.put("var", statisticalSummary.getVariance());
return new EmpiricalDistributionTuple(empiricalDistribution, column1, map);
}
use of org.apache.commons.math3.stat.descriptive.rank.Min in project lucene-solr by apache.
the class HistogramEvaluator method evaluate.
public List<Map> evaluate(Tuple tuple) throws IOException {
StreamEvaluator colEval1 = subEvaluators.get(0);
List<Number> numbers1 = (List<Number>) colEval1.evaluate(tuple);
double[] column1 = new double[numbers1.size()];
for (int i = 0; i < numbers1.size(); i++) {
column1[i] = numbers1.get(i).doubleValue();
}
int bins = 10;
if (subEvaluators.size() == 2) {
StreamEvaluator binsEval = subEvaluators.get(1);
Number binsNum = (Number) binsEval.evaluate(tuple);
bins = binsNum.intValue();
}
EmpiricalDistribution empiricalDistribution = new EmpiricalDistribution(bins);
empiricalDistribution.load(column1);
List<Map> binList = new ArrayList();
List<SummaryStatistics> summaries = empiricalDistribution.getBinStats();
for (SummaryStatistics statisticalSummary : summaries) {
Map map = new HashMap();
map.put("max", statisticalSummary.getMax());
map.put("mean", statisticalSummary.getMean());
map.put("min", statisticalSummary.getMin());
map.put("stdev", statisticalSummary.getStandardDeviation());
map.put("sum", statisticalSummary.getSum());
map.put("N", statisticalSummary.getN());
map.put("var", statisticalSummary.getVariance());
binList.add(map);
}
return binList;
}
Aggregations