use of org.apache.commons.math3.stat.descriptive.summary.Sum in project GDSC-SMLM by aherbert.
the class EMGainAnalysis method simulateFromPDF.
/**
* Random sample from the cumulative probability distribution function that is used during fitting
*
* @return The histogram
*/
private int[] simulateFromPDF() {
final double[] g = pdf(0, _photons, _gain, _noise, (int) _bias);
// Debug this
double[] x = Utils.newArray(g.length, 0, 1.0);
Utils.display(TITLE + " PDF", new Plot(TITLE + " PDF", "ADU", "P", x, Arrays.copyOf(g, g.length)));
// Get cumulative probability
double sum = 0;
for (int i = 0; i < g.length; i++) {
final double p = g[i];
g[i] += sum;
sum += p;
}
for (int i = 0; i < g.length; i++) g[i] /= sum;
// Ensure value of 1 at the end
g[g.length - 1] = 1;
// Randomly sample
RandomGenerator random = new Well44497b(System.currentTimeMillis() + System.identityHashCode(this));
int[] h = new int[g.length];
final int steps = simulationSize;
for (int n = 0; n < steps; n++) {
if (n % 64 == 0)
IJ.showProgress(n, steps);
final double p = random.nextDouble();
for (int i = 0; i < g.length; i++) if (p <= g[i]) {
h[i]++;
break;
}
}
return h;
}
use of org.apache.commons.math3.stat.descriptive.summary.Sum in project GDSC-SMLM by aherbert.
the class DiffusionRateTest method plotJumpDistances.
/**
* Plot a cumulative histogram and standard histogram of the jump distances.
*
* @param title
* the title
* @param jumpDistances
* the jump distances
* @param dimensions
* the number of dimensions for the jumps
* @param steps
* the steps
*/
private void plotJumpDistances(String title, DoubleData jumpDistances, int dimensions) {
// Cumulative histogram
// --------------------
double[] values = jumpDistances.values();
String title2 = title + " Cumulative Jump Distance " + dimensions + "D";
double[][] jdHistogram = JumpDistanceAnalysis.cumulativeHistogram(values);
Plot2 jdPlot = new Plot2(title2, "Distance (um^2)", "Cumulative Probability", jdHistogram[0], jdHistogram[1]);
PlotWindow pw2 = Utils.display(title2, jdPlot);
if (Utils.isNewWindow())
idList[idCount++] = pw2.getImagePlus().getID();
// Plot the expected function
// This is the Chi-squared distribution: The sum of the squares of k independent
// standard normal random variables with k = dimensions. It is a special case of
// the gamma distribution. If the normals have non-unit variance the distribution
// is scaled.
// Chi ~ Gamma(k/2, 2) // using the scale parameterisation of the gamma
// s^2 * Chi ~ Gamma(k/2, 2*s^2)
// So if s^2 = 2D:
// 2D * Chi ~ Gamma(k/2, 4D)
double estimatedD = simpleD * simpleSteps;
double max = Maths.max(values);
double[] x = Utils.newArray(1000, 0, max / 1000);
double k = dimensions / 2.0;
double mean = 4 * estimatedD;
GammaDistribution dist = new GammaDistribution(k, mean);
double[] y = new double[x.length];
for (int i = 0; i < x.length; i++) y[i] = dist.cumulativeProbability(x[i]);
jdPlot.setColor(Color.red);
jdPlot.addPoints(x, y, Plot.LINE);
Utils.display(title2, jdPlot);
// Histogram
// ---------
title2 = title + " Jump " + dimensions + "D";
int plotId = Utils.showHistogram(title2, jumpDistances, "Distance (um^2)", 0, 0, Math.max(20, values.length / 1000));
if (Utils.isNewWindow())
idList[idCount++] = plotId;
// Recompute the expected function
for (int i = 0; i < x.length; i++) y[i] = dist.density(x[i]);
// Scale to have the same area
if (Utils.xValues.length > 1) {
final double area1 = jumpDistances.size() * (Utils.xValues[1] - Utils.xValues[0]);
final double area2 = dist.cumulativeProbability(x[x.length - 1]);
final double scaleFactor = area1 / area2;
for (int i = 0; i < y.length; i++) y[i] *= scaleFactor;
}
jdPlot = Utils.plot;
jdPlot.setColor(Color.red);
jdPlot.addPoints(x, y, Plot.LINE);
Utils.display(WindowManager.getImage(plotId).getTitle(), jdPlot);
}
use of org.apache.commons.math3.stat.descriptive.summary.Sum in project GDSC-SMLM by aherbert.
the class BenchmarkFilterAnalysis method reportResults.
private ComplexFilterScore reportResults(boolean newResults, List<ComplexFilterScore> filters) {
if (filters.isEmpty()) {
IJ.log("Warning: No filters pass the criteria");
return null;
}
getCoordinateStore();
Collections.sort(filters);
FractionClassificationResult topFilterClassificationResult = null;
ArrayList<FractionalAssignment[]> topFilterResults = null;
String topFilterSummary = null;
if (showSummaryTable || saveTemplate) {
createSummaryWindow();
int n = 0;
final double range = (summaryDepth / simulationParameters.a) * 0.5;
int np = 0;
for (double depth : depthStats) {
if (Math.abs(depth) < range)
np++;
}
for (ComplexFilterScore fs : filters) {
final ArrayList<FractionalAssignment[]> list = new ArrayList<FractionalAssignment[]>(resultsList.length);
final FractionClassificationResult r = scoreFilter(fs.getFilter(), minimalFilter, resultsList, list, coordinateStore);
final StringBuilder sb = createResult(fs.getFilter(), r);
if (topFilterResults == null) {
topFilterResults = list;
topFilterClassificationResult = r;
}
// Show the recall at the specified depth. Sum the distance and signal factor of all scored spots.
int scored = 0;
double tp = 0, d = 0, sf = 0, rmsd = 0;
SimpleRegression regression = new SimpleRegression(false);
for (FractionalAssignment[] assignments : list) {
if (assignments == null)
continue;
for (int i = 0; i < assignments.length; i++) {
final CustomFractionalAssignment c = (CustomFractionalAssignment) assignments[i];
if (Math.abs(c.peak.error) <= range)
tp += c.getScore();
d += c.d;
sf += c.getSignalFactor();
rmsd += c.d * c.d;
regression.addData(c.peakResult.getSignal(), c.peak.getSignal());
}
scored += assignments.length;
}
final double slope = regression.getSlope();
sb.append('\t');
sb.append(Utils.rounded((double) tp / np)).append('\t');
sb.append(Utils.rounded(d / scored)).append('\t');
sb.append(Utils.rounded(sf / scored)).append('\t');
sb.append(Utils.rounded(Math.sqrt(rmsd / scored))).append('\t');
sb.append(Utils.rounded(slope)).append('\t');
if (fs.atLimit() != null)
sb.append(fs.atLimit());
String text = sb.toString();
if (topFilterSummary == null) {
topFilterSummary = text;
if (!showSummaryTable)
break;
}
if (fs.time != 0) {
sb.append('\t');
sb.append(fs.algorithm);
sb.append('\t');
sb.append(org.apache.commons.lang3.time.DurationFormatUtils.formatDurationHMS(fs.time));
} else
sb.append("\t\t");
if (fs.paramTime != 0) {
sb.append('\t');
sb.append(fs.getParamAlgorithm());
sb.append('\t');
sb.append(org.apache.commons.lang3.time.DurationFormatUtils.formatDurationHMS(fs.paramTime));
} else
sb.append("\t\t");
text = sb.toString();
if (isHeadless)
IJ.log(text);
else
summaryWindow.append(text);
n++;
if (summaryTopN > 0 && n >= summaryTopN)
break;
}
// Add a spacer to the summary table if we have multiple results
if (n > 1 && showSummaryTable) {
if (isHeadless)
IJ.log("");
else
summaryWindow.append("");
}
}
DirectFilter bestFilter = filters.get(0).getFilter();
if (saveBestFilter)
saveFilter(bestFilter);
if (topFilterClassificationResult == null) {
topFilterResults = new ArrayList<FractionalAssignment[]>(resultsList.length);
topFilterClassificationResult = scoreFilter(bestFilter, minimalFilter, resultsList, topFilterResults, coordinateStore);
}
if (newResults || scores.isEmpty()) {
scores.add(new FilterResult(failCount, residualsThreshold, duplicateDistance, filters.get(0)));
}
if (saveTemplate)
saveTemplate(topFilterSummary);
showPlots();
calculateSensitivity();
topFilterResults = depthAnalysis(topFilterResults, bestFilter);
topFilterResults = scoreAnalysis(topFilterResults, bestFilter);
componentAnalysis(topFilterClassificationResult, filters.get(0));
PreprocessedPeakResult[] filterResults = null;
if (isShowOverlay())
filterResults = showOverlay(topFilterResults, bestFilter);
saveResults(filterResults, bestFilter);
wo.tile();
return filters.get(0);
}
use of org.apache.commons.math3.stat.descriptive.summary.Sum in project lucene-solr by apache.
the class EmpiricalDistributionEvaluator method evaluate.
public Tuple evaluate(Tuple tuple) throws IOException {
if (subEvaluators.size() != 1) {
throw new IOException("Empirical dist expects 1 column as a parameters");
}
StreamEvaluator colEval1 = subEvaluators.get(0);
List<Number> numbers1 = (List<Number>) colEval1.evaluate(tuple);
double[] column1 = new double[numbers1.size()];
for (int i = 0; i < numbers1.size(); i++) {
column1[i] = numbers1.get(i).doubleValue();
}
Arrays.sort(column1);
EmpiricalDistribution empiricalDistribution = new EmpiricalDistribution();
empiricalDistribution.load(column1);
Map map = new HashMap();
StatisticalSummary statisticalSummary = empiricalDistribution.getSampleStats();
map.put("max", statisticalSummary.getMax());
map.put("mean", statisticalSummary.getMean());
map.put("min", statisticalSummary.getMin());
map.put("stdev", statisticalSummary.getStandardDeviation());
map.put("sum", statisticalSummary.getSum());
map.put("N", statisticalSummary.getN());
map.put("var", statisticalSummary.getVariance());
return new EmpiricalDistributionTuple(empiricalDistribution, column1, map);
}
use of org.apache.commons.math3.stat.descriptive.summary.Sum in project lucene-solr by apache.
the class HistogramEvaluator method evaluate.
public List<Map> evaluate(Tuple tuple) throws IOException {
StreamEvaluator colEval1 = subEvaluators.get(0);
List<Number> numbers1 = (List<Number>) colEval1.evaluate(tuple);
double[] column1 = new double[numbers1.size()];
for (int i = 0; i < numbers1.size(); i++) {
column1[i] = numbers1.get(i).doubleValue();
}
int bins = 10;
if (subEvaluators.size() == 2) {
StreamEvaluator binsEval = subEvaluators.get(1);
Number binsNum = (Number) binsEval.evaluate(tuple);
bins = binsNum.intValue();
}
EmpiricalDistribution empiricalDistribution = new EmpiricalDistribution(bins);
empiricalDistribution.load(column1);
List<Map> binList = new ArrayList();
List<SummaryStatistics> summaries = empiricalDistribution.getBinStats();
for (SummaryStatistics statisticalSummary : summaries) {
Map map = new HashMap();
map.put("max", statisticalSummary.getMax());
map.put("mean", statisticalSummary.getMean());
map.put("min", statisticalSummary.getMin());
map.put("stdev", statisticalSummary.getStandardDeviation());
map.put("sum", statisticalSummary.getSum());
map.put("N", statisticalSummary.getN());
map.put("var", statisticalSummary.getVariance());
binList.add(map);
}
return binList;
}
Aggregations