use of org.apache.commons.math3.distribution.RealDistribution in project GDSC-SMLM by aherbert.
the class CreateData method createPhotonDistribution.
/**
* @return A photon distribution loaded from a file of floating-point values with the specified population mean.
*/
private RealDistribution createPhotonDistribution() {
if (PHOTON_DISTRIBUTION[PHOTON_CUSTOM].equals(settings.photonDistribution)) {
// Get the distribution file
String filename = Utils.getFilename("Photon_distribution", settings.photonDistributionFile);
if (filename != null) {
settings.photonDistributionFile = filename;
try {
InputStream is = new FileInputStream(new File(settings.photonDistributionFile));
BufferedReader in = new BufferedReader(new UnicodeReader(is, null));
StoredDataStatistics stats = new StoredDataStatistics();
try {
String str = null;
double val = 0.0d;
while ((str = in.readLine()) != null) {
val = Double.parseDouble(str);
stats.add(val);
}
} finally {
in.close();
}
if (stats.getSum() > 0) {
// Update the statistics to the desired mean.
double scale = (double) settings.photonsPerSecond / stats.getMean();
double[] values = stats.getValues();
for (int i = 0; i < values.length; i++) values[i] *= scale;
// TODO - Investigate the limits of this distribution.
// How far above and below the input data will values be generated.
// Create the distribution using the recommended number of bins
final int binCount = stats.getN() / 10;
EmpiricalDistribution dist = new EmpiricalDistribution(binCount, createRandomGenerator());
dist.load(values);
return dist;
}
} catch (IOException e) {
// Ignore
} catch (NullArgumentException e) {
// Ignore
} catch (NumberFormatException e) {
// Ignore
}
}
Utils.log("Failed to load custom photon distribution from file: %s. Default to fixed.", settings.photonDistributionFile);
} else if (PHOTON_DISTRIBUTION[PHOTON_UNIFORM].equals(settings.photonDistribution)) {
if (settings.photonsPerSecond < settings.photonsPerSecondMaximum) {
UniformRealDistribution dist = new UniformRealDistribution(createRandomGenerator(), settings.photonsPerSecond, settings.photonsPerSecondMaximum);
return dist;
}
} else if (PHOTON_DISTRIBUTION[PHOTON_GAMMA].equals(settings.photonDistribution)) {
final double scaleParameter = settings.photonsPerSecond / settings.photonShape;
GammaDistribution dist = new GammaDistribution(createRandomGenerator(), settings.photonShape, scaleParameter, ExponentialDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY);
return dist;
} else if (PHOTON_DISTRIBUTION[PHOTON_CORRELATED].equals(settings.photonDistribution)) {
// No distribution required
return null;
}
settings.photonDistribution = PHOTON_DISTRIBUTION[PHOTON_FIXED];
return null;
}
use of org.apache.commons.math3.distribution.RealDistribution in project tetrad by cmu-phil.
the class GeneralizedSemEstimator method estimate.
/**
* Maximizes likelihood equation by equation. Assumes the equations are recursive and that
* each has exactly one error term.
*/
public GeneralizedSemIm estimate(GeneralizedSemPm pm, DataSet data) {
StringBuilder builder = new StringBuilder();
GeneralizedSemIm estIm = new GeneralizedSemIm(pm);
List<Node> nodes = pm.getGraph().getNodes();
nodes.removeAll(pm.getErrorNodes());
MyContext context = new MyContext();
List<List<Double>> allResiduals = new ArrayList<>();
List<RealDistribution> allDistributions = new ArrayList<>();
for (int index = 0; index < nodes.size(); index++) {
Node node = nodes.get(index);
List<String> parameters = new ArrayList<>(pm.getReferencedParameters(node));
Node error = pm.getErrorNode(node);
parameters.addAll(pm.getReferencedParameters(error));
LikelihoodFittingFunction2 likelihoodFittingfunction = new LikelihoodFittingFunction2(index, pm, parameters, nodes, data, context);
double[] values = new double[parameters.size()];
for (int j = 0; j < parameters.size(); j++) {
String parameter = parameters.get(j);
Expression parameterEstimationInitializationExpression = pm.getParameterEstimationInitializationExpression(parameter);
values[j] = parameterEstimationInitializationExpression.evaluate(new MyContext());
}
double[] point = optimize(likelihoodFittingfunction, values, 1);
for (int j = 0; j < parameters.size(); j++) {
estIm.setParameterValue(parameters.get(j), point[j]);
}
List<Double> residuals = likelihoodFittingfunction.getResiduals();
allResiduals.add(residuals);
RealDistribution distribution = likelihoodFittingfunction.getDistribution();
allDistributions.add(distribution);
GeneralAndersonDarlingTest test = new GeneralAndersonDarlingTest(residuals, distribution);
builder.append("\nEquation: ").append(node).append(" := ").append(estIm.getNodeSubstitutedString(node));
builder.append("\n\twhere ").append(pm.getErrorNode(node)).append(" ~ ").append(estIm.getNodeSubstitutedString(pm.getErrorNode(node)));
builder.append("\nAnderson Darling A^2* for this equation = ").append(test.getASquaredStar()).append("\n");
}
List<String> parameters = new ArrayList<>();
double[] values = new double[parameters.size()];
for (int i = 0; i < parameters.size(); i++) {
values[i] = estIm.getParameterValue(parameters.get(i));
}
LikelihoodFittingFunction likelihoodFittingFunction = new LikelihoodFittingFunction(pm, parameters, nodes, data, context);
optimize(likelihoodFittingFunction, values, 1);
MultiGeneralAndersonDarlingTest test = new MultiGeneralAndersonDarlingTest(allResiduals, allDistributions);
double aSquaredStar = test.getASquaredStar();
this.aSquaredStar = aSquaredStar;
String builder2 = "Report:\n" + "\nModel A^2* (Anderson Darling) = " + aSquaredStar + "\n" + builder;
this.report = builder2;
return estIm;
}
use of org.apache.commons.math3.distribution.RealDistribution in project GDSC-SMLM by aherbert.
the class CameraModelAnalysis method execute.
/**
* Execute the analysis.
*/
private boolean execute() {
dirty = false;
final CameraModelAnalysisSettings settings = this.settings.build();
if (!(getGain(settings) > 0)) {
ImageJUtils.log(TITLE + "Error: No total gain");
return false;
}
if (!(settings.getPhotons() > 0)) {
ImageJUtils.log(TITLE + "Error: No photons");
return false;
}
// Avoid repeating the same analysis
if (settings.equals(lastSettings)) {
return true;
}
lastSettings = settings;
final IntHistogram h = getHistogram(settings);
// Build cumulative distribution
final double[][] cdf1 = cumulativeHistogram(h);
final double[] x1 = cdf1[0];
final double[] y1 = cdf1[1];
// Interpolate to 300 steps faster evaluation?
// Get likelihood function
final LikelihoodFunction f = getLikelihoodFunction(settings);
// Create likelihood cumulative distribution
final double[][] cdf2 = cumulativeDistribution(settings, cdf1, f);
// Compute Komolgorov distance
final double[] distanceAndValue = getDistance(cdf1, cdf2);
final double distance = distanceAndValue[0];
final double value = distanceAndValue[1];
final double area = distanceAndValue[2];
final double[] x2 = cdf2[0];
final double[] y2 = cdf2[1];
// Fill y1
int offset = 0;
while (x2[offset] < x1[0]) {
offset++;
}
final double[] y1b = new double[y2.length];
System.arraycopy(y1, 0, y1b, offset, y1.length);
Arrays.fill(y1b, offset + y1.length, y2.length, y1[y1.length - 1]);
// KolmogorovSmirnovTest
// n is the number of samples used to build the probability distribution.
final int n = (int) MathUtils.sum(h.histogramCounts);
// From KolmogorovSmirnovTest.kolmogorovSmirnovTest(RealDistribution distribution, double[]
// data, boolean exact):
// Returns the p-value associated with the null hypothesis that data is a sample from
// distribution.
// E.g. If p<0.05 then the null hypothesis is rejected and the data do not match the
// distribution.
double pvalue = Double.NaN;
try {
pvalue = 1d - kolmogorovSmirnovTest.cdf(distance, n);
} catch (final MathArithmeticException ex) {
// Cannot be computed to leave at NaN
}
// Plot
final WindowOrganiser wo = new WindowOrganiser();
String title = TITLE + " CDF";
Plot plot = new Plot(title, "Count", "CDF");
final double max = 1.05 * MathUtils.maxDefault(1, y2);
plot.setLimits(x2[0], x2[x2.length - 1], 0, max);
plot.setColor(Color.blue);
plot.addPoints(x2, y1b, Plot.BAR);
plot.setColor(Color.red);
plot.addPoints(x2, y2, Plot.BAR);
plot.setColor(Color.magenta);
plot.drawLine(value, 0, value, max);
plot.setColor(Color.black);
plot.addLegend("CDF\nModel");
plot.addLabel(0, 0, String.format("Distance=%s @ %.0f (Mean=%s) : p=%s", MathUtils.rounded(distance), value, MathUtils.rounded(area), MathUtils.rounded(pvalue)));
ImageJUtils.display(title, plot, ImageJUtils.NO_TO_FRONT, wo);
// Show the histogram
title = TITLE + " Histogram";
plot = new Plot(title, "Count", "Frequency");
plot.setLimits(x1[0] - 0.5, x1[x1.length - 1] + 1.5, 0, MathUtils.max(h.histogramCounts) * 1.05);
plot.setColor(Color.blue);
plot.addPoints(x1, SimpleArrayUtils.toDouble(h.histogramCounts), Plot.BAR);
plot.setColor(Color.red);
final double[] x = floatHistogram[0].clone();
final double[] y = floatHistogram[1].clone();
final double scale = n / (MathUtils.sum(y) * (x[1] - x[0]));
for (int i = 0; i < y.length; i++) {
y[i] *= scale;
}
plot.addPoints(x, y, Plot.LINE);
plot.setColor(Color.black);
plot.addLegend("Sample\nExpected");
ImageJUtils.display(title, plot, ImageJUtils.NO_TO_FRONT, wo);
wo.tile();
return true;
}
use of org.apache.commons.math3.distribution.RealDistribution in project tetrad by cmu-phil.
the class IndTestRegressionAD method isIndependent.
/**
* Determines whether variable x is independent of variable y given a list of conditioning variables z.
*
* @param xVar the one variable being compared.
* @param yVar the second variable being compared.
* @param zList the list of conditioning variables.
* @return true iff x _||_ y | z.
* @throws RuntimeException if a matrix singularity is encountered.
*/
public boolean isIndependent(Node xVar, Node yVar, List<Node> zList) {
if (zList == null) {
throw new NullPointerException();
}
for (Node node : zList) {
if (node == null) {
throw new NullPointerException();
}
}
TetradVector v1, v2;
try {
List<Node> regressors = new ArrayList<>();
regressors.add(dataSet.getVariable(yVar.getName()));
for (Node zVar : zList) {
regressors.add(dataSet.getVariable(zVar.getName()));
}
RegressionDataset regression = new RegressionDataset(dataSet);
RegressionResult result = regression.regress(xVar, regressors);
v1 = result.getResiduals();
v2 = regression.getResidualsWithoutFirstRegressor();
// regressors.remove(dataSet.getVariable(yVar.getNode()));
// regression = new RegressionDataset(dataSet);
// result = regression.regress(xVar, regressors);
// v2 = result.getResiduals();
} catch (Exception e) {
throw e;
}
List<Double> d1 = new ArrayList<>();
for (int i = 0; i < v1.size(); i++) d1.add(v1.get(i));
List<Double> d2 = new ArrayList<>();
double[] f2 = new double[v2.size()];
for (int i = 0; i < v2.size(); i++) {
d2.add(v2.get(i));
f2[i] = v2.get(i);
}
double sd = StatUtils.sd(f2);
// RealDistribution c2 = new EmpiricalCdf(d2);
RealDistribution c2 = new NormalDistribution(0, sd);
GeneralAndersonDarlingTest test = new GeneralAndersonDarlingTest(d1, c2);
double aSquaredStar = test.getASquaredStar();
System.out.println("A squared star = " + aSquaredStar + " p = " + test.getP());
double p = test.getP();
double aa2 = 1 - tanh(aSquaredStar);
boolean independent = p > alpha;
this.pvalue = aa2;
if (verbose) {
if (independent) {
TetradLogger.getInstance().log("independencies", SearchLogUtils.independenceFactMsg(xVar, yVar, zList, 0.));
} else {
TetradLogger.getInstance().log("dependencies", SearchLogUtils.dependenceFactMsg(xVar, yVar, zList, 0.));
}
}
return independent;
}
use of org.apache.commons.math3.distribution.RealDistribution in project GDSC-SMLM by aherbert.
the class CreateData method createPhotonDistribution.
/**
* Creates the photon distribution.
*
* @return A photon distribution loaded from a file of floating-point values with the specified
* population mean.
*/
private RealDistribution createPhotonDistribution() {
if (PHOTON_DISTRIBUTION[PHOTON_CUSTOM].equals(settings.getPhotonDistribution())) {
// Get the distribution file
final String filename = ImageJUtils.getFilename("Photon_distribution", settings.getPhotonDistributionFile());
if (filename != null) {
settings.setPhotonDistributionFile(filename);
try (BufferedReader in = new BufferedReader(new UnicodeReader(new FileInputStream(new File(settings.getPhotonDistributionFile())), null))) {
final StoredDataStatistics stats = new StoredDataStatistics();
String str = in.readLine();
double val = 0.0d;
while (str != null) {
val = Double.parseDouble(str);
stats.add(val);
str = in.readLine();
}
if (stats.getSum() > 0) {
// Update the statistics to the desired mean.
final double scale = settings.getPhotonsPerSecond() / stats.getMean();
final double[] values = stats.getValues();
for (int i = 0; i < values.length; i++) {
values[i] *= scale;
}
// TODO - Investigate the limits of this distribution.
// How far above and below the input data will values be generated.
// Create the distribution using the recommended number of bins
final int binCount = stats.getN() / 10;
final EmpiricalDistribution dist = new EmpiricalDistribution(binCount, new RandomGeneratorAdapter(createRandomGenerator()));
dist.load(values);
return dist;
}
} catch (final IOException | NullArgumentException | NumberFormatException ex) {
// Ignore
}
}
ImageJUtils.log("Failed to load custom photon distribution from file: %s. Default to fixed.", settings.getPhotonDistributionFile());
} else if (PHOTON_DISTRIBUTION[PHOTON_UNIFORM].equals(settings.getPhotonDistribution())) {
if (settings.getPhotonsPerSecond() < settings.getPhotonsPerSecondMaximum()) {
return new UniformRealDistribution(new RandomGeneratorAdapter(createRandomGenerator()), settings.getPhotonsPerSecond(), settings.getPhotonsPerSecondMaximum());
}
} else if (PHOTON_DISTRIBUTION[PHOTON_GAMMA].equals(settings.getPhotonDistribution())) {
final double scaleParameter = settings.getPhotonsPerSecond() / settings.getPhotonShape();
return new GammaDistribution(new RandomGeneratorAdapter(createRandomGenerator()), settings.getPhotonShape(), scaleParameter, ExponentialDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY);
} else if (PHOTON_DISTRIBUTION[PHOTON_CORRELATED].equals(settings.getPhotonDistribution())) {
// No distribution required
return null;
}
settings.setPhotonDistribution(PHOTON_DISTRIBUTION[PHOTON_FIXED]);
return null;
}
Aggregations