use of org.apache.commons.math3.random.Well19937c in project GDSC-SMLM by aherbert.
the class FastMLEGradient2ProcedureTest method gradientProcedureIsNotSlowerThanGradientCalculator.
private void gradientProcedureIsNotSlowerThanGradientCalculator(final int nparams) {
org.junit.Assume.assumeTrue(speedTests || TestSettings.RUN_SPEED_TESTS);
final int iter = 1000;
rdg = new RandomDataGenerator(new Well19937c(30051977));
final ArrayList<double[]> paramsList = new ArrayList<double[]>(iter);
final ArrayList<double[]> yList = new ArrayList<double[]>(iter);
createFakeData(nparams, iter, paramsList, yList);
final FakeGradientFunction func = new FakeGradientFunction(blockWidth, nparams);
MLEGradientCalculator calc = (MLEGradientCalculator) GradientCalculatorFactory.newCalculator(nparams, true);
for (int i = 0; i < paramsList.size(); i++) calc.logLikelihood(yList.get(i), paramsList.get(i), func);
for (int i = 0; i < paramsList.size(); i++) {
FastMLEGradient2Procedure p = FastMLEGradient2ProcedureFactory.createUnrolled(yList.get(i), func);
p.computeLogLikelihood(paramsList.get(i));
}
// Realistic loops for an optimisation
final int loops = 15;
// Run till stable timing
Timer t1 = new Timer() {
@Override
void run() {
for (int i = 0, k = 0; i < iter; i++) {
MLEGradientCalculator calc = (MLEGradientCalculator) GradientCalculatorFactory.newCalculator(nparams, true);
for (int j = loops; j-- > 0; ) calc.logLikelihood(yList.get(i), paramsList.get(k++ % iter), func);
}
}
};
long time1 = t1.getTime();
Timer t2 = new Timer(t1.loops) {
@Override
void run() {
for (int i = 0, k = 0; i < iter; i++) {
FastMLEGradient2Procedure p = FastMLEGradient2ProcedureFactory.createUnrolled(yList.get(i), func);
for (int j = loops; j-- > 0; ) p.computeLogLikelihood(paramsList.get(k++ % iter));
}
}
};
long time2 = t2.getTime();
log("GradientCalculator = %d : FastMLEGradient2Procedure %d = %d : %fx\n", time1, nparams, time2, (1.0 * time1) / time2);
if (TestSettings.ASSERT_SPEED_TESTS) {
// Add contingency
Assert.assertTrue(time2 < time1 * 1.5);
}
}
use of org.apache.commons.math3.random.Well19937c in project GDSC-SMLM by aherbert.
the class FastMLEGradient2ProcedureTest method gradientCalculatorComputesGradient.
private void gradientCalculatorComputesGradient(ErfGaussian2DFunction func) {
// Check the first and second derivatives
int nparams = func.getNumberOfGradients();
int[] indices = func.gradientIndices();
int iter = 100;
rdg = new RandomDataGenerator(new Well19937c(30051977));
ArrayList<double[]> paramsList = new ArrayList<double[]>(iter);
ArrayList<double[]> yList = new ArrayList<double[]>(iter);
createData(1, iter, paramsList, yList, true);
double delta = 1e-5;
DoubleEquality eq = new DoubleEquality(1e-4, 1e-3);
for (int i = 0; i < paramsList.size(); i++) {
double[] y = yList.get(i);
double[] a = paramsList.get(i);
double[] a2 = a.clone();
FastMLEGradient2Procedure p = FastMLEGradient2ProcedureFactory.create(y, func);
//double ll = p.computeLogLikelihood(a);
p.computeSecondDerivative(a);
double[] d1 = p.d1.clone();
double[] d2 = p.d2.clone();
for (int j = 0; j < nparams; j++) {
int k = indices[j];
double d = Precision.representableDelta(a[k], (a[k] == 0) ? delta : a[k] * delta);
a2[k] = a[k] + d;
double llh = p.computeLogLikelihood(a2);
p.computeFirstDerivative(a2);
double[] d1h = p.d1.clone();
a2[k] = a[k] - d;
double lll = p.computeLogLikelihood(a2);
p.computeFirstDerivative(a2);
double[] d1l = p.d1.clone();
a2[k] = a[k];
double gradient1 = (llh - lll) / (2 * d);
double gradient2 = (d1h[j] - d1l[j]) / (2 * d);
//System.out.printf("[%d,%d] ll - %f (%s %f+/-%f) d1 %f ?= %f : d2 %f ?= %f\n", i, k, ll, func.getName(k), a[k], d,
// gradient1, d1[j], gradient2, d2[j]);
Assert.assertTrue("Not same gradient1 @ " + j, eq.almostEqualRelativeOrAbsolute(gradient1, d1[j]));
Assert.assertTrue("Not same gradient2 @ " + j, eq.almostEqualRelativeOrAbsolute(gradient2, d2[j]));
}
}
}
use of org.apache.commons.math3.random.Well19937c in project GDSC-SMLM by aherbert.
the class FastMLEGradient2ProcedureTest method gradientProcedureComputesSameWithPrecomputed.
@Test
public void gradientProcedureComputesSameWithPrecomputed() {
int iter = 10;
rdg = new RandomDataGenerator(new Well19937c(30051977));
ErfGaussian2DFunction f1 = (ErfGaussian2DFunction) GaussianFunctionFactory.create2D(1, 10, 10, GaussianFunctionFactory.FIT_ERF_FREE_CIRCLE, null);
ErfGaussian2DFunction f2 = (ErfGaussian2DFunction) GaussianFunctionFactory.create2D(2, 10, 10, GaussianFunctionFactory.FIT_ERF_FREE_CIRCLE, null);
double[] a1 = new double[7];
double[] a2 = new double[13];
final double[] x = new double[f1.size()];
final double[] b = new double[f1.size()];
for (int i = 0; i < iter; i++) {
a2[Gaussian2DFunction.BACKGROUND] = rdg.nextUniform(0.1, 0.3);
a2[Gaussian2DFunction.SIGNAL] = rdg.nextUniform(100, 300);
a2[Gaussian2DFunction.X_POSITION] = rdg.nextUniform(3, 5);
a2[Gaussian2DFunction.Y_POSITION] = rdg.nextUniform(3, 5);
a2[Gaussian2DFunction.X_SD] = rdg.nextUniform(1, 1.3);
a2[Gaussian2DFunction.Y_SD] = rdg.nextUniform(1, 1.3);
a2[6 + Gaussian2DFunction.SIGNAL] = rdg.nextUniform(100, 300);
a2[6 + Gaussian2DFunction.X_POSITION] = rdg.nextUniform(5, 7);
a2[6 + Gaussian2DFunction.Y_POSITION] = rdg.nextUniform(5, 7);
a2[6 + Gaussian2DFunction.X_SD] = rdg.nextUniform(1, 1.3);
a2[6 + Gaussian2DFunction.Y_SD] = rdg.nextUniform(1, 1.3);
// Simulate Poisson data
f2.initialise0(a2);
f1.forEach(new ValueProcedure() {
int k = 0;
public void execute(double value) {
x[k++] = (value > 0) ? rdg.nextPoisson(value) : 0;
}
});
// Precompute peak 2 (no background)
a1[Gaussian2DFunction.BACKGROUND] = 0;
for (int j = 1; j < 7; j++) a1[j] = a2[6 + j];
f1.initialise0(a1);
f1.forEach(new ValueProcedure() {
int k = 0;
public void execute(double value) {
b[k++] = value;
}
});
// Reset to peak 1
for (int j = 0; j < 7; j++) a1[j] = a2[j];
// Compute peak 1+2
FastMLEGradient2Procedure p12 = FastMLEGradient2ProcedureFactory.create(x, f2);
p12.computeSecondDerivative(a2);
double[] d11 = Arrays.copyOf(p12.d1, f1.getNumberOfGradients());
double[] d21 = Arrays.copyOf(p12.d2, f1.getNumberOfGradients());
// Compute peak 1+(precomputed 2)
FastMLEGradient2Procedure p1b2 = FastMLEGradient2ProcedureFactory.create(x, PrecomputedGradient2Function.wrapGradient2Function(f1, b));
p1b2.computeSecondDerivative(a1);
double[] d12 = p1b2.d1;
double[] d22 = p1b2.d2;
Assert.assertArrayEquals(" Result: Not same @ " + i, p12.u, p1b2.u, 1e-10);
Assert.assertArrayEquals(" D1: Not same @ " + i, d11, d12, 1e-10);
Assert.assertArrayEquals(" D2: Not same @ " + i, d21, d22, 1e-10);
double[] v1 = p12.computeValue(a2);
double[] v2 = p1b2.computeValue(a1);
Assert.assertArrayEquals(" Value: Not same @ " + i, v1, v2, 1e-10);
double[] d1 = Arrays.copyOf(p12.computeFirstDerivative(a2), f1.getNumberOfGradients());
double[] d2 = p1b2.computeFirstDerivative(a1);
Assert.assertArrayEquals(" 1st derivative: Not same @ " + i, d1, d2, 1e-10);
}
}
use of org.apache.commons.math3.random.Well19937c in project GDSC-SMLM by aherbert.
the class PoissonCalculatorTest method cannotSubtractConstantBackgroundAndComputeLogLikelihoodRatio.
private void cannotSubtractConstantBackgroundAndComputeLogLikelihoodRatio(BaseNonLinearFunction nlf1, BaseNonLinearFunction nlf2, BaseNonLinearFunction nlf3) {
//System.out.println(nlf1.name);
int n = maxx * maxx;
double[] a = new double[] { 1 };
// Simulate Poisson process of 3 combined functions
nlf1.initialise(a);
nlf2.initialise(a);
nlf3.initialise(a);
RandomDataGenerator rdg = new RandomDataGenerator(new Well19937c(30051977));
double[] x = Utils.newArray(n, 0, 1.0);
double[] u = new double[x.length];
double[] b1 = new double[x.length];
double[] b2 = new double[x.length];
double[] b3 = new double[x.length];
for (int i = 0; i < n; i++) {
b1[i] = nlf1.eval(i);
b2[i] = nlf2.eval(i);
b3[i] = nlf3.eval(i);
u[i] = b1[i] + b2[i] + b3[i];
if (u[i] > 0)
x[i] = rdg.nextPoisson(u[i]);
}
// x is the target data
// b1 is the already computed background
// b2 is the first function to add to the model
// b3 is the second function to add to the model
// Compute the LLR of adding b3 to b2 given we already have b1 modelling data x
double[] b12 = add(b1, b2);
double ll1a = PoissonCalculator.logLikelihood(b12, x);
double ll2a = PoissonCalculator.logLikelihood(add(b12, b3), x);
double llra = -2 * (ll1a - ll2a);
//System.out.printf("x|(a+b+c) ll1=%f, ll2=%f, llra=%f\n", ll1a, ll2a, llra);
// Compute the LLR of adding b3 to b2 given we already have x minus b1
x = subtract(x, b1);
double ll1b = PoissonCalculator.logLikelihood(b2, x);
double ll2b = PoissonCalculator.logLikelihood(add(b2, b3), x);
double llrb = -2 * (ll1b - ll2b);
//System.out.printf("x-a|(b+c) : ll1=%f, ll2=%f, llrb=%f\n", ll1b, ll2b, llrb);
//System.out.printf("llr=%f (%g), llr2=%f (%g)\n", llra, PoissonCalculator.computePValue(llra, 1), llrb,
// PoissonCalculator.computePValue(llrb, 1));
Assert.assertNotEquals("Log-likelihood ratio", llra, llrb, llra * 1e-10);
}
use of org.apache.commons.math3.random.Well19937c in project GDSC-SMLM by aherbert.
the class PoissonGradientProcedureTest method gradientProcedureUnrolledComputesSameAsGradientProcedure.
private void gradientProcedureUnrolledComputesSameAsGradientProcedure(int nparams, boolean precomputed) {
int iter = 10;
rdg = new RandomDataGenerator(new Well19937c(30051977));
ArrayList<double[]> paramsList = new ArrayList<double[]>(iter);
createFakeParams(nparams, iter, paramsList);
Gradient1Function func = new FakeGradientFunction(blockWidth, nparams);
if (precomputed)
func = PrecomputedGradient1Function.wrapGradient1Function(func, Utils.newArray(func.size(), 0.1, 1.3));
String name = String.format("[%d]", nparams);
for (int i = 0; i < paramsList.size(); i++) {
PoissonGradientProcedure p1 = new PoissonGradientProcedure(func);
p1.computeFisherInformation(paramsList.get(i));
PoissonGradientProcedure p2 = PoissonGradientProcedureFactory.create(func);
p2.computeFisherInformation(paramsList.get(i));
// Exactly the same ...
Assert.assertArrayEquals(name + " Observations: Not same alpha @ " + i, p1.getLinear(), p2.getLinear(), 0);
double[][] am1 = p1.getMatrix();
double[][] am2 = p2.getMatrix();
for (int j = 0; j < nparams; j++) Assert.assertArrayEquals(name + " Observations: Not same alpha @ " + i, am1[j], am2[j], 0);
}
}
Aggregations