use of org.apache.commons.math3.random.Well19937c in project GDSC-SMLM by aherbert.
the class LVMGradientProcedureTest method gradientProcedureIsNotSlowerThanGradientCalculator.
private void gradientProcedureIsNotSlowerThanGradientCalculator(final int nparams, final boolean mle) {
org.junit.Assume.assumeTrue(speedTests || TestSettings.RUN_SPEED_TESTS);
final int iter = 1000;
rdg = new RandomDataGenerator(new Well19937c(30051977));
final double[][] alpha = new double[nparams][nparams];
final double[] beta = new double[nparams];
final ArrayList<double[]> paramsList = new ArrayList<double[]>(iter);
final ArrayList<double[]> yList = new ArrayList<double[]>(iter);
int[] x = createFakeData(nparams, iter, paramsList, yList);
final int n = x.length;
final FakeGradientFunction func = new FakeGradientFunction(blockWidth, nparams);
GradientCalculator calc = GradientCalculatorFactory.newCalculator(nparams, mle);
for (int i = 0; i < paramsList.size(); i++) calc.findLinearised(n, yList.get(i), paramsList.get(i), alpha, beta, func);
final Type type = (mle) ? Type.MLE : Type.LSQ;
for (int i = 0; i < paramsList.size(); i++) {
LVMGradientProcedure p = LVMGradientProcedureFactory.create(yList.get(i), func, type);
p.gradient(paramsList.get(i));
}
// Realistic loops for an optimisation
final int loops = 15;
// Run till stable timing
Timer t1 = new Timer() {
@Override
void run() {
for (int i = 0, k = 0; i < iter; i++) {
GradientCalculator calc = GradientCalculatorFactory.newCalculator(nparams, mle);
for (int j = loops; j-- > 0; ) calc.findLinearised(n, yList.get(i), paramsList.get(k++ % iter), alpha, beta, func);
}
}
};
long time1 = t1.getTime();
Timer t2 = new Timer(t1.loops) {
@Override
void run() {
for (int i = 0, k = 0; i < iter; i++) {
LVMGradientProcedure p = LVMGradientProcedureFactory.create(yList.get(i), func, type);
for (int j = loops; j-- > 0; ) p.gradient(paramsList.get(k++ % iter));
}
}
};
long time2 = t2.getTime();
log("GradientCalculator = %d : LVMGradientProcedure %d %b = %d : %fx\n", time1, nparams, mle, time2, (1.0 * time1) / time2);
if (TestSettings.ASSERT_SPEED_TESTS) {
// Add contingency
Assert.assertTrue(time2 < time1 * 1.5);
}
}
use of org.apache.commons.math3.random.Well19937c in project GDSC-SMLM by aherbert.
the class LVMGradientProcedureTest method gradientProcedureComputesGradient.
private void gradientProcedureComputesGradient(ErfGaussian2DFunction func, Type type, boolean precomputed) {
int nparams = func.getNumberOfGradients();
int[] indices = func.gradientIndices();
int iter = 100;
rdg = new RandomDataGenerator(new Well19937c(30051977));
ArrayList<double[]> paramsList = new ArrayList<double[]>(iter);
ArrayList<double[]> yList = new ArrayList<double[]>(iter);
createData(1, iter, paramsList, yList, true);
double delta = 1e-3;
DoubleEquality eq = new DoubleEquality(1e-3, 1e-3);
final double[] b = (precomputed) ? new double[func.size()] : null;
for (int i = 0; i < paramsList.size(); i++) {
double[] y = yList.get(i);
double[] a = paramsList.get(i);
double[] a2 = a.clone();
LVMGradientProcedure p;
if (precomputed) {
// Mock fitting part of the function already
for (int j = 0; j < b.length; j++) {
b[j] = y[j] * 0.5;
}
p = LVMGradientProcedureFactory.create(y, PrecomputedGradient1Function.wrapGradient1Function(func, b), type);
} else
p = LVMGradientProcedureFactory.create(y, func, type);
p.gradient(a);
//double s = p.value;
double[] beta = p.beta.clone();
for (int j = 0; j < nparams; j++) {
int k = indices[j];
double d = Precision.representableDelta(a[k], (a[k] == 0) ? 1e-3 : a[k] * delta);
a2[k] = a[k] + d;
p.value(a2);
double s1 = p.value;
a2[k] = a[k] - d;
p.value(a2);
double s2 = p.value;
a2[k] = a[k];
// Apply a factor of -2 to compute the actual gradients:
// See Numerical Recipes in C++, 2nd Ed. Equation 15.5.6 for Nonlinear Models
beta[j] *= -2;
double gradient = (s1 - s2) / (2 * d);
//System.out.printf("[%d,%d] %f (%s %f+/-%f) %f ?= %f\n", i, k, s, func.getName(k), a[k], d, beta[j],
// gradient);
Assert.assertTrue("Not same gradient @ " + j, eq.almostEqualRelativeOrAbsolute(beta[j], gradient));
}
}
}
use of org.apache.commons.math3.random.Well19937c in project GDSC-SMLM by aherbert.
the class LVMGradientProcedureTest method gradientProcedureUnrolledComputesSameAsGradientProcedure.
private void gradientProcedureUnrolledComputesSameAsGradientProcedure(int nparams, Type type, boolean precomputed) {
int iter = 10;
rdg = new RandomDataGenerator(new Well19937c(30051977));
ArrayList<double[]> paramsList = new ArrayList<double[]>(iter);
ArrayList<double[]> yList = new ArrayList<double[]>(iter);
createFakeData(nparams, iter, paramsList, yList);
Gradient1Function func = new FakeGradientFunction(blockWidth, nparams);
if (precomputed) {
double[] b = Utils.newArray(func.size(), 0.1, 1.3);
func = PrecomputedGradient1Function.wrapGradient1Function(func, b);
}
String name = String.format("[%d] %b", nparams, type);
for (int i = 0; i < paramsList.size(); i++) {
LVMGradientProcedure p1 = createProcedure(type, yList.get(i), func);
p1.gradient(paramsList.get(i));
LVMGradientProcedure p2 = LVMGradientProcedureFactory.create(yList.get(i), func, type);
p2.gradient(paramsList.get(i));
// Exactly the same ...
Assert.assertEquals(name + " Result: Not same @ " + i, p1.value, p2.value, 0);
Assert.assertArrayEquals(name + " Observations: Not same beta @ " + i, p1.beta, p2.beta, 0);
Assert.assertArrayEquals(name + " Observations: Not same alpha @ " + i, p1.getAlphaLinear(), p2.getAlphaLinear(), 0);
double[][] am1 = p1.getAlphaMatrix();
double[][] am2 = p2.getAlphaMatrix();
for (int j = 0; j < nparams; j++) Assert.assertArrayEquals(name + " Observations: Not same alpha @ " + i, am1[j], am2[j], 0);
}
}
use of org.apache.commons.math3.random.Well19937c in project GDSC-SMLM by aherbert.
the class PoissonGradientProcedureTest method gradientProcedureIsNotSlowerThanGradientCalculator.
private void gradientProcedureIsNotSlowerThanGradientCalculator(final int nparams) {
org.junit.Assume.assumeTrue(speedTests || TestSettings.RUN_SPEED_TESTS);
final int iter = 1000;
rdg = new RandomDataGenerator(new Well19937c(30051977));
final ArrayList<double[]> paramsList = new ArrayList<double[]>(iter);
createFakeParams(nparams, iter, paramsList);
final int n = blockWidth * blockWidth;
final FakeGradientFunction func = new FakeGradientFunction(blockWidth, nparams);
GradientCalculator calc = GradientCalculatorFactory.newCalculator(nparams, false);
for (int i = 0; i < paramsList.size(); i++) calc.fisherInformationMatrix(n, paramsList.get(i), func);
for (int i = 0; i < paramsList.size(); i++) {
PoissonGradientProcedure p = PoissonGradientProcedureFactory.create(func);
p.computeFisherInformation(paramsList.get(i));
}
// Realistic loops for an optimisation
final int loops = 15;
// Run till stable timing
Timer t1 = new Timer() {
@Override
void run() {
for (int i = 0, k = 0; i < iter; i++) {
GradientCalculator calc = GradientCalculatorFactory.newCalculator(nparams, false);
for (int j = loops; j-- > 0; ) calc.fisherInformationMatrix(n, paramsList.get(k++ % iter), func);
}
}
};
long time1 = t1.getTime();
Timer t2 = new Timer(t1.loops) {
@Override
void run() {
for (int i = 0, k = 0; i < iter; i++) {
PoissonGradientProcedure p = PoissonGradientProcedureFactory.create(func);
for (int j = loops; j-- > 0; ) p.computeFisherInformation(paramsList.get(k++ % iter));
}
}
};
long time2 = t2.getTime();
log("GradientCalculator = %d : PoissonGradientProcedure %d = %d : %fx\n", time1, nparams, time2, (1.0 * time1) / time2);
if (TestSettings.ASSERT_SPEED_TESTS) {
// Add contingency
Assert.assertTrue(time2 < time1 * 1.5);
}
}
use of org.apache.commons.math3.random.Well19937c in project GDSC-SMLM by aherbert.
the class PoissonGradientProcedureTest method crlbIsHigherWithPrecomputed.
@Test
public void crlbIsHigherWithPrecomputed() {
int iter = 10;
rdg = new RandomDataGenerator(new Well19937c(30051977));
ErfGaussian2DFunction func = (ErfGaussian2DFunction) GaussianFunctionFactory.create2D(1, 10, 10, GaussianFunctionFactory.FIT_ERF_FREE_CIRCLE, null);
double[] a = new double[7];
int n = func.getNumberOfGradients();
// Get a background
double[] b = new double[func.size()];
for (int i = 0; i < b.length; i++) b[i] = rdg.nextUniform(1, 2);
for (int i = 0; i < iter; i++) {
a[Gaussian2DFunction.BACKGROUND] = rdg.nextUniform(0.1, 0.3);
a[Gaussian2DFunction.SIGNAL] = rdg.nextUniform(100, 300);
a[Gaussian2DFunction.X_POSITION] = rdg.nextUniform(4, 6);
a[Gaussian2DFunction.Y_POSITION] = rdg.nextUniform(4, 6);
a[Gaussian2DFunction.X_SD] = rdg.nextUniform(1, 1.3);
a[Gaussian2DFunction.Y_SD] = rdg.nextUniform(1, 1.3);
PoissonGradientProcedure p1 = PoissonGradientProcedureFactory.create(func);
p1.computeFisherInformation(a);
PoissonGradientProcedure p2 = PoissonGradientProcedureFactory.create(PrecomputedGradient1Function.wrapGradient1Function(func, b));
p2.computeFisherInformation(a);
FisherInformationMatrix m1 = new FisherInformationMatrix(p1.getLinear(), n);
FisherInformationMatrix m2 = new FisherInformationMatrix(p2.getLinear(), n);
double[] crlb1 = m1.crlb();
double[] crlb2 = m2.crlb();
Assert.assertNotNull(crlb1);
Assert.assertNotNull(crlb2);
//System.out.printf("%s : %s\n", Arrays.toString(crlb1), Arrays.toString(crlb2));
for (int j = 0; j < n; j++) Assert.assertTrue(crlb1[j] < crlb2[j]);
}
}
Aggregations