use of org.apache.commons.math3.random.RandomDataGenerator in project GDSC-SMLM by aherbert.
the class PoissonGradientProcedureTest method gradientProcedureComputesSameAsGradientCalculator.
private void gradientProcedureComputesSameAsGradientCalculator(int nparams) {
int iter = 10;
rdg = new RandomDataGenerator(new Well19937c(30051977));
ArrayList<double[]> paramsList = new ArrayList<double[]>(iter);
createFakeParams(nparams, iter, paramsList);
int n = blockWidth * blockWidth;
FakeGradientFunction func = new FakeGradientFunction(blockWidth, nparams);
GradientCalculator calc = GradientCalculatorFactory.newCalculator(nparams, false);
String name = String.format("[%d]", nparams);
for (int i = 0; i < paramsList.size(); i++) {
PoissonGradientProcedure p = PoissonGradientProcedureFactory.create(func);
p.computeFisherInformation(paramsList.get(i));
double[][] m = calc.fisherInformationMatrix(n, paramsList.get(i), func);
// Exactly the same ...
double[] al = p.getLinear();
Assert.assertArrayEquals(name + " Observations: Not same alpha @ " + i, al, new DenseMatrix64F(m).data, 0);
double[][] am = p.getMatrix();
for (int j = 0; j < nparams; j++) Assert.assertArrayEquals(name + " Observations: Not same alpha @ " + i, am[j], m[j], 0);
}
}
use of org.apache.commons.math3.random.RandomDataGenerator in project GDSC-SMLM by aherbert.
the class GradientCalculatorSpeedTest method gradientCalculatorNIsFasterThanGradientCalculator.
private void gradientCalculatorNIsFasterThanGradientCalculator(Gaussian2DFunction func, int nparams, boolean mle) {
org.junit.Assume.assumeTrue(speedTests || TestSettings.RUN_SPEED_TESTS);
// Check the function is the correct size
Assert.assertEquals(nparams, func.gradientIndices().length);
int iter = 10000;
rdg = new RandomDataGenerator(new Well19937c(30051977));
double[][] alpha = new double[nparams][nparams];
double[] beta = new double[nparams];
ArrayList<double[]> paramsList = new ArrayList<double[]>(iter);
ArrayList<double[]> yList = new ArrayList<double[]>(iter);
int[] x = createData(1, iter, paramsList, yList);
GradientCalculator calc = (mle) ? new MLEGradientCalculator(beta.length) : new GradientCalculator(beta.length);
GradientCalculator calc2 = GradientCalculatorFactory.newCalculator(nparams, mle);
for (int i = 0; i < paramsList.size(); i++) calc.findLinearised(x, yList.get(i), paramsList.get(i), alpha, beta, func);
for (int i = 0; i < paramsList.size(); i++) calc2.findLinearised(x, yList.get(i), paramsList.get(i), alpha, beta, func);
long start1 = System.nanoTime();
for (int i = 0; i < paramsList.size(); i++) calc.findLinearised(x, yList.get(i), paramsList.get(i), alpha, beta, func);
start1 = System.nanoTime() - start1;
long start2 = System.nanoTime();
for (int i = 0; i < paramsList.size(); i++) calc2.findLinearised(x, yList.get(i), paramsList.get(i), alpha, beta, func);
start2 = System.nanoTime() - start2;
log("%sLinearised GradientCalculator = %d : GradientCalculator%d = %d : %fx\n", (mle) ? "MLE " : "", start1, nparams, start2, (1.0 * start1) / start2);
if (TestSettings.ASSERT_SPEED_TESTS)
Assert.assertTrue(start2 < start1);
for (int i = 0; i < paramsList.size(); i++) calc.fisherInformationDiagonal(x.length, paramsList.get(i), func);
for (int i = 0; i < paramsList.size(); i++) calc2.fisherInformationDiagonal(x.length, paramsList.get(i), func);
start1 = System.nanoTime();
for (int i = 0; i < paramsList.size(); i++) calc.fisherInformationDiagonal(x.length, paramsList.get(i), func);
start1 = System.nanoTime() - start1;
start2 = System.nanoTime();
for (int i = 0; i < paramsList.size(); i++) calc2.fisherInformationDiagonal(x.length, paramsList.get(i), func);
start2 = System.nanoTime() - start2;
log("%sFisher Diagonal GradientCalculator = %d : GradientCalculator%d = %d : %fx\n", (mle) ? "MLE " : "", start1, nparams, start2, (1.0 * start1) / start2);
if (TestSettings.ASSERT_SPEED_TESTS)
Assert.assertTrue(start2 < start1);
}
use of org.apache.commons.math3.random.RandomDataGenerator in project GDSC-SMLM by aherbert.
the class LSQLVMGradientProcedureTest method gradientProcedureUnrolledComputesSameAsGradientProcedure.
private void gradientProcedureUnrolledComputesSameAsGradientProcedure(int nparams) {
int iter = 10;
rdg = new RandomDataGenerator(new Well19937c(30051977));
ArrayList<double[]> paramsList = new ArrayList<double[]>(iter);
ArrayList<double[]> yList = new ArrayList<double[]>(iter);
createFakeData(nparams, iter, paramsList, yList);
FakeGradientFunction func = new FakeGradientFunction(blockWidth, nparams);
String name = GradientCalculator.class.getSimpleName();
for (int i = 0; i < paramsList.size(); i++) {
BaseLSQLVMGradientProcedure p1 = LSQLVMGradientProcedureFactory.create(yList.get(i), func);
p1.gradient(paramsList.get(i));
BaseLSQLVMGradientProcedure p2 = new LSQLVMGradientProcedure(yList.get(i), func);
p2.gradient(paramsList.get(i));
// Exactly the same ...
Assert.assertEquals(name + " Result: Not same @ " + i, p1.value, p2.value, 0);
Assert.assertArrayEquals(name + " Observations: Not same beta @ " + i, p1.beta, p2.beta, 0);
Assert.assertArrayEquals(name + " Observations: Not same alpha @ " + i, p1.getAlphaLinear(), p2.getAlphaLinear(), 0);
double[][] am1 = p1.getAlphaMatrix();
double[][] am2 = p2.getAlphaMatrix();
for (int j = 0; j < nparams; j++) Assert.assertArrayEquals(name + " Observations: Not same alpha @ " + i, am1[j], am2[j], 0);
}
}
use of org.apache.commons.math3.random.RandomDataGenerator in project GDSC-SMLM by aherbert.
the class LSQLVMGradientProcedureTest method gradientProcedureLinearIsFasterThanGradientProcedureMatrix.
private void gradientProcedureLinearIsFasterThanGradientProcedureMatrix(final int nparams, final BaseLSQLVMGradientProcedureFactory factory1, final BaseLSQLVMGradientProcedureFactory factory2, boolean doAssert) {
org.junit.Assume.assumeTrue(speedTests || TestSettings.RUN_SPEED_TESTS);
final int iter = 100;
rdg = new RandomDataGenerator(new Well19937c(30051977));
final ArrayList<double[]> paramsList = new ArrayList<double[]>(iter);
final ArrayList<double[]> yList = new ArrayList<double[]>(iter);
createData(1, iter, paramsList, yList);
// Remove the timing of the function call by creating a dummy function
final Gradient1Function func = new FakeGradientFunction(blockWidth, nparams);
for (int i = 0; i < paramsList.size(); i++) {
BaseLSQLVMGradientProcedure p = factory1.createProcedure(yList.get(i), func);
p.gradient(paramsList.get(i));
p.gradient(paramsList.get(i));
BaseLSQLVMGradientProcedure p2 = factory2.createProcedure(yList.get(i), func);
p2.gradient(paramsList.get(i));
p2.gradient(paramsList.get(i));
// Check they are the same
Assert.assertArrayEquals("A " + i, p.getAlphaLinear(), p2.getAlphaLinear(), 0);
Assert.assertArrayEquals("B " + i, p.beta, p2.beta, 0);
}
// Realistic loops for an optimisation
final int loops = 15;
// Run till stable timing
Timer t1 = new Timer() {
@Override
void run() {
for (int i = 0, k = 0; i < paramsList.size(); i++) {
BaseLSQLVMGradientProcedure p = factory1.createProcedure(yList.get(i), func);
for (int j = loops; j-- > 0; ) p.gradient(paramsList.get(k++ % iter));
}
}
};
long time1 = t1.getTime();
Timer t2 = new Timer(t1.loops) {
@Override
void run() {
for (int i = 0, k = 0; i < paramsList.size(); i++) {
BaseLSQLVMGradientProcedure p2 = factory2.createProcedure(yList.get(i), func);
for (int j = loops; j-- > 0; ) p2.gradient(paramsList.get(k++ % iter));
}
}
};
long time2 = t2.getTime();
log("Standard %s = %d : Unrolled %s %d = %d : %fx\n", factory1.getClass().getSimpleName(), time1, factory2.getClass().getSimpleName(), nparams, time2, (1.0 * time1) / time2);
if (doAssert)
Assert.assertTrue(time2 < time1);
}
use of org.apache.commons.math3.random.RandomDataGenerator in project GDSC-SMLM by aherbert.
the class FastMLEGradient2ProcedureTest method gradientProcedureIsNotSlowerThanGradientCalculator.
private void gradientProcedureIsNotSlowerThanGradientCalculator(final int nparams) {
org.junit.Assume.assumeTrue(speedTests || TestSettings.RUN_SPEED_TESTS);
final int iter = 1000;
rdg = new RandomDataGenerator(new Well19937c(30051977));
final ArrayList<double[]> paramsList = new ArrayList<double[]>(iter);
final ArrayList<double[]> yList = new ArrayList<double[]>(iter);
createFakeData(nparams, iter, paramsList, yList);
final FakeGradientFunction func = new FakeGradientFunction(blockWidth, nparams);
MLEGradientCalculator calc = (MLEGradientCalculator) GradientCalculatorFactory.newCalculator(nparams, true);
for (int i = 0; i < paramsList.size(); i++) calc.logLikelihood(yList.get(i), paramsList.get(i), func);
for (int i = 0; i < paramsList.size(); i++) {
FastMLEGradient2Procedure p = FastMLEGradient2ProcedureFactory.createUnrolled(yList.get(i), func);
p.computeLogLikelihood(paramsList.get(i));
}
// Realistic loops for an optimisation
final int loops = 15;
// Run till stable timing
Timer t1 = new Timer() {
@Override
void run() {
for (int i = 0, k = 0; i < iter; i++) {
MLEGradientCalculator calc = (MLEGradientCalculator) GradientCalculatorFactory.newCalculator(nparams, true);
for (int j = loops; j-- > 0; ) calc.logLikelihood(yList.get(i), paramsList.get(k++ % iter), func);
}
}
};
long time1 = t1.getTime();
Timer t2 = new Timer(t1.loops) {
@Override
void run() {
for (int i = 0, k = 0; i < iter; i++) {
FastMLEGradient2Procedure p = FastMLEGradient2ProcedureFactory.createUnrolled(yList.get(i), func);
for (int j = loops; j-- > 0; ) p.computeLogLikelihood(paramsList.get(k++ % iter));
}
}
};
long time2 = t2.getTime();
log("GradientCalculator = %d : FastMLEGradient2Procedure %d = %d : %fx\n", time1, nparams, time2, (1.0 * time1) / time2);
if (TestSettings.ASSERT_SPEED_TESTS) {
// Add contingency
Assert.assertTrue(time2 < time1 * 1.5);
}
}
Aggregations