Search in sources :

Example 6 with Optimum

use of org.apache.commons.math3.fitting.leastsquares.LeastSquaresOptimizer.Optimum in project GDSC-SMLM by aherbert.

the class ApacheLVMFitter method computeFit.

public FitStatus computeFit(double[] y, final double[] y_fit, double[] a, double[] a_dev) {
    int n = y.length;
    try {
        // Different convergence thresholds seem to have no effect on the resulting fit, only the number of
        // iterations for convergence
        final double initialStepBoundFactor = 100;
        final double costRelativeTolerance = 1e-10;
        final double parRelativeTolerance = 1e-10;
        final double orthoTolerance = 1e-10;
        final double threshold = Precision.SAFE_MIN;
        // Extract the parameters to be fitted
        final double[] initialSolution = getInitialSolution(a);
        // TODO - Pass in more advanced stopping criteria.
        // Create the target and weight arrays
        final double[] yd = new double[n];
        final double[] w = new double[n];
        for (int i = 0; i < n; i++) {
            yd[i] = y[i];
            w[i] = 1;
        }
        LevenbergMarquardtOptimizer optimizer = new LevenbergMarquardtOptimizer(initialStepBoundFactor, costRelativeTolerance, parRelativeTolerance, orthoTolerance, threshold);
        //@formatter:off
        LeastSquaresBuilder builder = new LeastSquaresBuilder().maxEvaluations(Integer.MAX_VALUE).maxIterations(getMaxEvaluations()).start(initialSolution).target(yd).weight(new DiagonalMatrix(w));
        if (f instanceof ExtendedNonLinearFunction && ((ExtendedNonLinearFunction) f).canComputeValuesAndJacobian()) {
            // Compute together, or each individually
            builder.model(new ValueAndJacobianFunction() {

                final ExtendedNonLinearFunction fun = (ExtendedNonLinearFunction) f;

                public Pair<RealVector, RealMatrix> value(RealVector point) {
                    final double[] p = point.toArray();
                    final Pair<double[], double[][]> result = fun.computeValuesAndJacobian(p);
                    return new Pair<RealVector, RealMatrix>(new ArrayRealVector(result.getFirst(), false), new Array2DRowRealMatrix(result.getSecond(), false));
                }

                public RealVector computeValue(double[] params) {
                    return new ArrayRealVector(fun.computeValues(params), false);
                }

                public RealMatrix computeJacobian(double[] params) {
                    return new Array2DRowRealMatrix(fun.computeJacobian(params), false);
                }
            });
        } else {
            // Compute separately
            builder.model(new MultivariateVectorFunctionWrapper((NonLinearFunction) f, a, n), new MultivariateMatrixFunctionWrapper((NonLinearFunction) f, a, n));
        }
        LeastSquaresProblem problem = builder.build();
        Optimum optimum = optimizer.optimize(problem);
        final double[] parameters = optimum.getPoint().toArray();
        setSolution(a, parameters);
        iterations = optimum.getIterations();
        evaluations = optimum.getEvaluations();
        if (a_dev != null) {
            try {
                double[][] covar = optimum.getCovariances(threshold).getData();
                setDeviationsFromMatrix(a_dev, covar);
            } catch (SingularMatrixException e) {
                // Matrix inversion failed. In order to return a solution 
                // return the reciprocal of the diagonal of the Fisher information 
                // for a loose bound on the limit 
                final int[] gradientIndices = f.gradientIndices();
                final int nparams = gradientIndices.length;
                GradientCalculator calculator = GradientCalculatorFactory.newCalculator(nparams);
                double[][] alpha = new double[nparams][nparams];
                double[] beta = new double[nparams];
                calculator.findLinearised(nparams, y, a, alpha, beta, (NonLinearFunction) f);
                FisherInformationMatrix m = new FisherInformationMatrix(alpha);
                setDeviations(a_dev, m.crlb(true));
            }
        }
        // Compute function value
        if (y_fit != null) {
            Gaussian2DFunction f = (Gaussian2DFunction) this.f;
            f.initialise0(a);
            f.forEach(new ValueProcedure() {

                int i = 0;

                public void execute(double value) {
                    y_fit[i] = value;
                }
            });
        }
        // As this is unweighted then we can do this to get the sum of squared residuals
        // This is the same as optimum.getCost() * optimum.getCost(); The getCost() function
        // just computes the dot product anyway.
        value = optimum.getResiduals().dotProduct(optimum.getResiduals());
    } catch (TooManyEvaluationsException e) {
        return FitStatus.TOO_MANY_EVALUATIONS;
    } catch (TooManyIterationsException e) {
        return FitStatus.TOO_MANY_ITERATIONS;
    } catch (ConvergenceException e) {
        // Occurs when QR decomposition fails - mark as a singular non-linear model (no solution)
        return FitStatus.SINGULAR_NON_LINEAR_MODEL;
    } catch (Exception e) {
        // TODO - Find out the other exceptions from the fitter and add return values to match. 
        return FitStatus.UNKNOWN;
    }
    return FitStatus.OK;
}
Also used : ValueProcedure(gdsc.smlm.function.ValueProcedure) ExtendedNonLinearFunction(gdsc.smlm.function.ExtendedNonLinearFunction) NonLinearFunction(gdsc.smlm.function.NonLinearFunction) LeastSquaresBuilder(org.apache.commons.math3.fitting.leastsquares.LeastSquaresBuilder) TooManyEvaluationsException(org.apache.commons.math3.exception.TooManyEvaluationsException) Array2DRowRealMatrix(org.apache.commons.math3.linear.Array2DRowRealMatrix) Gaussian2DFunction(gdsc.smlm.function.gaussian.Gaussian2DFunction) ValueAndJacobianFunction(org.apache.commons.math3.fitting.leastsquares.ValueAndJacobianFunction) DiagonalMatrix(org.apache.commons.math3.linear.DiagonalMatrix) RealVector(org.apache.commons.math3.linear.RealVector) ArrayRealVector(org.apache.commons.math3.linear.ArrayRealVector) ConvergenceException(org.apache.commons.math3.exception.ConvergenceException) SingularMatrixException(org.apache.commons.math3.linear.SingularMatrixException) TooManyIterationsException(org.apache.commons.math3.exception.TooManyIterationsException) LeastSquaresProblem(org.apache.commons.math3.fitting.leastsquares.LeastSquaresProblem) GradientCalculator(gdsc.smlm.fitting.nonlinear.gradient.GradientCalculator) Pair(org.apache.commons.math3.util.Pair) ArrayRealVector(org.apache.commons.math3.linear.ArrayRealVector) FisherInformationMatrix(gdsc.smlm.fitting.FisherInformationMatrix) MultivariateMatrixFunctionWrapper(gdsc.smlm.function.MultivariateMatrixFunctionWrapper) SingularMatrixException(org.apache.commons.math3.linear.SingularMatrixException) ConvergenceException(org.apache.commons.math3.exception.ConvergenceException) TooManyIterationsException(org.apache.commons.math3.exception.TooManyIterationsException) TooManyEvaluationsException(org.apache.commons.math3.exception.TooManyEvaluationsException) Optimum(org.apache.commons.math3.fitting.leastsquares.LeastSquaresOptimizer.Optimum) LevenbergMarquardtOptimizer(org.apache.commons.math3.fitting.leastsquares.LevenbergMarquardtOptimizer) Array2DRowRealMatrix(org.apache.commons.math3.linear.Array2DRowRealMatrix) RealMatrix(org.apache.commons.math3.linear.RealMatrix) MultivariateVectorFunctionWrapper(gdsc.smlm.function.MultivariateVectorFunctionWrapper) ExtendedNonLinearFunction(gdsc.smlm.function.ExtendedNonLinearFunction)

Example 7 with Optimum

use of org.apache.commons.math3.fitting.leastsquares.LeastSquaresOptimizer.Optimum in project GDSC-SMLM by aherbert.

the class TraceDiffusion method fitMSD.

/**
	 * Fit the MSD using a linear fit that must pass through 0,0.
	 * <p>
	 * Update the plot by adding the fit line.
	 * 
	 * @param x
	 * @param y
	 * @param title
	 * @param plot
	 * @return [D, precision]
	 */
private double[] fitMSD(double[] x, double[] y, String title, Plot2 plot) {
    // The Weimann paper (Plos One e64287) fits:
    // MSD(n dt) = 4D n dt + 4s^2
    // n = number of jumps
    // dt = time difference between frames
    // s = localisation precision
    // Thus we should fit an intercept as well.
    // From the fit D = gradient / (4*exposureTime)
    double D = 0;
    double intercept = 0;
    double precision = 0;
    LevenbergMarquardtOptimizer optimizer = new LevenbergMarquardtOptimizer();
    Optimum lvmSolution;
    double ic = 0;
    // Fit with no intercept
    try {
        final LinearFunction function = new LinearFunction(x, y, settings.fitLength);
        double[] parameters = new double[] { function.guess() };
        //@formatter:off
        LeastSquaresProblem problem = new LeastSquaresBuilder().maxEvaluations(Integer.MAX_VALUE).maxIterations(3000).start(parameters).target(function.getY()).weight(new DiagonalMatrix(function.getWeights())).model(function, new MultivariateMatrixFunction() {

            public double[][] value(double[] point) throws IllegalArgumentException {
                return function.jacobian(point);
            }
        }).build();
        //@formatter:on
        lvmSolution = optimizer.optimize(problem);
        double ss = lvmSolution.getResiduals().dotProduct(lvmSolution.getResiduals());
        //double ss = 0;
        //double[] obs = function.getY();
        //double[] exp = lvmSolution.getValue();
        //for (int i = 0; i < obs.length; i++)
        //	ss += (obs[i] - exp[i]) * (obs[i] - exp[i]);
        ic = Maths.getAkaikeInformationCriterionFromResiduals(ss, function.getY().length, 1);
        double gradient = lvmSolution.getPoint().getEntry(0);
        D = gradient / 4;
        Utils.log("Linear fit (%d points) : Gradient = %s, D = %s um^2/s, SS = %s, IC = %s (%d evaluations)", function.getY().length, Utils.rounded(gradient, 4), Utils.rounded(D, 4), Utils.rounded(ss), Utils.rounded(ic), lvmSolution.getEvaluations());
    } catch (TooManyIterationsException e) {
        Utils.log("Failed to fit : Too many iterations (%s)", e.getMessage());
    } catch (ConvergenceException e) {
        Utils.log("Failed to fit : %s", e.getMessage());
    }
    // Fit with intercept.
    // Optionally include the intercept (which is the estimated precision).
    boolean fitIntercept = true;
    try {
        final LinearFunctionWithIntercept function = new LinearFunctionWithIntercept(x, y, settings.fitLength, fitIntercept);
        //@formatter:off
        LeastSquaresProblem problem = new LeastSquaresBuilder().maxEvaluations(Integer.MAX_VALUE).maxIterations(3000).start(function.guess()).target(function.getY()).weight(new DiagonalMatrix(function.getWeights())).model(function, new MultivariateMatrixFunction() {

            public double[][] value(double[] point) throws IllegalArgumentException {
                return function.jacobian(point);
            }
        }).build();
        //@formatter:on
        lvmSolution = optimizer.optimize(problem);
        double ss = lvmSolution.getResiduals().dotProduct(lvmSolution.getResiduals());
        //double ss = 0;
        //double[] obs = function.getY();
        //double[] exp = lvmSolution.getValue();
        //for (int i = 0; i < obs.length; i++)
        //	ss += (obs[i] - exp[i]) * (obs[i] - exp[i]);
        double ic2 = Maths.getAkaikeInformationCriterionFromResiduals(ss, function.getY().length, 2);
        double gradient = lvmSolution.getPoint().getEntry(0);
        final double s = lvmSolution.getPoint().getEntry(1);
        double intercept2 = 4 * s * s;
        if (ic2 < ic || debugFitting) {
            // Convert fitted precision in um to nm
            Utils.log("Linear fit with intercept (%d points) : Gradient = %s, Intercept = %s, D = %s um^2/s, precision = %s nm, SS = %s, IC = %s (%d evaluations)", function.getY().length, Utils.rounded(gradient, 4), Utils.rounded(intercept2, 4), Utils.rounded(gradient / 4, 4), Utils.rounded(s * 1000, 4), Utils.rounded(ss), Utils.rounded(ic2), lvmSolution.getEvaluations());
        }
        if (lvmSolution == null || ic2 < ic) {
            intercept = intercept2;
            D = gradient / 4;
            precision = s;
        }
    } catch (TooManyIterationsException e) {
        Utils.log("Failed to fit with intercept : Too many iterations (%s)", e.getMessage());
    } catch (ConvergenceException e) {
        Utils.log("Failed to fit with intercept : %s", e.getMessage());
    }
    if (settings.msdCorrection) {
        // i.e. the intercept is allowed to be a small negative.
        try {
            // This function fits the jump distance (n) not the time (nt) so update x
            double[] x2 = new double[x.length];
            for (int i = 0; i < x2.length; i++) x2[i] = x[i] / exposureTime;
            final LinearFunctionWithMSDCorrectedIntercept function = new LinearFunctionWithMSDCorrectedIntercept(x2, y, settings.fitLength, fitIntercept);
            //@formatter:off
            LeastSquaresProblem problem = new LeastSquaresBuilder().maxEvaluations(Integer.MAX_VALUE).maxIterations(3000).start(function.guess()).target(function.getY()).weight(new DiagonalMatrix(function.getWeights())).model(function, new MultivariateMatrixFunction() {

                public double[][] value(double[] point) throws IllegalArgumentException {
                    return function.jacobian(point);
                }
            }).build();
            //@formatter:on
            lvmSolution = optimizer.optimize(problem);
            double ss = lvmSolution.getResiduals().dotProduct(lvmSolution.getResiduals());
            //double ss = 0;
            //double[] obs = function.getY();
            //double[] exp = lvmSolution.getValue();
            //for (int i = 0; i < obs.length; i++)
            //	ss += (obs[i] - exp[i]) * (obs[i] - exp[i]);
            double ic2 = Maths.getAkaikeInformationCriterionFromResiduals(ss, function.getY().length, 2);
            double gradient = lvmSolution.getPoint().getEntry(0);
            final double s = lvmSolution.getPoint().getEntry(1);
            double intercept2 = 4 * s * s - gradient / 3;
            // Q. Is this working?
            // Try fixed precision fitting. Is the gradient correct?
            // Revisit all the equations to see if they are wrong.
            // Try adding the x[0] datapoint using the precision.
            // Change the formula to not be linear at x[0] and to just fit the precision, i.e. the intercept2 = 4 * s * s - gradient / 3 is wrong as the 
            // equation is not linear below n=1.
            // Incorporate the exposure time into the gradient to allow comparison to other fits 
            gradient /= exposureTime;
            if (ic2 < ic || debugFitting) {
                // Convert fitted precision in um to nm
                Utils.log("Linear fit with MSD corrected intercept (%d points) : Gradient = %s, Intercept = %s, D = %s um^2/s, precision = %s nm, SS = %s, IC = %s (%d evaluations)", function.getY().length, Utils.rounded(gradient, 4), Utils.rounded(intercept2, 4), Utils.rounded(gradient / 4, 4), Utils.rounded(s * 1000, 4), Utils.rounded(ss), Utils.rounded(ic2), lvmSolution.getEvaluations());
            }
            if (lvmSolution == null || ic2 < ic) {
                intercept = intercept2;
                D = gradient / 4;
                precision = s;
            }
        } catch (TooManyIterationsException e) {
            Utils.log("Failed to fit with intercept : Too many iterations (%s)", e.getMessage());
        } catch (ConvergenceException e) {
            Utils.log("Failed to fit with intercept : %s", e.getMessage());
        }
    }
    // Add the fit to the plot
    if (D > 0) {
        plot.setColor(Color.magenta);
        plot.drawLine(0, intercept, x[x.length - 1], 4 * D * x[x.length - 1] + intercept);
        display(title, plot);
        checkTraceDistance(D);
    }
    return new double[] { D, precision };
}
Also used : LeastSquaresBuilder(org.apache.commons.math3.fitting.leastsquares.LeastSquaresBuilder) Optimum(org.apache.commons.math3.fitting.leastsquares.LeastSquaresOptimizer.Optimum) LevenbergMarquardtOptimizer(org.apache.commons.math3.fitting.leastsquares.LevenbergMarquardtOptimizer) DiagonalMatrix(org.apache.commons.math3.linear.DiagonalMatrix) ConvergenceException(org.apache.commons.math3.exception.ConvergenceException) TooManyIterationsException(org.apache.commons.math3.exception.TooManyIterationsException) LeastSquaresProblem(org.apache.commons.math3.fitting.leastsquares.LeastSquaresProblem) MultivariateMatrixFunction(org.apache.commons.math3.analysis.MultivariateMatrixFunction)

Example 8 with Optimum

use of org.apache.commons.math3.fitting.leastsquares.LeastSquaresOptimizer.Optimum in project GDSC-SMLM by aherbert.

the class CustomPowellOptimizer method doOptimize.

/** {@inheritDoc} */
@Override
protected PointValuePair doOptimize() {
    final GoalType goal = getGoalType();
    final double[] guess = getStartPoint();
    final int n = guess.length;
    // Mark when we have modified the basis vectors
    boolean nonBasis = false;
    double[][] direc = createBasisVectors(n);
    final ConvergenceChecker<PointValuePair> checker = getConvergenceChecker();
    //int resets = 0;
    //PointValuePair solution = null;
    //PointValuePair finalSolution = null;
    //int solutionIter = 0, solutionEval = 0;
    //double startValue = 0;
    //try
    //{
    double[] x = guess;
    // Ensure the point is within bounds
    applyBounds(x);
    double fVal = computeObjectiveValue(x);
    //startValue = fVal;
    double[] x1 = x.clone();
    while (true) {
        incrementIterationCount();
        final double fX = fVal;
        double fX2 = 0;
        double delta = 0;
        int bigInd = 0;
        for (int i = 0; i < n; i++) {
            fX2 = fVal;
            final UnivariatePointValuePair optimum = line.search(x, direc[i]);
            fVal = optimum.getValue();
            x = newPoint(x, direc[i], optimum.getPoint());
            if ((fX2 - fVal) > delta) {
                delta = fX2 - fVal;
                bigInd = i;
            }
        }
        boolean stop = false;
        if (positionChecker != null) {
            // Check for convergence on the position
            stop = positionChecker.converged(x1, x);
        }
        if (!stop) {
            // Check if we have improved from an impossible position
            if (Double.isInfinite(fX) || Double.isNaN(fX)) {
                if (Double.isInfinite(fVal) || Double.isNaN(fVal)) {
                    // Nowhere to go 
                    stop = true;
                }
            // else: this is better as we now have a value, so continue
            } else {
                stop = DoubleEquality.almostEqualRelativeOrAbsolute(fX, fVal, relativeThreshold, absoluteThreshold);
            }
        }
        final PointValuePair previous = new PointValuePair(x1, fX);
        final PointValuePair current = new PointValuePair(x, fVal);
        if (!stop && checker != null) {
            // User-defined stopping criteria.
            stop = checker.converged(getIterations(), previous, current);
        }
        boolean reset = false;
        if (stop) {
            // Only allow convergence using the basis vectors, i.e. we cannot move along any dimension
            if (basisConvergence && nonBasis) {
                // Reset to the basis vectors and continue
                reset = true;
            //resets++;
            } else {
                //System.out.printf("Resets = %d\n", resets);
                final PointValuePair answer;
                if (goal == GoalType.MINIMIZE) {
                    answer = (fVal < fX) ? current : previous;
                } else {
                    answer = (fVal > fX) ? current : previous;
                }
                return answer;
            // XXX Debugging
            // Continue the algorithm to see how far it goes
            //if (solution == null)
            //{
            //	solution = answer;
            //	solutionIter = getIterations();
            //	solutionEval = getEvaluations();
            //}
            //finalSolution = answer;
            }
        }
        if (reset) {
            direc = createBasisVectors(n);
            nonBasis = false;
        }
        final double[] d = new double[n];
        final double[] x2 = new double[n];
        for (int i = 0; i < n; i++) {
            d[i] = x[i] - x1[i];
            x2[i] = x[i] + d[i];
        }
        applyBounds(x2);
        x1 = x.clone();
        fX2 = computeObjectiveValue(x2);
        // See if we can continue along the overall search direction to find a better value
        if (fX > fX2) {
            // Check if:
            // 1. The decrease along the average direction was not due to any single direction's decrease
            // 2. There is a substantial second derivative along the average direction and we are close to
            // it minimum
            double t = 2 * (fX + fX2 - 2 * fVal);
            double temp = fX - fVal - delta;
            t *= temp * temp;
            temp = fX - fX2;
            t -= delta * temp * temp;
            if (t < 0.0) {
                final UnivariatePointValuePair optimum = line.search(x, d);
                fVal = optimum.getValue();
                if (reset) {
                    x = newPoint(x, d, optimum.getPoint());
                    continue;
                } else {
                    final double[][] result = newPointAndDirection(x, d, optimum.getPoint());
                    x = result[0];
                    final int lastInd = n - 1;
                    direc[bigInd] = direc[lastInd];
                    direc[lastInd] = result[1];
                    nonBasis = true;
                }
            }
        }
    }
//}
//catch (RuntimeException e)
//{
//	if (solution != null)
//	{
//		System.out.printf("Start %f : Initial %f (%d,%d) : Final %f (%d,%d) : %f\n", startValue,
//				solution.getValue(), solutionIter, solutionEval, finalSolution.getValue(), getIterations(),
//				getEvaluations(), DoubleEquality.relativeError(finalSolution.getValue(), solution.getValue()));
//		return finalSolution;
//	}
//	throw e;
//}
}
Also used : GoalType(org.apache.commons.math3.optim.nonlinear.scalar.GoalType) UnivariatePointValuePair(org.apache.commons.math3.optim.univariate.UnivariatePointValuePair) PointValuePair(org.apache.commons.math3.optim.PointValuePair) UnivariatePointValuePair(org.apache.commons.math3.optim.univariate.UnivariatePointValuePair)

Example 9 with Optimum

use of org.apache.commons.math3.fitting.leastsquares.LeastSquaresOptimizer.Optimum in project GDSC-SMLM by aherbert.

the class MaximumLikelihoodFitter method computeFit.

/*
	 * (non-Javadoc)
	 * 
	 * @see gdsc.smlm.fitting.nonlinear.BaseFunctionSolver#computeFit(double[], double[], double[], double[])
	 */
public FitStatus computeFit(double[] y, double[] y_fit, double[] a, double[] a_dev) {
    final int n = y.length;
    LikelihoodWrapper maximumLikelihoodFunction = createLikelihoodWrapper((NonLinearFunction) f, n, y, a);
    @SuppressWarnings("rawtypes") BaseOptimizer baseOptimiser = null;
    try {
        double[] startPoint = getInitialSolution(a);
        PointValuePair optimum = null;
        if (searchMethod == SearchMethod.POWELL || searchMethod == SearchMethod.POWELL_BOUNDED || searchMethod == SearchMethod.POWELL_ADAPTER) {
            // Non-differentiable version using Powell Optimiser
            // This is as per the method in Numerical Recipes 10.5 (Direction Set (Powell's) method)
            // I could extend the optimiser and implement bounds on the directions moved. However the mapping
            // adapter seems to work OK.
            final boolean basisConvergence = false;
            // Perhaps these thresholds should be tighter?
            // The default is to use the sqrt() of the overall tolerance
            //final double lineRel = FastMath.sqrt(relativeThreshold);
            //final double lineAbs = FastMath.sqrt(absoluteThreshold);
            //final double lineRel = relativeThreshold * 1e2;
            //final double lineAbs = absoluteThreshold * 1e2;
            // Since we are fitting only a small number of parameters then just use the same tolerance 
            // for each search direction
            final double lineRel = relativeThreshold;
            final double lineAbs = absoluteThreshold;
            CustomPowellOptimizer o = new CustomPowellOptimizer(relativeThreshold, absoluteThreshold, lineRel, lineAbs, null, basisConvergence);
            baseOptimiser = o;
            OptimizationData maxIterationData = null;
            if (getMaxIterations() > 0)
                maxIterationData = new MaxIter(getMaxIterations());
            if (searchMethod == SearchMethod.POWELL_ADAPTER) {
                // Try using the mapping adapter for a bounded Powell search
                MultivariateFunctionMappingAdapter adapter = new MultivariateFunctionMappingAdapter(new MultivariateLikelihood(maximumLikelihoodFunction), lower, upper);
                optimum = o.optimize(maxIterationData, new MaxEval(getMaxEvaluations()), new ObjectiveFunction(adapter), GoalType.MINIMIZE, new InitialGuess(adapter.boundedToUnbounded(startPoint)));
                double[] solution = adapter.unboundedToBounded(optimum.getPointRef());
                optimum = new PointValuePair(solution, optimum.getValue());
            } else {
                if (powellFunction == null) {
                    // Python code by using the sqrt of the number of photons and background.
                    if (mapGaussian) {
                        Gaussian2DFunction gf = (Gaussian2DFunction) f;
                        // Re-map signal and background using the sqrt
                        int[] indices = gf.gradientIndices();
                        int[] map = new int[indices.length];
                        int count = 0;
                        // Background is always first
                        if (indices[0] == Gaussian2DFunction.BACKGROUND) {
                            map[count++] = 0;
                        }
                        // Look for the Signal in multiple peak 2D Gaussians
                        for (int i = 1; i < indices.length; i++) if (indices[i] % 6 == Gaussian2DFunction.SIGNAL) {
                            map[count++] = i;
                        }
                        if (count > 0) {
                            powellFunction = new MappedMultivariateLikelihood(maximumLikelihoodFunction, Arrays.copyOf(map, count));
                        }
                    }
                    if (powellFunction == null) {
                        powellFunction = new MultivariateLikelihood(maximumLikelihoodFunction);
                    }
                }
                // Update the maximum likelihood function in the Powell function wrapper
                powellFunction.fun = maximumLikelihoodFunction;
                OptimizationData positionChecker = null;
                // new org.apache.commons.math3.optim.PositionChecker(relativeThreshold, absoluteThreshold);
                SimpleBounds simpleBounds = null;
                if (powellFunction.isMapped()) {
                    MappedMultivariateLikelihood adapter = (MappedMultivariateLikelihood) powellFunction;
                    if (searchMethod == SearchMethod.POWELL_BOUNDED)
                        simpleBounds = new SimpleBounds(adapter.map(lower), adapter.map(upper));
                    optimum = o.optimize(maxIterationData, new MaxEval(getMaxEvaluations()), new ObjectiveFunction(powellFunction), GoalType.MINIMIZE, new InitialGuess(adapter.map(startPoint)), positionChecker, simpleBounds);
                    double[] solution = adapter.unmap(optimum.getPointRef());
                    optimum = new PointValuePair(solution, optimum.getValue());
                } else {
                    if (searchMethod == SearchMethod.POWELL_BOUNDED)
                        simpleBounds = new SimpleBounds(lower, upper);
                    optimum = o.optimize(maxIterationData, new MaxEval(getMaxEvaluations()), new ObjectiveFunction(powellFunction), GoalType.MINIMIZE, new InitialGuess(startPoint), positionChecker, simpleBounds);
                }
            }
        } else if (searchMethod == SearchMethod.BOBYQA) {
            // Differentiable approximation using Powell's BOBYQA algorithm.
            // This is slower than the Powell optimiser and requires a high number of evaluations.
            int numberOfInterpolationPoints = this.getNumberOfFittedParameters() + 2;
            BOBYQAOptimizer o = new BOBYQAOptimizer(numberOfInterpolationPoints);
            baseOptimiser = o;
            optimum = o.optimize(new MaxEval(getMaxEvaluations()), new ObjectiveFunction(new MultivariateLikelihood(maximumLikelihoodFunction)), GoalType.MINIMIZE, new InitialGuess(startPoint), new SimpleBounds(lower, upper));
        } else if (searchMethod == SearchMethod.CMAES) {
            // TODO - Understand why the CMAES optimiser does not fit very well on test data. It appears 
            // to converge too early and the likelihood scores are not as low as the other optimisers.
            // CMAESOptimiser based on Matlab code:
            // https://www.lri.fr/~hansen/cmaes.m
            // Take the defaults from the Matlab documentation
            //Double.NEGATIVE_INFINITY;
            double stopFitness = 0;
            boolean isActiveCMA = true;
            int diagonalOnly = 0;
            int checkFeasableCount = 1;
            RandomGenerator random = new Well19937c();
            boolean generateStatistics = false;
            // The sigma determines the search range for the variables. It should be 1/3 of the initial search region.
            double[] sigma = new double[lower.length];
            for (int i = 0; i < sigma.length; i++) sigma[i] = (upper[i] - lower[i]) / 3;
            int popSize = (int) (4 + Math.floor(3 * Math.log(sigma.length)));
            // The CMAES optimiser is random and restarting can overcome problems with quick convergence.
            // The Apache commons documentations states that convergence should occur between 30N and 300N^2
            // function evaluations
            final int n30 = FastMath.min(sigma.length * sigma.length * 30, getMaxEvaluations() / 2);
            evaluations = 0;
            OptimizationData[] data = new OptimizationData[] { new InitialGuess(startPoint), new CMAESOptimizer.PopulationSize(popSize), new MaxEval(getMaxEvaluations()), new CMAESOptimizer.Sigma(sigma), new ObjectiveFunction(new MultivariateLikelihood(maximumLikelihoodFunction)), GoalType.MINIMIZE, new SimpleBounds(lower, upper) };
            // Iterate to prevent early convergence
            int repeat = 0;
            while (evaluations < n30) {
                if (repeat++ > 1) {
                    // Update the start point and population size
                    data[0] = new InitialGuess(optimum.getPointRef());
                    popSize *= 2;
                    data[1] = new CMAESOptimizer.PopulationSize(popSize);
                }
                CMAESOptimizer o = new CMAESOptimizer(getMaxIterations(), stopFitness, isActiveCMA, diagonalOnly, checkFeasableCount, random, generateStatistics, new SimpleValueChecker(relativeThreshold, absoluteThreshold));
                baseOptimiser = o;
                PointValuePair result = o.optimize(data);
                iterations += o.getIterations();
                evaluations += o.getEvaluations();
                //		o.getEvaluations(), totalEvaluations);
                if (optimum == null || result.getValue() < optimum.getValue()) {
                    optimum = result;
                }
            }
            // Prevent incrementing the iterations again
            baseOptimiser = null;
        } else if (searchMethod == SearchMethod.BFGS) {
            // BFGS can use an approximate line search minimisation where as Powell and conjugate gradient
            // methods require a more accurate line minimisation. The BFGS search does not do a full 
            // minimisation but takes appropriate steps in the direction of the current gradient.
            // Do not use the convergence checker on the value of the function. Use the convergence on the 
            // point coordinate and gradient
            //BFGSOptimizer o = new BFGSOptimizer(new SimpleValueChecker(rel, abs));
            BFGSOptimizer o = new BFGSOptimizer();
            baseOptimiser = o;
            // Configure maximum step length for each dimension using the bounds
            double[] stepLength = new double[lower.length];
            for (int i = 0; i < stepLength.length; i++) {
                stepLength[i] = (upper[i] - lower[i]) * 0.3333333;
                if (stepLength[i] <= 0)
                    stepLength[i] = Double.POSITIVE_INFINITY;
            }
            // The GoalType is always minimise so no need to pass this in
            OptimizationData positionChecker = null;
            //new org.apache.commons.math3.optim.PositionChecker(relativeThreshold, absoluteThreshold);
            optimum = o.optimize(new MaxEval(getMaxEvaluations()), new ObjectiveFunctionGradient(new MultivariateVectorLikelihood(maximumLikelihoodFunction)), new ObjectiveFunction(new MultivariateLikelihood(maximumLikelihoodFunction)), new InitialGuess(startPoint), new SimpleBounds(lowerConstraint, upperConstraint), new BFGSOptimizer.GradientTolerance(relativeThreshold), positionChecker, new BFGSOptimizer.StepLength(stepLength));
        } else {
            // The line search algorithm often fails. This is due to searching into a region where the 
            // function evaluates to a negative so has been clipped. This means the upper bound of the line
            // cannot be found.
            // Note that running it on an easy problem (200 photons with fixed fitting (no background)) the algorithm
            // does sometimes produces results better than the Powell algorithm but it is slower.
            BoundedNonLinearConjugateGradientOptimizer o = new BoundedNonLinearConjugateGradientOptimizer((searchMethod == SearchMethod.CONJUGATE_GRADIENT_FR) ? Formula.FLETCHER_REEVES : Formula.POLAK_RIBIERE, new SimpleValueChecker(relativeThreshold, absoluteThreshold));
            baseOptimiser = o;
            // Note: The gradients may become unstable at the edge of the bounds. Or they will not change 
            // direction if the true solution is on the bounds since the gradient will always continue 
            // towards the bounds. This is key to the conjugate gradient method. It searches along a vector 
            // until the direction of the gradient is in the opposite direction (using dot products, i.e. 
            // cosine of angle between them)
            // NR 10.7 states there is no advantage of the variable metric DFP or BFGS methods over
            // conjugate gradient methods. So I will try these first.
            // Try this:
            // Adapt the conjugate gradient optimiser to use the gradient to pick the search direction
            // and then for the line minimisation. However if the function is out of bounds then clip the 
            // variables at the bounds and continue. 
            // If the current point is at the bounds and the gradient is to continue out of bounds then 
            // clip the gradient too.
            // Or: just use the gradient for the search direction then use the line minimisation/rest
            // as per the Powell optimiser. The bounds should limit the search.
            // I tried a Bounded conjugate gradient optimiser with clipped variables:
            // This sometimes works. However when the variables go a long way out of the expected range the gradients
            // can have vastly different magnitudes. This results in the algorithm stalling since the gradients
            // can be close to zero and the some of the parameters are no longer adjusted.
            // Perhaps this can be looked for and the algorithm then gives up and resorts to a Powell optimiser from 
            // the current point.
            // Changed the bracketing step to very small (default is 1, changed to 0.001). This improves the 
            // performance. The gradient direction is very sensitive to small changes in the coordinates so a 
            // tighter bracketing of the line search helps.
            // Tried using a non-gradient method for the line search copied from the Powell optimiser:
            // This also works when the bracketing step is small but the number of iterations is higher.
            // 24.10.2014: I have tried to get conjugate gradient to work but the gradient function 
            // must not behave suitably for the optimiser. In the current state both methods of using a 
            // Bounded Conjugate Gradient Optimiser perform poorly relative to other optimisers:
            // Simulated : n=1000, signal=200, x=0.53, y=0.47
            // LVM : n=1000, signal=171, x=0.537, y=0.471 (1.003s)
            // Powell : n=1000, signal=187, x=0.537, y=0.48 (1.238s)
            // Gradient based PR (constrained): n=858, signal=161, x=0.533, y=0.474 (2.54s)
            // Gradient based PR (bounded): n=948, signal=161, x=0.533, y=0.473 (2.67s)
            // Non-gradient based : n=1000, signal=151.47, x=0.535, y=0.474 (1.626s)
            // The conjugate optimisers are slower, under predict the signal by the most and in the case of 
            // the gradient based optimiser, fail to converge on some problems. This is worse when constrained
            // fitting is used and not tightly bounded fitting.
            // I will leave the code in as an option but would not recommend using it. I may remove it in the 
            // future.
            // Note: It is strange that the non-gradient based line minimisation is more successful.
            // It may be that the gradient function is not accurate (due to round off error) or that it is
            // simply wrong when far from the optimum. My JUnit tests only evaluate the function within the 
            // expected range of the answer.
            // Note the default step size on the Powell optimiser is 1 but the initial directions are unit vectors.
            // So our bracketing step should be a minimum of 1 / average length of the first gradient vector to prevent
            // the first step being too large when bracketing.
            final double[] gradient = new double[startPoint.length];
            maximumLikelihoodFunction.likelihood(startPoint, gradient);
            double l = 0;
            for (double d : gradient) l += d * d;
            final double bracketingStep = FastMath.min(0.001, ((l > 1) ? 1.0 / l : 1));
            //System.out.printf("Bracketing step = %f (length=%f)\n", bracketingStep, l);
            o.setUseGradientLineSearch(gradientLineMinimisation);
            optimum = o.optimize(new MaxEval(getMaxEvaluations()), new ObjectiveFunctionGradient(new MultivariateVectorLikelihood(maximumLikelihoodFunction)), new ObjectiveFunction(new MultivariateLikelihood(maximumLikelihoodFunction)), GoalType.MINIMIZE, new InitialGuess(startPoint), new SimpleBounds(lowerConstraint, upperConstraint), new BoundedNonLinearConjugateGradientOptimizer.BracketingStep(bracketingStep));
        //maximumLikelihoodFunction.value(solution, gradient);
        //System.out.printf("Iter = %d, %g @ %s : %s\n", iterations, ll, Arrays.toString(solution),
        //		Arrays.toString(gradient));
        }
        final double[] solution = optimum.getPointRef();
        setSolution(a, solution);
        if (a_dev != null) {
            // Assume the Maximum Likelihood estimator returns the optimum fit (achieves the Cramer Roa
            // lower bounds) and so the covariance can be obtained from the Fisher Information Matrix.
            FisherInformationMatrix m = new FisherInformationMatrix(maximumLikelihoodFunction.fisherInformation(a));
            setDeviations(a_dev, m.crlb(true));
        }
        // Reverse negative log likelihood for maximum likelihood score
        value = -optimum.getValue();
    } catch (TooManyIterationsException e) {
        //e.printStackTrace();
        return FitStatus.TOO_MANY_ITERATIONS;
    } catch (TooManyEvaluationsException e) {
        //e.printStackTrace();
        return FitStatus.TOO_MANY_EVALUATIONS;
    } catch (ConvergenceException e) {
        //System.out.printf("Singular non linear model = %s\n", e.getMessage());
        return FitStatus.SINGULAR_NON_LINEAR_MODEL;
    } catch (BFGSOptimizer.LineSearchRoundoffException e) {
        //e.printStackTrace();
        return FitStatus.FAILED_TO_CONVERGE;
    } catch (Exception e) {
        //System.out.printf("Unknown error = %s\n", e.getMessage());
        e.printStackTrace();
        return FitStatus.UNKNOWN;
    } finally {
        if (baseOptimiser != null) {
            iterations += baseOptimiser.getIterations();
            evaluations += baseOptimiser.getEvaluations();
        }
    }
    // Check this as likelihood functions can go wrong
    if (Double.isInfinite(value) || Double.isNaN(value))
        return FitStatus.INVALID_LIKELIHOOD;
    return FitStatus.OK;
}
Also used : MaxEval(org.apache.commons.math3.optim.MaxEval) InitialGuess(org.apache.commons.math3.optim.InitialGuess) BOBYQAOptimizer(org.apache.commons.math3.optim.nonlinear.scalar.noderiv.BOBYQAOptimizer) SimpleBounds(org.apache.commons.math3.optim.SimpleBounds) ObjectiveFunction(org.apache.commons.math3.optim.nonlinear.scalar.ObjectiveFunction) Well19937c(org.apache.commons.math3.random.Well19937c) SimpleValueChecker(org.apache.commons.math3.optim.SimpleValueChecker) RandomGenerator(org.apache.commons.math3.random.RandomGenerator) BFGSOptimizer(org.apache.commons.math3.optim.nonlinear.scalar.gradient.BFGSOptimizer) PointValuePair(org.apache.commons.math3.optim.PointValuePair) TooManyEvaluationsException(org.apache.commons.math3.exception.TooManyEvaluationsException) Gaussian2DFunction(gdsc.smlm.function.gaussian.Gaussian2DFunction) ConvergenceException(org.apache.commons.math3.exception.ConvergenceException) BoundedNonLinearConjugateGradientOptimizer(org.apache.commons.math3.optim.nonlinear.scalar.gradient.BoundedNonLinearConjugateGradientOptimizer) TooManyIterationsException(org.apache.commons.math3.exception.TooManyIterationsException) BaseOptimizer(org.apache.commons.math3.optim.BaseOptimizer) CMAESOptimizer(org.apache.commons.math3.optim.nonlinear.scalar.noderiv.CMAESOptimizer) FisherInformationMatrix(gdsc.smlm.fitting.FisherInformationMatrix) PoissonGammaGaussianLikelihoodWrapper(gdsc.smlm.function.PoissonGammaGaussianLikelihoodWrapper) PoissonGaussianLikelihoodWrapper(gdsc.smlm.function.PoissonGaussianLikelihoodWrapper) PoissonLikelihoodWrapper(gdsc.smlm.function.PoissonLikelihoodWrapper) LikelihoodWrapper(gdsc.smlm.function.LikelihoodWrapper) ConvergenceException(org.apache.commons.math3.exception.ConvergenceException) TooManyIterationsException(org.apache.commons.math3.exception.TooManyIterationsException) TooManyEvaluationsException(org.apache.commons.math3.exception.TooManyEvaluationsException) ObjectiveFunctionGradient(org.apache.commons.math3.optim.nonlinear.scalar.ObjectiveFunctionGradient) MultivariateFunctionMappingAdapter(org.apache.commons.math3.optim.nonlinear.scalar.MultivariateFunctionMappingAdapter) OptimizationData(org.apache.commons.math3.optim.OptimizationData) CustomPowellOptimizer(org.apache.commons.math3.optim.nonlinear.scalar.noderiv.CustomPowellOptimizer) MaxIter(org.apache.commons.math3.optim.MaxIter)

Example 10 with Optimum

use of org.apache.commons.math3.fitting.leastsquares.LeastSquaresOptimizer.Optimum in project GDSC-SMLM by aherbert.

the class BenchmarkFilterAnalysis method filterAnalysis.

private int filterAnalysis(FilterSet filterSet, int setNumber, DirectFilter currentOptimum, double rangeReduction) {
    // Check if the filters are the same so allowing optimisation
    final boolean allSameType = filterSet.allSameType();
    this.ga_resultsList = resultsList;
    Chromosome<FilterScore> best = null;
    String algorithm = "";
    // All the search algorithms use search dimensions.
    // Create search dimensions if needed (these are used for testing if the optimum is at the limit).
    ss_filter = null;
    ss_lower = null;
    ss_upper = null;
    FixedDimension[] originalDimensions = null;
    boolean rangeInput = false;
    boolean[] disabled = null;
    double[][] seed = null;
    boolean nonInteractive = false;
    if (allSameType) {
        // There should always be 1 filter
        ss_filter = (DirectFilter) filterSet.getFilters().get(0);
        int n = ss_filter.getNumberOfParameters();
        // Option to configure a range
        rangeInput = filterSet.getName().contains("Range");
        double[] range = new double[n];
        if (rangeInput && filterSet.size() == 4) {
            originalDimensions = new FixedDimension[n];
            // This is used as min/lower/upper/max
            final Filter minF = ss_filter;
            final Filter lowerF = filterSet.getFilters().get(1);
            final Filter upperF = filterSet.getFilters().get(2);
            final Filter maxF = filterSet.getFilters().get(3);
            for (int i = 0; i < n; i++) {
                double min = minF.getParameterValue(i);
                double lower = lowerF.getParameterValue(i);
                double upper = upperF.getParameterValue(i);
                range[i] = upper - lower;
                double max = maxF.getParameterValue(i);
                double minIncrement = ss_filter.getParameterIncrement(i);
                try {
                    originalDimensions[i] = new FixedDimension(min, max, minIncrement, lower, upper);
                } catch (IllegalArgumentException e) {
                    Utils.log(TITLE + " : Unable to configure dimension [%d] %s: " + e.getMessage(), i, ss_filter.getParameterName(i));
                    originalDimensions = null;
                    rangeInput = false;
                    break;
                }
            }
        }
        if (rangeInput && (filterSet.size() == 3 || filterSet.size() == 2)) {
            originalDimensions = new FixedDimension[n];
            // This is used as lower/upper[/increment]
            final Filter lowerF = ss_filter;
            final Filter upperF = filterSet.getFilters().get(1);
            for (int i = 0; i < n; i++) {
                // Do not disable if the increment is not set. This is left to the user to decide.
                //					if (incF.getParameterValue(i) == incF.getDisabledParameterValue(i) ||
                //							Double.isInfinite(incF.getParameterValue(i)))
                //					{
                //						// Not enabled
                //						dimensions[i] = new SearchDimension(incF.getDisabledParameterValue(i));
                //						continue;
                //					}
                double lower = lowerF.getParameterValue(i);
                double upper = upperF.getParameterValue(i);
                range[i] = upper - lower;
                ParameterType type = ss_filter.getParameterType(i);
                double min = BenchmarkSpotFit.getMin(type);
                double max = BenchmarkSpotFit.getMax(type);
                double minIncrement = ss_filter.getParameterIncrement(i);
                try {
                    originalDimensions[i] = new FixedDimension(min, max, minIncrement, lower, upper);
                } catch (IllegalArgumentException e) {
                    Utils.log(TITLE + " : Unable to configure dimension [%d] %s: " + e.getMessage(), i, ss_filter.getParameterName(i));
                    originalDimensions = null;
                    rangeInput = false;
                    break;
                }
            }
        }
        // Get the dimensions from the filters
        if (originalDimensions == null) {
            originalDimensions = new FixedDimension[n];
            // Allow inputing a filter set (e.g. saved from previous optimisation)
            // Find the limits in the current scores
            final double[] lower = ss_filter.getParameters().clone();
            final double[] upper = lower.clone();
            // Allow the SearchSpace algorithms to be seeded with an initial population 
            // for the first evaluation of the optimum. This is done when the input filter 
            // set is not a range.
            seed = new double[filterSet.size()][];
            int c = 0;
            for (Filter f : filterSet.getFilters()) {
                final double[] point = f.getParameters();
                seed[c++] = point;
                for (int j = 0; j < lower.length; j++) {
                    if (lower[j] > point[j])
                        lower[j] = point[j];
                    if (upper[j] < point[j])
                        upper[j] = point[j];
                }
            }
            // Min/max must be set using values from BenchmarkSpotFit.
            for (int i = 0; i < n; i++) {
                if (lower[i] == upper[i]) {
                    // Not enabled
                    originalDimensions[i] = new FixedDimension(lower[i]);
                    continue;
                }
                ParameterType type = ss_filter.getParameterType(i);
                double min = BenchmarkSpotFit.getMin(type);
                double max = BenchmarkSpotFit.getMax(type);
                double minIncrement = ss_filter.getParameterIncrement(i);
                if (min > lower[i])
                    min = lower[i];
                if (max < upper[i])
                    max = upper[i];
                try {
                    originalDimensions[i] = new FixedDimension(min, max, minIncrement, lower[i], upper[i]);
                } catch (IllegalArgumentException e) {
                    Utils.log(TITLE + " : Unable to configure dimension [%d] %s: " + e.getMessage(), i, ss_filter.getParameterName(i));
                    originalDimensions = null;
                    break;
                }
            }
            if (originalDimensions == null) {
                // Failed to work out the dimensions. No optimisation will be possible.
                // Sort so that the filters are in a nice order for reporting
                filterSet.sort();
                // This will not be used when the dimensions are null
                seed = null;
            }
        }
        if (originalDimensions != null) {
            // Use the current optimum if we are doing a range optimisation
            if (currentOptimum != null && rangeInput && currentOptimum.getType().equals(ss_filter.getType()) && evolve != 0) {
                // Suppress dialogs and use the current settings
                nonInteractive = true;
                double[] p = currentOptimum.getParameters();
                // Range search uses SearchDimension and we must centre on the optimum after creation.							
                for (int i = 0; i < originalDimensions.length; i++) {
                    double centre = p[i];
                    double r = 0;
                    if (originalDimensions[i].isActive()) {
                        // Set the range around the centre.
                        // This uses the range for each param when we read the filters.
                        r = range[i];
                        // Optionally reduce the width of the dimensions. 
                        if (rangeReduction > 0 && rangeReduction < 1)
                            r *= rangeReduction;
                    }
                    double lower = centre - r * 0.5;
                    double upper = centre + r * 0.5;
                    originalDimensions[i] = originalDimensions[i].create(lower, upper);
                }
            }
            // Store the dimensions so we can do an 'at limit' check
            disabled = new boolean[originalDimensions.length];
            ss_lower = new double[originalDimensions.length];
            ss_upper = new double[originalDimensions.length];
            for (int i = 0; i < disabled.length; i++) {
                disabled[i] = !originalDimensions[i].isActive();
                ss_lower[i] = originalDimensions[i].lower;
                ss_upper[i] = originalDimensions[i].upper;
            }
        }
    } else {
        // Sort so that the filters are in a nice order for reporting
        filterSet.sort();
    }
    analysisStopWatch = StopWatch.createStarted();
    if (evolve == 1 && originalDimensions != null) {
        // Collect parameters for the genetic algorithm
        pauseFilterTimer();
        // Remember the step size settings
        double[] stepSize = stepSizeMap.get(setNumber);
        if (stepSize == null || stepSize.length != ss_filter.length()) {
            stepSize = ss_filter.mutationStepRange().clone();
            for (int j = 0; j < stepSize.length; j++) stepSize[j] *= delta;
            // See if the same number of parameters have been optimised in other algorithms
            boolean[] enabled = searchRangeMap.get(setNumber);
            if (enabled != null && enabled.length == stepSize.length) {
                for (int j = 0; j < stepSize.length; j++) if (!enabled[j])
                    stepSize[j] *= -1;
            }
        }
        GenericDialog gd = null;
        int[] indices = ss_filter.getChromosomeParameters();
        boolean runAlgorithm = nonInteractive;
        if (!nonInteractive) {
            // Ask the user for the mutation step parameters.
            gd = new GenericDialog(TITLE);
            String prefix = setNumber + "_";
            gd.addMessage("Configure the genetic algorithm for [" + setNumber + "] " + filterSet.getName());
            gd.addNumericField(prefix + "Population_size", populationSize, 0);
            gd.addNumericField(prefix + "Failure_limit", failureLimit, 0);
            gd.addNumericField(prefix + "Tolerance", tolerance, -1);
            gd.addNumericField(prefix + "Converged_count", convergedCount, 0);
            gd.addSlider(prefix + "Mutation_rate", 0.05, 1, mutationRate);
            gd.addSlider(prefix + "Crossover_rate", 0.05, 1, crossoverRate);
            gd.addSlider(prefix + "Mean_children", 0.05, 3, meanChildren);
            gd.addSlider(prefix + "Selection_fraction", 0.05, 0.5, selectionFraction);
            gd.addCheckbox(prefix + "Ramped_selection", rampedSelection);
            gd.addCheckbox(prefix + "Save_option", saveOption);
            gd.addMessage("Configure the step size for each parameter");
            for (int j = 0; j < indices.length; j++) {
                // Do not mutate parameters that were not expanded, i.e. the input did not vary them.
                final double step = (originalDimensions[indices[j]].isActive()) ? stepSize[j] : 0;
                gd.addNumericField(getDialogName(prefix, ss_filter, indices[j]), step, 2);
            }
            gd.showDialog();
            runAlgorithm = !gd.wasCanceled();
        }
        if (runAlgorithm) {
            // Used to create random sample
            FixedDimension[] dimensions = Arrays.copyOf(originalDimensions, originalDimensions.length);
            if (!nonInteractive) {
                populationSize = (int) Math.abs(gd.getNextNumber());
                if (populationSize < 10)
                    populationSize = 10;
                failureLimit = (int) Math.abs(gd.getNextNumber());
                tolerance = gd.getNextNumber();
                // Allow negatives
                convergedCount = (int) gd.getNextNumber();
                mutationRate = Math.abs(gd.getNextNumber());
                crossoverRate = Math.abs(gd.getNextNumber());
                meanChildren = Math.abs(gd.getNextNumber());
                selectionFraction = Math.abs(gd.getNextNumber());
                rampedSelection = gd.getNextBoolean();
                saveOption = gd.getNextBoolean();
                for (int j = 0; j < indices.length; j++) {
                    stepSize[j] = gd.getNextNumber();
                }
                // Store for repeat analysis
                stepSizeMap.put(setNumber, stepSize);
            }
            for (int j = 0; j < indices.length; j++) {
                // A zero step size will keep the parameter but prevent range mutation.
                if (stepSize[j] < 0) {
                    dimensions[indices[j]] = new FixedDimension(ss_filter.getDisabledParameterValue(indices[j]));
                    disabled[indices[j]] = true;
                }
            }
            //				// Reset negatives to zero
            //				stepSize = stepSize.clone();
            //				for (int j = 0; j < stepSize.length; j++)
            //					if (stepSize[j] < 0)
            //						stepSize[j] = 0;
            // Create the genetic algorithm
            RandomDataGenerator random = new RandomDataGenerator(new Well44497b());
            SimpleMutator<FilterScore> mutator = new SimpleMutator<FilterScore>(random, mutationRate);
            // Override the settings with the step length, a min of zero and the configured upper
            double[] upper = ss_filter.upperLimit();
            mutator.overrideChromosomeSettings(stepSize, new double[stepSize.length], upper);
            Recombiner<FilterScore> recombiner = new SimpleRecombiner<FilterScore>(random, crossoverRate, meanChildren);
            SelectionStrategy<FilterScore> selectionStrategy;
            // If the initial population is huge ensure that the first selection culls to the correct size
            final int selectionMax = (int) (selectionFraction * populationSize);
            if (rampedSelection)
                selectionStrategy = new RampedSelectionStrategy<FilterScore>(random, selectionFraction, selectionMax);
            else
                selectionStrategy = new SimpleSelectionStrategy<FilterScore>(random, selectionFraction, selectionMax);
            ToleranceChecker<FilterScore> ga_checker = new InterruptChecker(tolerance, tolerance * 1e-3, convergedCount);
            // Create new random filters if the population is initially below the population size
            List<Filter> filters = filterSet.getFilters();
            if (filterSet.size() < populationSize) {
                filters = new ArrayList<Filter>(populationSize);
                // Add the existing filters if they are not a range input file
                if (!rangeInput)
                    filters.addAll(filterSet.getFilters());
                // Add current optimum to seed
                if (nonInteractive)
                    filters.add(currentOptimum);
                // The GA does not use the min interval grid so sample without rounding
                double[][] sample = SearchSpace.sampleWithoutRounding(dimensions, populationSize - filters.size(), null);
                filters.addAll(searchSpaceToFilters(sample));
            }
            ga_population = new Population<FilterScore>(filters);
            ga_population.setPopulationSize(populationSize);
            ga_population.setFailureLimit(failureLimit);
            selectionStrategy.setTracker(this);
            // Evolve
            algorithm = EVOLVE[evolve];
            ga_statusPrefix = algorithm + " [" + setNumber + "] " + filterSet.getName() + " ... ";
            ga_iteration = 0;
            ga_population.setTracker(this);
            createGAWindow();
            resumeFilterTimer();
            best = ga_population.evolve(mutator, recombiner, this, selectionStrategy, ga_checker);
            if (best != null) {
                // In case optimisation was stopped
                IJ.resetEscape();
                // The GA may produce coordinates off the min interval grid
                best = enumerateMinInterval(best, stepSize, indices);
                // Now update the filter set for final assessment
                filterSet = new FilterSet(filterSet.getName(), populationToFilters(ga_population.getIndividuals()));
                // Option to save the filters
                if (saveOption)
                    saveFilterSet(filterSet, setNumber, !nonInteractive);
            }
        } else
            resumeFilterTimer();
    }
    if ((evolve == 2 || evolve == 4) && originalDimensions != null) {
        // Collect parameters for the range search algorithm
        pauseFilterTimer();
        boolean isStepSearch = evolve == 4;
        // The step search should use a multi-dimension refinement and no range reduction
        SearchSpace.RefinementMode myRefinementMode = SearchSpace.RefinementMode.MULTI_DIMENSION;
        // Remember the enabled settings
        boolean[] enabled = searchRangeMap.get(setNumber);
        int n = ss_filter.getNumberOfParameters();
        if (enabled == null || enabled.length != n) {
            enabled = new boolean[n];
            Arrays.fill(enabled, true);
            // See if the same number of parameters have been optimised in other algorithms
            double[] stepSize = stepSizeMap.get(setNumber);
            if (stepSize != null && enabled.length == stepSize.length) {
                for (int j = 0; j < stepSize.length; j++) if (stepSize[j] < 0)
                    enabled[j] = false;
            }
        }
        GenericDialog gd = null;
        boolean runAlgorithm = nonInteractive;
        if (!nonInteractive) {
            // Ask the user for the search parameters.
            gd = new GenericDialog(TITLE);
            String prefix = setNumber + "_";
            gd.addMessage("Configure the " + EVOLVE[evolve] + " algorithm for [" + setNumber + "] " + filterSet.getName());
            gd.addSlider(prefix + "Width", 1, 5, rangeSearchWidth);
            if (!isStepSearch) {
                gd.addCheckbox(prefix + "Save_option", saveOption);
                gd.addNumericField(prefix + "Max_iterations", maxIterations, 0);
                String[] modes = SettingsManager.getNames((Object[]) SearchSpace.RefinementMode.values());
                gd.addSlider(prefix + "Reduce", 0.01, 0.99, rangeSearchReduce);
                gd.addChoice("Refinement", modes, modes[refinementMode]);
            }
            gd.addNumericField(prefix + "Seed_size", seedSize, 0);
            // Add choice of fields to optimise
            for (int i = 0; i < n; i++) gd.addCheckbox(getDialogName(prefix, ss_filter, i), enabled[i]);
            gd.showDialog();
            runAlgorithm = !gd.wasCanceled();
        }
        if (runAlgorithm) {
            SearchDimension[] dimensions = new SearchDimension[n];
            if (!nonInteractive) {
                rangeSearchWidth = (int) gd.getNextNumber();
                if (!isStepSearch) {
                    saveOption = gd.getNextBoolean();
                    maxIterations = (int) gd.getNextNumber();
                    refinementMode = gd.getNextChoiceIndex();
                    rangeSearchReduce = gd.getNextNumber();
                }
                seedSize = (int) gd.getNextNumber();
                for (int i = 0; i < n; i++) enabled[i] = gd.getNextBoolean();
                searchRangeMap.put(setNumber, enabled);
            }
            if (!isStepSearch)
                myRefinementMode = SearchSpace.RefinementMode.values()[refinementMode];
            for (int i = 0; i < n; i++) {
                if (enabled[i]) {
                    try {
                        dimensions[i] = originalDimensions[i].create(rangeSearchWidth);
                        dimensions[i].setPad(true);
                        // Prevent range reduction so that the step search just does a single refinement step
                        dimensions[i].setReduceFactor((isStepSearch) ? 1 : rangeSearchReduce);
                        // Centre on current optimum
                        if (nonInteractive)
                            dimensions[i].setCentre(currentOptimum.getParameterValue(i));
                    } catch (IllegalArgumentException e) {
                        IJ.error(TITLE, String.format("Unable to configure dimension [%d] %s: " + e.getMessage(), i, ss_filter.getParameterName(i)));
                        return -1;
                    }
                } else {
                    dimensions[i] = new SearchDimension(ss_filter.getDisabledParameterValue(i));
                }
            }
            for (int i = 0; i < disabled.length; i++) disabled[i] = !dimensions[i].active;
            // Check the number of combinations is OK
            long combinations = SearchSpace.countCombinations(dimensions);
            if (!nonInteractive && combinations > 10000) {
                gd = new GenericDialog(TITLE);
                gd.addMessage(String.format("%d combinations for the configured dimensions.\n \nClick 'Yes' to optimise.", combinations));
                gd.enableYesNoCancel();
                gd.hideCancelButton();
                gd.showDialog();
                if (!gd.wasOKed()) {
                    combinations = 0;
                }
            }
            if (combinations == 0) {
                resumeFilterTimer();
            } else {
                algorithm = EVOLVE[evolve] + " " + rangeSearchWidth;
                ga_statusPrefix = algorithm + " [" + setNumber + "] " + filterSet.getName() + " ... ";
                ga_iteration = 0;
                es_optimum = null;
                SearchSpace ss = new SearchSpace();
                ss.setTracker(this);
                if (seedSize > 0) {
                    double[][] sample;
                    // Add current optimum to seed
                    if (nonInteractive) {
                        sample = new double[1][];
                        sample[0] = currentOptimum.getParameters();
                        seed = merge(seed, sample);
                    }
                    int size = (seed == null) ? 0 : seed.length;
                    // Sample without rounding as the seed will be rounded
                    sample = SearchSpace.sampleWithoutRounding(dimensions, seedSize - size, null);
                    seed = merge(seed, sample);
                }
                // Note: If we have an optimum and we are not seeding this should not matter as the dimensions 
                // have been centred on the current optimum					
                ss.seed(seed);
                ConvergenceChecker<FilterScore> checker = new InterruptConvergenceChecker(0, 0, maxIterations);
                createGAWindow();
                resumeFilterTimer();
                SearchResult<FilterScore> optimum = ss.search(dimensions, this, checker, myRefinementMode);
                if (optimum != null) {
                    // In case optimisation was stopped
                    IJ.resetEscape();
                    best = ((SimpleFilterScore) optimum.score).r.filter;
                    if (seedSize > 0) {
                    // Not required as the search now respects the min interval
                    // The optimum may be off grid if it was from the seed
                    //best = enumerateMinInterval(best, enabled);
                    }
                    // Now update the filter set for final assessment
                    filterSet = new FilterSet(filterSet.getName(), searchSpaceToFilters((DirectFilter) best, ss.getSearchSpace()));
                    // Option to save the filters
                    if (saveOption)
                        saveFilterSet(filterSet, setNumber, !nonInteractive);
                }
            }
        } else
            resumeFilterTimer();
    }
    if (evolve == 3 && originalDimensions != null) {
        // Collect parameters for the enrichment search algorithm
        pauseFilterTimer();
        boolean[] enabled = searchRangeMap.get(setNumber);
        int n = ss_filter.getNumberOfParameters();
        if (enabled == null || enabled.length != n) {
            enabled = new boolean[n];
            Arrays.fill(enabled, true);
            // See if the same number of parameters have been optimised in other algorithms
            double[] stepSize = stepSizeMap.get(setNumber);
            if (stepSize != null && enabled.length == stepSize.length) {
                for (int j = 0; j < stepSize.length; j++) if (stepSize[j] < 0)
                    enabled[j] = false;
            }
        }
        GenericDialog gd = null;
        boolean runAlgorithm = nonInteractive;
        if (!nonInteractive) {
            // Ask the user for the search parameters.
            gd = new GenericDialog(TITLE);
            String prefix = setNumber + "_";
            gd.addMessage("Configure the enrichment search algorithm for [" + setNumber + "] " + filterSet.getName());
            gd.addCheckbox(prefix + "Save_option", saveOption);
            gd.addNumericField(prefix + "Max_iterations", maxIterations, 0);
            gd.addNumericField(prefix + "Converged_count", convergedCount, 0);
            gd.addNumericField(prefix + "Samples", enrichmentSamples, 0);
            gd.addSlider(prefix + "Fraction", 0.01, 0.99, enrichmentFraction);
            gd.addSlider(prefix + "Padding", 0, 0.99, enrichmentPadding);
            // Add choice of fields to optimise
            for (int i = 0; i < n; i++) gd.addCheckbox(getDialogName(prefix, ss_filter, i), enabled[i]);
            gd.showDialog();
            runAlgorithm = !gd.wasCanceled();
        }
        if (runAlgorithm) {
            FixedDimension[] dimensions = Arrays.copyOf(originalDimensions, originalDimensions.length);
            if (!nonInteractive) {
                saveOption = gd.getNextBoolean();
                maxIterations = (int) gd.getNextNumber();
                convergedCount = (int) gd.getNextNumber();
                enrichmentSamples = (int) gd.getNextNumber();
                enrichmentFraction = gd.getNextNumber();
                enrichmentPadding = gd.getNextNumber();
                for (int i = 0; i < n; i++) enabled[i] = gd.getNextBoolean();
                searchRangeMap.put(setNumber, enabled);
            }
            for (int i = 0; i < n; i++) {
                if (!enabled[i])
                    dimensions[i] = new FixedDimension(ss_filter.getDisabledParameterValue(i));
            }
            for (int i = 0; i < disabled.length; i++) disabled[i] = !dimensions[i].active;
            algorithm = EVOLVE[evolve];
            ga_statusPrefix = algorithm + " [" + setNumber + "] " + filterSet.getName() + " ... ";
            ga_iteration = 0;
            es_optimum = null;
            SearchSpace ss = new SearchSpace();
            ss.setTracker(this);
            // Add current optimum to seed
            if (nonInteractive) {
                double[][] sample = new double[1][];
                sample[0] = currentOptimum.getParameters();
                seed = merge(seed, sample);
            }
            ss.seed(seed);
            ConvergenceChecker<FilterScore> checker = new InterruptConvergenceChecker(0, 0, maxIterations, convergedCount);
            createGAWindow();
            resumeFilterTimer();
            SearchResult<FilterScore> optimum = ss.enrichmentSearch(dimensions, this, checker, enrichmentSamples, enrichmentFraction, enrichmentPadding);
            if (optimum != null) {
                // In case optimisation was stopped
                IJ.resetEscape();
                best = ((SimpleFilterScore) optimum.score).r.filter;
                // Not required as the search now respects the min interval
                // Enumerate on the min interval to produce the final filter
                //best = enumerateMinInterval(best, enabled);
                // Now update the filter set for final assessment
                filterSet = new FilterSet(filterSet.getName(), searchSpaceToFilters((DirectFilter) best, ss.getSearchSpace()));
                // Option to save the filters
                if (saveOption)
                    saveFilterSet(filterSet, setNumber, !nonInteractive);
            }
        } else
            resumeFilterTimer();
    }
    IJ.showStatus("Analysing [" + setNumber + "] " + filterSet.getName() + " ...");
    // Do not support plotting if we used optimisation
    double[] xValues = (best != null || isHeadless || (plotTopN == 0)) ? null : new double[filterSet.size()];
    double[] yValues = (xValues == null) ? null : new double[xValues.length];
    SimpleFilterScore max = null;
    // It can just assess the top 1 required for the summary.
    if (best != null) {
        // Only assess the top 1 filter for the summary
        List<Filter> list = new ArrayList<Filter>();
        list.add((DirectFilter) best);
        filterSet = new FilterSet(filterSet.getName(), list);
    }
    // Score the filters and report the results if configured.
    FilterScoreResult[] scoreResults = scoreFilters(setUncomputedStrength(filterSet), showResultsTable);
    if (scoreResults == null)
        return -1;
    analysisStopWatch.stop();
    for (int index = 0; index < scoreResults.length; index++) {
        final FilterScoreResult scoreResult = scoreResults[index];
        if (xValues != null) {
            xValues[index] = scoreResult.filter.getNumericalValue();
            yValues[index] = scoreResult.score;
        }
        final SimpleFilterScore result = new SimpleFilterScore(scoreResult, allSameType, scoreResult.criteria >= minCriteria);
        if (result.compareTo(max) < 0) {
            max = result;
        }
    }
    if (showResultsTable) {
        BufferedTextWindow tw = null;
        if (resultsWindow != null) {
            tw = new BufferedTextWindow(resultsWindow);
            tw.setIncrement(Integer.MAX_VALUE);
        }
        for (int index = 0; index < scoreResults.length; index++) addToResultsWindow(tw, scoreResults[index].text);
        if (resultsWindow != null)
            resultsWindow.getTextPanel().updateDisplay();
    }
    // Check the top filter against the limits of the original dimensions
    char[] atLimit = null;
    if (allSameType && originalDimensions != null) {
        DirectFilter filter = max.r.filter;
        int[] indices = filter.getChromosomeParameters();
        atLimit = new char[indices.length];
        StringBuilder sb = new StringBuilder(200);
        for (int j = 0; j < indices.length; j++) {
            atLimit[j] = ComplexFilterScore.WITHIN;
            final int p = indices[j];
            if (disabled[p])
                continue;
            final double value = filter.getParameterValue(p);
            double lowerLimit = originalDimensions[p].getLower();
            double upperLimit = originalDimensions[p].getUpper();
            int c1 = Double.compare(value, lowerLimit);
            if (c1 <= 0) {
                atLimit[j] = ComplexFilterScore.FLOOR;
                sb.append(" : ").append(filter.getParameterName(p)).append(' ').append(atLimit[j]).append('[').append(Utils.rounded(value));
                if (c1 == -1) {
                    atLimit[j] = ComplexFilterScore.BELOW;
                    sb.append("<").append(Utils.rounded(lowerLimit));
                }
                sb.append("]");
            } else {
                int c2 = Double.compare(value, upperLimit);
                if (c2 >= 0) {
                    atLimit[j] = ComplexFilterScore.CEIL;
                    sb.append(" : ").append(filter.getParameterName(p)).append(' ').append(atLimit[j]).append('[').append(Utils.rounded(value));
                    if (c2 == 1) {
                        atLimit[j] = ComplexFilterScore.ABOVE;
                        sb.append(">").append(Utils.rounded(upperLimit));
                    }
                    sb.append("]");
                }
            }
        }
        if (sb.length() > 0) {
            if (max.criteriaPassed) {
                Utils.log("Warning: Top filter (%s @ %s|%s) [%s] at the limit of the expanded range%s", filter.getName(), Utils.rounded((invertScore) ? -max.score : max.score), Utils.rounded((invertCriteria) ? -minCriteria : minCriteria), limitFailCount + limitRange, sb.toString());
            } else {
                Utils.log("Warning: Top filter (%s @ -|%s) [%s] at the limit of the expanded range%s", filter.getName(), Utils.rounded((invertCriteria) ? -max.criteria : max.criteria), limitFailCount + limitRange, sb.toString());
            }
        }
    }
    // Note that max should never be null since this method is not run with an empty filter set
    // We may have no filters that pass the criteria
    String type = max.r.filter.getType();
    if (!max.criteriaPassed) {
        Utils.log("Warning: Filter does not pass the criteria: %s : Best = %s using %s", type, Utils.rounded((invertCriteria) ? -max.criteria : max.criteria), max.r.filter.getName());
        return 0;
    }
    // This could be an option?
    boolean allowDuplicates = true;
    // XXX - Commented out the requirement to be the same type to store for later analysis. 
    // This may break the code, however I think that all filter sets should be able to have a best filter
    // irrespective of whether they were the same type or not.
    //if (allSameType)
    //{
    ComplexFilterScore newFilterScore = new ComplexFilterScore(max.r, atLimit, algorithm, analysisStopWatch.getTime(), "", 0);
    addBestFilter(type, allowDuplicates, newFilterScore);
    // Add spacer at end of each result set
    if (isHeadless) {
        if (showResultsTable && filterSet.size() > 1)
            IJ.log("");
    } else {
        if (showResultsTable && filterSet.size() > 1)
            resultsWindow.append("");
        if (plotTopN > 0 && xValues != null) {
            // Check the xValues are unique. Since the filters have been sorted by their
            // numeric value we only need to compare adjacent entries.
            boolean unique = true;
            for (int ii = 0; ii < xValues.length - 1; ii++) {
                if (xValues[ii] == xValues[ii + 1]) {
                    unique = false;
                    break;
                }
            }
            String xAxisName = filterSet.getValueName();
            if (unique) {
                // Check the values all refer to the same property
                for (Filter filter : filterSet.getFilters()) {
                    if (!xAxisName.equals(filter.getNumericalValueName())) {
                        unique = false;
                        break;
                    }
                }
            }
            if (!unique) {
                // If not unique then renumber them and use an arbitrary label
                xAxisName = "Filter";
                for (int ii = 0; ii < xValues.length; ii++) xValues[ii] = ii + 1;
            }
            String title = filterSet.getName();
            // Check if a previous filter set had the same name, update if necessary
            NamedPlot p = getNamedPlot(title);
            if (p == null)
                plots.add(new NamedPlot(title, xAxisName, xValues, yValues));
            else
                p.updateValues(xAxisName, xValues, yValues);
            if (plots.size() > plotTopN) {
                Collections.sort(plots);
                p = plots.remove(plots.size() - 1);
            }
        }
    }
    return 0;
}
Also used : SimpleRecombiner(gdsc.smlm.ga.SimpleRecombiner) SearchSpace(gdsc.smlm.search.SearchSpace) ArrayList(java.util.ArrayList) RandomDataGenerator(org.apache.commons.math3.random.RandomDataGenerator) BufferedTextWindow(gdsc.core.ij.BufferedTextWindow) SearchDimension(gdsc.smlm.search.SearchDimension) FixedDimension(gdsc.smlm.search.FixedDimension) FilterSet(gdsc.smlm.results.filter.FilterSet) RampedSelectionStrategy(gdsc.smlm.ga.RampedSelectionStrategy) GenericDialog(ij.gui.GenericDialog) NonBlockingGenericDialog(ij.gui.NonBlockingGenericDialog) ParameterType(gdsc.smlm.results.filter.ParameterType) IDirectFilter(gdsc.smlm.results.filter.IDirectFilter) DirectFilter(gdsc.smlm.results.filter.DirectFilter) SimpleSelectionStrategy(gdsc.smlm.ga.SimpleSelectionStrategy) IDirectFilter(gdsc.smlm.results.filter.IDirectFilter) DirectFilter(gdsc.smlm.results.filter.DirectFilter) Filter(gdsc.smlm.results.filter.Filter) MultiPathFilter(gdsc.smlm.results.filter.MultiPathFilter) MaximaSpotFilter(gdsc.smlm.filters.MaximaSpotFilter) SimpleMutator(gdsc.smlm.ga.SimpleMutator) FilterScore(gdsc.smlm.results.filter.FilterScore) Well44497b(org.apache.commons.math3.random.Well44497b)

Aggregations

PointValuePair (org.apache.commons.math3.optim.PointValuePair)12 TooManyIterationsException (org.apache.commons.math3.exception.TooManyIterationsException)11 ConvergenceException (org.apache.commons.math3.exception.ConvergenceException)10 LeastSquaresBuilder (org.apache.commons.math3.fitting.leastsquares.LeastSquaresBuilder)9 Optimum (org.apache.commons.math3.fitting.leastsquares.LeastSquaresOptimizer.Optimum)9 LeastSquaresProblem (org.apache.commons.math3.fitting.leastsquares.LeastSquaresProblem)9 LevenbergMarquardtOptimizer (org.apache.commons.math3.fitting.leastsquares.LevenbergMarquardtOptimizer)9 DiagonalMatrix (org.apache.commons.math3.linear.DiagonalMatrix)9 MultivariateMatrixFunction (org.apache.commons.math3.analysis.MultivariateMatrixFunction)8 TooManyEvaluationsException (org.apache.commons.math3.exception.TooManyEvaluationsException)6 InitialGuess (org.apache.commons.math3.optim.InitialGuess)5 MaxEval (org.apache.commons.math3.optim.MaxEval)5 ObjectiveFunction (org.apache.commons.math3.optim.nonlinear.scalar.ObjectiveFunction)5 OptimizationData (org.apache.commons.math3.optim.OptimizationData)4 SimpleBounds (org.apache.commons.math3.optim.SimpleBounds)4 CMAESOptimizer (org.apache.commons.math3.optim.nonlinear.scalar.noderiv.CMAESOptimizer)4 CustomPowellOptimizer (org.apache.commons.math3.optim.nonlinear.scalar.noderiv.CustomPowellOptimizer)4 SimpleValueChecker (org.apache.commons.math3.optim.SimpleValueChecker)3 GoalType (org.apache.commons.math3.optim.nonlinear.scalar.GoalType)3 MaximaSpotFilter (gdsc.smlm.filters.MaximaSpotFilter)2