Search in sources :

Example 31 with Covariance

use of org.apache.commons.math3.stat.correlation.Covariance in project knime-core by knime.

the class CovarianceMatrixCalculatorTest method computeCovarianceOfRandomDataWithMissingValues.

/**
 * Tests the covariance computation on data with missing values
 *
 * @throws InvalidSettingsException
 * @throws CanceledExecutionException
 */
@Test
public void computeCovarianceOfRandomDataWithMissingValues() throws InvalidSettingsException, CanceledExecutionException {
    long currentTimeMillis = System.currentTimeMillis();
    System.out.println("Mahalanobis test random seed: " + currentTimeMillis);
    final Random random = new Random(47);
    double[][] data = new double[10][];
    BufferedDataContainer inTableCont = generateData(random, data, SPEC_2);
    // add two rows with missing values, at the end both should be ignored
    DataCell[] row = new DataCell[2];
    row[0] = new DoubleCell(random.nextDouble());
    row[1] = DataType.getMissingCell();
    inTableCont.addRowToTable(new DefaultRow(new RowKey("Missing!1"), row));
    row[1] = new DoubleCell(random.nextDouble());
    row[0] = DataType.getMissingCell();
    inTableCont.addRowToTable(new DefaultRow(new RowKey("Missing!2"), row));
    inTableCont.close();
    BufferedDataTable inTable = inTableCont.getTable();
    // As the missing row should be ignored the test the covariance matrix computation should be the same
    CovarianceMatrixCalculator covMatrixCalculator = new CovarianceMatrixCalculator(SPEC_2, SPEC_2.getColumnNames());
    BufferedDataContainer covDataContainer = m_exec.createDataContainer(covMatrixCalculator.getResultSpec());
    RealMatrix covMatrixUnderTest = covMatrixCalculator.computeCovarianceMatrix(m_exec, inTable, covDataContainer);
    covDataContainer.close();
    Covariance covariance = new Covariance(data);
    RealMatrix referenceCovarianceMatrix = covariance.getCovarianceMatrix();
    BufferedDataTable covTableUnderTest = covDataContainer.getTable();
    // The diagonal is the variance which also changes considering missing values...
    // but we check only the part of the covariance matrix at the top right triangle.
    assertCovarianceMatrixEquality(covMatrixUnderTest, referenceCovarianceMatrix, covTableUnderTest, SPEC_2, false);
}
Also used : BufferedDataContainer(org.knime.core.node.BufferedDataContainer) RowKey(org.knime.core.data.RowKey) DoubleCell(org.knime.core.data.def.DoubleCell) Random(java.util.Random) RealMatrix(org.apache.commons.math3.linear.RealMatrix) Covariance(org.apache.commons.math3.stat.correlation.Covariance) BufferedDataTable(org.knime.core.node.BufferedDataTable) DataCell(org.knime.core.data.DataCell) DefaultRow(org.knime.core.data.def.DefaultRow) Test(org.junit.Test)

Example 32 with Covariance

use of org.apache.commons.math3.stat.correlation.Covariance in project imagingbook-common by imagingbook.

the class MahalanobisDistance method conditionCovarianceMatrix.

// 
/**
 * Conditions the supplied covariance matrix by enforcing
 * positive eigenvalues along its main diagonal.
 * @param S original covariance matrix
 * @param minDiagonal the minimum positive value of diagonal elements
 * @return conditioned covariance matrix
 */
private RealMatrix conditionCovarianceMatrix(RealMatrix S, double minDiagonal) {
    // S  ->  V . D . V^T
    EigenDecomposition ed = new EigenDecomposition(S);
    RealMatrix V = ed.getV();
    // diagonal matrix of eigenvalues
    RealMatrix D = ed.getD();
    RealMatrix VT = ed.getVT();
    for (int i = 0; i < D.getRowDimension(); i++) {
        // setting eigenvalues to zero is not enough!
        D.setEntry(i, i, Math.max(D.getEntry(i, i), minDiagonal));
    }
    return V.multiply(D).multiply(VT);
}
Also used : EigenDecomposition(org.apache.commons.math3.linear.EigenDecomposition) RealMatrix(org.apache.commons.math3.linear.RealMatrix)

Example 33 with Covariance

use of org.apache.commons.math3.stat.correlation.Covariance in project FSensor by KalebKE.

the class RotationKalmanFilter method correct.

/**
 * Correct the current state estimate with an actual measurement.
 *
 * @param z
 *            the measurement vector
 * @throws NullArgumentException
 *             if the measurement vector is {@code null}
 * @throws DimensionMismatchException
 *             if the dimension of the measurement vector does not fit
 * @throws SingularMatrixException
 *             if the covariance matrix could not be inverted
 */
public void correct(final RealVector z) throws NullArgumentException, DimensionMismatchException, SingularMatrixException {
    // sanity checks
    MathUtils.checkNotNull(z);
    if (z.getDimension() != measurementMatrix.getRowDimension()) {
        throw new DimensionMismatchException(z.getDimension(), measurementMatrix.getRowDimension());
    }
    // S = H * P(k) * H' + R
    RealMatrix s = measurementMatrix.multiply(errorCovariance).multiply(measurementMatrixT).add(measurementModel.getMeasurementNoise());
    // Inn = z(k) - H * xHat(k)-
    RealVector innovation = z.subtract(measurementMatrix.operate(stateEstimation));
    // calculate gain matrix
    // K(k) = P(k)- * H' * (H * P(k)- * H' + R)^-1
    // K(k) = P(k)- * H' * S^-1
    // instead of calculating the inverse of S we can rearrange the formula,
    // and then solve the linear equation A x X = B with A = S', X = K' and
    // B = (H * P)'
    // K(k) * S = P(k)- * H'
    // S' * K(k)' = H * P(k)-'
    RealMatrix kalmanGain = new CholeskyDecomposition(s).getSolver().solve(measurementMatrix.multiply(errorCovariance.transpose())).transpose();
    // update estimate with measurement z(k)
    // xHat(k) = xHat(k)- + K * Inn
    stateEstimation = stateEstimation.add(kalmanGain.operate(innovation));
    // update covariance of prediction error
    // P(k) = (I - K * H) * P(k)-
    RealMatrix identity = MatrixUtils.createRealIdentityMatrix(kalmanGain.getRowDimension());
    errorCovariance = identity.subtract(kalmanGain.multiply(measurementMatrix)).multiply(errorCovariance);
}
Also used : CholeskyDecomposition(org.apache.commons.math3.linear.CholeskyDecomposition) DimensionMismatchException(org.apache.commons.math3.exception.DimensionMismatchException) MatrixDimensionMismatchException(org.apache.commons.math3.linear.MatrixDimensionMismatchException) Array2DRowRealMatrix(org.apache.commons.math3.linear.Array2DRowRealMatrix) RealMatrix(org.apache.commons.math3.linear.RealMatrix) RealVector(org.apache.commons.math3.linear.RealVector) ArrayRealVector(org.apache.commons.math3.linear.ArrayRealVector)

Example 34 with Covariance

use of org.apache.commons.math3.stat.correlation.Covariance in project narchy by automenta.

the class MyCMAESOptimizer method doOptimize.

// /**
// * {@inheritDoc}
// *
// * @param optData Optimization data. In addition to those documented in
// * {@link MultivariateOptimizer#parseOptimizationData(OptimizationData[])
// * MultivariateOptimizer}, this method will register the following data:
// * <ul>
// *  <li>{@link MyCMAESOptimizer.Sigma}</li>
// *  <li>{@link MyCMAESOptimizer.PopulationSize}</li>
// * </ul>
// * @return {@inheritDoc}
// * @throws TooManyEvaluationsException if the maximal number of
// * evaluations is exceeded.
// * @throws DimensionMismatchException if the initial guess, target, and weight
// * arguments have inconsistent dimensions.
// */
// @Override
// public PointValuePair optimize(OptimizationData... optData)
// throws TooManyEvaluationsException,
// DimensionMismatchException {
// // Set up base class and perform computation.
// return super.optimize(optData);
// }
/**
 * {@inheritDoc}
 */
@Override
protected PointValuePair doOptimize() {
    // -------------------- Initialization --------------------------------
    isMinimize = getGoalType().equals(GoalType.MINIMIZE);
    final MyCMAESOptimizer.FitnessFunction fitfun = new MyCMAESOptimizer.FitnessFunction();
    final double[] guess = getStartPoint();
    // number of objective variables/problem dimension
    dimension = guess.length;
    initializeCMA(guess);
    iterations = 0;
    MyCMAESOptimizer.ValuePenaltyPair valuePenalty = fitfun.value(guess);
    double bestValue = valuePenalty.value + valuePenalty.penalty;
    push(fitnessHistory, bestValue);
    PointValuePair optimum = new PointValuePair(guess, isMinimize ? bestValue : -bestValue);
    PointValuePair lastResult = null;
    final double[] lB = MyCMAESOptimizer.this.getLowerBound();
    final double[] uB = MyCMAESOptimizer.this.getUpperBound();
    generationLoop: for (iterations = 1; iterations <= maxIterations; iterations++) {
        incrementIterationCount();
        // Generate and evaluate lambda offspring
        final RealMatrix arz = randn1(dimension, lambda);
        final RealMatrix arx = zeros(dimension, lambda);
        final double[] fitness = new double[lambda];
        final MyCMAESOptimizer.ValuePenaltyPair[] valuePenaltyPairs = new MyCMAESOptimizer.ValuePenaltyPair[lambda];
        // generate random offspring
        for (int k = 0; k < lambda; k++) {
            RealMatrix arzK = arz.getColumnMatrix(k);
            RealMatrix arxk = null;
            for (int i = 0; i < checkFeasableCount + 1; i++) {
                if (diagonalOnly <= 0) {
                    arxk = xmean.add(BD.multiply(arzK).scalarMultiply(// m + sig * Normal(0,C)
                    sigma));
                } else {
                    arxk = xmean.add(times(diagD, arzK).scalarMultiply(sigma));
                }
                if (i >= checkFeasableCount || fitfun.isFeasible(arxk.getColumn(0), lB, uB)) {
                    break;
                }
                // regenerate random arguments for row
                arz.setColumn(k, randn(dimension));
            }
            copyColumn(arxk, 0, arx, k);
            try {
                // compute fitness
                valuePenaltyPairs[k] = fitfun.value(arx.getColumn(k));
            } catch (TooManyEvaluationsException e) {
                break generationLoop;
            }
        }
        // Compute fitnesses by adding value and penalty after scaling by value range.
        double valueRange = valueRange(valuePenaltyPairs);
        for (int iValue = 0; iValue < valuePenaltyPairs.length; iValue++) {
            fitness[iValue] = valuePenaltyPairs[iValue].value + valuePenaltyPairs[iValue].penalty * valueRange;
        }
        // Sort by fitness and compute weighted mean into xmean
        final int[] arindex = sortedIndices(fitness);
        // Calculate new xmean, this is selection and recombination
        // for speed up of Eq. (2) and (3)
        final RealMatrix xold = xmean;
        int[] arMu = MathArrays.copyOf(arindex, mu);
        final RealMatrix bestArx = selectColumns(arx, arMu);
        xmean = bestArx.multiply(weights);
        final RealMatrix bestArz = selectColumns(arz, arMu);
        final RealMatrix zmean = bestArz.multiply(weights);
        final boolean hsig = updateEvolutionPaths(zmean, xold);
        if (diagonalOnly <= 0) {
            updateCovariance(hsig, bestArx, arz, arindex, xold);
        } else {
            updateCovarianceDiagonalOnly(hsig, bestArz);
        }
        // Adapt step size sigma - Eq. (5)
        sigma *= Math.exp(Math.min(1, (normps / chiN - 1) * cs / damps));
        final double bestFitness = fitness[arindex[0]];
        final double worstFitness = fitness[arindex[arindex.length - 1]];
        if (bestValue > bestFitness) {
            bestValue = bestFitness;
            lastResult = optimum;
            optimum = new PointValuePair(fitfun.repair(bestArx.getColumn(0)), isMinimize ? bestFitness : -bestFitness);
            if (getConvergenceChecker() != null && getConvergenceChecker().converged(iterations, optimum, lastResult)) {
                break generationLoop;
            }
        }
        // Break, if fitness is good enough
        if (stopFitness == stopFitness && bestFitness < (isMinimize ? stopFitness : -stopFitness)) {
            break generationLoop;
        }
        final double[] sqrtDiagC = sqrt(diagC).getColumn(0);
        final double[] pcCol = pc.getColumn(0);
        for (int i = 0; i < dimension; i++) {
            if (sigma * Math.max(Math.abs(pcCol[i]), sqrtDiagC[i]) > stopTolX) {
                break;
            }
            if (i >= dimension - 1) {
                break generationLoop;
            }
        }
        for (int i = 0; i < dimension; i++) {
            if (sigma * sqrtDiagC[i] > stopTolUpX) {
                break generationLoop;
            }
        }
        final double historyBest = min(fitnessHistory);
        final double historyWorst = max(fitnessHistory);
        if (iterations > 2 && Math.max(historyWorst, worstFitness) - Math.min(historyBest, bestFitness) < stopTolFun) {
            break generationLoop;
        }
        if (iterations > fitnessHistory.length && historyWorst - historyBest < stopTolHistFun) {
            break generationLoop;
        }
        // condition number of the covariance matrix exceeds 1e14
        if (max(diagD) / min(diagD) > tENmILLION) {
            break generationLoop;
        }
        // user defined termination
        if (getConvergenceChecker() != null) {
            final PointValuePair current = new PointValuePair(bestArx.getColumn(0), isMinimize ? bestFitness : -bestFitness);
            if (lastResult != null && getConvergenceChecker().converged(iterations, current, lastResult)) {
                break generationLoop;
            }
            lastResult = current;
        }
        // Adjust step size in case of equal function values (flat fitness)
        if (bestValue == fitness[arindex[(int) (0.1 + lambda / 4.0)]]) {
            sigma *= Math.exp(0.2 + cs / damps);
        }
        if (iterations > 2 && Math.max(historyWorst, bestFitness) - Math.min(historyBest, bestFitness) == 0) {
            sigma *= Math.exp(0.2 + cs / damps);
        }
        // store best in history
        push(fitnessHistory, bestFitness);
        if (generateStatistics) {
            statisticsSigmaHistory.add(sigma);
            statisticsFitnessHistory.add(bestFitness);
            statisticsMeanHistory.add(xmean.transpose());
            statisticsDHistory.add(diagD.transpose().scalarMultiply(hUNDreDtHOUSAND));
        }
    }
    return optimum;
}
Also used : TooManyEvaluationsException(org.apache.commons.math3.exception.TooManyEvaluationsException) Array2DRowRealMatrix(org.apache.commons.math3.linear.Array2DRowRealMatrix) RealMatrix(org.apache.commons.math3.linear.RealMatrix) PointValuePair(org.apache.commons.math3.optim.PointValuePair)

Example 35 with Covariance

use of org.apache.commons.math3.stat.correlation.Covariance in project narchy by automenta.

the class MyCMAESOptimizer method updateCovariance.

/**
 * Update of the covariance matrix C.
 *
 * @param hsig Flag indicating a small correction.
 * @param bestArx Fitness-sorted matrix of the argument vectors producing the
 * current offspring.
 * @param arz Unsorted matrix containing the gaussian random values of the
 * current offspring.
 * @param arindex Indices indicating the fitness-order of the current offspring.
 * @param xold xmean matrix of the previous generation.
 */
private void updateCovariance(boolean hsig, final RealMatrix bestArx, final RealMatrix arz, final int[] arindex, final RealMatrix xold) {
    double negccov = 0;
    if (ccov1 + ccovmu > 0) {
        final RealMatrix arpos = bestArx.subtract(repmat(xold, 1, mu)).scalarMultiply(// mu difference vectors
        1 / sigma);
        final RealMatrix roneu = pc.multiply(pc.transpose()).scalarMultiply(// rank one update
        ccov1);
        // minor correction if hsig==false
        double oldFac = hsig ? 0 : ccov1 * cc * (2 - cc);
        oldFac += 1 - ccov1 - ccovmu;
        if (isActiveCMA) {
            // Adapt covariance matrix C active CMA
            negccov = (1 - ccovmu) * 0.25 * mueff / (Math.pow(dimension + 2, 1.5) + 2 * mueff);
            // keep at least 0.66 in all directions, small popsize are most
            // critical
            final double negminresidualvariance = 0.66;
            // where to make up for the variance loss
            final double negalphaold = 0.5;
            // prepare vectors, compute negative updating matrix Cneg
            final int[] arReverseIndex = reverse(arindex);
            RealMatrix arzneg = selectColumns(arz, MathArrays.copyOf(arReverseIndex, mu));
            RealMatrix arnorms = sqrt(sumRows(square(arzneg)));
            final int[] idxnorms = sortedIndices(arnorms.getRow(0));
            final RealMatrix arnormsSorted = selectColumns(arnorms, idxnorms);
            final int[] idxReverse = reverse(idxnorms);
            final RealMatrix arnormsReverse = selectColumns(arnorms, idxReverse);
            arnorms = divide(arnormsReverse, arnormsSorted);
            final int[] idxInv = inverse(idxnorms);
            final RealMatrix arnormsInv = selectColumns(arnorms, idxInv);
            // check and set learning rate negccov
            final double negcovMax = (1 - negminresidualvariance) / square(arnormsInv).multiply(weights).getEntry(0, 0);
            if (negccov > negcovMax) {
                negccov = negcovMax;
            }
            arzneg = times(arzneg, repmat(arnormsInv, dimension, 1));
            final RealMatrix artmp = BD.multiply(arzneg);
            final RealMatrix Cneg = artmp.multiply(diag(weights)).multiply(artmp.transpose());
            oldFac += negalphaold * negccov;
            C = C.scalarMultiply(oldFac).add(// regard old matrix
            roneu).add(// plus rank one update
            arpos.scalarMultiply(// plus rank mu update
            ccovmu + (1 - negalphaold) * negccov).multiply(times(repmat(weights, 1, dimension), arpos.transpose()))).subtract(Cneg.scalarMultiply(negccov));
        } else {
            // Adapt covariance matrix C - nonactive
            C = // regard old matrix
            C.scalarMultiply(oldFac).add(// plus rank one update
            roneu).add(// plus rank mu update
            arpos.scalarMultiply(ccovmu).multiply(times(repmat(weights, 1, dimension), arpos.transpose())));
        }
    }
    updateBD(negccov);
}
Also used : Array2DRowRealMatrix(org.apache.commons.math3.linear.Array2DRowRealMatrix) RealMatrix(org.apache.commons.math3.linear.RealMatrix)

Aggregations

RealMatrix (org.apache.commons.math3.linear.RealMatrix)27 Array2DRowRealMatrix (org.apache.commons.math3.linear.Array2DRowRealMatrix)10 IntStream (java.util.stream.IntStream)9 ArrayList (java.util.ArrayList)8 List (java.util.List)7 Nonnull (javax.annotation.Nonnull)7 Collectors (java.util.stream.Collectors)6 Stream (java.util.stream.Stream)5 Covariance (org.apache.commons.math3.stat.correlation.Covariance)5 java.util (java.util)4 Supplier (java.util.function.Supplier)4 Nullable (javax.annotation.Nullable)4 ImmutablePair (org.apache.commons.lang3.tuple.ImmutablePair)4 TooManyEvaluationsException (org.apache.commons.math3.exception.TooManyEvaluationsException)4 EigenDecomposition (org.apache.commons.math3.linear.EigenDecomposition)4 Logger (org.apache.logging.log4j.Logger)4 UserException (org.broadinstitute.hellbender.exceptions.UserException)4 Nd4jIOUtils (org.broadinstitute.hellbender.tools.coveragemodel.nd4jutils.Nd4jIOUtils)4 Utils (org.broadinstitute.hellbender.utils.Utils)4 ParamUtils (org.broadinstitute.hellbender.utils.param.ParamUtils)4