Search in sources :

Example 11 with MultivariateNormalDistribution

use of dr.math.distributions.MultivariateNormalDistribution in project beast-mcmc by beast-dev.

the class NewLatentLiabilityGibbs method sampleNode.

private double sampleNode(NodeRef node, WrappedNormalSufficientStatistics statistics) {
    final int thisNumber = node.getNumber();
    final int[] obsInds = maskDelegate.getObservedIndices(node);
    final int obsDim = obsInds.length;
    if (obsDim == dim) {
        return 0;
    }
    ReadableVector mean = statistics.getMean();
    ReadableMatrix thisP = statistics.getPrecision();
    double precisionScalar = statistics.getPrecisionScalar();
    for (int i = 0; i < mean.getDim(); ++i) {
        fcdMean[i] = mean.get(i);
    }
    for (int i = 0; i < mean.getDim(); ++i) {
        for (int j = 0; j < mean.getDim(); ++j) {
            fcdPrecision[i][j] = thisP.get(i, j) * precisionScalar;
        }
    }
    if (repeatedMeasuresModel != null) {
        // TODO: preallocate memory for all these matrices/vectors
        // storing original fcd precision
        DenseMatrix64F Q = new DenseMatrix64F(fcdPrecision);
        double[] tipPartial = repeatedMeasuresModel.getTipPartial(thisNumber, false);
        int offset = dim;
        DenseMatrix64F P = MissingOps.wrap(tipPartial, offset, dim, dim);
        DenseMatrix64F preOrderMean = new DenseMatrix64F(dim, 1);
        for (int i = 0; i < dim; i++) {
            preOrderMean.set(i, 0, fcdMean[i]);
        }
        DenseMatrix64F dataMean = new DenseMatrix64F(dim, 1);
        for (int i = 0; i < dim; i++) {
            dataMean.set(i, 0, tipPartial[i]);
        }
        D1Matrix64F bufferMean = new DenseMatrix64F(dim, 1);
        // bufferMean = Q * preOrderMean
        mult(Q, preOrderMean, bufferMean);
        // bufferMean = Q * preOderMean + P * dataMean
        multAdd(P, dataMean, bufferMean);
        // P = P + Q
        CommonOps.addEquals(P, Q);
        DenseMatrix64F V = new DenseMatrix64F(dim, dim);
        // V = inv(P + Q)
        CommonOps.invert(P, V);
        // dataMean = inv(P + Q) * (Q * preOderMean + P * dataMean)
        mult(V, bufferMean, dataMean);
        for (int i = 0; i < dim; i++) {
            fcdMean[i] = dataMean.get(i);
            for (int j = 0; j < dim; j++) {
                fcdPrecision[i][j] = P.get(i, j);
            }
        }
    }
    // TODO: should this not be declared until 'else' statement?
    MultivariateNormalDistribution fullDistribution = new MultivariateNormalDistribution(fcdMean, fcdPrecision);
    MultivariateNormalDistribution drawDistribution;
    if (mask != null && obsDim > 0) {
        addMaskOnContiuousTraitsPrecisionSpace(thisNumber);
        drawDistribution = new MultivariateNormalDistribution(maskedMean, maskedPrecision);
    } else {
        drawDistribution = fullDistribution;
    }
    double[] oldValue = getNodeTrait(node);
    int attempt = 0;
    boolean validTip = false;
    while (!validTip & attempt < maxAttempts) {
        setNodeTrait(node, drawDistribution.nextMultivariateNormal());
        if (latentLiability.validTraitForTip(thisNumber)) {
            validTip = true;
        }
        attempt++;
    }
    if (attempt == maxAttempts) {
        return Double.NEGATIVE_INFINITY;
    }
    double[] newValue = getNodeTrait(node);
    if (latentLiability instanceof DummyLatentTruncationProvider) {
        return Double.POSITIVE_INFINITY;
    } else {
        return fullDistribution.logPdf(oldValue) - fullDistribution.logPdf(newValue);
    }
}
Also used : DummyLatentTruncationProvider(dr.evomodel.continuous.DummyLatentTruncationProvider) D1Matrix64F(org.ejml.data.D1Matrix64F) MultivariateNormalDistribution(dr.math.distributions.MultivariateNormalDistribution) DenseMatrix64F(org.ejml.data.DenseMatrix64F)

Example 12 with MultivariateNormalDistribution

use of dr.math.distributions.MultivariateNormalDistribution in project beast-mcmc by beast-dev.

the class LatentLiabilityGibbs method sampleNode2.

public double sampleNode2(NodeRef node) {
    final int thisNumber = node.getNumber();
    // double[] mean = new double[dim];
    // for (int i = 0; i < dim; i++) {
    // mean[i] = preMeans[thisNumber][i];
    // }
    // 
    // double p = preP[thisNumber];
    // 
    // double[][] thisP = new double[dim][dim];
    // 
    // for (int i = 0; i < dim; i++) {
    // for (int j = 0; j < dim; j++) {
    // 
    // thisP[i][j] = p * precisionParam.getParameterValue(i, j);
    // 
    // }
    // }
    double[] mean = traitModel.getConditionalMean(thisNumber);
    double[][] thisP = traitModel.getConditionalPrecision(thisNumber);
    double[] oldValue = getNodeTrait(node);
    double[] value = oldValue;
    int attempt = 0;
    boolean validTip = false;
    if (hasMask) {
        double[] oldCompVal = new double[numUpdate];
        double[] newValue = new double[numUpdate];
        double[][] condP = new double[numUpdate][numUpdate];
        for (int i = 0; i < numUpdate; i++) {
            oldCompVal[i] = value[doUpdate[i]];
            for (int j = 0; j < numUpdate; j++) {
                condP[i][j] = thisP[doUpdate[i]][doUpdate[j]];
            }
        }
        double[] condMean = getComponentConditionalMean(thisP, oldValue, mean, condP);
        // Start sampling
        MultivariateNormalDistribution distribution = new MultivariateNormalDistribution(condMean, condP);
        // estourou++;
        while (!validTip & attempt < 10000) {
            newValue = distribution.nextMultivariateNormal();
            for (int i = 0; i < numUpdate; i++) {
                value[doUpdate[i]] = newValue[i];
            }
            setNodeTrait(node, value);
            if (latentLiability.validTraitForTip(thisNumber)) {
                validTip = true;
            // estourou--;
            }
            attempt++;
        }
        // TODO Failure rate should be stored somewhere and polled later for diagnostics
        /*      if (Math.floorMod(getCount(),10000)==1) {
              printInformation(estourou/10000.0 , "Could not sample truncated normal");
              estourou=0;
              printInformation(((double)getAcceptCount()-ultimoAccept)/10000.0, "Accept probability");
              ultimoAccept=(double)getAcceptCount();

          }

        */
        double pOld = distribution.logPdf(oldCompVal);
        double pNew = distribution.logPdf(newValue);
        double logq = pOld - pNew;
        traitModel.getTraitParameter().getParameter(thisNumber).fireParameterChangedEvent();
        return logq;
    } else {
        // Sample it all traits together as one multivariate normal
        MultivariateNormalDistribution distribution = new MultivariateNormalDistribution(mean, thisP);
        while (!validTip & attempt < 10000) {
            value = distribution.nextMultivariateNormal();
            setNodeTrait(node, value);
            if (latentLiability.validTraitForTip(thisNumber)) {
                validTip = true;
            }
            attempt++;
        }
        double pOld = distribution.logPdf(oldValue);
        double pNew = distribution.logPdf(value);
        double logq = pOld - pNew;
        traitModel.getTraitParameter().getParameter(thisNumber).fireParameterChangedEvent();
        return logq;
    }
}
Also used : MultivariateNormalDistribution(dr.math.distributions.MultivariateNormalDistribution)

Example 13 with MultivariateNormalDistribution

use of dr.math.distributions.MultivariateNormalDistribution in project beast-mcmc by beast-dev.

the class LatentLiabilityGibbs method sampleNode.

public double sampleNode(NodeRef node) {
    final int thisNumber = node.getNumber();
    double[] traitValue = getNodeTrait(node);
    double[] mean = new double[dim];
    for (int i = 0; i < dim; i++) {
        mean[i] = preMeans[thisNumber][i];
    }
    double p = preP[thisNumber];
    double[][] thisP = new double[dim][dim];
    for (int i = 0; i < dim; i++) {
        for (int j = 0; j < dim; j++) {
            thisP[i][j] = p * precisionParam.getParameterValue(i, j);
        }
    }
    /*	  Sample it all traits together as one multivariate normal
            *
               MultivariateNormalDistribution distribution  = new MultivariateNormalDistribution(mean, thisP);


 		double[] oldValue = getNodeTrait(node);
 		double[] value = distribution.nextMultivariateNormal();

 		setNodeTrait(node,value);



		double pOld = distribution.logPdf(oldValue[]);

		double pNew = distribution.logPdf(value[]);


 	//	printInformation(oldValue[0]);

 	//	printInformation(value[0]);

  */
    // double[] newTraitValue = getNodeTrait(node);
    // double pNew = distribution.logPdf(newTraitValue);
    // /////////  Individually gibbs sample each entry in the vector
    // for(int entry=0;entry<dim; entry++){
    int entry = MathUtils.nextInt(dim);
    double thisMean = getConditionalMean(entry, thisP, traitValue, mean);
    double SD = Math.sqrt(1 / thisP[entry][entry]);
    double oldValue = getNodeTrait(node, entry);
    double value = MathUtils.nextGaussian();
    value *= SD;
    value += thisMean;
    // printInformation(oldValue);
    // printInformation(value);
    NormalDistribution distribution = new NormalDistribution(thisMean, SD);
    double pOld = distribution.logPdf(oldValue);
    double pNew = distribution.logPdf(value);
    setNodeTrait(node, entry, value);
    double logq = pOld - pNew;
    traitModel.getTraitParameter().getParameter(thisNumber).fireParameterChangedEvent();
    return logq;
}
Also used : MultivariateNormalDistribution(dr.math.distributions.MultivariateNormalDistribution) NormalDistribution(dr.math.distributions.NormalDistribution)

Aggregations

MultivariateNormalDistribution (dr.math.distributions.MultivariateNormalDistribution)13 Matrix (dr.math.matrixAlgebra.Matrix)4 IllegalDimension (dr.math.matrixAlgebra.IllegalDimension)3 Tree (dr.evolution.tree.Tree)2 MultivariateDistributionLikelihood (dr.inference.distribution.MultivariateDistributionLikelihood)2 Vector (dr.math.matrixAlgebra.Vector)2 BranchRates (dr.evolution.tree.BranchRates)1 NodeRef (dr.evolution.tree.NodeRef)1 DummyLatentTruncationProvider (dr.evomodel.continuous.DummyLatentTruncationProvider)1 DistributionLikelihood (dr.inference.distribution.DistributionLikelihood)1 MultivariateNormalDistributionModel (dr.inference.distribution.MultivariateNormalDistributionModel)1 NormalDistributionModel (dr.inference.distribution.NormalDistributionModel)1 ParametricDistributionModel (dr.inference.distribution.ParametricDistributionModel)1 CompoundParameter (dr.inference.model.CompoundParameter)1 MaskedParameter (dr.inference.model.MaskedParameter)1 Parameter (dr.inference.model.Parameter)1 EllipticalSliceOperator (dr.inference.operators.EllipticalSliceOperator)1 GaussianProcessRandomGenerator (dr.math.distributions.GaussianProcessRandomGenerator)1 NormalDistribution (dr.math.distributions.NormalDistribution)1 WishartSufficientStatistics (dr.math.distributions.WishartSufficientStatistics)1