Search in sources :

Example 1 with SiteRateModel

use of dr.evomodel.siteratemodel.SiteRateModel in project beast-mcmc by beast-dev.

the class MultiPartitionDataLikelihoodDelegate method calculateLikelihood.

/**
     * Calculate the log likelihood of the current state.
     *
     * @return the log likelihood.
     */
@Override
public double calculateLikelihood(List<BranchOperation> branchOperations, List<NodeOperation> nodeOperations, int rootNodeNumber) throws LikelihoodException {
    boolean throwLikelihoodRescalingException = false;
    if (!initialEvaluation) {
        for (int i = 0; i < partitionCount; i++) {
            if (!this.delayRescalingUntilUnderflow || everUnderflowed[i]) {
                if (this.rescalingScheme == PartialsRescalingScheme.ALWAYS || this.rescalingScheme == PartialsRescalingScheme.DELAYED) {
                    useScaleFactors[i] = true;
                    recomputeScaleFactors[i] = true;
                } else if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
                    useScaleFactors[i] = true;
                    if (DEBUG) {
                        System.out.println("rescalingCount[" + i + "] = " + rescalingCount[i]);
                    }
                    if (rescalingCount[i] > rescalingFrequency) {
                        if (DEBUG) {
                            System.out.println("rescalingCount > rescalingFrequency");
                        }
                        rescalingCount[i] = 0;
                        rescalingCountInner[i] = 0;
                    }
                    if (DEBUG) {
                        System.out.println("rescalingCountInner = " + rescalingCountInner[i]);
                    }
                    if (rescalingCountInner[i] < RESCALE_TIMES) {
                        if (DEBUG) {
                            System.out.println("rescalingCountInner < RESCALE_TIMES");
                        }
                        recomputeScaleFactors[i] = true;
                        updatePartition[i] = true;
                        rescalingCountInner[i]++;
                        throwLikelihoodRescalingException = true;
                    }
                }
            }
        }
        if (throwLikelihoodRescalingException) {
            throw new LikelihoodRescalingException();
        }
    }
    if (RESCALING_OFF) {
        // a debugging switch
        for (int i = 0; i < partitionCount; i++) {
            useScaleFactors[i] = false;
            recomputeScaleFactors[i] = false;
        }
    }
    int k = 0;
    for (EvolutionaryProcessDelegate evolutionaryProcessDelegate : evolutionaryProcessDelegates) {
        if (updateSubstitutionModels[k]) {
            // TODO: More efficient to update only the substitution model that changed, instead of all
            // TODO: flip currently assumes 1 substitution model per partition
            evolutionaryProcessDelegate.updateSubstitutionModels(beagle, flip[k]);
            updatePartition[k] = true;
            if (DEBUG) {
                System.out.println("updateSubstitutionModels, updatePartition[" + k + "] = " + updatePartition[k]);
            }
            updateAllPartitions = false;
        // we are currently assuming a no-category model...
        }
        k++;
    }
    k = 0;
    for (SiteRateModel siteRateModel : siteRateModels) {
        if (updateSiteRateModels[k]) {
            double[] categoryRates = siteRateModel.getCategoryRates();
            if (categoryRates == null) {
                // (probably a very small alpha) so reject the move.
                return Double.NEGATIVE_INFINITY;
            }
            if (flip[k]) {
                categoryRateBufferHelper[k].flipOffset(0);
            }
            beagle.setCategoryRatesWithIndex(categoryRateBufferHelper[k].getOffsetIndex(0), categoryRates);
            updatePartition[k] = true;
            if (DEBUG) {
                System.out.println("updateSiteRateModels, updatePartition[" + k + "] = " + updatePartition[k]);
            }
            updateAllPartitions = false;
        }
        k++;
    }
    int branchUpdateCount = 0;
    for (BranchOperation op : branchOperations) {
        branchUpdateIndices[branchUpdateCount] = op.getBranchNumber();
        branchLengths[branchUpdateCount] = op.getBranchLength();
        branchUpdateCount++;
    }
    if (branchUpdateCount > 0) {
        // TODO below only applies to homogenous substitution models
        int[] eigenDecompositionIndices = new int[branchUpdateCount * partitionCount];
        int[] categoryRateIndices = new int[branchUpdateCount * partitionCount];
        int[] probabilityIndices = new int[branchUpdateCount * partitionCount];
        double[] edgeLengths = new double[branchUpdateCount * partitionCount];
        int op = 0;
        int partition = 0;
        for (EvolutionaryProcessDelegate evolutionaryProcessDelegate : evolutionaryProcessDelegates) {
            if (updatePartition[partition] || updateAllPartitions) {
                if (flip[partition]) {
                    evolutionaryProcessDelegate.flipTransitionMatrices(branchUpdateIndices, branchUpdateCount);
                }
                for (int i = 0; i < branchUpdateCount; i++) {
                    eigenDecompositionIndices[op] = evolutionaryProcessDelegate.getEigenIndex(0);
                    categoryRateIndices[op] = categoryRateBufferHelper[partition].getOffsetIndex(0);
                    probabilityIndices[op] = evolutionaryProcessDelegate.getMatrixIndex(branchUpdateIndices[i]);
                    edgeLengths[op] = branchLengths[i];
                    op++;
                }
            }
            partition++;
        }
        beagle.updateTransitionMatricesWithMultipleModels(eigenDecompositionIndices, categoryRateIndices, probabilityIndices, // firstDerivativeIndices
        null, // secondDerivativeIndices
        null, edgeLengths, op);
    }
    for (int i = 0; i < partitionCount; i++) {
        if (updatePartition[i] || updateAllPartitions) {
            if (DEBUG) {
                System.out.println("updatePartition[" + i + "] = " + updatePartition[i] + ", updateAllPartitions = " + updateAllPartitions);
            }
            if (flip[i]) {
                for (NodeOperation op : nodeOperations) {
                    partialBufferHelper[i].flipOffset(op.getNodeNumber());
                }
            }
        }
    }
    int operationCount = 0;
    k = 0;
    for (NodeOperation op : nodeOperations) {
        int nodeNum = op.getNodeNumber();
        int[] writeScale = new int[partitionCount];
        int[] readScale = new int[partitionCount];
        for (int i = 0; i < partitionCount; i++) {
            if (updatePartition[i] || updateAllPartitions) {
                if (useScaleFactors[i]) {
                    // get the index of this scaling buffer
                    int n = nodeNum - tipCount;
                    if (recomputeScaleFactors[i]) {
                        // flip the indicator: can take either n or (internalNodeCount + 1) - n
                        scaleBufferHelper[i].flipOffset(n);
                        // store the index
                        scaleBufferIndices[i][n] = scaleBufferHelper[i].getOffsetIndex(n);
                        // Write new scaleFactor
                        writeScale[i] = scaleBufferIndices[i][n];
                        readScale[i] = Beagle.NONE;
                    } else {
                        writeScale[i] = Beagle.NONE;
                        // Read existing scaleFactor
                        readScale[i] = scaleBufferIndices[i][n];
                    }
                } else {
                    // Not using scaleFactors
                    writeScale[i] = Beagle.NONE;
                    readScale[i] = Beagle.NONE;
                }
            }
        }
        //Example 1: 1 partition with 1 evolutionary model & -beagle_instances 3
        //partition 0 -> model 0
        //partition 1 -> model 0
        //partition 2 -> model 0
        //Example 2: 3 partitions with 3 evolutionary models & -beagle_instances 2
        //partitions 0 & 1 -> model 0
        //partitions 2 & 3 -> model 1
        //partitions 4 & 5 -> model 2
        int mapPartition = partitionCount / evolutionaryProcessDelegates.size();
        for (int i = 0; i < partitionCount; i++) {
            if (updatePartition[i] || updateAllPartitions) {
                EvolutionaryProcessDelegate evolutionaryProcessDelegate = evolutionaryProcessDelegates.get(i / (mapPartition));
                /*if (evolutionaryProcessDelegates.size() == partitionCount) {
                        evolutionaryProcessDelegate = evolutionaryProcessDelegates.get(i);
                    } else {
                        evolutionaryProcessDelegate = evolutionaryProcessDelegates.get(0);
                    }*/
                operations[k] = partialBufferHelper[i].getOffsetIndex(nodeNum);
                operations[k + 1] = writeScale[i];
                operations[k + 2] = readScale[i];
                // source node 1
                operations[k + 3] = partialBufferHelper[i].getOffsetIndex(op.getLeftChild());
                // source matrix 1
                operations[k + 4] = evolutionaryProcessDelegate.getMatrixIndex(op.getLeftChild());
                // source node 2
                operations[k + 5] = partialBufferHelper[i].getOffsetIndex(op.getRightChild());
                // source matrix 2
                operations[k + 6] = evolutionaryProcessDelegate.getMatrixIndex(op.getRightChild());
                operations[k + 7] = i;
                //TODO: we don't know the cumulateScaleBufferIndex here yet (see below)
                operations[k + 8] = Beagle.NONE;
                if (DEBUG) {
                    if (k == 0 || (k == Beagle.PARTITION_OPERATION_TUPLE_SIZE)) {
                        System.out.println("write = " + writeScale[i] + "; read = " + readScale[i] + "; parent = " + operations[k] + ", k = " + k + ", i = " + i);
                    }
                }
                k += Beagle.PARTITION_OPERATION_TUPLE_SIZE;
                operationCount++;
            }
        }
    }
    beagle.updatePartialsByPartition(operations, operationCount);
    //double[] rootPartials = new double[totalPatternCount * stateCount];
    //beagle.getPartials(rootIndex, 0, rootPartials);
    int[] cumulativeScaleIndices = new int[partitionCount];
    for (int i = 0; i < partitionCount; i++) {
        cumulativeScaleIndices[i] = Beagle.NONE;
        if (useScaleFactors[i]) {
            if (recomputeScaleFactors[i] && (updatePartition[i] || updateAllPartitions)) {
                scaleBufferHelper[i].flipOffset(internalNodeCount);
                cumulativeScaleIndices[i] = scaleBufferHelper[i].getOffsetIndex(internalNodeCount);
                //TODO: check with Daniel if calling these methods using an iteration can be done more efficiently
                beagle.resetScaleFactorsByPartition(cumulativeScaleIndices[i], i);
                beagle.accumulateScaleFactorsByPartition(scaleBufferIndices[i], internalNodeCount, cumulativeScaleIndices[i], i);
            } else {
                cumulativeScaleIndices[i] = scaleBufferHelper[i].getOffsetIndex(internalNodeCount);
            }
        }
    }
    // these could be set only when they change but store/restore would need to be considered
    for (int i = 0; i < siteRateModels.size(); i++) {
        double[] categoryWeights = this.siteRateModels.get(i).getCategoryProportions();
        beagle.setCategoryWeights(i, categoryWeights);
        // This should probably explicitly be the state frequencies for the root node...
        double[] frequencies = evolutionaryProcessDelegates.get(i).getRootStateFrequencies();
        beagle.setStateFrequencies(i, frequencies);
    }
    if (DEBUG) {
        for (int i = 0; i < partitionCount; i++) {
            System.out.println("useScaleFactors=" + useScaleFactors[i] + " recomputeScaleFactors=" + recomputeScaleFactors[i] + " (partition: " + i + ")");
        }
    }
    /*System.out.println("partitionCount = " + partitionCount);
                for (int i = 0; i < partitionCount; i++) {
                    System.out.println("partitionIndices[" + i + "] = " + partitionIndices[i]);
                }*/
    int[] partitionIndices = new int[partitionCount];
    int[] rootIndices = new int[partitionCount];
    int[] categoryWeightsIndices = new int[partitionCount];
    int[] stateFrequenciesIndices = new int[partitionCount];
    double[] sumLogLikelihoods = new double[1];
    double[] sumLogLikelihoodsByPartition = new double[partitionCount];
    // create a list of partitions that have been updated
    int updatedPartitionCount = 0;
    for (int i = 0; i < partitionCount; i++) {
        if (updatePartition[i] || updateAllPartitions) {
            partitionIndices[updatedPartitionCount] = i;
            rootIndices[updatedPartitionCount] = partialBufferHelper[i].getOffsetIndex(rootNodeNumber);
            categoryWeightsIndices[updatedPartitionCount] = i % siteRateModels.size();
            stateFrequenciesIndices[updatedPartitionCount] = i % siteRateModels.size();
            cumulativeScaleIndices[updatedPartitionCount] = cumulativeScaleIndices[i];
            updatedPartitionCount++;
        }
    }
    //TODO: check these arguments with Daniel
    //TODO: partitionIndices needs to be set according to which partitions need updating?
    beagle.calculateRootLogLikelihoodsByPartition(rootIndices, categoryWeightsIndices, stateFrequenciesIndices, cumulativeScaleIndices, partitionIndices, updatedPartitionCount, 1, sumLogLikelihoodsByPartition, sumLogLikelihoods);
    // write updated partition likelihoods to the cached array
    for (int i = 0; i < updatedPartitionCount; i++) {
        cachedLogLikelihoodsByPartition[partitionIndices[i]] = sumLogLikelihoodsByPartition[i];
        // clear the global flags
        updatePartition[partitionIndices[i]] = false;
        recomputeScaleFactors[partitionIndices[i]] = false;
        partitionWasUpdated[partitionIndices[i]] = true;
    }
    double tmpLogL = sumLogLikelihoods[0];
    if (DEBUG) {
        for (int i = 0; i < partitionCount; i++) {
            System.out.println("partition " + i + ": " + cachedLogLikelihoodsByPartition[i] + (partitionWasUpdated[i] ? " [updated]" : ""));
        }
    }
    // If these are needed...
    //        if (patternLogLikelihoods == null) {
    //            patternLogLikelihoods = new double[totalPatternCount];
    //        }
    //        beagle.getSiteLogLikelihoods(patternLogLikelihoods);
    updateSubstitutionModels(false);
    updateSiteRateModels(false);
    updateAllPartitions = true;
    if (Double.isNaN(tmpLogL) || Double.isInfinite(tmpLogL)) {
        if (DEBUG) {
            System.out.println("Double.isNaN(logL) || Double.isInfinite(logL)");
        }
        for (int i = 0; i < updatedPartitionCount; i++) {
            if (Double.isNaN(sumLogLikelihoodsByPartition[i]) || Double.isInfinite(sumLogLikelihoodsByPartition[i])) {
                everUnderflowed[partitionIndices[i]] = true;
            }
        }
        if (firstRescaleAttempt) {
            if (delayRescalingUntilUnderflow || rescalingScheme == PartialsRescalingScheme.DELAYED) {
                // show a message but only every 1000 rescales
                if (rescalingMessageCount % 1000 == 0) {
                    if (rescalingMessageCount > 0) {
                        Logger.getLogger("dr.evomodel").info("Underflow calculating likelihood (" + rescalingMessageCount + " messages not shown).");
                    } else {
                        Logger.getLogger("dr.evomodel").info("Underflow calculating likelihood. Attempting a rescaling... (" + getId() + ")");
                    }
                }
                rescalingMessageCount += 1;
            }
            for (int i = 0; i < updatedPartitionCount; i++) {
                if (delayRescalingUntilUnderflow || rescalingScheme == PartialsRescalingScheme.DELAYED) {
                    if (Double.isNaN(sumLogLikelihoodsByPartition[i]) || Double.isInfinite(sumLogLikelihoodsByPartition[i])) {
                        useScaleFactors[partitionIndices[i]] = true;
                        recomputeScaleFactors[partitionIndices[i]] = true;
                        updatePartition[partitionIndices[i]] = true;
                        // turn off double buffer flipping so the next call overwrites the
                        // underflowed buffers. Flip will be turned on again in storeState for
                        // next step
                        flip[partitionIndices[i]] = false;
                        updateAllPartitions = false;
                        if (DEBUG) {
                            System.out.println("Double.isNaN(logL) || Double.isInfinite(logL) (partition index: " + partitionIndices[i] + ")");
                        }
                    }
                }
            }
            firstRescaleAttempt = false;
            throw new LikelihoodUnderflowException();
        }
        return Double.NEGATIVE_INFINITY;
    } else {
        for (int i = 0; i < updatedPartitionCount; i++) {
            if (partitionWasUpdated[partitionIndices[i]]) {
                if (!this.delayRescalingUntilUnderflow || everUnderflowed[partitionIndices[i]]) {
                    if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
                        if (!initialEvaluation) {
                            rescalingCount[partitionIndices[i]]++;
                        }
                    }
                }
                partitionWasUpdated[partitionIndices[i]] = false;
            }
            //TODO: probably better to only switch back those booleans that were actually altered
            recomputeScaleFactors[partitionIndices[i]] = false;
            flip[partitionIndices[i]] = true;
        }
        firstRescaleAttempt = true;
        initialEvaluation = false;
    }
    //********************************************************************
    double logL = 0.0;
    for (double l : cachedLogLikelihoodsByPartition) {
        logL += l;
    }
    return logL;
}
Also used : SiteRateModel(dr.evomodel.siteratemodel.SiteRateModel)

Example 2 with SiteRateModel

use of dr.evomodel.siteratemodel.SiteRateModel in project beast-mcmc by beast-dev.

the class DataLikelihoodTester2 method main.

public static void main(String[] args) {
    // turn off logging to avoid screen noise...
    Logger logger = Logger.getLogger("dr");
    logger.setUseParentHandlers(false);
    SimpleAlignment alignment = createAlignment(sequences, Nucleotides.INSTANCE);
    TreeModel treeModel;
    try {
        treeModel = createSpecifiedTree("((human:0.1,chimp:0.1):0.1,gorilla:0.2)");
    } catch (Exception e) {
        throw new RuntimeException("Unable to parse Newick tree");
    }
    System.out.print("\nTest BeagleTreeLikelihood (kappa = 1): ");
    //substitutionModel
    Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 });
    Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 0, 100);
    FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
    HKY hky = new HKY(kappa, f);
    //siteModel
    double alpha = 0.5;
    GammaSiteRateModel siteRateModel = new GammaSiteRateModel("gammaModel", alpha, 4);
    //        GammaSiteRateModel siteRateModel = new GammaSiteRateModel("siteRateModel");
    siteRateModel.setSubstitutionModel(hky);
    Parameter mu = new Parameter.Default(GammaSiteModelParser.SUBSTITUTION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
    siteRateModel.setRelativeRateParameter(mu);
    FrequencyModel f2 = new FrequencyModel(Nucleotides.INSTANCE, freqs);
    Parameter kappa2 = new Parameter.Default(HKYParser.KAPPA, 10.0, 0, 100);
    HKY hky2 = new HKY(kappa2, f2);
    GammaSiteRateModel siteRateModel2 = new GammaSiteRateModel("gammaModel", alpha, 4);
    siteRateModel2.setSubstitutionModel(hky2);
    siteRateModel2.setRelativeRateParameter(mu);
    //treeLikelihood
    SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
    BranchModel branchModel = new HomogeneousBranchModel(siteRateModel.getSubstitutionModel(), siteRateModel.getSubstitutionModel().getFrequencyModel());
    BranchModel branchModel2 = new HomogeneousBranchModel(siteRateModel2.getSubstitutionModel(), siteRateModel2.getSubstitutionModel().getFrequencyModel());
    BranchRateModel branchRateModel = new DefaultBranchRateModel();
    BeagleTreeLikelihood treeLikelihood = new BeagleTreeLikelihood(patterns, treeModel, branchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.AUTO, true);
    double logLikelihood = treeLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 1): ");
    BeagleDataLikelihoodDelegate dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, PartialsRescalingScheme.NONE, false);
    TreeDataLikelihood treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(5.0);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 5): ");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 10): ");
    dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel2, siteRateModel2, false, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky2.setKappa(11.0);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 11): ");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(1.0);
    hky2.setKappa(10.0);
    MultiPartitionDataLikelihoodDelegate multiPartitionDataLikelihoodDelegate;
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 1):");
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(5.0);
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 5):");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(1.0);
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 10):");
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
    List<PatternList> patternLists = new ArrayList<PatternList>();
    patternLists.add(patterns);
    patternLists.add(patterns);
    List<SiteRateModel> siteRateModels = new ArrayList<SiteRateModel>();
    siteRateModels.add(siteRateModel);
    siteRateModels.add(siteRateModel2);
    List<BranchModel> branchModels = new ArrayList<BranchModel>();
    branchModels.add(branchModel);
    branchModels.add(branchModel2);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: this is 2x the logLikelihood of the 2nd partition)\n\n");
    System.exit(0);
    //START ADDITIONAL TEST #1 - Guy Baele
    System.out.println("-- Test #1 SiteRateModels -- ");
    //alpha in partition 1 reject followed by alpha in partition 2 reject
    System.out.print("Adjust alpha in partition 1: ");
    siteRateModel.setAlpha(0.4);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return alpha in partition 1 to original value: ");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
    System.out.print("Adjust alpha in partition 2: ");
    siteRateModel2.setAlpha(0.35);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return alpha in partition 2 to original value: ");
    siteRateModel2.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
    //alpha in partition 1 accept followed by alpha in partition 2 accept
    System.out.print("Adjust alpha in partition 1: ");
    siteRateModel.setAlpha(0.4);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Adjust alpha in partition 2: ");
    siteRateModel2.setAlpha(0.35);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only setting alpha in partition 2)");
    System.out.print("Return alpha in partition 1 to original value: ");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 2 has not been returned to original value yet)");
    System.out.print("Return alpha in partition 2 to original value: ");
    siteRateModel2.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    //adjusting alphas in both partitions without explicitly calling getLogLikelihood() in between
    System.out.print("Adjust both alphas in partitions 1 and 2: ");
    siteRateModel.setAlpha(0.4);
    siteRateModel2.setAlpha(0.35);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return alpha in partition 2 to original value: ");
    siteRateModel2.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 1 has not been returned to original value yet)");
    System.out.print("Return alpha in partition 1 to original value: ");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n\n");
    //END ADDITIONAL TEST - Guy Baele
    //START ADDITIONAL TEST #2 - Guy Baele
    System.out.println("-- Test #2 SiteRateModels -- ");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    //1 siteRateModel shared across 2 partitions
    siteRateModels = new ArrayList<SiteRateModel>();
    siteRateModels.add(siteRateModel);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    System.out.print("Adjust alpha in shared siteRateModel: ");
    siteRateModel.setAlpha(0.4);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only adjusted alpha for partition 1)");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n\n");
    //END ADDITIONAL TEST - Guy Baele
    //START ADDITIONAL TEST #3 - Guy Baele
    System.out.println("-- Test #3 SiteRateModels -- ");
    siteRateModel = new GammaSiteRateModel("gammaModel");
    siteRateModel.setSubstitutionModel(hky);
    siteRateModel.setRelativeRateParameter(mu);
    siteRateModel2 = new GammaSiteRateModel("gammaModel2");
    siteRateModel2.setSubstitutionModel(hky2);
    siteRateModel2.setRelativeRateParameter(mu);
    siteRateModels = new ArrayList<SiteRateModel>();
    siteRateModels.add(siteRateModel);
    siteRateModels.add(siteRateModel2);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    System.out.print("Adjust kappa in partition 1: ");
    hky.setKappa(5.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: logLikelihood has not changed?)");
    System.out.print("Return kappa in partition 1 to original value: ");
    hky.setKappa(1.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    System.out.print("Adjust kappa in partition 2: ");
    hky2.setKappa(11.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return kappa in partition 2 to original value: ");
    hky2.setKappa(10.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n\n");
    //END ADDITIONAL TEST - Guy Baele
    //START ADDITIONAL TEST #4 - Guy Baele
    System.out.println("-- Test #4 SiteRateModels -- ");
    SimpleAlignment secondAlignment = createAlignment(moreSequences, Nucleotides.INSTANCE);
    SitePatterns morePatterns = new SitePatterns(secondAlignment, null, 0, -1, 1, true);
    BeagleDataLikelihoodDelegate dataLikelihoodDelegateOne = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, PartialsRescalingScheme.NONE, false);
    TreeDataLikelihood treeDataLikelihoodOne = new TreeDataLikelihood(dataLikelihoodDelegateOne, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
    System.out.println("\nBeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 1) = " + logLikelihood);
    hky.setKappa(10.0);
    logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
    System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 10) = " + logLikelihood);
    hky.setKappa(1.0);
    BeagleDataLikelihoodDelegate dataLikelihoodDelegateTwo = new BeagleDataLikelihoodDelegate(treeModel, morePatterns, branchModel2, siteRateModel2, false, PartialsRescalingScheme.NONE, false);
    TreeDataLikelihood treeDataLikelihoodTwo = new TreeDataLikelihood(dataLikelihoodDelegateTwo, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihoodTwo.getLogLikelihood();
    System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 2 (kappa = 10) = " + logLikelihood + "\n");
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 1):");
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(10.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 10):");
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(1.0);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) morePatterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 2nd partition (kappa = 10):");
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    patternLists = new ArrayList<PatternList>();
    patternLists.add(patterns);
    patternLists.add(morePatterns);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: should be the sum of both separate logLikelihoods)\nKappa value of partition 2 is used to compute logLikelihood for both partitions?");
//END ADDITIONAL TEST - Guy Baele
}
Also used : FrequencyModel(dr.evomodel.substmodel.FrequencyModel) ArrayList(java.util.ArrayList) PatternList(dr.evolution.alignment.PatternList) Logger(java.util.logging.Logger) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) BranchModel(dr.evomodel.branchmodel.BranchModel) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) SiteRateModel(dr.evomodel.siteratemodel.SiteRateModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) TreeModel(dr.evomodel.tree.TreeModel) SimpleAlignment(dr.evolution.alignment.SimpleAlignment) SitePatterns(dr.evolution.alignment.SitePatterns) BeagleTreeLikelihood(dr.evomodel.treelikelihood.BeagleTreeLikelihood) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) BranchRateModel(dr.evomodel.branchratemodel.BranchRateModel) HKY(dr.evomodel.substmodel.nucleotide.HKY) Parameter(dr.inference.model.Parameter)

Example 3 with SiteRateModel

use of dr.evomodel.siteratemodel.SiteRateModel in project beast-mcmc by beast-dev.

the class MultiPartitionDataLikelihoodParser method parseXMLObject.

public Object parseXMLObject(XMLObject xo) throws XMLParseException {
    boolean useAmbiguities = xo.getAttribute(USE_AMBIGUITIES, false);
    int instanceCount = xo.getAttribute(INSTANCE_COUNT, 1);
    if (instanceCount < 1) {
        instanceCount = 1;
    }
    String ic = System.getProperty(BEAGLE_INSTANCE_COUNT);
    if (ic != null && ic.length() > 0) {
        instanceCount = Integer.parseInt(ic);
    }
    if (DEBUG) {
        System.out.println("instanceCount: " + instanceCount);
    }
    List<PatternList> patternLists = xo.getAllChildren(PatternList.class);
    TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
    List<SiteRateModel> siteRateModels = xo.getAllChildren(SiteRateModel.class);
    FrequencyModel rootFreqModel = (FrequencyModel) xo.getChild(FrequencyModel.class);
    List<BranchModel> branchModels = xo.getAllChildren(BranchModel.class);
    if (branchModels == null) {
        if (DEBUG) {
            System.out.println("branchModels == null");
        }
        branchModels = new ArrayList<BranchModel>();
        List<SubstitutionModel> substitutionModels = xo.getAllChildren(SubstitutionModel.class);
        if (substitutionModels == null) {
            if (DEBUG) {
                System.out.println("substitutionModels == null");
            }
            for (SiteRateModel siteRateModel : siteRateModels) {
                SubstitutionModel substitutionModel = ((GammaSiteRateModel) siteRateModel).getSubstitutionModel();
                if (substitutionModel == null) {
                    throw new XMLParseException("No substitution model available for TreeDataLikelihood: " + xo.getId());
                }
                branchModels.add(new HomogeneousBranchModel(substitutionModel, rootFreqModel));
            }
        }
        if (DEBUG) {
            System.out.println("branchModels size: " + branchModels.size());
        }
        for (BranchModel branchModel : branchModels) {
            System.out.println("  " + branchModel.getId() + "  " + branchModel.getModelName());
        }
    }
    BranchRateModel branchRateModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
    if (branchRateModel == null) {
        branchRateModel = new DefaultBranchRateModel();
    }
    if (DEBUG) {
        System.out.println("BranchRateModel: " + branchRateModel.getId());
    }
    TipStatesModel tipStatesModel = (TipStatesModel) xo.getChild(TipStatesModel.class);
    PartialsRescalingScheme scalingScheme = PartialsRescalingScheme.DEFAULT;
    boolean delayScaling = true;
    if (xo.hasAttribute(SCALING_SCHEME)) {
        scalingScheme = PartialsRescalingScheme.parseFromString(xo.getStringAttribute(SCALING_SCHEME));
        if (scalingScheme == null)
            throw new XMLParseException("Unknown scaling scheme '" + xo.getStringAttribute(SCALING_SCHEME) + "' in " + "OldBeagleTreeLikelihood object '" + xo.getId());
    }
    if (xo.hasAttribute(DELAY_SCALING)) {
        delayScaling = xo.getBooleanAttribute(DELAY_SCALING);
    }
    if (instanceCount == 1) {
        if (DEBUG) {
            System.out.println("instanceCount == 1");
        }
        return createTreeDataLikelihood(patternLists, treeModel, branchModels, siteRateModels, branchRateModel, tipStatesModel, useAmbiguities, scalingScheme, delayScaling, xo);
    }
    if (tipStatesModel != null) {
        throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with a TipStateModel (i.e., a sequence error model).");
    }
    List<PatternList> patternInstanceLists = new ArrayList<PatternList>();
    for (int j = 0; j < patternLists.size(); j++) {
        for (int i = 0; i < instanceCount; i++) {
            patternInstanceLists.add(new Patterns(patternLists.get(j), i, instanceCount));
        }
    }
    return createTreeDataLikelihood(patternLists, treeModel, branchModels, siteRateModels, branchRateModel, null, useAmbiguities, scalingScheme, delayScaling, xo);
}
Also used : FrequencyModel(dr.evomodel.substmodel.FrequencyModel) PatternList(dr.evolution.alignment.PatternList) ArrayList(java.util.ArrayList) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) PartialsRescalingScheme(dr.evomodel.treelikelihood.PartialsRescalingScheme) BranchModel(dr.evomodel.branchmodel.BranchModel) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) SubstitutionModel(dr.evomodel.substmodel.SubstitutionModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) SiteRateModel(dr.evomodel.siteratemodel.SiteRateModel) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) TipStatesModel(dr.evomodel.tipstatesmodel.TipStatesModel) TreeModel(dr.evomodel.tree.TreeModel) BranchRateModel(dr.evomodel.branchratemodel.BranchRateModel) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) Patterns(dr.evolution.alignment.Patterns)

Example 4 with SiteRateModel

use of dr.evomodel.siteratemodel.SiteRateModel in project beast-mcmc by beast-dev.

the class TreeDataLikelihoodParser method parseXMLObject.

public Object parseXMLObject(XMLObject xo) throws XMLParseException {
    boolean useAmbiguities = xo.getAttribute(USE_AMBIGUITIES, false);
    // TreeDataLikelihood doesn't currently support Instances defined from the command line
    //        int instanceCount = xo.getAttribute(INSTANCE_COUNT, 1);
    //        if (instanceCount < 1) {
    //            instanceCount = 1;
    //        }
    //
    //        String ic = System.getProperty(BEAGLE_INSTANCE_COUNT);
    //        if (ic != null && ic.length() > 0) {
    //            instanceCount = Integer.parseInt(ic);
    //        }
    List<PatternList> patternLists = new ArrayList<PatternList>();
    List<SiteRateModel> siteRateModels = new ArrayList<SiteRateModel>();
    List<BranchModel> branchModels = new ArrayList<BranchModel>();
    boolean hasSinglePartition = false;
    PatternList patternList = (PatternList) xo.getChild(PatternList.class);
    if (patternList != null) {
        hasSinglePartition = true;
        patternLists.add(patternList);
        GammaSiteRateModel siteRateModel = (GammaSiteRateModel) xo.getChild(GammaSiteRateModel.class);
        siteRateModels.add(siteRateModel);
        FrequencyModel rootFreqModel = (FrequencyModel) xo.getChild(FrequencyModel.class);
        BranchModel branchModel = (BranchModel) xo.getChild(BranchModel.class);
        if (branchModel == null) {
            SubstitutionModel substitutionModel = (SubstitutionModel) xo.getChild(SubstitutionModel.class);
            if (substitutionModel == null) {
                substitutionModel = siteRateModel.getSubstitutionModel();
            }
            if (substitutionModel == null) {
                throw new XMLParseException("No substitution model available for partition in DataTreeLikelihood: " + xo.getId());
            }
            branchModel = new HomogeneousBranchModel(substitutionModel, rootFreqModel);
        }
        branchModels.add(branchModel);
    }
    int k = 0;
    for (int i = 0; i < xo.getChildCount(); i++) {
        if (xo.getChildName(i).equals(PARTITION)) {
            if (hasSinglePartition) {
                throw new XMLParseException("Either a single set of patterns should be given or multiple 'partitions' elements within DataTreeLikelihood: " + xo.getId());
            }
            k += 1;
            XMLObject cxo = (XMLObject) xo.getChild(i);
            patternList = (PatternList) cxo.getChild(PatternList.class);
            patternLists.add(patternList);
            GammaSiteRateModel siteRateModel = (GammaSiteRateModel) cxo.getChild(GammaSiteRateModel.class);
            siteRateModels.add(siteRateModel);
            FrequencyModel rootFreqModel = (FrequencyModel) xo.getChild(FrequencyModel.class);
            BranchModel branchModel = (BranchModel) cxo.getChild(BranchModel.class);
            if (branchModel == null) {
                SubstitutionModel substitutionModel = (SubstitutionModel) xo.getChild(SubstitutionModel.class);
                if (substitutionModel == null) {
                    substitutionModel = siteRateModel.getSubstitutionModel();
                }
                if (substitutionModel == null) {
                    throw new XMLParseException("No substitution model available for partition " + k + " in DataTreeLikelihood: " + xo.getId());
                }
                branchModel = new HomogeneousBranchModel(substitutionModel, rootFreqModel);
            }
            branchModels.add(branchModel);
        }
    }
    if (patternLists.size() == 0) {
        throw new XMLParseException("Either a single set of patterns should be given or multiple 'partitions' elements within DataTreeLikelihood: " + xo.getId());
    }
    TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
    BranchRateModel branchRateModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
    if (branchRateModel == null) {
        branchRateModel = new DefaultBranchRateModel();
    }
    TipStatesModel tipStatesModel = (TipStatesModel) xo.getChild(TipStatesModel.class);
    PartialsRescalingScheme scalingScheme = PartialsRescalingScheme.DEFAULT;
    boolean delayScaling = true;
    if (xo.hasAttribute(SCALING_SCHEME)) {
        scalingScheme = PartialsRescalingScheme.parseFromString(xo.getStringAttribute(SCALING_SCHEME));
        if (scalingScheme == null)
            throw new XMLParseException("Unknown scaling scheme '" + xo.getStringAttribute(SCALING_SCHEME) + "' in " + "BeagleDataLikelihood object '" + xo.getId());
    }
    if (xo.hasAttribute(DELAY_SCALING)) {
        delayScaling = xo.getBooleanAttribute(DELAY_SCALING);
    }
    if (tipStatesModel != null) {
        throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with a TipStateModel (i.e., a sequence error model).");
    }
    return createTreeDataLikelihood(patternLists, branchModels, siteRateModels, treeModel, branchRateModel, null, useAmbiguities, scalingScheme, delayScaling);
}
Also used : FrequencyModel(dr.evomodel.substmodel.FrequencyModel) ArrayList(java.util.ArrayList) PatternList(dr.evolution.alignment.PatternList) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) PartialsRescalingScheme(dr.evomodel.treelikelihood.PartialsRescalingScheme) BranchModel(dr.evomodel.branchmodel.BranchModel) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) SubstitutionModel(dr.evomodel.substmodel.SubstitutionModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) SiteRateModel(dr.evomodel.siteratemodel.SiteRateModel) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) TipStatesModel(dr.evomodel.tipstatesmodel.TipStatesModel) TreeModel(dr.evomodel.tree.TreeModel) BranchRateModel(dr.evomodel.branchratemodel.BranchRateModel) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel)

Example 5 with SiteRateModel

use of dr.evomodel.siteratemodel.SiteRateModel in project beast-mcmc by beast-dev.

the class MarkovModulatedSubstitutionModelParser method parseXMLObject.

public Object parseXMLObject(XMLObject xo) throws XMLParseException {
    DataType dataType = DataTypeUtils.getDataType(xo);
    if (!(dataType instanceof HiddenDataType)) {
        throw new XMLParseException("Must construct " + MARKOV_MODULATED_MODEL + " with hidden data types");
    }
    Parameter switchingRates = (Parameter) xo.getElementFirstChild(SWITCHING_RATES);
    List<SubstitutionModel> substModels = new ArrayList<SubstitutionModel>();
    for (int i = 0; i < xo.getChildCount(); i++) {
        Object cxo = xo.getChild(i);
        if (cxo instanceof SubstitutionModel) {
            substModels.add((SubstitutionModel) cxo);
        }
    }
    boolean geometricRates = xo.getAttribute(GEOMETRIC_RATES, false);
    Parameter rateScalar = xo.hasChildNamed(RATE_SCALAR) ? (Parameter) xo.getChild(RATE_SCALAR).getChild(Parameter.class) : null;
    SiteRateModel siteRateModel = (SiteRateModel) xo.getChild(SiteRateModel.class);
    if (siteRateModel != null) {
        if (siteRateModel.getCategoryCount() != substModels.size() && substModels.size() % siteRateModel.getCategoryCount() != 0) {
            throw new XMLParseException("Number of gamma categories must equal number of substitution models in " + xo.getId());
        }
    }
    MarkovModulatedSubstitutionModel mmsm = new MarkovModulatedSubstitutionModel(xo.getId(), substModels, switchingRates, dataType, null, rateScalar, geometricRates, siteRateModel);
    if (xo.getAttribute(RENORMALIZE, false)) {
        mmsm.setNormalization(true);
    }
    return mmsm;
}
Also used : MarkovModulatedSubstitutionModel(dr.evomodel.substmodel.MarkovModulatedSubstitutionModel) ArrayList(java.util.ArrayList) HiddenDataType(dr.evolution.datatype.HiddenDataType) DataType(dr.evolution.datatype.DataType) HiddenDataType(dr.evolution.datatype.HiddenDataType) Parameter(dr.inference.model.Parameter) SubstitutionModel(dr.evomodel.substmodel.SubstitutionModel) MarkovModulatedSubstitutionModel(dr.evomodel.substmodel.MarkovModulatedSubstitutionModel) SiteRateModel(dr.evomodel.siteratemodel.SiteRateModel)

Aggregations

SiteRateModel (dr.evomodel.siteratemodel.SiteRateModel)7 ArrayList (java.util.ArrayList)6 GammaSiteRateModel (dr.evomodel.siteratemodel.GammaSiteRateModel)5 PatternList (dr.evolution.alignment.PatternList)4 BranchModel (dr.evomodel.branchmodel.BranchModel)4 HomogeneousBranchModel (dr.evomodel.branchmodel.HomogeneousBranchModel)4 BranchRateModel (dr.evomodel.branchratemodel.BranchRateModel)4 DefaultBranchRateModel (dr.evomodel.branchratemodel.DefaultBranchRateModel)4 FrequencyModel (dr.evomodel.substmodel.FrequencyModel)4 TreeModel (dr.evomodel.tree.TreeModel)4 Parameter (dr.inference.model.Parameter)4 SubstitutionModel (dr.evomodel.substmodel.SubstitutionModel)3 SimpleAlignment (dr.evolution.alignment.SimpleAlignment)2 SitePatterns (dr.evolution.alignment.SitePatterns)2 HKY (dr.evomodel.substmodel.nucleotide.HKY)2 TipStatesModel (dr.evomodel.tipstatesmodel.TipStatesModel)2 BeagleTreeLikelihood (dr.evomodel.treelikelihood.BeagleTreeLikelihood)2 PartialsRescalingScheme (dr.evomodel.treelikelihood.PartialsRescalingScheme)2 Patterns (dr.evolution.alignment.Patterns)1 DataType (dr.evolution.datatype.DataType)1