Search in sources :

Example 1 with BeagleTreeLikelihood

use of dr.evomodel.treelikelihood.BeagleTreeLikelihood in project beast-mcmc by beast-dev.

the class SiteLogLikelihoodLoggerParser method parseXMLObject.

@Override
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
    SiteLogLikelihoodLogger siteLogLikelihoodLogger;
    BeagleTreeLikelihood beagleTreeLikelihood = null;
    for (int i = 0; i < xo.getChildCount(); i++) {
        beagleTreeLikelihood = (BeagleTreeLikelihood) xo.getChild(i);
    }
    siteLogLikelihoodLogger = new SiteLogLikelihoodLogger(beagleTreeLikelihood);
    return siteLogLikelihoodLogger;
}
Also used : BeagleTreeLikelihood(dr.evomodel.treelikelihood.BeagleTreeLikelihood) SiteLogLikelihoodLogger(dr.app.beagle.tools.SiteLogLikelihoodLogger)

Example 2 with BeagleTreeLikelihood

use of dr.evomodel.treelikelihood.BeagleTreeLikelihood in project beast-mcmc by beast-dev.

the class DataLikelihoodTester2 method main.

public static void main(String[] args) {
    // turn off logging to avoid screen noise...
    Logger logger = Logger.getLogger("dr");
    logger.setUseParentHandlers(false);
    SimpleAlignment alignment = createAlignment(sequences, Nucleotides.INSTANCE);
    TreeModel treeModel;
    try {
        treeModel = createSpecifiedTree("((human:0.1,chimp:0.1):0.1,gorilla:0.2)");
    } catch (Exception e) {
        throw new RuntimeException("Unable to parse Newick tree");
    }
    System.out.print("\nTest BeagleTreeLikelihood (kappa = 1): ");
    //substitutionModel
    Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 });
    Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 0, 100);
    FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
    HKY hky = new HKY(kappa, f);
    //siteModel
    double alpha = 0.5;
    GammaSiteRateModel siteRateModel = new GammaSiteRateModel("gammaModel", alpha, 4);
    //        GammaSiteRateModel siteRateModel = new GammaSiteRateModel("siteRateModel");
    siteRateModel.setSubstitutionModel(hky);
    Parameter mu = new Parameter.Default(GammaSiteModelParser.SUBSTITUTION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
    siteRateModel.setRelativeRateParameter(mu);
    FrequencyModel f2 = new FrequencyModel(Nucleotides.INSTANCE, freqs);
    Parameter kappa2 = new Parameter.Default(HKYParser.KAPPA, 10.0, 0, 100);
    HKY hky2 = new HKY(kappa2, f2);
    GammaSiteRateModel siteRateModel2 = new GammaSiteRateModel("gammaModel", alpha, 4);
    siteRateModel2.setSubstitutionModel(hky2);
    siteRateModel2.setRelativeRateParameter(mu);
    //treeLikelihood
    SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
    BranchModel branchModel = new HomogeneousBranchModel(siteRateModel.getSubstitutionModel(), siteRateModel.getSubstitutionModel().getFrequencyModel());
    BranchModel branchModel2 = new HomogeneousBranchModel(siteRateModel2.getSubstitutionModel(), siteRateModel2.getSubstitutionModel().getFrequencyModel());
    BranchRateModel branchRateModel = new DefaultBranchRateModel();
    BeagleTreeLikelihood treeLikelihood = new BeagleTreeLikelihood(patterns, treeModel, branchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.AUTO, true);
    double logLikelihood = treeLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 1): ");
    BeagleDataLikelihoodDelegate dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, PartialsRescalingScheme.NONE, false);
    TreeDataLikelihood treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(5.0);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 5): ");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 10): ");
    dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel2, siteRateModel2, false, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky2.setKappa(11.0);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 11): ");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(1.0);
    hky2.setKappa(10.0);
    MultiPartitionDataLikelihoodDelegate multiPartitionDataLikelihoodDelegate;
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 1):");
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(5.0);
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 5):");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(1.0);
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 10):");
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
    List<PatternList> patternLists = new ArrayList<PatternList>();
    patternLists.add(patterns);
    patternLists.add(patterns);
    List<SiteRateModel> siteRateModels = new ArrayList<SiteRateModel>();
    siteRateModels.add(siteRateModel);
    siteRateModels.add(siteRateModel2);
    List<BranchModel> branchModels = new ArrayList<BranchModel>();
    branchModels.add(branchModel);
    branchModels.add(branchModel2);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: this is 2x the logLikelihood of the 2nd partition)\n\n");
    System.exit(0);
    //START ADDITIONAL TEST #1 - Guy Baele
    System.out.println("-- Test #1 SiteRateModels -- ");
    //alpha in partition 1 reject followed by alpha in partition 2 reject
    System.out.print("Adjust alpha in partition 1: ");
    siteRateModel.setAlpha(0.4);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return alpha in partition 1 to original value: ");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
    System.out.print("Adjust alpha in partition 2: ");
    siteRateModel2.setAlpha(0.35);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return alpha in partition 2 to original value: ");
    siteRateModel2.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
    //alpha in partition 1 accept followed by alpha in partition 2 accept
    System.out.print("Adjust alpha in partition 1: ");
    siteRateModel.setAlpha(0.4);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Adjust alpha in partition 2: ");
    siteRateModel2.setAlpha(0.35);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only setting alpha in partition 2)");
    System.out.print("Return alpha in partition 1 to original value: ");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 2 has not been returned to original value yet)");
    System.out.print("Return alpha in partition 2 to original value: ");
    siteRateModel2.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    //adjusting alphas in both partitions without explicitly calling getLogLikelihood() in between
    System.out.print("Adjust both alphas in partitions 1 and 2: ");
    siteRateModel.setAlpha(0.4);
    siteRateModel2.setAlpha(0.35);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return alpha in partition 2 to original value: ");
    siteRateModel2.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 1 has not been returned to original value yet)");
    System.out.print("Return alpha in partition 1 to original value: ");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n\n");
    //END ADDITIONAL TEST - Guy Baele
    //START ADDITIONAL TEST #2 - Guy Baele
    System.out.println("-- Test #2 SiteRateModels -- ");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    //1 siteRateModel shared across 2 partitions
    siteRateModels = new ArrayList<SiteRateModel>();
    siteRateModels.add(siteRateModel);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    System.out.print("Adjust alpha in shared siteRateModel: ");
    siteRateModel.setAlpha(0.4);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only adjusted alpha for partition 1)");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n\n");
    //END ADDITIONAL TEST - Guy Baele
    //START ADDITIONAL TEST #3 - Guy Baele
    System.out.println("-- Test #3 SiteRateModels -- ");
    siteRateModel = new GammaSiteRateModel("gammaModel");
    siteRateModel.setSubstitutionModel(hky);
    siteRateModel.setRelativeRateParameter(mu);
    siteRateModel2 = new GammaSiteRateModel("gammaModel2");
    siteRateModel2.setSubstitutionModel(hky2);
    siteRateModel2.setRelativeRateParameter(mu);
    siteRateModels = new ArrayList<SiteRateModel>();
    siteRateModels.add(siteRateModel);
    siteRateModels.add(siteRateModel2);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    System.out.print("Adjust kappa in partition 1: ");
    hky.setKappa(5.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: logLikelihood has not changed?)");
    System.out.print("Return kappa in partition 1 to original value: ");
    hky.setKappa(1.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    System.out.print("Adjust kappa in partition 2: ");
    hky2.setKappa(11.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return kappa in partition 2 to original value: ");
    hky2.setKappa(10.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n\n");
    //END ADDITIONAL TEST - Guy Baele
    //START ADDITIONAL TEST #4 - Guy Baele
    System.out.println("-- Test #4 SiteRateModels -- ");
    SimpleAlignment secondAlignment = createAlignment(moreSequences, Nucleotides.INSTANCE);
    SitePatterns morePatterns = new SitePatterns(secondAlignment, null, 0, -1, 1, true);
    BeagleDataLikelihoodDelegate dataLikelihoodDelegateOne = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, PartialsRescalingScheme.NONE, false);
    TreeDataLikelihood treeDataLikelihoodOne = new TreeDataLikelihood(dataLikelihoodDelegateOne, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
    System.out.println("\nBeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 1) = " + logLikelihood);
    hky.setKappa(10.0);
    logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
    System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 10) = " + logLikelihood);
    hky.setKappa(1.0);
    BeagleDataLikelihoodDelegate dataLikelihoodDelegateTwo = new BeagleDataLikelihoodDelegate(treeModel, morePatterns, branchModel2, siteRateModel2, false, PartialsRescalingScheme.NONE, false);
    TreeDataLikelihood treeDataLikelihoodTwo = new TreeDataLikelihood(dataLikelihoodDelegateTwo, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihoodTwo.getLogLikelihood();
    System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 2 (kappa = 10) = " + logLikelihood + "\n");
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 1):");
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(10.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 10):");
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(1.0);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) morePatterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 2nd partition (kappa = 10):");
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    patternLists = new ArrayList<PatternList>();
    patternLists.add(patterns);
    patternLists.add(morePatterns);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: should be the sum of both separate logLikelihoods)\nKappa value of partition 2 is used to compute logLikelihood for both partitions?");
//END ADDITIONAL TEST - Guy Baele
}
Also used : FrequencyModel(dr.evomodel.substmodel.FrequencyModel) ArrayList(java.util.ArrayList) PatternList(dr.evolution.alignment.PatternList) Logger(java.util.logging.Logger) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) BranchModel(dr.evomodel.branchmodel.BranchModel) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) SiteRateModel(dr.evomodel.siteratemodel.SiteRateModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) TreeModel(dr.evomodel.tree.TreeModel) SimpleAlignment(dr.evolution.alignment.SimpleAlignment) SitePatterns(dr.evolution.alignment.SitePatterns) BeagleTreeLikelihood(dr.evomodel.treelikelihood.BeagleTreeLikelihood) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) BranchRateModel(dr.evomodel.branchratemodel.BranchRateModel) HKY(dr.evomodel.substmodel.nucleotide.HKY) Parameter(dr.inference.model.Parameter)

Example 3 with BeagleTreeLikelihood

use of dr.evomodel.treelikelihood.BeagleTreeLikelihood in project beast-mcmc by beast-dev.

the class ThreadedCompoundLikelihood method getLogLikelihood.

public double getLogLikelihood() {
    double logLikelihood = 0.0;
    boolean knownLikelihoods = true;
    for (Likelihood likelihood : likelihoods) {
        if (!((BeagleTreeLikelihood) likelihood).isLikelihoodKnown()) {
            knownLikelihoods = false;
            break;
        } else {
            logLikelihood += likelihood.getLogLikelihood();
        }
    }
    if (knownLikelihoods) {
        if (DEBUG) {
            //System.err.println("BTLs are known; total logLikelihood = " + logLikelihood);
            //double check if the total loglikelihood will be identical by recalculating
            double backupLikelihood = logLikelihood;
            logLikelihood = 0.0;
            if (threads == null) {
                // first call so setup a thread for each likelihood...
                threads = new LikelihoodThread[likelihoodCallers.size()];
                for (int i = 0; i < threads.length; i++) {
                    // and start them running...
                    threads[i] = new LikelihoodThread();
                    threads[i].start();
                }
            }
            for (int i = 0; i < threads.length; i++) {
                // set the caller which will be called in each thread
                threads[i].setCaller(likelihoodCallers.get(i));
            }
            for (LikelihoodThread thread : threads) {
                // now wait for the results to be set...
                Double result = thread.getResult();
                while (result == null) {
                    result = thread.getResult();
                }
                logLikelihood += result;
            }
            if (backupLikelihood != logLikelihood) {
                //System.err.println("Likelihood recalculation does not return stored likelihood");
                throw new RuntimeException("Likelihood recalculation does not return stored likelihood");
            }
        }
    } else {
        //System.err.println("BTLs are not known: recalculate");
        logLikelihood = 0.0;
        if (threads == null) {
            //System.err.println("threads == null");
            // first call so setup a thread for each likelihood...
            threads = new LikelihoodThread[likelihoodCallers.size()];
            //System.err.println("LikelihoodThreads: " + threads.length);
            for (int i = 0; i < threads.length; i++) {
                // and start them running...
                threads[i] = new LikelihoodThread();
                threads[i].start();
            }
        }
        //double setStart = System.nanoTime();
        for (int i = 0; i < threads.length; i++) {
            // set the caller which will be called in each thread
            threads[i].setCaller(likelihoodCallers.get(i));
        }
        //start = System.nanoTime();
        for (LikelihoodThread thread : threads) {
            //double testone = System.nanoTime();
            // now wait for the results to be set...
            Double result = thread.getResult();
            while (result == null) {
                result = thread.getResult();
            }
            logLikelihood += result;
        //double testtwo = System.nanoTime();
        //System.err.println(thread.getName() + " - result = " + result + ": " + (testtwo - testone));
        }
    //end = System.nanoTime();
    //double end = System.nanoTime();
    //System.err.println("TCL total time: " + (end - start));
    }
    // * weightFactor;
    return logLikelihood;
}
Also used : BeagleTreeLikelihood(dr.evomodel.treelikelihood.BeagleTreeLikelihood)

Example 4 with BeagleTreeLikelihood

use of dr.evomodel.treelikelihood.BeagleTreeLikelihood in project beast-mcmc by beast-dev.

the class BalancedBeagleTreeLikelihoodParser method parseXMLObject.

public Object parseXMLObject(XMLObject xo) throws XMLParseException {
    boolean useAmbiguities = xo.getAttribute(BeagleTreeLikelihoodParser.USE_AMBIGUITIES, false);
    /*int instanceCount = xo.getAttribute(INSTANCE_COUNT, 1);
        if (instanceCount < 1) {
            instanceCount = 1;
        }

        String ic = System.getProperty(BEAGLE_INSTANCE_COUNT);
        if (ic != null && ic.length() > 0) {
            instanceCount = Integer.parseInt(ic);
        }*/
    PatternList patternList = (PatternList) xo.getChild(PatternList.class);
    TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
    GammaSiteRateModel siteRateModel = (GammaSiteRateModel) xo.getChild(GammaSiteRateModel.class);
    FrequencyModel rootFreqModel = (FrequencyModel) xo.getChild(FrequencyModel.class);
    BranchModel branchModel = (BranchModel) xo.getChild(BranchModel.class);
    if (branchModel == null) {
        SubstitutionModel substitutionModel = (SubstitutionModel) xo.getChild(SubstitutionModel.class);
        if (substitutionModel == null) {
            substitutionModel = siteRateModel.getSubstitutionModel();
        }
        if (substitutionModel == null) {
            throw new XMLParseException("No substitution model available for TreeLikelihood: " + xo.getId());
        }
        branchModel = new HomogeneousBranchModel(substitutionModel, rootFreqModel);
    }
    BranchRateModel branchRateModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
    TipStatesModel tipStatesModel = (TipStatesModel) xo.getChild(TipStatesModel.class);
    //        if (xo.getChild(TipStatesModel.class) != null) {
    //            throw new XMLParseException("Sequence Error Models are not supported under BEAGLE yet. Please use Native BEAST Likelihood.");
    //        }
    PartialsRescalingScheme scalingScheme = PartialsRescalingScheme.DEFAULT;
    if (xo.hasAttribute(BeagleTreeLikelihoodParser.SCALING_SCHEME)) {
        //            scalingScheme = PartialsRescalingScheme.parseFromString(xo.getStringAttribute(BeagleTreeLikelihoodParser.SCALING_SCHEME));
        if (scalingScheme == null)
            throw new XMLParseException("Unknown scaling scheme '" + xo.getStringAttribute(BeagleTreeLikelihoodParser.SCALING_SCHEME) + "' in " + "OldBeagleTreeLikelihood object '" + xo.getId());
    }
    boolean delayScaling = true;
    Map<Set<String>, Parameter> partialsRestrictions = null;
    if (xo.hasChildNamed(PARTIALS_RESTRICTION)) {
        XMLObject cxo = xo.getChild(PARTIALS_RESTRICTION);
        TaxonList taxonList = (TaxonList) cxo.getChild(TaxonList.class);
        //            Parameter parameter = (Parameter) cxo.getChild(Parameter.class);
        try {
            TreeUtils.getLeavesForTaxa(treeModel, taxonList);
        } catch (TreeUtils.MissingTaxonException e) {
            throw new XMLParseException("Unable to parse taxon list: " + e.getMessage());
        }
        throw new XMLParseException("Restricting internal nodes is not yet implemented.  Contact Marc");
    }
    /*if (instanceCount == 1 || patternList.getPatternCount() < instanceCount) {
            return createTreeLikelihood(
                    patternList,
                    treeModel,
                    branchModel,
                    siteRateModel,
                    branchRateModel,
                    tipStatesModel,
                    useAmbiguities,
                    scalingScheme,
                    partialsRestrictions,
                    xo
            );
        }*/
    //first run a test for instanceCount == 1
    System.err.println("\nTesting instanceCount == 1");
    Likelihood baseLikelihood = createTreeLikelihood(patternList, treeModel, branchModel, siteRateModel, branchRateModel, tipStatesModel, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo);
    double start = System.nanoTime();
    for (int i = 0; i < TEST_RUNS; i++) {
        baseLikelihood.makeDirty();
        baseLikelihood.getLogLikelihood();
    }
    double end = System.nanoTime();
    double baseResult = end - start;
    System.err.println("Evaluation took: " + baseResult);
    if (!(patternList instanceof SitePatterns)) {
        throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with BEAUti-selected codon partitioning.");
    }
    if (tipStatesModel != null) {
        throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with a TipStateModel (i.e., a sequence error model).");
    }
    //List<Likelihood> likelihoods = new ArrayList<Likelihood>();
    List<Likelihood> likelihoods = null;
    CompoundLikelihood compound = null;
    int instanceCount = 2;
    boolean optimal = false;
    while (optimal == false) {
        System.err.println("\nCreating instanceCount == " + instanceCount);
        likelihoods = new ArrayList<Likelihood>();
        for (int i = 0; i < instanceCount; i++) {
            Patterns subPatterns = new Patterns((SitePatterns) patternList, 0, 0, 1, i, instanceCount);
            AbstractTreeLikelihood treeLikelihood = createTreeLikelihood(subPatterns, treeModel, branchModel, siteRateModel, branchRateModel, null, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo);
            treeLikelihood.setId(xo.getId() + "_" + instanceCount);
            likelihoods.add(treeLikelihood);
        }
        //construct compoundLikelihood
        compound = new CompoundLikelihood(instanceCount, likelihoods);
        //test timings 
        System.err.println("\nTesting instanceCount == " + instanceCount);
        start = System.nanoTime();
        for (int i = 0; i < TEST_RUNS; i++) {
            compound.makeDirty();
            compound.getLogLikelihood();
        }
        end = System.nanoTime();
        double newResult = end - start;
        System.err.println("Evaluation took: " + newResult);
        if (baseResult / newResult > TEST_CUTOFF) {
            instanceCount++;
            baseResult = newResult;
        } else {
            optimal = true;
            instanceCount--;
            System.err.println("\nCreating final BeagleTreeLikelihood with instanceCount: " + instanceCount);
            likelihoods = new ArrayList<Likelihood>();
            for (int i = 0; i < instanceCount; i++) {
                Patterns subPatterns = new Patterns((SitePatterns) patternList, 0, 0, 1, i, instanceCount);
                AbstractTreeLikelihood treeLikelihood = createTreeLikelihood(subPatterns, treeModel, branchModel, siteRateModel, branchRateModel, null, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo);
                treeLikelihood.setId(xo.getId() + "_" + instanceCount);
                likelihoods.add(treeLikelihood);
            }
            //construct compoundLikelihood
            compound = new CompoundLikelihood(instanceCount, likelihoods);
        }
    }
    return compound;
/*for (int i = 0; i < instanceCount; i++) {

            Patterns subPatterns = new Patterns((SitePatterns)patternList, 0, 0, 1, i, instanceCount);

            AbstractTreeLikelihood treeLikelihood = createTreeLikelihood(
                    subPatterns,
                    treeModel,
                    branchModel,
                    siteRateModel,
                    branchRateModel,
                    null,
                    useAmbiguities,
                    scalingScheme,
                    partialsRestrictions,
                    xo);
            treeLikelihood.setId(xo.getId() + "_" + instanceCount);
            likelihoods.add(treeLikelihood);
        }

        return new CompoundLikelihood(likelihoods);*/
}
Also used : FrequencyModel(dr.evomodel.substmodel.FrequencyModel) Set(java.util.Set) CompoundLikelihood(dr.inference.model.CompoundLikelihood) Likelihood(dr.inference.model.Likelihood) BeagleTreeLikelihood(dr.evomodel.treelikelihood.BeagleTreeLikelihood) AbstractTreeLikelihood(dr.evomodel.treelikelihood.AbstractTreeLikelihood) PatternList(dr.evolution.alignment.PatternList) AbstractTreeLikelihood(dr.evomodel.treelikelihood.AbstractTreeLikelihood) PartialsRescalingScheme(dr.evomodel.treelikelihood.PartialsRescalingScheme) BranchModel(dr.evomodel.branchmodel.BranchModel) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) TreeModel(dr.evomodel.tree.TreeModel) Patterns(dr.evolution.alignment.Patterns) SitePatterns(dr.evolution.alignment.SitePatterns) TreeUtils(dr.evolution.tree.TreeUtils) SitePatterns(dr.evolution.alignment.SitePatterns) TaxonList(dr.evolution.util.TaxonList) CompoundLikelihood(dr.inference.model.CompoundLikelihood) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) SubstitutionModel(dr.evomodel.substmodel.SubstitutionModel) TipStatesModel(dr.evomodel.tipstatesmodel.TipStatesModel) BranchRateModel(dr.evomodel.branchratemodel.BranchRateModel) Parameter(dr.inference.model.Parameter)

Example 5 with BeagleTreeLikelihood

use of dr.evomodel.treelikelihood.BeagleTreeLikelihood in project beast-mcmc by beast-dev.

the class BeagleSeqSimTest method simulateCodon.

// END: simulateAminoAcid
static void simulateCodon() {
    try {
        boolean calculateLikelihood = true;
        System.out.println("Test case 6: simulate codons");
        MathUtils.setSeed(666);
        int sequenceLength = 10;
        ArrayList<Partition> partitionsList = new ArrayList<Partition>();
        // create tree
        NewickImporter importer = new NewickImporter("(SimSeq1:73.7468,(SimSeq2:25.256989999999995,SimSeq3:45.256989999999995):18.48981);");
        Tree tree = importer.importTree(null);
        TreeModel treeModel = new TreeModel(tree);
        // create site model
        GammaSiteRateModel siteRateModel = new GammaSiteRateModel("siteModel");
        // create branch rate model
        BranchRateModel branchRateModel = new DefaultBranchRateModel();
        // create Frequency Model
        Parameter freqs = new Parameter.Default(Utils.UNIFORM_CODON_FREQUENCIES);
        FrequencyModel freqModel = new FrequencyModel(Codons.UNIVERSAL, freqs);
        // create substitution model
        Parameter alpha = new Parameter.Default(1, 10);
        Parameter beta = new Parameter.Default(1, 5);
        //			Parameter kappa = new Parameter.Default(1, 1);
        MG94CodonModel mg94 = new MG94CodonModel(Codons.UNIVERSAL, alpha, beta, freqModel);
        HomogeneousBranchModel substitutionModel = new HomogeneousBranchModel(mg94);
        // create partition
        Partition partition1 = new //
        Partition(//
        treeModel, //
        substitutionModel, //
        siteRateModel, //
        branchRateModel, //
        freqModel, // from
        0, // to
        sequenceLength - 1, // every
        1);
        partitionsList.add(partition1);
        // feed to sequence simulator and generate data
        BeagleSequenceSimulator simulator = new BeagleSequenceSimulator(partitionsList);
        Alignment alignment = simulator.simulate(simulateInPar, false);
        System.out.println(alignment.toString());
        if (calculateLikelihood) {
            // NewBeagleSequenceLikelihood nbtl = new
            // NewBeagleSequenceLikelihood(alignment, treeModel,
            // substitutionModel, (SiteModel) siteRateModel,
            // branchRateModel, null, false,
            // PartialsRescalingScheme.DEFAULT);
            ConvertAlignment convert = new ConvertAlignment(Nucleotides.INSTANCE, GeneticCode.UNIVERSAL, alignment);
            BeagleTreeLikelihood nbtl = new //
            BeagleTreeLikelihood(//
            convert, //
            treeModel, //
            substitutionModel, //
            siteRateModel, //
            branchRateModel, //
            null, //
            false, PartialsRescalingScheme.DEFAULT, true);
            System.out.println("likelihood = " + nbtl.getLogLikelihood());
        }
    } catch (Exception e) {
        e.printStackTrace();
        System.exit(-1);
    }
// END: try-catch
}
Also used : FrequencyModel(dr.evomodel.substmodel.FrequencyModel) Partition(dr.app.beagle.tools.Partition) BeagleTreeLikelihood(dr.evomodel.treelikelihood.BeagleTreeLikelihood) MG94CodonModel(dr.evomodel.substmodel.codon.MG94CodonModel) ArrayList(java.util.ArrayList) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) BeagleSequenceSimulator(dr.app.beagle.tools.BeagleSequenceSimulator) ImportException(dr.evolution.io.Importer.ImportException) IOException(java.io.IOException) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) TreeModel(dr.evomodel.tree.TreeModel) ConvertAlignment(dr.evolution.alignment.ConvertAlignment) Alignment(dr.evolution.alignment.Alignment) SimpleAlignment(dr.evolution.alignment.SimpleAlignment) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) BranchRateModel(dr.evomodel.branchratemodel.BranchRateModel) NewickImporter(dr.evolution.io.NewickImporter) ConvertAlignment(dr.evolution.alignment.ConvertAlignment) Tree(dr.evolution.tree.Tree) Parameter(dr.inference.model.Parameter)

Aggregations

BeagleTreeLikelihood (dr.evomodel.treelikelihood.BeagleTreeLikelihood)10 BranchRateModel (dr.evomodel.branchratemodel.BranchRateModel)7 GammaSiteRateModel (dr.evomodel.siteratemodel.GammaSiteRateModel)7 TreeModel (dr.evomodel.tree.TreeModel)7 HomogeneousBranchModel (dr.evomodel.branchmodel.HomogeneousBranchModel)6 FrequencyModel (dr.evomodel.substmodel.FrequencyModel)6 Parameter (dr.inference.model.Parameter)6 BranchModel (dr.evomodel.branchmodel.BranchModel)5 SitePatterns (dr.evolution.alignment.SitePatterns)4 DefaultBranchRateModel (dr.evomodel.branchratemodel.DefaultBranchRateModel)4 ArrayList (java.util.ArrayList)4 BeagleSequenceSimulator (dr.app.beagle.tools.BeagleSequenceSimulator)3 Partition (dr.app.beagle.tools.Partition)3 Alignment (dr.evolution.alignment.Alignment)3 PatternList (dr.evolution.alignment.PatternList)3 SimpleAlignment (dr.evolution.alignment.SimpleAlignment)3 NewickImporter (dr.evolution.io.NewickImporter)3 SubstitutionModel (dr.evomodel.substmodel.SubstitutionModel)3 HKY (dr.evomodel.substmodel.nucleotide.HKY)3 ConvertAlignment (dr.evolution.alignment.ConvertAlignment)2