Search in sources :

Example 1 with SitePatterns

use of dr.evolution.alignment.SitePatterns in project beast-mcmc by beast-dev.

the class DataLikelihoodTester2 method main.

public static void main(String[] args) {
    // turn off logging to avoid screen noise...
    Logger logger = Logger.getLogger("dr");
    logger.setUseParentHandlers(false);
    SimpleAlignment alignment = createAlignment(sequences, Nucleotides.INSTANCE);
    TreeModel treeModel;
    try {
        treeModel = createSpecifiedTree("((human:0.1,chimp:0.1):0.1,gorilla:0.2)");
    } catch (Exception e) {
        throw new RuntimeException("Unable to parse Newick tree");
    }
    System.out.print("\nTest BeagleTreeLikelihood (kappa = 1): ");
    //substitutionModel
    Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 });
    Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 0, 100);
    FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
    HKY hky = new HKY(kappa, f);
    //siteModel
    double alpha = 0.5;
    GammaSiteRateModel siteRateModel = new GammaSiteRateModel("gammaModel", alpha, 4);
    //        GammaSiteRateModel siteRateModel = new GammaSiteRateModel("siteRateModel");
    siteRateModel.setSubstitutionModel(hky);
    Parameter mu = new Parameter.Default(GammaSiteModelParser.SUBSTITUTION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
    siteRateModel.setRelativeRateParameter(mu);
    FrequencyModel f2 = new FrequencyModel(Nucleotides.INSTANCE, freqs);
    Parameter kappa2 = new Parameter.Default(HKYParser.KAPPA, 10.0, 0, 100);
    HKY hky2 = new HKY(kappa2, f2);
    GammaSiteRateModel siteRateModel2 = new GammaSiteRateModel("gammaModel", alpha, 4);
    siteRateModel2.setSubstitutionModel(hky2);
    siteRateModel2.setRelativeRateParameter(mu);
    //treeLikelihood
    SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
    BranchModel branchModel = new HomogeneousBranchModel(siteRateModel.getSubstitutionModel(), siteRateModel.getSubstitutionModel().getFrequencyModel());
    BranchModel branchModel2 = new HomogeneousBranchModel(siteRateModel2.getSubstitutionModel(), siteRateModel2.getSubstitutionModel().getFrequencyModel());
    BranchRateModel branchRateModel = new DefaultBranchRateModel();
    BeagleTreeLikelihood treeLikelihood = new BeagleTreeLikelihood(patterns, treeModel, branchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.AUTO, true);
    double logLikelihood = treeLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 1): ");
    BeagleDataLikelihoodDelegate dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, PartialsRescalingScheme.NONE, false);
    TreeDataLikelihood treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(5.0);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 5): ");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 10): ");
    dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel2, siteRateModel2, false, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky2.setKappa(11.0);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 11): ");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(1.0);
    hky2.setKappa(10.0);
    MultiPartitionDataLikelihoodDelegate multiPartitionDataLikelihoodDelegate;
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 1):");
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(5.0);
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 5):");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(1.0);
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 10):");
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
    List<PatternList> patternLists = new ArrayList<PatternList>();
    patternLists.add(patterns);
    patternLists.add(patterns);
    List<SiteRateModel> siteRateModels = new ArrayList<SiteRateModel>();
    siteRateModels.add(siteRateModel);
    siteRateModels.add(siteRateModel2);
    List<BranchModel> branchModels = new ArrayList<BranchModel>();
    branchModels.add(branchModel);
    branchModels.add(branchModel2);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: this is 2x the logLikelihood of the 2nd partition)\n\n");
    System.exit(0);
    //START ADDITIONAL TEST #1 - Guy Baele
    System.out.println("-- Test #1 SiteRateModels -- ");
    //alpha in partition 1 reject followed by alpha in partition 2 reject
    System.out.print("Adjust alpha in partition 1: ");
    siteRateModel.setAlpha(0.4);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return alpha in partition 1 to original value: ");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
    System.out.print("Adjust alpha in partition 2: ");
    siteRateModel2.setAlpha(0.35);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return alpha in partition 2 to original value: ");
    siteRateModel2.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
    //alpha in partition 1 accept followed by alpha in partition 2 accept
    System.out.print("Adjust alpha in partition 1: ");
    siteRateModel.setAlpha(0.4);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Adjust alpha in partition 2: ");
    siteRateModel2.setAlpha(0.35);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only setting alpha in partition 2)");
    System.out.print("Return alpha in partition 1 to original value: ");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 2 has not been returned to original value yet)");
    System.out.print("Return alpha in partition 2 to original value: ");
    siteRateModel2.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    //adjusting alphas in both partitions without explicitly calling getLogLikelihood() in between
    System.out.print("Adjust both alphas in partitions 1 and 2: ");
    siteRateModel.setAlpha(0.4);
    siteRateModel2.setAlpha(0.35);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return alpha in partition 2 to original value: ");
    siteRateModel2.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 1 has not been returned to original value yet)");
    System.out.print("Return alpha in partition 1 to original value: ");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n\n");
    //END ADDITIONAL TEST - Guy Baele
    //START ADDITIONAL TEST #2 - Guy Baele
    System.out.println("-- Test #2 SiteRateModels -- ");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    //1 siteRateModel shared across 2 partitions
    siteRateModels = new ArrayList<SiteRateModel>();
    siteRateModels.add(siteRateModel);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    System.out.print("Adjust alpha in shared siteRateModel: ");
    siteRateModel.setAlpha(0.4);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only adjusted alpha for partition 1)");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n\n");
    //END ADDITIONAL TEST - Guy Baele
    //START ADDITIONAL TEST #3 - Guy Baele
    System.out.println("-- Test #3 SiteRateModels -- ");
    siteRateModel = new GammaSiteRateModel("gammaModel");
    siteRateModel.setSubstitutionModel(hky);
    siteRateModel.setRelativeRateParameter(mu);
    siteRateModel2 = new GammaSiteRateModel("gammaModel2");
    siteRateModel2.setSubstitutionModel(hky2);
    siteRateModel2.setRelativeRateParameter(mu);
    siteRateModels = new ArrayList<SiteRateModel>();
    siteRateModels.add(siteRateModel);
    siteRateModels.add(siteRateModel2);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    System.out.print("Adjust kappa in partition 1: ");
    hky.setKappa(5.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: logLikelihood has not changed?)");
    System.out.print("Return kappa in partition 1 to original value: ");
    hky.setKappa(1.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    System.out.print("Adjust kappa in partition 2: ");
    hky2.setKappa(11.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return kappa in partition 2 to original value: ");
    hky2.setKappa(10.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n\n");
    //END ADDITIONAL TEST - Guy Baele
    //START ADDITIONAL TEST #4 - Guy Baele
    System.out.println("-- Test #4 SiteRateModels -- ");
    SimpleAlignment secondAlignment = createAlignment(moreSequences, Nucleotides.INSTANCE);
    SitePatterns morePatterns = new SitePatterns(secondAlignment, null, 0, -1, 1, true);
    BeagleDataLikelihoodDelegate dataLikelihoodDelegateOne = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, PartialsRescalingScheme.NONE, false);
    TreeDataLikelihood treeDataLikelihoodOne = new TreeDataLikelihood(dataLikelihoodDelegateOne, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
    System.out.println("\nBeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 1) = " + logLikelihood);
    hky.setKappa(10.0);
    logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
    System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 10) = " + logLikelihood);
    hky.setKappa(1.0);
    BeagleDataLikelihoodDelegate dataLikelihoodDelegateTwo = new BeagleDataLikelihoodDelegate(treeModel, morePatterns, branchModel2, siteRateModel2, false, PartialsRescalingScheme.NONE, false);
    TreeDataLikelihood treeDataLikelihoodTwo = new TreeDataLikelihood(dataLikelihoodDelegateTwo, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihoodTwo.getLogLikelihood();
    System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 2 (kappa = 10) = " + logLikelihood + "\n");
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 1):");
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(10.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 10):");
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(1.0);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) morePatterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 2nd partition (kappa = 10):");
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    patternLists = new ArrayList<PatternList>();
    patternLists.add(patterns);
    patternLists.add(morePatterns);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: should be the sum of both separate logLikelihoods)\nKappa value of partition 2 is used to compute logLikelihood for both partitions?");
//END ADDITIONAL TEST - Guy Baele
}
Also used : FrequencyModel(dr.evomodel.substmodel.FrequencyModel) ArrayList(java.util.ArrayList) PatternList(dr.evolution.alignment.PatternList) Logger(java.util.logging.Logger) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) BranchModel(dr.evomodel.branchmodel.BranchModel) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) SiteRateModel(dr.evomodel.siteratemodel.SiteRateModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) TreeModel(dr.evomodel.tree.TreeModel) SimpleAlignment(dr.evolution.alignment.SimpleAlignment) SitePatterns(dr.evolution.alignment.SitePatterns) BeagleTreeLikelihood(dr.evomodel.treelikelihood.BeagleTreeLikelihood) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) BranchRateModel(dr.evomodel.branchratemodel.BranchRateModel) HKY(dr.evomodel.substmodel.nucleotide.HKY) Parameter(dr.inference.model.Parameter)

Example 2 with SitePatterns

use of dr.evolution.alignment.SitePatterns in project beast-mcmc by beast-dev.

the class StrictClockTest method testStrictClock.

public void testStrictClock() throws Exception {
    Parameter popSize = new Parameter.Default(ConstantPopulationModelParser.POPULATION_SIZE, 380.0, 0, 38000.0);
    ConstantPopulationModel constantModel = createRandomInitialTree(popSize);
    CoalescentLikelihood coalescent = new CoalescentLikelihood(treeModel, null, new ArrayList<TaxonList>(), constantModel);
    coalescent.setId("coalescent");
    // clock model
    Parameter rateParameter = new Parameter.Default(StrictClockBranchRates.RATE, 2.3E-5, 0, 100.0);
    StrictClockBranchRates branchRateModel = new StrictClockBranchRates(rateParameter);
    // Sub model
    Parameter freqs = new Parameter.Default(alignment.getStateFrequencies());
    Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 0, 100.0);
    FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
    HKY hky = new HKY(kappa, f);
    //siteModel
    GammaSiteModel siteModel = new GammaSiteModel(hky);
    Parameter mu = new Parameter.Default(GammaSiteModelParser.MUTATION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
    siteModel.setMutationRateParameter(mu);
    //treeLikelihood
    SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
    TreeLikelihood treeLikelihood = new TreeLikelihood(patterns, treeModel, siteModel, branchRateModel, null, false, false, true, false, false);
    treeLikelihood.setId(TreeLikelihoodParser.TREE_LIKELIHOOD);
    // Operators
    OperatorSchedule schedule = new SimpleOperatorSchedule();
    MCMCOperator operator = new ScaleOperator(kappa, 0.75);
    operator.setWeight(1.0);
    schedule.addOperator(operator);
    operator = new ScaleOperator(rateParameter, 0.75);
    operator.setWeight(3.0);
    schedule.addOperator(operator);
    Parameter allInternalHeights = treeModel.createNodeHeightsParameter(true, true, false);
    operator = new UpDownOperator(new Scalable[] { new Scalable.Default(rateParameter) }, new Scalable[] { new Scalable.Default(allInternalHeights) }, 0.75, 3.0, CoercionMode.COERCION_ON);
    schedule.addOperator(operator);
    operator = new ScaleOperator(popSize, 0.75);
    operator.setWeight(3.0);
    schedule.addOperator(operator);
    Parameter rootHeight = treeModel.getRootHeightParameter();
    rootHeight.setId(TREE_HEIGHT);
    operator = new ScaleOperator(rootHeight, 0.75);
    operator.setWeight(3.0);
    schedule.addOperator(operator);
    Parameter internalHeights = treeModel.createNodeHeightsParameter(false, true, false);
    operator = new UniformOperator(internalHeights, 30.0);
    schedule.addOperator(operator);
    operator = new SubtreeSlideOperator(treeModel, 15.0, 1.0, true, false, false, false, CoercionMode.COERCION_ON);
    schedule.addOperator(operator);
    operator = new ExchangeOperator(ExchangeOperator.NARROW, treeModel, 15.0);
    //        operator.doOperation();
    schedule.addOperator(operator);
    operator = new ExchangeOperator(ExchangeOperator.WIDE, treeModel, 3.0);
    //        operator.doOperation();
    schedule.addOperator(operator);
    operator = new WilsonBalding(treeModel, 3.0);
    //        operator.doOperation();
    schedule.addOperator(operator);
    //CompoundLikelihood
    List<Likelihood> likelihoods = new ArrayList<Likelihood>();
    likelihoods.add(coalescent);
    Likelihood prior = new CompoundLikelihood(0, likelihoods);
    prior.setId(CompoundLikelihoodParser.PRIOR);
    likelihoods.clear();
    likelihoods.add(treeLikelihood);
    Likelihood likelihood = new CompoundLikelihood(-1, likelihoods);
    likelihoods.clear();
    likelihoods.add(prior);
    likelihoods.add(likelihood);
    Likelihood posterior = new CompoundLikelihood(0, likelihoods);
    posterior.setId(CompoundLikelihoodParser.POSTERIOR);
    // Log
    ArrayLogFormatter formatter = new ArrayLogFormatter(false);
    MCLogger[] loggers = new MCLogger[2];
    loggers[0] = new MCLogger(formatter, 500, false);
    loggers[0].add(posterior);
    loggers[0].add(treeLikelihood);
    loggers[0].add(rootHeight);
    loggers[0].add(rateParameter);
    loggers[0].add(popSize);
    loggers[0].add(kappa);
    loggers[0].add(coalescent);
    loggers[1] = new MCLogger(new TabDelimitedFormatter(System.out), 10000, false);
    loggers[1].add(posterior);
    loggers[1].add(treeLikelihood);
    loggers[1].add(rootHeight);
    loggers[1].add(rateParameter);
    loggers[1].add(coalescent);
    // MCMC
    MCMC mcmc = new MCMC("mcmc1");
    MCMCOptions options = new MCMCOptions(1000000);
    mcmc.setShowOperatorAnalysis(true);
    mcmc.init(options, posterior, schedule, loggers);
    mcmc.run();
    // time
    System.out.println(mcmc.getTimer().toString());
    // Tracer
    List<Trace> traces = formatter.getTraces();
    ArrayTraceList traceList = new ArrayTraceList("RandomLocalClockTest", traces, 0);
    for (int i = 1; i < traces.size(); i++) {
        traceList.analyseTrace(i);
    }
    //        <expectation name="posterior" value="-3928.71"/>
    //        <expectation name="clock.rate" value="8.04835E-4"/>
    //        <expectation name="constant.popSize" value="37.3762"/>
    //        <expectation name="hky.kappa" value="18.2782"/>
    //        <expectation name="treeModel.rootHeight" value="69.0580"/>
    //        <expectation name="treeLikelihood" value="-3856.59"/>
    //        <expectation name="coalescent" value="-72.1285"/>
    TraceCorrelation likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(CompoundLikelihoodParser.POSTERIOR));
    assertExpectation(CompoundLikelihoodParser.POSTERIOR, likelihoodStats, -3928.71);
    likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TreeLikelihoodParser.TREE_LIKELIHOOD));
    assertExpectation(TreeLikelihoodParser.TREE_LIKELIHOOD, likelihoodStats, -3856.59);
    TraceCorrelation treeHeightStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TREE_HEIGHT));
    assertExpectation(TREE_HEIGHT, treeHeightStats, 69.0580);
    TraceCorrelation kappaStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(HKYParser.KAPPA));
    assertExpectation(HKYParser.KAPPA, kappaStats, 18.2782);
    TraceCorrelation rateStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(StrictClockBranchRates.RATE));
    assertExpectation(StrictClockBranchRates.RATE, rateStats, 8.04835E-4);
    TraceCorrelation popStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(ConstantPopulationModelParser.POPULATION_SIZE));
    assertExpectation(ConstantPopulationModelParser.POPULATION_SIZE, popStats, 37.3762);
    TraceCorrelation coalescentStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("coalescent"));
    assertExpectation("coalescent", coalescentStats, -72.1285);
}
Also used : FrequencyModel(dr.oldevomodel.substmodel.FrequencyModel) CompoundLikelihood(dr.inference.model.CompoundLikelihood) Likelihood(dr.inference.model.Likelihood) TreeLikelihood(dr.oldevomodel.treelikelihood.TreeLikelihood) CoalescentLikelihood(dr.evomodel.coalescent.CoalescentLikelihood) TreeLikelihood(dr.oldevomodel.treelikelihood.TreeLikelihood) ExchangeOperator(dr.evomodel.operators.ExchangeOperator) ArrayList(java.util.ArrayList) MCMC(dr.inference.mcmc.MCMC) SubtreeSlideOperator(dr.evomodel.operators.SubtreeSlideOperator) StrictClockBranchRates(dr.evomodel.branchratemodel.StrictClockBranchRates) CoalescentLikelihood(dr.evomodel.coalescent.CoalescentLikelihood) MCMCOptions(dr.inference.mcmc.MCMCOptions) ArrayLogFormatter(dr.inference.loggers.ArrayLogFormatter) WilsonBalding(dr.evomodel.operators.WilsonBalding) TraceCorrelation(dr.inference.trace.TraceCorrelation) SitePatterns(dr.evolution.alignment.SitePatterns) ConstantPopulationModel(dr.evomodel.coalescent.ConstantPopulationModel) TaxonList(dr.evolution.util.TaxonList) CompoundLikelihood(dr.inference.model.CompoundLikelihood) TabDelimitedFormatter(dr.inference.loggers.TabDelimitedFormatter) Trace(dr.inference.trace.Trace) GammaSiteModel(dr.oldevomodel.sitemodel.GammaSiteModel) ArrayTraceList(dr.inference.trace.ArrayTraceList) HKY(dr.oldevomodel.substmodel.HKY) Parameter(dr.inference.model.Parameter) MCLogger(dr.inference.loggers.MCLogger)

Example 3 with SitePatterns

use of dr.evolution.alignment.SitePatterns in project beast-mcmc by beast-dev.

the class BalancedBeagleTreeLikelihoodParser method parseXMLObject.

public Object parseXMLObject(XMLObject xo) throws XMLParseException {
    boolean useAmbiguities = xo.getAttribute(BeagleTreeLikelihoodParser.USE_AMBIGUITIES, false);
    /*int instanceCount = xo.getAttribute(INSTANCE_COUNT, 1);
        if (instanceCount < 1) {
            instanceCount = 1;
        }

        String ic = System.getProperty(BEAGLE_INSTANCE_COUNT);
        if (ic != null && ic.length() > 0) {
            instanceCount = Integer.parseInt(ic);
        }*/
    PatternList patternList = (PatternList) xo.getChild(PatternList.class);
    TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
    GammaSiteRateModel siteRateModel = (GammaSiteRateModel) xo.getChild(GammaSiteRateModel.class);
    FrequencyModel rootFreqModel = (FrequencyModel) xo.getChild(FrequencyModel.class);
    BranchModel branchModel = (BranchModel) xo.getChild(BranchModel.class);
    if (branchModel == null) {
        SubstitutionModel substitutionModel = (SubstitutionModel) xo.getChild(SubstitutionModel.class);
        if (substitutionModel == null) {
            substitutionModel = siteRateModel.getSubstitutionModel();
        }
        if (substitutionModel == null) {
            throw new XMLParseException("No substitution model available for TreeLikelihood: " + xo.getId());
        }
        branchModel = new HomogeneousBranchModel(substitutionModel, rootFreqModel);
    }
    BranchRateModel branchRateModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
    TipStatesModel tipStatesModel = (TipStatesModel) xo.getChild(TipStatesModel.class);
    //        if (xo.getChild(TipStatesModel.class) != null) {
    //            throw new XMLParseException("Sequence Error Models are not supported under BEAGLE yet. Please use Native BEAST Likelihood.");
    //        }
    PartialsRescalingScheme scalingScheme = PartialsRescalingScheme.DEFAULT;
    if (xo.hasAttribute(BeagleTreeLikelihoodParser.SCALING_SCHEME)) {
        //            scalingScheme = PartialsRescalingScheme.parseFromString(xo.getStringAttribute(BeagleTreeLikelihoodParser.SCALING_SCHEME));
        if (scalingScheme == null)
            throw new XMLParseException("Unknown scaling scheme '" + xo.getStringAttribute(BeagleTreeLikelihoodParser.SCALING_SCHEME) + "' in " + "OldBeagleTreeLikelihood object '" + xo.getId());
    }
    boolean delayScaling = true;
    Map<Set<String>, Parameter> partialsRestrictions = null;
    if (xo.hasChildNamed(PARTIALS_RESTRICTION)) {
        XMLObject cxo = xo.getChild(PARTIALS_RESTRICTION);
        TaxonList taxonList = (TaxonList) cxo.getChild(TaxonList.class);
        //            Parameter parameter = (Parameter) cxo.getChild(Parameter.class);
        try {
            TreeUtils.getLeavesForTaxa(treeModel, taxonList);
        } catch (TreeUtils.MissingTaxonException e) {
            throw new XMLParseException("Unable to parse taxon list: " + e.getMessage());
        }
        throw new XMLParseException("Restricting internal nodes is not yet implemented.  Contact Marc");
    }
    /*if (instanceCount == 1 || patternList.getPatternCount() < instanceCount) {
            return createTreeLikelihood(
                    patternList,
                    treeModel,
                    branchModel,
                    siteRateModel,
                    branchRateModel,
                    tipStatesModel,
                    useAmbiguities,
                    scalingScheme,
                    partialsRestrictions,
                    xo
            );
        }*/
    //first run a test for instanceCount == 1
    System.err.println("\nTesting instanceCount == 1");
    Likelihood baseLikelihood = createTreeLikelihood(patternList, treeModel, branchModel, siteRateModel, branchRateModel, tipStatesModel, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo);
    double start = System.nanoTime();
    for (int i = 0; i < TEST_RUNS; i++) {
        baseLikelihood.makeDirty();
        baseLikelihood.getLogLikelihood();
    }
    double end = System.nanoTime();
    double baseResult = end - start;
    System.err.println("Evaluation took: " + baseResult);
    if (!(patternList instanceof SitePatterns)) {
        throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with BEAUti-selected codon partitioning.");
    }
    if (tipStatesModel != null) {
        throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with a TipStateModel (i.e., a sequence error model).");
    }
    //List<Likelihood> likelihoods = new ArrayList<Likelihood>();
    List<Likelihood> likelihoods = null;
    CompoundLikelihood compound = null;
    int instanceCount = 2;
    boolean optimal = false;
    while (optimal == false) {
        System.err.println("\nCreating instanceCount == " + instanceCount);
        likelihoods = new ArrayList<Likelihood>();
        for (int i = 0; i < instanceCount; i++) {
            Patterns subPatterns = new Patterns((SitePatterns) patternList, 0, 0, 1, i, instanceCount);
            AbstractTreeLikelihood treeLikelihood = createTreeLikelihood(subPatterns, treeModel, branchModel, siteRateModel, branchRateModel, null, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo);
            treeLikelihood.setId(xo.getId() + "_" + instanceCount);
            likelihoods.add(treeLikelihood);
        }
        //construct compoundLikelihood
        compound = new CompoundLikelihood(instanceCount, likelihoods);
        //test timings 
        System.err.println("\nTesting instanceCount == " + instanceCount);
        start = System.nanoTime();
        for (int i = 0; i < TEST_RUNS; i++) {
            compound.makeDirty();
            compound.getLogLikelihood();
        }
        end = System.nanoTime();
        double newResult = end - start;
        System.err.println("Evaluation took: " + newResult);
        if (baseResult / newResult > TEST_CUTOFF) {
            instanceCount++;
            baseResult = newResult;
        } else {
            optimal = true;
            instanceCount--;
            System.err.println("\nCreating final BeagleTreeLikelihood with instanceCount: " + instanceCount);
            likelihoods = new ArrayList<Likelihood>();
            for (int i = 0; i < instanceCount; i++) {
                Patterns subPatterns = new Patterns((SitePatterns) patternList, 0, 0, 1, i, instanceCount);
                AbstractTreeLikelihood treeLikelihood = createTreeLikelihood(subPatterns, treeModel, branchModel, siteRateModel, branchRateModel, null, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo);
                treeLikelihood.setId(xo.getId() + "_" + instanceCount);
                likelihoods.add(treeLikelihood);
            }
            //construct compoundLikelihood
            compound = new CompoundLikelihood(instanceCount, likelihoods);
        }
    }
    return compound;
/*for (int i = 0; i < instanceCount; i++) {

            Patterns subPatterns = new Patterns((SitePatterns)patternList, 0, 0, 1, i, instanceCount);

            AbstractTreeLikelihood treeLikelihood = createTreeLikelihood(
                    subPatterns,
                    treeModel,
                    branchModel,
                    siteRateModel,
                    branchRateModel,
                    null,
                    useAmbiguities,
                    scalingScheme,
                    partialsRestrictions,
                    xo);
            treeLikelihood.setId(xo.getId() + "_" + instanceCount);
            likelihoods.add(treeLikelihood);
        }

        return new CompoundLikelihood(likelihoods);*/
}
Also used : FrequencyModel(dr.evomodel.substmodel.FrequencyModel) Set(java.util.Set) CompoundLikelihood(dr.inference.model.CompoundLikelihood) Likelihood(dr.inference.model.Likelihood) BeagleTreeLikelihood(dr.evomodel.treelikelihood.BeagleTreeLikelihood) AbstractTreeLikelihood(dr.evomodel.treelikelihood.AbstractTreeLikelihood) PatternList(dr.evolution.alignment.PatternList) AbstractTreeLikelihood(dr.evomodel.treelikelihood.AbstractTreeLikelihood) PartialsRescalingScheme(dr.evomodel.treelikelihood.PartialsRescalingScheme) BranchModel(dr.evomodel.branchmodel.BranchModel) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) TreeModel(dr.evomodel.tree.TreeModel) Patterns(dr.evolution.alignment.Patterns) SitePatterns(dr.evolution.alignment.SitePatterns) TreeUtils(dr.evolution.tree.TreeUtils) SitePatterns(dr.evolution.alignment.SitePatterns) TaxonList(dr.evolution.util.TaxonList) CompoundLikelihood(dr.inference.model.CompoundLikelihood) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) SubstitutionModel(dr.evomodel.substmodel.SubstitutionModel) TipStatesModel(dr.evomodel.tipstatesmodel.TipStatesModel) BranchRateModel(dr.evomodel.branchratemodel.BranchRateModel) Parameter(dr.inference.model.Parameter)

Example 4 with SitePatterns

use of dr.evolution.alignment.SitePatterns in project beast-mcmc by beast-dev.

the class BeagleOperationParser method parseXMLObject.

public Object parseXMLObject(XMLObject xo) throws XMLParseException {
    SitePatterns patternList = (SitePatterns) xo.getChild(PatternList.class);
    TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
    BranchRateModel rateModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
    Alignment alignment = (Alignment) xo.getChild(Alignment.class);
    GammaSiteRateModel substitutionModel = (GammaSiteRateModel) xo.getChild(GammaSiteRateModel.class);
    PrintWriter branch = null, operation = null;
    if (xo.hasAttribute(BRANCH_FILE_NAME)) {
        branch = XMLParser.getFilePrintWriter(xo, OPERATION_REPORT, BRANCH_FILE_NAME);
    }
    if (xo.hasAttribute(OPERATION_FILE_NAME)) {
        operation = XMLParser.getFilePrintWriter(xo, OPERATION_REPORT, OPERATION_FILE_NAME);
    }
    return new BeagleOperationReport(treeModel, patternList, rateModel, substitutionModel, alignment, branch, operation);
}
Also used : SitePatterns(dr.evolution.alignment.SitePatterns) TreeModel(dr.evomodel.tree.TreeModel) Alignment(dr.evolution.alignment.Alignment) BeagleOperationReport(dr.evomodel.treelikelihood.BeagleOperationReport) BranchRateModel(dr.evomodel.branchratemodel.BranchRateModel) PatternList(dr.evolution.alignment.PatternList) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) PrintWriter(java.io.PrintWriter)

Example 5 with SitePatterns

use of dr.evolution.alignment.SitePatterns in project beast-mcmc by beast-dev.

the class GeneralSubstitutionModelTest method testGeneralSubstitutionModel.

public void testGeneralSubstitutionModel() {
    // Sub model
    FrequencyModel freqModel = new FrequencyModel(dataType, alignment.getStateFrequencies());
    // dimension="5" value="1.0"
    Parameter ratesPara = new Parameter.Default(GeneralSubstitutionModelParser.RATES, 5, 1.0);
    // relativeTo="5"
    GeneralSubstitutionModel generalSubstitutionModel = new GeneralSubstitutionModel(dataType, freqModel, ratesPara, 4);
    //siteModel
    GammaSiteModel siteModel = new GammaSiteModel(generalSubstitutionModel);
    Parameter mu = new Parameter.Default(GammaSiteModelParser.MUTATION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
    siteModel.setMutationRateParameter(mu);
    //treeLikelihood
    SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
    TreeLikelihood treeLikelihood = new TreeLikelihood(patterns, treeModel, siteModel, null, null, false, false, true, false, false);
    treeLikelihood.setId(TreeLikelihoodParser.TREE_LIKELIHOOD);
    // Operators
    OperatorSchedule schedule = new SimpleOperatorSchedule();
    MCMCOperator operator = new ScaleOperator(ratesPara, 0.5);
    operator.setWeight(1.0);
    schedule.addOperator(operator);
    Parameter rootHeight = treeModel.getRootHeightParameter();
    rootHeight.setId(TREE_HEIGHT);
    operator = new ScaleOperator(rootHeight, 0.5);
    operator.setWeight(1.0);
    schedule.addOperator(operator);
    Parameter internalHeights = treeModel.createNodeHeightsParameter(false, true, false);
    operator = new UniformOperator(internalHeights, 10.0);
    schedule.addOperator(operator);
    operator = new SubtreeSlideOperator(treeModel, 1, 1, true, false, false, false, CoercionMode.COERCION_ON);
    schedule.addOperator(operator);
    operator = new ExchangeOperator(ExchangeOperator.NARROW, treeModel, 1.0);
    //        operator.doOperation();
    schedule.addOperator(operator);
    operator = new ExchangeOperator(ExchangeOperator.WIDE, treeModel, 1.0);
    //        operator.doOperation();
    schedule.addOperator(operator);
    operator = new WilsonBalding(treeModel, 1.0);
    //        operator.doOperation();
    schedule.addOperator(operator);
    // Log
    ArrayLogFormatter formatter = new ArrayLogFormatter(false);
    MCLogger[] loggers = new MCLogger[2];
    loggers[0] = new MCLogger(formatter, 1000, false);
    loggers[0].add(treeLikelihood);
    loggers[0].add(rootHeight);
    loggers[0].add(ratesPara);
    loggers[1] = new MCLogger(new TabDelimitedFormatter(System.out), 100000, false);
    loggers[1].add(treeLikelihood);
    loggers[1].add(rootHeight);
    loggers[1].add(ratesPara);
    // MCMC
    MCMC mcmc = new MCMC("mcmc1");
    MCMCOptions options = new MCMCOptions(10000000);
    mcmc.setShowOperatorAnalysis(true);
    mcmc.init(options, treeLikelihood, schedule, loggers);
    mcmc.run();
    // time
    System.out.println(mcmc.getTimer().toString());
    // Tracer
    List<Trace> traces = formatter.getTraces();
    ArrayTraceList traceList = new ArrayTraceList("GeneralSubstitutionModelTest", traces, 0);
    for (int i = 1; i < traces.size(); i++) {
        traceList.analyseTrace(i);
    }
    //      <expectation name="likelihood" value="-1815.75"/>
    //		<expectation name="treeModel.rootHeight" value="6.42048E-2"/>
    //		<expectation name="rateAC" value="6.08986E-2"/>
    TraceCorrelation likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TreeLikelihoodParser.TREE_LIKELIHOOD));
    assertExpectation(TreeLikelihoodParser.TREE_LIKELIHOOD, likelihoodStats, -1815.75);
    TraceCorrelation treeHeightStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TREE_HEIGHT));
    assertExpectation(TREE_HEIGHT, treeHeightStats, 0.0640787258170083);
    TraceCorrelation rateACStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(GeneralSubstitutionModelParser.RATES + "1"));
    assertExpectation(GeneralSubstitutionModelParser.RATES + "1", rateACStats, 0.061071756742081366);
}
Also used : FrequencyModel(dr.oldevomodel.substmodel.FrequencyModel) TreeLikelihood(dr.oldevomodel.treelikelihood.TreeLikelihood) ExchangeOperator(dr.evomodel.operators.ExchangeOperator) MCMC(dr.inference.mcmc.MCMC) SubtreeSlideOperator(dr.evomodel.operators.SubtreeSlideOperator) MCMCOptions(dr.inference.mcmc.MCMCOptions) GeneralSubstitutionModel(dr.oldevomodel.substmodel.GeneralSubstitutionModel) ArrayLogFormatter(dr.inference.loggers.ArrayLogFormatter) WilsonBalding(dr.evomodel.operators.WilsonBalding) TraceCorrelation(dr.inference.trace.TraceCorrelation) SitePatterns(dr.evolution.alignment.SitePatterns) TabDelimitedFormatter(dr.inference.loggers.TabDelimitedFormatter) Trace(dr.inference.trace.Trace) GammaSiteModel(dr.oldevomodel.sitemodel.GammaSiteModel) ArrayTraceList(dr.inference.trace.ArrayTraceList) Parameter(dr.inference.model.Parameter) MCLogger(dr.inference.loggers.MCLogger)

Aggregations

SitePatterns (dr.evolution.alignment.SitePatterns)30 Parameter (dr.inference.model.Parameter)20 FrequencyModel (dr.oldevomodel.substmodel.FrequencyModel)16 GammaSiteModel (dr.oldevomodel.sitemodel.GammaSiteModel)15 TreeLikelihood (dr.oldevomodel.treelikelihood.TreeLikelihood)15 HKY (dr.oldevomodel.substmodel.HKY)11 ArrayList (java.util.ArrayList)7 BranchRateModel (dr.evomodel.branchratemodel.BranchRateModel)6 GammaSiteRateModel (dr.evomodel.siteratemodel.GammaSiteRateModel)6 TaxonList (dr.evolution.util.TaxonList)5 BranchModel (dr.evomodel.branchmodel.BranchModel)5 ExchangeOperator (dr.evomodel.operators.ExchangeOperator)5 SubtreeSlideOperator (dr.evomodel.operators.SubtreeSlideOperator)5 WilsonBalding (dr.evomodel.operators.WilsonBalding)5 TreeModel (dr.evomodel.tree.TreeModel)5 ArrayLogFormatter (dr.inference.loggers.ArrayLogFormatter)5 MCLogger (dr.inference.loggers.MCLogger)5 TabDelimitedFormatter (dr.inference.loggers.TabDelimitedFormatter)5 MCMC (dr.inference.mcmc.MCMC)5 MCMCOptions (dr.inference.mcmc.MCMCOptions)5