Search in sources :

Example 91 with TreeModel

use of dr.evomodel.tree.TreeModel in project beast-mcmc by beast-dev.

the class DataLikelihoodTester method main.

public static void main(String[] args) {
    // turn off logging to avoid screen noise...
    Logger logger = Logger.getLogger("dr");
    logger.setUseParentHandlers(false);
    SimpleAlignment alignment = createAlignment(sequences, Nucleotides.INSTANCE);
    TreeModel treeModel;
    try {
        treeModel = createSpecifiedTree("((human:0.1,chimp:0.1):0.1,gorilla:0.2)");
    } catch (Exception e) {
        throw new RuntimeException("Unable to parse Newick tree");
    }
    System.out.print("\nTest BeagleTreeLikelihood (kappa = 1): ");
    //substitutionModel
    Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 });
    Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 0, 100);
    FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
    HKY hky = new HKY(kappa, f);
    //siteModel
    double alpha = 0.5;
    GammaSiteRateModel siteRateModel = new GammaSiteRateModel("gammaModel", alpha, 4);
    //        GammaSiteRateModel siteRateModel = new GammaSiteRateModel("siteRateModel");
    siteRateModel.setSubstitutionModel(hky);
    Parameter mu = new Parameter.Default(GammaSiteModelParser.SUBSTITUTION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
    siteRateModel.setRelativeRateParameter(mu);
    FrequencyModel f2 = new FrequencyModel(Nucleotides.INSTANCE, freqs);
    Parameter kappa2 = new Parameter.Default(HKYParser.KAPPA, 10.0, 0, 100);
    HKY hky2 = new HKY(kappa2, f2);
    GammaSiteRateModel siteRateModel2 = new GammaSiteRateModel("gammaModel", alpha, 4);
    siteRateModel2.setSubstitutionModel(hky2);
    siteRateModel2.setRelativeRateParameter(mu);
    //treeLikelihood
    SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
    BranchModel branchModel = new HomogeneousBranchModel(siteRateModel.getSubstitutionModel(), siteRateModel.getSubstitutionModel().getFrequencyModel());
    BranchModel branchModel2 = new HomogeneousBranchModel(siteRateModel2.getSubstitutionModel(), siteRateModel2.getSubstitutionModel().getFrequencyModel());
    BranchRateModel branchRateModel = new DefaultBranchRateModel();
    BeagleTreeLikelihood treeLikelihood = new BeagleTreeLikelihood(patterns, treeModel, branchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.AUTO, true);
    double logLikelihood = treeLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 1): ");
    BeagleDataLikelihoodDelegate dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, PartialsRescalingScheme.NONE, false);
    TreeDataLikelihood treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(5.0);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 5): ");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 10): ");
    dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel2, siteRateModel2, false, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky2.setKappa(11.0);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 11): ");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(1.0);
    hky2.setKappa(10.0);
    MultiPartitionDataLikelihoodDelegate multiPartitionDataLikelihoodDelegate;
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 1):");
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(5.0);
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 5):");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(1.0);
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 10):");
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
    List<PatternList> patternLists = new ArrayList<PatternList>();
    patternLists.add(patterns);
    patternLists.add(patterns);
    List<SiteRateModel> siteRateModels = new ArrayList<SiteRateModel>();
    siteRateModels.add(siteRateModel);
    siteRateModels.add(siteRateModel2);
    List<BranchModel> branchModels = new ArrayList<BranchModel>();
    branchModels.add(branchModel);
    branchModels.add(branchModel2);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: this is 2x the logLikelihood of the 2nd partition)\n\n");
    System.exit(0);
    //START ADDITIONAL TEST #1 - Guy Baele
    System.out.println("-- Test #1 SiteRateModels -- ");
    //alpha in partition 1 reject followed by alpha in partition 2 reject
    System.out.print("Adjust alpha in partition 1: ");
    siteRateModel.setAlpha(0.4);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return alpha in partition 1 to original value: ");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
    System.out.print("Adjust alpha in partition 2: ");
    siteRateModel2.setAlpha(0.35);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return alpha in partition 2 to original value: ");
    siteRateModel2.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
    //alpha in partition 1 accept followed by alpha in partition 2 accept
    System.out.print("Adjust alpha in partition 1: ");
    siteRateModel.setAlpha(0.4);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Adjust alpha in partition 2: ");
    siteRateModel2.setAlpha(0.35);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only setting alpha in partition 2)");
    System.out.print("Return alpha in partition 1 to original value: ");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 2 has not been returned to original value yet)");
    System.out.print("Return alpha in partition 2 to original value: ");
    siteRateModel2.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    //adjusting alphas in both partitions without explicitly calling getLogLikelihood() in between
    System.out.print("Adjust both alphas in partitions 1 and 2: ");
    siteRateModel.setAlpha(0.4);
    siteRateModel2.setAlpha(0.35);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return alpha in partition 2 to original value: ");
    siteRateModel2.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 1 has not been returned to original value yet)");
    System.out.print("Return alpha in partition 1 to original value: ");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n\n");
    //END ADDITIONAL TEST - Guy Baele
    //START ADDITIONAL TEST #2 - Guy Baele
    System.out.println("-- Test #2 SiteRateModels -- ");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    //1 siteRateModel shared across 2 partitions
    siteRateModels = new ArrayList<SiteRateModel>();
    siteRateModels.add(siteRateModel);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    System.out.print("Adjust alpha in shared siteRateModel: ");
    siteRateModel.setAlpha(0.4);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only adjusted alpha for partition 1)");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n\n");
    //END ADDITIONAL TEST - Guy Baele
    //START ADDITIONAL TEST #3 - Guy Baele
    System.out.println("-- Test #3 SiteRateModels -- ");
    siteRateModel = new GammaSiteRateModel("gammaModel");
    siteRateModel.setSubstitutionModel(hky);
    siteRateModel.setRelativeRateParameter(mu);
    siteRateModel2 = new GammaSiteRateModel("gammaModel2");
    siteRateModel2.setSubstitutionModel(hky2);
    siteRateModel2.setRelativeRateParameter(mu);
    siteRateModels = new ArrayList<SiteRateModel>();
    siteRateModels.add(siteRateModel);
    siteRateModels.add(siteRateModel2);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    System.out.print("Adjust kappa in partition 1: ");
    hky.setKappa(5.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: logLikelihood has not changed?)");
    System.out.print("Return kappa in partition 1 to original value: ");
    hky.setKappa(1.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    System.out.print("Adjust kappa in partition 2: ");
    hky2.setKappa(11.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return kappa in partition 2 to original value: ");
    hky2.setKappa(10.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n\n");
    //END ADDITIONAL TEST - Guy Baele
    //START ADDITIONAL TEST #4 - Guy Baele
    System.out.println("-- Test #4 SiteRateModels -- ");
    SimpleAlignment secondAlignment = createAlignment(moreSequences, Nucleotides.INSTANCE);
    SitePatterns morePatterns = new SitePatterns(secondAlignment, null, 0, -1, 1, true);
    BeagleDataLikelihoodDelegate dataLikelihoodDelegateOne = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, PartialsRescalingScheme.NONE, false);
    TreeDataLikelihood treeDataLikelihoodOne = new TreeDataLikelihood(dataLikelihoodDelegateOne, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
    System.out.println("\nBeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 1) = " + logLikelihood);
    hky.setKappa(10.0);
    logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
    System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 10) = " + logLikelihood);
    hky.setKappa(1.0);
    BeagleDataLikelihoodDelegate dataLikelihoodDelegateTwo = new BeagleDataLikelihoodDelegate(treeModel, morePatterns, branchModel2, siteRateModel2, false, PartialsRescalingScheme.NONE, false);
    TreeDataLikelihood treeDataLikelihoodTwo = new TreeDataLikelihood(dataLikelihoodDelegateTwo, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihoodTwo.getLogLikelihood();
    System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 2 (kappa = 10) = " + logLikelihood + "\n");
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 1):");
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(10.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 10):");
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(1.0);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) morePatterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 2nd partition (kappa = 10):");
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    patternLists = new ArrayList<PatternList>();
    patternLists.add(patterns);
    patternLists.add(morePatterns);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: should be the sum of both separate logLikelihoods)\nKappa value of partition 2 is used to compute logLikelihood for both partitions?");
//END ADDITIONAL TEST - Guy Baele
}
Also used : FrequencyModel(dr.evomodel.substmodel.FrequencyModel) ArrayList(java.util.ArrayList) PatternList(dr.evolution.alignment.PatternList) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) BranchModel(dr.evomodel.branchmodel.BranchModel) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) SiteRateModel(dr.evomodel.siteratemodel.SiteRateModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) TreeModel(dr.evomodel.tree.TreeModel) SimpleAlignment(dr.evolution.alignment.SimpleAlignment) SitePatterns(dr.evolution.alignment.SitePatterns) BeagleTreeLikelihood(dr.evomodel.treelikelihood.BeagleTreeLikelihood) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) BranchRateModel(dr.evomodel.branchratemodel.BranchRateModel) HKY(dr.evomodel.substmodel.nucleotide.HKY) Parameter(dr.inference.model.Parameter)

Example 92 with TreeModel

use of dr.evomodel.tree.TreeModel in project beast-mcmc by beast-dev.

the class DataLikelihoodTester method createSpecifiedTree.

private static TreeModel createSpecifiedTree(String t) throws Exception {
    NewickImporter importer = new NewickImporter(t);
    Tree tree = importer.importTree(null);
    //treeModel
    return new TreeModel(tree);
}
Also used : TreeModel(dr.evomodel.tree.TreeModel) NewickImporter(dr.evolution.io.NewickImporter) Tree(dr.evolution.tree.Tree)

Example 93 with TreeModel

use of dr.evomodel.tree.TreeModel in project beast-mcmc by beast-dev.

the class GaussianProcessSkytrackLikelihoodParser method parseXMLObject.

public Object parseXMLObject(XMLObject xo) throws XMLParseException {
    XMLObject cxo = xo.getChild(POPULATION_PARAMETER);
    Parameter popParameter = (Parameter) cxo.getChild(Parameter.class);
    cxo = xo.getChild(PRECISION_PARAMETER);
    Parameter precParameter = (Parameter) cxo.getChild(Parameter.class);
    //        cxo = xo.getChild(LAMBDA_BOUND_PARAMETER);
    //        Parameter lambda_bound = (Parameter) cxo.getChild(Parameter.class);
    //
    //        cxo = xo.getChild(LAMBDA_PARAMETER);
    //        Parameter lambda_parameter = (Parameter) cxo.getChild(Parameter.class);
    cxo = xo.getChild(POPULATION_TREE);
    List<Tree> treeList = new ArrayList<Tree>();
    for (int i = 0; i < cxo.getChildCount(); i++) {
        Object testObject = cxo.getChild(i);
        if (testObject instanceof Tree) {
            treeList.add((TreeModel) testObject);
        }
    }
    //        TreeModel treeModel = (TreeModel) cxo.getChild(TreeModel.class);
    //        cxo = xo.getChild(GROUP_SIZES);
    //        Parameter groupParameter = null;
    //        if (cxo != null) {
    //            groupParameter = (Parameter) cxo.getChild(Parameter.class);
    //
    //            if (popParameter.getDimension() != groupParameter.getDimension())
    //                throw new XMLParseException("Population and group size parameters must have the same length");
    //        }
    Parameter lambda_parameter;
    if (xo.getChild(LAMBDA_PARAMETER) != null) {
        cxo = xo.getChild(LAMBDA_PARAMETER);
        lambda_parameter = (Parameter) cxo.getChild(Parameter.class);
    } else {
        lambda_parameter = new Parameter.Default(1.0);
    }
    Parameter GPtype;
    if (xo.getChild(GPTYPE) != null) {
        cxo = xo.getChild(GPTYPE);
        GPtype = (Parameter) cxo.getChild(Parameter.class);
    } else {
        GPtype = new Parameter.Default(1.0);
    }
    Parameter Tmrca;
    if (xo.getChild(TMRCA) != null) {
        cxo = xo.getChild(TMRCA);
        Tmrca = (Parameter) cxo.getChild(Parameter.class);
    } else {
        Tmrca = new Parameter.Default(1.0);
    }
    Parameter numPoints;
    if (xo.getChild(NUMBER_POINTS) != null) {
        cxo = xo.getChild(NUMBER_POINTS);
        numPoints = (Parameter) cxo.getChild(Parameter.class);
    } else {
        numPoints = new Parameter.Default(1.0);
    }
    Parameter CoalCounts;
    if (xo.getChild(COALCOUNT) != null) {
        cxo = xo.getChild(COALCOUNT);
        CoalCounts = (Parameter) cxo.getChild(Parameter.class);
    } else {
        CoalCounts = new Parameter.Default(1.0);
    }
    Parameter GPcounts;
    if (xo.getChild(GPCOUNTS) != null) {
        cxo = xo.getChild(GPCOUNTS);
        GPcounts = (Parameter) cxo.getChild(Parameter.class);
    } else {
        GPcounts = new Parameter.Default(1.0);
    }
    Parameter coalfactor;
    if (xo.getChild(COALFACTOR) != null) {
        cxo = xo.getChild(COALFACTOR);
        coalfactor = (Parameter) cxo.getChild(Parameter.class);
    } else {
        coalfactor = new Parameter.Default(1.0);
    }
    Parameter lambda_bound;
    if (xo.getChild(LAMBDA_BOUND_PARAMETER) != null) {
        cxo = xo.getChild(LAMBDA_BOUND_PARAMETER);
        lambda_bound = (Parameter) cxo.getChild(Parameter.class);
    } else {
        lambda_bound = new Parameter.Default(1.0);
    }
    Parameter alpha_parameter;
    if (xo.getChild(ALPHA_PARAMETER) != null) {
        cxo = xo.getChild(ALPHA_PARAMETER);
        alpha_parameter = (Parameter) cxo.getChild(Parameter.class);
    } else {
        alpha_parameter = new Parameter.Default(0.001);
    }
    Parameter beta_parameter;
    if (xo.getChild(BETA_PARAMETER) != null) {
        cxo = xo.getChild(BETA_PARAMETER);
        beta_parameter = (Parameter) cxo.getChild(Parameter.class);
    } else {
        beta_parameter = new Parameter.Default(0.001);
    }
    Parameter change_points;
    if (xo.getChild(CHANGE_POINTS) != null) {
        cxo = xo.getChild(CHANGE_POINTS);
        change_points = (Parameter) cxo.getChild(Parameter.class);
    } else {
        change_points = new Parameter.Default(0, 1);
    }
    if (xo.getAttribute(RANDOMIZE_TREE, false)) {
        for (Tree tree : treeList) {
            if (tree instanceof TreeModel) {
                GaussianProcessSkytrackLikelihood.checkTree((TreeModel) tree);
            } else {
                throw new XMLParseException("Can not randomize a fixed tree");
            }
        }
    }
    //        XMLObject latentChild = xo.getChild(LATENT_PARAMETER);
    //        Parameter latentPoints = (Parameter) latentChild.getChild(Parameter.class);
    boolean rescaleByRootHeight = xo.getAttribute(RESCALE_BY_ROOT_ISSUE, true);
    if (treeList.size() == 1) {
        return new GaussianProcessSkytrackLikelihood(treeList, precParameter, rescaleByRootHeight, lambda_bound, lambda_parameter, popParameter, alpha_parameter, beta_parameter, change_points, GPtype, GPcounts, coalfactor, CoalCounts, numPoints, Tmrca);
    } else {
        return new GaussianProcessMultilocusSkytrackLikelihood(treeList, precParameter, rescaleByRootHeight, lambda_bound, lambda_parameter, popParameter, alpha_parameter, beta_parameter, change_points, GPtype, GPcounts, coalfactor, CoalCounts, numPoints, Tmrca);
    }
}
Also used : ArrayList(java.util.ArrayList) GaussianProcessMultilocusSkytrackLikelihood(dr.evomodel.coalescent.GaussianProcessMultilocusSkytrackLikelihood) TreeModel(dr.evomodel.tree.TreeModel) GaussianProcessSkytrackLikelihood(dr.evomodel.coalescent.GaussianProcessSkytrackLikelihood) Parameter(dr.inference.model.Parameter) Tree(dr.evolution.tree.Tree)

Example 94 with TreeModel

use of dr.evomodel.tree.TreeModel in project beast-mcmc by beast-dev.

the class BayesianSkylineLikelihoodParser method parseXMLObject.

public Object parseXMLObject(XMLObject xo) throws XMLParseException {
    XMLObject cxo = xo.getChild(POPULATION_SIZES);
    Parameter param = (Parameter) cxo.getChild(Parameter.class);
    cxo = xo.getChild(GROUP_SIZES);
    Parameter param2 = (Parameter) cxo.getChild(Parameter.class);
    cxo = xo.getChild(CoalescentLikelihoodParser.POPULATION_TREE);
    TreeModel treeModel = (TreeModel) cxo.getChild(TreeModel.class);
    int type = BayesianSkylineLikelihood.LINEAR_TYPE;
    String typeName = LINEAR;
    if (xo.hasAttribute(LINEAR) && !xo.getBooleanAttribute(LINEAR)) {
        type = BayesianSkylineLikelihood.STEPWISE_TYPE;
        typeName = STEPWISE;
    }
    if (xo.hasAttribute(TYPE)) {
        if (xo.getStringAttribute(TYPE).equalsIgnoreCase(STEPWISE)) {
            type = BayesianSkylineLikelihood.STEPWISE_TYPE;
            typeName = STEPWISE;
        } else if (xo.getStringAttribute(TYPE).equalsIgnoreCase(LINEAR)) {
            type = BayesianSkylineLikelihood.LINEAR_TYPE;
            typeName = LINEAR;
        } else if (xo.getStringAttribute(TYPE).equalsIgnoreCase(EXPONENTIAL)) {
            type = BayesianSkylineLikelihood.EXPONENTIAL_TYPE;
            typeName = EXPONENTIAL;
        } else
            throw new XMLParseException("Unknown Bayesian Skyline type: " + xo.getStringAttribute(TYPE));
    }
    if (param2.getDimension() > (treeModel.getExternalNodeCount() - 1)) {
        throw new XMLParseException("There are more groups (" + param2.getDimension() + ") than coalescent nodes in the tree (" + (treeModel.getExternalNodeCount() - 1) + ").");
    }
    Logger.getLogger("dr.evomodel").info("Bayesian skyline plot: " + param.getDimension() + " " + typeName + " control points");
    return new BayesianSkylineLikelihood(treeModel, param, param2, type);
}
Also used : TreeModel(dr.evomodel.tree.TreeModel) Parameter(dr.inference.model.Parameter) BayesianSkylineLikelihood(dr.evomodel.coalescent.BayesianSkylineLikelihood)

Example 95 with TreeModel

use of dr.evomodel.tree.TreeModel in project beast-mcmc by beast-dev.

the class VariableDemographicModelParser method parseXMLObject.

public Object parseXMLObject(XMLObject xo) throws XMLParseException {
    XMLObject cxo = xo.getChild(POPULATION_SIZES);
    Parameter popParam = (Parameter) cxo.getChild(Parameter.class);
    cxo = xo.getChild(INDICATOR_PARAMETER);
    Parameter indicatorParam = (Parameter) cxo.getChild(Parameter.class);
    cxo = xo.getChild(POPULATION_TREES);
    final int nc = cxo.getChildCount();
    TreeModel[] treeModels = new TreeModel[nc];
    double[] populationFactor = new double[nc];
    for (int k = 0; k < treeModels.length; ++k) {
        final XMLObject child = (XMLObject) cxo.getChild(k);
        populationFactor[k] = child.hasAttribute(PLOIDY) ? child.getDoubleAttribute(PLOIDY) : 1.0;
        treeModels[k] = (TreeModel) child.getChild(TreeModel.class);
    }
    VariableDemographicModel.Type type = VariableDemographicModel.Type.STEPWISE;
    if (xo.hasAttribute(TYPE)) {
        final String s = xo.getStringAttribute(TYPE);
        if (s.equalsIgnoreCase(VariableDemographicModel.Type.STEPWISE.toString())) {
            type = VariableDemographicModel.Type.STEPWISE;
        } else if (s.equalsIgnoreCase(VariableDemographicModel.Type.LINEAR.toString())) {
            type = VariableDemographicModel.Type.LINEAR;
        } else if (s.equalsIgnoreCase(VariableDemographicModel.Type.EXPONENTIAL.toString())) {
            type = VariableDemographicModel.Type.EXPONENTIAL;
        } else {
            throw new XMLParseException("Unknown Bayesian Skyline type: " + s);
        }
    }
    final boolean logSpace = xo.getAttribute(LOG_SPACE, false) || type == VariableDemographicModel.Type.EXPONENTIAL;
    final boolean useMid = xo.getAttribute(USE_MIDPOINTS, false);
    Logger.getLogger("dr.evomodel").info("Variable demographic: " + type.toString() + " control points");
    return new VariableDemographicModel(treeModels, populationFactor, popParam, indicatorParam, type, logSpace, useMid);
}
Also used : TreeModel(dr.evomodel.tree.TreeModel) VariableDemographicModel(dr.evomodel.coalescent.VariableDemographicModel) Parameter(dr.inference.model.Parameter)

Aggregations

TreeModel (dr.evomodel.tree.TreeModel)142 Parameter (dr.inference.model.Parameter)62 BranchRateModel (dr.evomodel.branchratemodel.BranchRateModel)28 ArrayList (java.util.ArrayList)28 Tree (dr.evolution.tree.Tree)26 NewickImporter (dr.evolution.io.NewickImporter)21 GammaSiteRateModel (dr.evomodel.siteratemodel.GammaSiteRateModel)20 FrequencyModel (dr.evomodel.substmodel.FrequencyModel)19 HomogeneousBranchModel (dr.evomodel.branchmodel.HomogeneousBranchModel)18 DefaultBranchRateModel (dr.evomodel.branchratemodel.DefaultBranchRateModel)15 SubstitutionModel (dr.evomodel.substmodel.SubstitutionModel)15 PatternList (dr.evolution.alignment.PatternList)14 NodeRef (dr.evolution.tree.NodeRef)14 Partition (dr.app.beagle.tools.Partition)12 BranchModel (dr.evomodel.branchmodel.BranchModel)12 IOException (java.io.IOException)12 BeagleSequenceSimulator (dr.app.beagle.tools.BeagleSequenceSimulator)11 Taxon (dr.evolution.util.Taxon)11 TaxonList (dr.evolution.util.TaxonList)11 Patterns (dr.evolution.alignment.Patterns)9