Search in sources :

Example 1 with PatternList

use of dr.evolution.alignment.PatternList in project beast-mcmc by beast-dev.

the class CheckPointTreeModifier method incorporateAdditionalTaxa.

/**
     * Add the remaining taxa, which can be identified through the TreeDataLikelihood XML elements.
     */
public ArrayList<NodeRef> incorporateAdditionalTaxa(CheckPointUpdaterApp.UpdateChoice choice, BranchRates rateModel) {
    System.out.println("Tree before adding taxa:\n" + treeModel.toString() + "\n");
    ArrayList<NodeRef> newTaxaNodes = new ArrayList<NodeRef>();
    for (String str : newTaxaNames) {
        for (int i = 0; i < treeModel.getExternalNodeCount(); i++) {
            if (treeModel.getNodeTaxon(treeModel.getExternalNode(i)).getId().equals(str)) {
                newTaxaNodes.add(treeModel.getExternalNode(i));
                //always take into account Taxon dates vs. dates set through a TreeModel
                System.out.println(treeModel.getNodeTaxon(treeModel.getExternalNode(i)).getId() + " with height " + treeModel.getNodeHeight(treeModel.getExternalNode(i)) + " or " + treeModel.getNodeTaxon(treeModel.getExternalNode(i)).getHeight());
            }
        }
    }
    System.out.println("newTaxaNodes length = " + newTaxaNodes.size());
    ArrayList<Taxon> currentTaxa = new ArrayList<Taxon>();
    for (int i = 0; i < treeModel.getExternalNodeCount(); i++) {
        boolean taxonFound = false;
        for (String str : newTaxaNames) {
            if (str.equals((treeModel.getNodeTaxon(treeModel.getExternalNode(i))).getId())) {
                taxonFound = true;
            }
        }
        if (!taxonFound) {
            System.out.println("Adding " + treeModel.getNodeTaxon(treeModel.getExternalNode(i)).getId() + " to list of current taxa");
            currentTaxa.add(treeModel.getNodeTaxon(treeModel.getExternalNode(i)));
        }
    }
    System.out.println("Current taxa count = " + currentTaxa.size());
    //iterate over both current taxa and to be added taxa
    boolean originTaxon = true;
    for (Taxon taxon : currentTaxa) {
        if (taxon.getHeight() == 0.0) {
            originTaxon = false;
            System.out.println("Current taxon " + taxon.getId() + " has node height 0.0");
        }
    }
    for (NodeRef newTaxon : newTaxaNodes) {
        if (treeModel.getNodeTaxon(newTaxon).getHeight() == 0.0) {
            originTaxon = false;
            System.out.println("New taxon " + treeModel.getNodeTaxon(newTaxon).getId() + " has node height 0.0");
        }
    }
    //check the Tree(Data)Likelihoods in the connected set of likelihoods
    //focus on TreeDataLikelihood, which has getTree() to get the tree for each likelihood
    //also get the DataLikelihoodDelegate from TreeDataLikelihood
    ArrayList<TreeDataLikelihood> likelihoods = new ArrayList<TreeDataLikelihood>();
    ArrayList<Tree> trees = new ArrayList<Tree>();
    ArrayList<DataLikelihoodDelegate> delegates = new ArrayList<DataLikelihoodDelegate>();
    for (Likelihood likelihood : Likelihood.CONNECTED_LIKELIHOOD_SET) {
        if (likelihood instanceof TreeDataLikelihood) {
            likelihoods.add((TreeDataLikelihood) likelihood);
            trees.add(((TreeDataLikelihood) likelihood).getTree());
            delegates.add(((TreeDataLikelihood) likelihood).getDataLikelihoodDelegate());
        }
    }
    //suggested to go through TreeDataLikelihoodParser and give it an extra option to create a HashMap
    //keyed by the tree; am currently not overly fond of this approach
    ArrayList<PatternList> patternLists = new ArrayList<PatternList>();
    for (DataLikelihoodDelegate del : delegates) {
        if (del instanceof BeagleDataLikelihoodDelegate) {
            patternLists.add(((BeagleDataLikelihoodDelegate) del).getPatternList());
        } else if (del instanceof MultiPartitionDataLikelihoodDelegate) {
            MultiPartitionDataLikelihoodDelegate mpdld = (MultiPartitionDataLikelihoodDelegate) del;
            List<PatternList> list = mpdld.getPatternLists();
            for (PatternList pList : list) {
                patternLists.add(pList);
            }
        }
    }
    if (patternLists.size() == 0) {
        throw new RuntimeException("No patterns detected. Please make sure the XML file is BEAST 1.9 compatible.");
    }
    //aggregate all patterns to create distance matrix
    //TODO What about different trees for different partitions?
    Patterns patterns = new Patterns(patternLists.get(0));
    if (patternLists.size() > 1) {
        for (int i = 1; i < patternLists.size(); i++) {
            patterns.addPatterns(patternLists.get(i));
        }
    }
    //set the patterns for the distance matrix computations
    choice.setPatterns(patterns);
    //add new taxa one at a time
    System.out.println("Adding " + newTaxaNodes.size() + " taxa ...");
    for (NodeRef newTaxon : newTaxaNodes) {
        treeModel.setNodeHeight(newTaxon, treeModel.getNodeTaxon(newTaxon).getHeight());
        System.out.println("\nadding Taxon: " + newTaxon + " (height = " + treeModel.getNodeHeight(newTaxon) + ")");
        //check if this taxon has a more recent sampling date than all other nodes in the current TreeModel
        double offset = checkCurrentTreeNodes(newTaxon, treeModel.getRoot());
        System.out.println("Sampling date offset when adding " + newTaxon + " = " + offset);
        //AND set its current node height to 0.0 IF no originTaxon has been found
        if (offset < 0.0) {
            if (!originTaxon) {
                System.out.println("Updating all node heights with offset " + Math.abs(offset));
                updateAllTreeNodes(Math.abs(offset), treeModel.getRoot());
                treeModel.setNodeHeight(newTaxon, 0.0);
            }
        } else if (offset == 0.0) {
            if (!originTaxon) {
                treeModel.setNodeHeight(newTaxon, 0.0);
            }
        }
        //get the closest Taxon to the Taxon that needs to be added
        //take into account which taxa can currently be chosen
        Taxon closest = choice.getClosestTaxon(treeModel.getNodeTaxon(newTaxon), currentTaxa);
        System.out.println("\nclosest Taxon: " + closest + " with original height: " + closest.getHeight());
        //get the distance between these two taxa
        double distance = choice.getDistance(treeModel.getNodeTaxon(newTaxon), closest);
        System.out.println("at distance: " + distance);
        //TODO what if distance == 0.0 ??? how to choose closest taxon then (in absence of geo info)?
        //find the NodeRef for the closest Taxon (do not rely on node numbering)
        NodeRef closestRef = null;
        //careful with node numbering and subtract number of new taxa
        for (int i = 0; i < treeModel.getExternalNodeCount(); i++) {
            if (treeModel.getNodeTaxon(treeModel.getExternalNode(i)) == closest) {
                closestRef = treeModel.getExternalNode(i);
            }
        }
        System.out.println(closestRef + " with height " + treeModel.getNodeHeight(closestRef));
        //System.out.println("trying to set node height: " + closestRef + " from " + treeModel.getNodeHeight(closestRef) + " to " + closest.getHeight());
        //treeModel.setNodeHeight(closestRef, closest.getHeight());
        double timeForDistance = distance / rateModel.getBranchRate(treeModel, closestRef);
        System.out.println("timeForDistance = " + timeForDistance);
        //get parent node of branch that will be split
        NodeRef parent = treeModel.getParent(closestRef);
        //determine height of new node
        double insertHeight;
        if (treeModel.getNodeHeight(closestRef) == treeModel.getNodeHeight(newTaxon)) {
            insertHeight = treeModel.getNodeHeight(closestRef) + timeForDistance / 2.0;
            System.out.println("treeModel.getNodeHeight(closestRef) == treeModel.getNodeHeight(newTaxon): " + insertHeight);
            if (insertHeight >= treeModel.getNodeHeight(parent)) {
                insertHeight = treeModel.getNodeHeight(closestRef) + EPSILON * (treeModel.getNodeHeight(parent) - treeModel.getNodeHeight(closestRef));
            }
        } else {
            double remainder = (timeForDistance - Math.abs(treeModel.getNodeHeight(closestRef) - treeModel.getNodeHeight(newTaxon))) / 2.0;
            if (remainder > 0) {
                insertHeight = Math.max(treeModel.getNodeHeight(closestRef), treeModel.getNodeHeight(newTaxon)) + remainder;
                System.out.println("remainder > 0: " + insertHeight);
                if (insertHeight >= treeModel.getNodeHeight(parent)) {
                    insertHeight = treeModel.getNodeHeight(closestRef) + EPSILON * (treeModel.getNodeHeight(parent) - treeModel.getNodeHeight(closestRef));
                }
            } else {
                insertHeight = EPSILON * (treeModel.getNodeHeight(parent) - Math.max(treeModel.getNodeHeight(closestRef), treeModel.getNodeHeight(newTaxon)));
                insertHeight += Math.max(treeModel.getNodeHeight(closestRef), treeModel.getNodeHeight(newTaxon));
                System.out.println("remainder <= 0: " + insertHeight);
            }
        }
        System.out.println("insert at height: " + insertHeight);
        //pass on all the necessary variables to a method that adds the new taxon to the tree
        addTaxonAlongBranch(newTaxon, parent, closestRef, insertHeight);
        //option to print tree after each taxon addition
        System.out.println("\nTree after adding taxon " + newTaxon + ":\n" + treeModel.toString());
        //add newly added Taxon to list of current taxa
        currentTaxa.add(treeModel.getNodeTaxon(newTaxon));
    }
    return newTaxaNodes;
}
Also used : Likelihood(dr.inference.model.Likelihood) TreeDataLikelihood(dr.evomodel.treedatalikelihood.TreeDataLikelihood) Taxon(dr.evolution.util.Taxon) ArrayList(java.util.ArrayList) PatternList(dr.evolution.alignment.PatternList) BeagleDataLikelihoodDelegate(dr.evomodel.treedatalikelihood.BeagleDataLikelihoodDelegate) NodeRef(dr.evolution.tree.NodeRef) TreeDataLikelihood(dr.evomodel.treedatalikelihood.TreeDataLikelihood) BeagleDataLikelihoodDelegate(dr.evomodel.treedatalikelihood.BeagleDataLikelihoodDelegate) MultiPartitionDataLikelihoodDelegate(dr.evomodel.treedatalikelihood.MultiPartitionDataLikelihoodDelegate) DataLikelihoodDelegate(dr.evomodel.treedatalikelihood.DataLikelihoodDelegate) Tree(dr.evolution.tree.Tree) PatternList(dr.evolution.alignment.PatternList) ArrayList(java.util.ArrayList) List(java.util.List) MultiPartitionDataLikelihoodDelegate(dr.evomodel.treedatalikelihood.MultiPartitionDataLikelihoodDelegate) Patterns(dr.evolution.alignment.Patterns)

Example 2 with PatternList

use of dr.evolution.alignment.PatternList in project beast-mcmc by beast-dev.

the class MultiPartitionTreeLikelihood method setPartials.

/**
     * Sets the partials from a sequence in an alignment.
     *
     * @param beagle        beagle
     * @param sequenceIndex sequenceIndex
     * @param nodeIndex     nodeIndex
     */
protected final void setPartials(Beagle beagle, int sequenceIndex, int nodeIndex) {
    double[] partials = new double[patternCount * stateCount * categoryCount];
    boolean[] stateSet;
    int v = 0;
    for (PatternList patternList : patternLists) {
        for (int i = 0; i < patternList.getPatternCount(); i++) {
            int state = patternList.getPatternState(sequenceIndex, i);
            stateSet = dataType.getStateSet(state);
            for (int j = 0; j < stateCount; j++) {
                if (stateSet[j]) {
                    partials[v] = 1.0;
                } else {
                    partials[v] = 0.0;
                }
                v++;
            }
        }
    }
    // if there is more than one category then replicate the partials for each
    int n = patternCount * stateCount;
    int k = n;
    for (int i = 1; i < categoryCount; i++) {
        System.arraycopy(partials, 0, partials, k, n);
        k += n;
    }
    beagle.setPartials(nodeIndex, partials);
}
Also used : PatternList(dr.evolution.alignment.PatternList)

Example 3 with PatternList

use of dr.evolution.alignment.PatternList in project beast-mcmc by beast-dev.

the class MultiPartitionDataLikelihoodDelegate method setStates.

/**
     * Sets the partials from a sequence in an alignment.
     *
     * @param beagle        beagle
     * @param patternLists  patternLists
     * @param taxonId       taxonId
     * @param nodeIndex     nodeIndex
     */
private final void setStates(Beagle beagle, List<PatternList> patternLists, String taxonId, int nodeIndex) throws TaxonList.MissingTaxonException {
    int[] states = new int[totalPatternCount];
    int v = 0;
    for (PatternList patternList : patternLists) {
        int sequenceIndex = patternList.getTaxonIndex(taxonId);
        if (sequenceIndex == -1) {
            throw new TaxonList.MissingTaxonException("Taxon, " + taxonId + ", not found in patternList, " + patternList.getId());
        }
        for (int i = 0; i < patternList.getPatternCount(); i++) {
            states[v] = patternList.getPatternState(sequenceIndex, i);
            v++;
        }
    }
    beagle.setTipStates(nodeIndex, states);
}
Also used : PatternList(dr.evolution.alignment.PatternList)

Example 4 with PatternList

use of dr.evolution.alignment.PatternList in project beast-mcmc by beast-dev.

the class DataLikelihoodTester2 method main.

public static void main(String[] args) {
    // turn off logging to avoid screen noise...
    Logger logger = Logger.getLogger("dr");
    logger.setUseParentHandlers(false);
    SimpleAlignment alignment = createAlignment(sequences, Nucleotides.INSTANCE);
    TreeModel treeModel;
    try {
        treeModel = createSpecifiedTree("((human:0.1,chimp:0.1):0.1,gorilla:0.2)");
    } catch (Exception e) {
        throw new RuntimeException("Unable to parse Newick tree");
    }
    System.out.print("\nTest BeagleTreeLikelihood (kappa = 1): ");
    //substitutionModel
    Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 });
    Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 0, 100);
    FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
    HKY hky = new HKY(kappa, f);
    //siteModel
    double alpha = 0.5;
    GammaSiteRateModel siteRateModel = new GammaSiteRateModel("gammaModel", alpha, 4);
    //        GammaSiteRateModel siteRateModel = new GammaSiteRateModel("siteRateModel");
    siteRateModel.setSubstitutionModel(hky);
    Parameter mu = new Parameter.Default(GammaSiteModelParser.SUBSTITUTION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
    siteRateModel.setRelativeRateParameter(mu);
    FrequencyModel f2 = new FrequencyModel(Nucleotides.INSTANCE, freqs);
    Parameter kappa2 = new Parameter.Default(HKYParser.KAPPA, 10.0, 0, 100);
    HKY hky2 = new HKY(kappa2, f2);
    GammaSiteRateModel siteRateModel2 = new GammaSiteRateModel("gammaModel", alpha, 4);
    siteRateModel2.setSubstitutionModel(hky2);
    siteRateModel2.setRelativeRateParameter(mu);
    //treeLikelihood
    SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
    BranchModel branchModel = new HomogeneousBranchModel(siteRateModel.getSubstitutionModel(), siteRateModel.getSubstitutionModel().getFrequencyModel());
    BranchModel branchModel2 = new HomogeneousBranchModel(siteRateModel2.getSubstitutionModel(), siteRateModel2.getSubstitutionModel().getFrequencyModel());
    BranchRateModel branchRateModel = new DefaultBranchRateModel();
    BeagleTreeLikelihood treeLikelihood = new BeagleTreeLikelihood(patterns, treeModel, branchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.AUTO, true);
    double logLikelihood = treeLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 1): ");
    BeagleDataLikelihoodDelegate dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, PartialsRescalingScheme.NONE, false);
    TreeDataLikelihood treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(5.0);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 5): ");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 10): ");
    dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel2, siteRateModel2, false, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky2.setKappa(11.0);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 11): ");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(1.0);
    hky2.setKappa(10.0);
    MultiPartitionDataLikelihoodDelegate multiPartitionDataLikelihoodDelegate;
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 1):");
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(5.0);
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 5):");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(1.0);
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 10):");
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
    List<PatternList> patternLists = new ArrayList<PatternList>();
    patternLists.add(patterns);
    patternLists.add(patterns);
    List<SiteRateModel> siteRateModels = new ArrayList<SiteRateModel>();
    siteRateModels.add(siteRateModel);
    siteRateModels.add(siteRateModel2);
    List<BranchModel> branchModels = new ArrayList<BranchModel>();
    branchModels.add(branchModel);
    branchModels.add(branchModel2);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: this is 2x the logLikelihood of the 2nd partition)\n\n");
    System.exit(0);
    //START ADDITIONAL TEST #1 - Guy Baele
    System.out.println("-- Test #1 SiteRateModels -- ");
    //alpha in partition 1 reject followed by alpha in partition 2 reject
    System.out.print("Adjust alpha in partition 1: ");
    siteRateModel.setAlpha(0.4);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return alpha in partition 1 to original value: ");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
    System.out.print("Adjust alpha in partition 2: ");
    siteRateModel2.setAlpha(0.35);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return alpha in partition 2 to original value: ");
    siteRateModel2.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
    //alpha in partition 1 accept followed by alpha in partition 2 accept
    System.out.print("Adjust alpha in partition 1: ");
    siteRateModel.setAlpha(0.4);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Adjust alpha in partition 2: ");
    siteRateModel2.setAlpha(0.35);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only setting alpha in partition 2)");
    System.out.print("Return alpha in partition 1 to original value: ");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 2 has not been returned to original value yet)");
    System.out.print("Return alpha in partition 2 to original value: ");
    siteRateModel2.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    //adjusting alphas in both partitions without explicitly calling getLogLikelihood() in between
    System.out.print("Adjust both alphas in partitions 1 and 2: ");
    siteRateModel.setAlpha(0.4);
    siteRateModel2.setAlpha(0.35);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return alpha in partition 2 to original value: ");
    siteRateModel2.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 1 has not been returned to original value yet)");
    System.out.print("Return alpha in partition 1 to original value: ");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n\n");
    //END ADDITIONAL TEST - Guy Baele
    //START ADDITIONAL TEST #2 - Guy Baele
    System.out.println("-- Test #2 SiteRateModels -- ");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    //1 siteRateModel shared across 2 partitions
    siteRateModels = new ArrayList<SiteRateModel>();
    siteRateModels.add(siteRateModel);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    System.out.print("Adjust alpha in shared siteRateModel: ");
    siteRateModel.setAlpha(0.4);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only adjusted alpha for partition 1)");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n\n");
    //END ADDITIONAL TEST - Guy Baele
    //START ADDITIONAL TEST #3 - Guy Baele
    System.out.println("-- Test #3 SiteRateModels -- ");
    siteRateModel = new GammaSiteRateModel("gammaModel");
    siteRateModel.setSubstitutionModel(hky);
    siteRateModel.setRelativeRateParameter(mu);
    siteRateModel2 = new GammaSiteRateModel("gammaModel2");
    siteRateModel2.setSubstitutionModel(hky2);
    siteRateModel2.setRelativeRateParameter(mu);
    siteRateModels = new ArrayList<SiteRateModel>();
    siteRateModels.add(siteRateModel);
    siteRateModels.add(siteRateModel2);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    System.out.print("Adjust kappa in partition 1: ");
    hky.setKappa(5.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: logLikelihood has not changed?)");
    System.out.print("Return kappa in partition 1 to original value: ");
    hky.setKappa(1.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    System.out.print("Adjust kappa in partition 2: ");
    hky2.setKappa(11.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return kappa in partition 2 to original value: ");
    hky2.setKappa(10.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n\n");
    //END ADDITIONAL TEST - Guy Baele
    //START ADDITIONAL TEST #4 - Guy Baele
    System.out.println("-- Test #4 SiteRateModels -- ");
    SimpleAlignment secondAlignment = createAlignment(moreSequences, Nucleotides.INSTANCE);
    SitePatterns morePatterns = new SitePatterns(secondAlignment, null, 0, -1, 1, true);
    BeagleDataLikelihoodDelegate dataLikelihoodDelegateOne = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, PartialsRescalingScheme.NONE, false);
    TreeDataLikelihood treeDataLikelihoodOne = new TreeDataLikelihood(dataLikelihoodDelegateOne, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
    System.out.println("\nBeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 1) = " + logLikelihood);
    hky.setKappa(10.0);
    logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
    System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 10) = " + logLikelihood);
    hky.setKappa(1.0);
    BeagleDataLikelihoodDelegate dataLikelihoodDelegateTwo = new BeagleDataLikelihoodDelegate(treeModel, morePatterns, branchModel2, siteRateModel2, false, PartialsRescalingScheme.NONE, false);
    TreeDataLikelihood treeDataLikelihoodTwo = new TreeDataLikelihood(dataLikelihoodDelegateTwo, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihoodTwo.getLogLikelihood();
    System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 2 (kappa = 10) = " + logLikelihood + "\n");
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 1):");
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(10.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 10):");
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(1.0);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) morePatterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 2nd partition (kappa = 10):");
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    patternLists = new ArrayList<PatternList>();
    patternLists.add(patterns);
    patternLists.add(morePatterns);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: should be the sum of both separate logLikelihoods)\nKappa value of partition 2 is used to compute logLikelihood for both partitions?");
//END ADDITIONAL TEST - Guy Baele
}
Also used : FrequencyModel(dr.evomodel.substmodel.FrequencyModel) ArrayList(java.util.ArrayList) PatternList(dr.evolution.alignment.PatternList) Logger(java.util.logging.Logger) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) BranchModel(dr.evomodel.branchmodel.BranchModel) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) SiteRateModel(dr.evomodel.siteratemodel.SiteRateModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) TreeModel(dr.evomodel.tree.TreeModel) SimpleAlignment(dr.evolution.alignment.SimpleAlignment) SitePatterns(dr.evolution.alignment.SitePatterns) BeagleTreeLikelihood(dr.evomodel.treelikelihood.BeagleTreeLikelihood) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) BranchRateModel(dr.evomodel.branchratemodel.BranchRateModel) HKY(dr.evomodel.substmodel.nucleotide.HKY) Parameter(dr.inference.model.Parameter)

Example 5 with PatternList

use of dr.evolution.alignment.PatternList in project beast-mcmc by beast-dev.

the class MultiPartitionDataLikelihoodParser method parseXMLObject.

public Object parseXMLObject(XMLObject xo) throws XMLParseException {
    boolean useAmbiguities = xo.getAttribute(USE_AMBIGUITIES, false);
    int instanceCount = xo.getAttribute(INSTANCE_COUNT, 1);
    if (instanceCount < 1) {
        instanceCount = 1;
    }
    String ic = System.getProperty(BEAGLE_INSTANCE_COUNT);
    if (ic != null && ic.length() > 0) {
        instanceCount = Integer.parseInt(ic);
    }
    if (DEBUG) {
        System.out.println("instanceCount: " + instanceCount);
    }
    List<PatternList> patternLists = xo.getAllChildren(PatternList.class);
    TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
    List<SiteRateModel> siteRateModels = xo.getAllChildren(SiteRateModel.class);
    FrequencyModel rootFreqModel = (FrequencyModel) xo.getChild(FrequencyModel.class);
    List<BranchModel> branchModels = xo.getAllChildren(BranchModel.class);
    if (branchModels == null) {
        if (DEBUG) {
            System.out.println("branchModels == null");
        }
        branchModels = new ArrayList<BranchModel>();
        List<SubstitutionModel> substitutionModels = xo.getAllChildren(SubstitutionModel.class);
        if (substitutionModels == null) {
            if (DEBUG) {
                System.out.println("substitutionModels == null");
            }
            for (SiteRateModel siteRateModel : siteRateModels) {
                SubstitutionModel substitutionModel = ((GammaSiteRateModel) siteRateModel).getSubstitutionModel();
                if (substitutionModel == null) {
                    throw new XMLParseException("No substitution model available for TreeDataLikelihood: " + xo.getId());
                }
                branchModels.add(new HomogeneousBranchModel(substitutionModel, rootFreqModel));
            }
        }
        if (DEBUG) {
            System.out.println("branchModels size: " + branchModels.size());
        }
        for (BranchModel branchModel : branchModels) {
            System.out.println("  " + branchModel.getId() + "  " + branchModel.getModelName());
        }
    }
    BranchRateModel branchRateModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
    if (branchRateModel == null) {
        branchRateModel = new DefaultBranchRateModel();
    }
    if (DEBUG) {
        System.out.println("BranchRateModel: " + branchRateModel.getId());
    }
    TipStatesModel tipStatesModel = (TipStatesModel) xo.getChild(TipStatesModel.class);
    PartialsRescalingScheme scalingScheme = PartialsRescalingScheme.DEFAULT;
    boolean delayScaling = true;
    if (xo.hasAttribute(SCALING_SCHEME)) {
        scalingScheme = PartialsRescalingScheme.parseFromString(xo.getStringAttribute(SCALING_SCHEME));
        if (scalingScheme == null)
            throw new XMLParseException("Unknown scaling scheme '" + xo.getStringAttribute(SCALING_SCHEME) + "' in " + "OldBeagleTreeLikelihood object '" + xo.getId());
    }
    if (xo.hasAttribute(DELAY_SCALING)) {
        delayScaling = xo.getBooleanAttribute(DELAY_SCALING);
    }
    if (instanceCount == 1) {
        if (DEBUG) {
            System.out.println("instanceCount == 1");
        }
        return createTreeDataLikelihood(patternLists, treeModel, branchModels, siteRateModels, branchRateModel, tipStatesModel, useAmbiguities, scalingScheme, delayScaling, xo);
    }
    if (tipStatesModel != null) {
        throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with a TipStateModel (i.e., a sequence error model).");
    }
    List<PatternList> patternInstanceLists = new ArrayList<PatternList>();
    for (int j = 0; j < patternLists.size(); j++) {
        for (int i = 0; i < instanceCount; i++) {
            patternInstanceLists.add(new Patterns(patternLists.get(j), i, instanceCount));
        }
    }
    return createTreeDataLikelihood(patternLists, treeModel, branchModels, siteRateModels, branchRateModel, null, useAmbiguities, scalingScheme, delayScaling, xo);
}
Also used : FrequencyModel(dr.evomodel.substmodel.FrequencyModel) PatternList(dr.evolution.alignment.PatternList) ArrayList(java.util.ArrayList) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) PartialsRescalingScheme(dr.evomodel.treelikelihood.PartialsRescalingScheme) BranchModel(dr.evomodel.branchmodel.BranchModel) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) SubstitutionModel(dr.evomodel.substmodel.SubstitutionModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) SiteRateModel(dr.evomodel.siteratemodel.SiteRateModel) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) TipStatesModel(dr.evomodel.tipstatesmodel.TipStatesModel) TreeModel(dr.evomodel.tree.TreeModel) BranchRateModel(dr.evomodel.branchratemodel.BranchRateModel) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) Patterns(dr.evolution.alignment.Patterns)

Aggregations

PatternList (dr.evolution.alignment.PatternList)22 TreeModel (dr.evomodel.tree.TreeModel)14 BranchRateModel (dr.evomodel.branchratemodel.BranchRateModel)12 Parameter (dr.inference.model.Parameter)11 GammaSiteRateModel (dr.evomodel.siteratemodel.GammaSiteRateModel)7 FrequencyModel (dr.evomodel.substmodel.FrequencyModel)7 BranchModel (dr.evomodel.branchmodel.BranchModel)6 HomogeneousBranchModel (dr.evomodel.branchmodel.HomogeneousBranchModel)6 ArrayList (java.util.ArrayList)6 TipStatesModel (dr.evomodel.tipstatesmodel.TipStatesModel)5 Patterns (dr.evolution.alignment.Patterns)4 SitePatterns (dr.evolution.alignment.SitePatterns)4 Taxon (dr.evolution.util.Taxon)4 DefaultBranchRateModel (dr.evomodel.branchratemodel.DefaultBranchRateModel)4 SiteRateModel (dr.evomodel.siteratemodel.SiteRateModel)4 SubstitutionModel (dr.evomodel.substmodel.SubstitutionModel)4 BeagleTreeLikelihood (dr.evomodel.treelikelihood.BeagleTreeLikelihood)4 PartialsRescalingScheme (dr.evomodel.treelikelihood.PartialsRescalingScheme)4 SiteModel (dr.oldevomodel.sitemodel.SiteModel)4 DataType (dr.evolution.datatype.DataType)3