Search in sources :

Example 46 with Node

use of beast.evolution.tree.Node in project bacter by tgvaughan.

the class AddRemoveRedundantConversion method getRedundantConversions.

/**
 * Obtain list of redundant conversions.
 *
 * @param cfNode node identifying edge on CF
 * @return conversion list
 */
private List<Conversion> getRedundantConversions(Node cfNode) {
    Node cfParent = cfNode.getParent();
    List<Conversion> redundantConversions = new ArrayList<>();
    double maxL = acg.getRoot().getHeight() * apertureInput.get();
    for (Locus locus : acg.getLoci()) {
        for (Conversion conv : acg.getConversions(locus)) {
            if (((conv.getNode1() == cfNode || conv.getNode1().getParent() == cfNode) && Math.abs(conv.getHeight1() - cfNode.getHeight()) < maxL) && (conv.getNode2() == cfParent || conv.getNode2().getParent() == cfParent) && Math.abs(conv.getHeight2() - cfParent.getHeight()) < maxL) {
                redundantConversions.add(conv);
            }
        }
    }
    return redundantConversions;
}
Also used : Node(beast.evolution.tree.Node) ArrayList(java.util.ArrayList) Locus(bacter.Locus) Conversion(bacter.Conversion)

Example 47 with Node

use of beast.evolution.tree.Node in project bacter by tgvaughan.

the class AddRemoveRedundantConversion method proposal.

@Override
public double proposal() {
    double logHGF = 0;
    // Select non-root CF node
    Node cfNode = acg.getNode(Randomizer.nextInt(acg.getNodeCount() - 1));
    Node cfParent = cfNode.getParent();
    double maxL = apertureInput.get() * acg.getRoot().getHeight();
    if (Randomizer.nextBoolean()) {
        // Add redundant conversion
        Conversion newConv = new Conversion();
        double L = Math.min(getEdgeLength(cfNode), maxL);
        for (Node child : cfNode.getChildren()) L += Math.min(getEdgeLength(child), maxL);
        logHGF -= Math.log(1.0 / L);
        double fromPoint = L * Randomizer.nextDouble();
        if (fromPoint < Math.min(getEdgeLength(cfNode), maxL)) {
            newConv.setNode1(cfNode);
            newConv.setHeight1(cfNode.getHeight() + fromPoint);
        } else {
            fromPoint -= Math.min(getEdgeLength(cfNode), maxL);
            for (Node child : cfNode.getChildren()) {
                if (fromPoint < Math.min(getEdgeLength(child), maxL)) {
                    newConv.setNode1(child);
                    newConv.setHeight1(cfNode.getHeight() - fromPoint);
                    break;
                }
                fromPoint -= Math.min(getEdgeLength(child), maxL);
            }
        }
        L = Math.min(getEdgeLength(cfParent), maxL);
        for (Node child : cfParent.getChildren()) L += Math.min(getEdgeLength(child), maxL);
        logHGF -= Math.log(1.0 / L);
        double toPoint = L * Randomizer.nextDouble();
        if (toPoint < Math.min(getEdgeLength(cfParent), maxL)) {
            newConv.setNode2(cfParent);
            newConv.setHeight2(cfParent.getHeight() + toPoint);
        } else {
            toPoint -= Math.min(getEdgeLength(cfParent), maxL);
            for (Node child : cfParent.getChildren()) {
                if (toPoint < Math.min(getEdgeLength(child), maxL)) {
                    newConv.setNode2(child);
                    newConv.setHeight2(cfParent.getHeight() - toPoint);
                    break;
                }
                toPoint -= Math.min(getEdgeLength(child), maxL);
            }
        }
        if (newConv.getHeight1() > newConv.getHeight2())
            return Double.NEGATIVE_INFINITY;
        logHGF -= drawAffectedRegion(newConv);
        // Add conversion
        acg.addConversion(newConv);
        // Add probability of reverse move deleting this conversion
        // to HGF:
        logHGF += Math.log(1.0 / getRedundantConversions(cfNode).size());
    } else {
        // Remove
        // Identify redundant conversions
        List<Conversion> redundantConversions = getRedundantConversions(cfNode);
        if (redundantConversions.size() == 0)
            return Double.NEGATIVE_INFINITY;
        // Choose conversion to remove
        Conversion conv = redundantConversions.get(Randomizer.nextInt(redundantConversions.size()));
        logHGF -= Math.log(1.0 / redundantConversions.size());
        // Add probability of reverse move generating this conversion
        // to HGF:
        double L = Math.min(getEdgeLength(cfNode), maxL);
        for (Node child : cfNode.getChildren()) L += Math.min(getEdgeLength(child), maxL);
        logHGF += Math.log(1.0 / L);
        L = Math.min(getEdgeLength(cfParent), maxL);
        for (Node child : cfParent.getChildren()) L += Math.min(getEdgeLength(child), maxL);
        logHGF += Math.log(1.0 / L);
        logHGF += getAffectedRegionProb(conv);
        // Remove conversion
        acg.deleteConversion(conv);
    }
    return logHGF;
}
Also used : Node(beast.evolution.tree.Node) Conversion(bacter.Conversion)

Example 48 with Node

use of beast.evolution.tree.Node in project bacter by tgvaughan.

the class CFConversionSwap method proposal.

@Override
public double proposal() {
    double logHGF = 0.0;
    // Determine whether we can apply this operator:
    if (acg.getLeafNodeCount() < 3 || acg.getTotalConvCount() == 0)
        return Double.NEGATIVE_INFINITY;
    // Acquire list of conversions compatible with swap:
    List<Conversion> compatible = getCompatibleConversions();
    if (compatible.isEmpty())
        return Double.NEGATIVE_INFINITY;
    // Select conversion to replace
    Conversion replaceConversion = compatible.get(Randomizer.nextInt(compatible.size()));
    logHGF -= Math.log(1.0 / compatible.size());
    acg.deleteConversion(replaceConversion);
    Node srcNode = replaceConversion.getNode1();
    Node destNode = replaceConversion.getNode2();
    Node srcNodeP = srcNode.getParent();
    Node srcNodeS = getSibling(srcNode);
    double t_srcNodeP = srcNodeP.getHeight();
    if (destNode == srcNode.getParent())
        destNode = srcNodeS;
    double newTime = replaceConversion.getHeight2();
    // Conversion modification:
    replaceConversion.setNode2(srcNodeS);
    replaceConversion.setHeight2(t_srcNodeP);
    logHGF += getAffectedRegionProb(replaceConversion);
    logHGF -= drawAffectedRegion(replaceConversion);
    acg.addConversion(replaceConversion);
    if (!includeRootInput.get() && (srcNodeP.isRoot() || destNode.isRoot()))
        return Double.NEGATIVE_INFINITY;
    // Perform necessary conversion expansions/collapses:
    if (newTime < t_srcNodeP) {
        logHGF += collapseConversions(srcNode, destNode, newTime);
    } else {
        logHGF -= expandConversions(srcNode, destNode, newTime);
    }
    logHGF += Math.log(1.0 / getCompatibleConversions().size());
    assert !acg.isInvalid() : "CFCS proposed invalid state.";
    return logHGF;
}
Also used : Node(beast.evolution.tree.Node) Conversion(bacter.Conversion)

Example 49 with Node

use of beast.evolution.tree.Node in project bacter by tgvaughan.

the class ACGLikelihood method setStates.

/**
 * Set leaf states in a likelihood core.
 *
 * @param lhc       likelihood core object
 * @param patterns  leaf state patterns
 */
void setStates(LikelihoodCore lhc, Multiset<int[]> patterns) {
    for (Node node : acg.getExternalNodes()) {
        int[] states = new int[patterns.elementSet().size()];
        int taxon = alignment.getTaxonIndex(node.getID());
        int i = 0;
        for (int[] pattern : patterns.elementSet()) {
            int code = pattern[taxon];
            int[] statesForCode = alignment.getDataType().getStatesForCode(code);
            if (statesForCode.length == 1)
                states[i] = statesForCode[0];
            else
                // Causes ambiguous states to be ignored.
                states[i] = code;
            i += 1;
        }
        lhc.setNodeStates(node.getNr(), states);
    }
}
Also used : Node(beast.evolution.tree.Node)

Example 50 with Node

use of beast.evolution.tree.Node in project bacter by tgvaughan.

the class ACGLikelihood method traverseNoRecurse.

/**
 * Traverse a marginal tree, computing partial likelihoods on the way.
 * This version avoids potentially-expensive recursive function calls.
 *
 * @param root Tree node
 * @param region region
 */
void traverseNoRecurse(MarginalNode root, Region region) {
    computePostOrder(root);
    LikelihoodCore lhc = likelihoodCores.get(region);
    for (MarginalNode node : postOrderNodes) {
        if (!node.isRoot()) {
            lhc.setNodeMatrixForUpdate(node.getNr());
            boolean cfEdge = node.cfNodeNr >= 0 && !acg.getNode(node.cfNodeNr).isRoot() && acg.getNode(node.cfNodeNr).getParent().getNr() == ((MarginalNode) node.getParent()).cfNodeNr;
            if (!cfEdge) {
                cacheMisses += 1;
                for (int i = 0; i < siteModel.getCategoryCount(); i++) {
                    double jointBranchRate = siteModel.getRateForCategory(i, node) * branchRateModel.getRateForBranch(node);
                    double parentHeight = node.getParent().getHeight();
                    double nodeHeight = node.getHeight();
                    substitutionModel.getTransitionProbabilities(node, parentHeight, nodeHeight, jointBranchRate, probabilities);
                    lhc.setNodeMatrix(node.getNr(), i, probabilities);
                }
            } else {
                cacheHits += 1;
                for (int i = 0; i < siteModel.getCategoryCount(); i++) {
                    lhc.setNodeMatrix(node.getNr(), i, cfTransitionProbs[node.cfNodeNr][i]);
                }
            }
        }
        if (!node.isLeaf()) {
            // LikelihoodCore only supports binary trees.
            List<Node> children = node.getChildren();
            lhc.setNodePartialsForUpdate(node.getNr());
            lhc.setNodeStatesForUpdate(node.getNr());
            lhc.calculatePartials(children.get(0).getNr(), children.get(1).getNr(), node.getNr());
            if (node.isRoot()) {
                double[] frequencies = substitutionModel.getFrequencies();
                double[] proportions = siteModel.getCategoryProportions(node);
                lhc.integratePartials(node.getNr(), proportions, rootPartials.get(region));
                for (int idx : constantPatterns.get(region)) {
                    rootPartials.get(region)[idx] += siteModel.getProportionInvariant();
                }
                lhc.calculateLogLikelihoods(rootPartials.get(region), frequencies, patternLogLikelihoods.get(region));
            }
        }
    }
}
Also used : Node(beast.evolution.tree.Node) LikelihoodCore(beast.evolution.likelihood.LikelihoodCore) BeerLikelihoodCore(beast.evolution.likelihood.BeerLikelihoodCore)

Aggregations

Node (beast.evolution.tree.Node)140 Conversion (bacter.Conversion)24 MultiTypeNode (beast.evolution.tree.MultiTypeNode)22 Locus (bacter.Locus)17 ArrayList (java.util.ArrayList)15 Tree (beast.evolution.tree.Tree)14 Test (org.junit.Test)9 CalculationNode (beast.core.CalculationNode)8 RealParameter (beast.core.parameter.RealParameter)8 TreeInterface (beast.evolution.tree.TreeInterface)7 ClusterTree (beast.util.ClusterTree)7 ConversionGraph (bacter.ConversionGraph)6 Alignment (beast.evolution.alignment.Alignment)6 TaxonSet (beast.evolution.alignment.TaxonSet)6 SiteModel (beast.evolution.sitemodel.SiteModel)5 BitSet (java.util.BitSet)5 StateNode (beast.core.StateNode)4 JukesCantor (beast.evolution.substitutionmodel.JukesCantor)4 TreeParser (beast.util.TreeParser)3 CFEventList (bacter.CFEventList)2