use of dr.evolution.tree.NodeRef in project beast-mcmc by beast-dev.
the class TreeTraversal method updateNodeAndAncestors.
public final void updateNodeAndAncestors(final NodeRef node) {
updateNode[node.getNumber()] = true;
if (!treeModel.isRoot(node)) {
final NodeRef parent = treeModel.getParent(node);
updateNodeAndAncestors(parent);
}
}
use of dr.evolution.tree.NodeRef in project beast-mcmc by beast-dev.
the class TreeTraversal method updateNodeAndChildren.
public final void updateNodeAndChildren(final NodeRef node) {
updateNode[node.getNumber()] = true;
for (int i = 0; i < treeModel.getChildCount(node); i++) {
final NodeRef child = treeModel.getChild(node, i);
updateNode[child.getNumber()] = true;
}
}
use of dr.evolution.tree.NodeRef in project beast-mcmc by beast-dev.
the class AbstractTreeLikelihood method updateNodeAndChildren.
/**
* Set update flag for a node and its direct children
*/
protected void updateNodeAndChildren(NodeRef node) {
if (COUNT_TOTAL_OPERATIONS)
totalRateUpdateSingleCount++;
updateNode[node.getNumber()] = true;
for (int i = 0; i < treeModel.getChildCount(node); i++) {
if (COUNT_TOTAL_OPERATIONS)
totalRateUpdateSingleCount++;
NodeRef child = treeModel.getChild(node, i);
updateNode[child.getNumber()] = true;
}
likelihoodKnown = false;
}
use of dr.evolution.tree.NodeRef in project beast-mcmc by beast-dev.
the class AncestralStateBeagleTreeLikelihood method traverseSample.
public void traverseSample(TreeModel tree, NodeRef node, int[] parentState, int[] rateCategory) {
int nodeNum = node.getNumber();
NodeRef parent = tree.getParent(node);
// This function assumes that all partial likelihoods have already been calculated
// If the node is internal, then sample its state given the state of its parent (pre-order traversal).
double[] conditionalProbabilities = new double[stateCount];
int[] state = new int[patternCount];
if (!tree.isExternal(node)) {
if (parent == null) {
// This is the root node
getPartials(nodeNum, partials);
boolean sampleCategory = categoryCount > 1;
double[] posteriorWeightedCategory = null;
double[] priorWeightedCategory = null;
if (sampleCategory) {
rateCategory = new int[patternCount];
posteriorWeightedCategory = new double[categoryCount];
priorWeightedCategory = siteRateModel.getCategoryProportions();
}
for (int j = 0; j < patternCount; j++) {
// Sample across-site-rate-variation, if it exists
if (sampleCategory) {
for (int r = 0; r < categoryCount; r++) {
posteriorWeightedCategory[r] = 0;
for (int k = 0; k < stateCount; k++) {
posteriorWeightedCategory[r] += partials[r * stateCount * patternCount + j * stateCount + k];
}
posteriorWeightedCategory[r] *= priorWeightedCategory[r];
}
rateCategory[j] = drawChoice(posteriorWeightedCategory);
}
// Sample root character state
int partialsIndex = (rateCategory == null ? 0 : rateCategory[j]) * stateCount * patternCount;
System.arraycopy(partials, partialsIndex + j * stateCount, conditionalProbabilities, 0, stateCount);
// TODO May have more than one set of frequencies
double[] frequencies = substitutionModelDelegate.getRootStateFrequencies();
for (int i = 0; i < stateCount; i++) {
conditionalProbabilities[i] *= frequencies[i];
}
try {
state[j] = drawChoice(conditionalProbabilities);
} catch (Error e) {
System.err.println(e.toString());
System.err.println("Please report error to Marc");
state[j] = 0;
}
reconstructedStates[nodeNum][j] = state[j];
if (!returnMarginalLogLikelihood) {
jointLogLikelihood += Math.log(frequencies[state[j]]);
}
}
if (sampleCategory) {
if (this.rateCategory == null) {
this.rateCategory = new int[patternCount];
}
System.arraycopy(rateCategory, 0, this.rateCategory, 0, patternCount);
}
} else {
// This is an internal node, but not the root
double[] partialLikelihood = new double[stateCount * patternCount * categoryCount];
getPartials(nodeNum, partialLikelihood);
// Sibon says that this actually works now
// if (categoryCount > 1)
// throw new RuntimeException("Reconstruction not implemented for multiple categories yet.");
getMatrix(nodeNum, probabilities);
for (int j = 0; j < patternCount; j++) {
int parentIndex = parentState[j] * stateCount;
int childIndex = j * stateCount;
int category = rateCategory == null ? 0 : rateCategory[j];
int matrixIndex = category * stateCount * stateCount;
int partialIndex = category * stateCount * patternCount;
for (int i = 0; i < stateCount; i++) conditionalProbabilities[i] = partialLikelihood[partialIndex + childIndex + i] * probabilities[matrixIndex + parentIndex + i];
state[j] = drawChoice(conditionalProbabilities);
reconstructedStates[nodeNum][j] = state[j];
if (!returnMarginalLogLikelihood) {
double contrib = probabilities[parentIndex + state[j]];
jointLogLikelihood += Math.log(contrib);
}
}
hookCalculation(tree, parent, node, parentState, state, probabilities, rateCategory);
}
// Traverse down the two child nodes
NodeRef child1 = tree.getChild(node, 0);
traverseSample(tree, child1, state, rateCategory);
NodeRef child2 = tree.getChild(node, 1);
traverseSample(tree, child2, state, rateCategory);
} else {
if (useAmbiguities()) {
getMatrix(nodeNum, probabilities);
double[] partials = tipPartials[nodeNum];
for (int j = 0; j < patternCount; j++) {
final int parentIndex = parentState[j] * stateCount;
int category = rateCategory == null ? 0 : rateCategory[j];
int matrixIndex = category * stateCount * stateCount;
System.arraycopy(probabilities, parentIndex + matrixIndex, conditionalProbabilities, 0, stateCount);
for (int k = 0; k < stateCount; ++k) {
conditionalProbabilities[k] *= partials[j * stateCount + k];
}
reconstructedStates[nodeNum][j] = drawChoice(conditionalProbabilities);
if (!returnMarginalLogLikelihood) {
double contrib = probabilities[parentIndex + reconstructedStates[nodeNum][j]];
jointLogLikelihood += Math.log(contrib);
}
}
} else {
getTipStates(nodeNum, reconstructedStates[nodeNum]);
for (int j = 0; j < patternCount; j++) {
final int thisState = reconstructedStates[nodeNum][j];
if (dataType.isAmbiguousState(thisState)) {
final int parentIndex = parentState[j] * stateCount;
int category = rateCategory == null ? 0 : rateCategory[j];
int matrixIndex = category * stateCount * stateCount;
getMatrix(nodeNum, probabilities);
System.arraycopy(probabilities, parentIndex + matrixIndex, conditionalProbabilities, 0, stateCount);
if (useAmbiguities && !dataType.isUnknownState(thisState)) {
// Not completely unknown
boolean[] stateSet = dataType.getStateSet(thisState);
for (int k = 0; k < stateCount; k++) {
if (!stateSet[k]) {
conditionalProbabilities[k] = 0.0;
}
}
}
reconstructedStates[nodeNum][j] = drawChoice(conditionalProbabilities);
}
if (!returnMarginalLogLikelihood) {
final int parentIndex = parentState[j] * stateCount;
getMatrix(nodeNum, probabilities);
if (!returnMarginalLogLikelihood) {
double contrib = probabilities[parentIndex + reconstructedStates[nodeNum][j]];
jointLogLikelihood += Math.log(contrib);
}
}
}
}
hookCalculation(tree, parent, node, parentState, reconstructedStates[nodeNum], null, rateCategory);
}
}
use of dr.evolution.tree.NodeRef in project beast-mcmc by beast-dev.
the class BeagleOperationReport method calculateLogLikelihood.
protected double calculateLogLikelihood() {
if (matrixUpdateIndices == null) {
matrixUpdateIndices = new int[eigenCount][nodeCount];
branchLengths = new double[eigenCount][nodeCount];
branchUpdateCount = new int[eigenCount];
// scaleBufferIndices = new int[internalNodeCount];
// storedScaleBufferIndices = new int[internalNodeCount];
}
if (operations == null) {
operations = new int[numRestrictedPartials + 1][internalNodeCount * Beagle.OPERATION_TUPLE_SIZE];
operationCount = new int[numRestrictedPartials + 1];
}
recomputeScaleFactors = false;
for (int i = 0; i < eigenCount; i++) {
branchUpdateCount[i] = 0;
}
operationListCount = 0;
if (hasRestrictedPartials) {
for (int i = 0; i <= numRestrictedPartials; i++) {
operationCount[i] = 0;
}
} else {
operationCount[0] = 0;
}
System.out.println(alignmentString.toString());
final NodeRef root = treeModel.getRoot();
// Do not flip buffers
traverse(treeModel, root, null, false);
for (int i = 0; i < eigenCount; i++) {
if (branchUpdateCount[i] > 0) {
if (DEBUG_BEAGLE_OPERATIONS) {
StringBuilder sb = new StringBuilder();
sb.append("eval = ").append(new Vector(substitutionModel.getEigenDecomposition().getEigenValues())).append("\n");
sb.append("evec = ").append(new Vector(substitutionModel.getEigenDecomposition().getEigenVectors())).append("\n");
sb.append("ivec = ").append(new Vector(substitutionModel.getEigenDecomposition().getInverseEigenVectors())).append("\n");
sb.append("Branch count: ").append(branchUpdateCount[i]);
sb.append("\nNode indices:\n");
if (SINGLE_LINE) {
sb.append("int n[] = {");
}
for (int k = 0; k < branchUpdateCount[i]; ++k) {
if (SINGLE_LINE) {
sb.append(" ").append(matrixUpdateIndices[i][k]);
if (k < (branchUpdateCount[i] - 1)) {
sb.append(",");
}
} else {
sb.append(matrixUpdateIndices[i][k]).append("\n");
}
}
if (SINGLE_LINE) {
sb.append(" };\n");
}
sb.append("\nBranch lengths:\n");
if (SINGLE_LINE) {
sb.append("double b[] = {");
}
for (int k = 0; k < branchUpdateCount[i]; ++k) {
if (SINGLE_LINE) {
sb.append(" ").append(branchLengths[i][k]);
if (k < (branchUpdateCount[i] - 1)) {
sb.append(",");
}
} else {
sb.append(branchLengths[i][k]).append("\n");
}
}
if (SINGLE_LINE) {
sb.append(" };\n");
}
System.out.println(sb.toString());
}
}
}
if (DEBUG_BEAGLE_OPERATIONS) {
StringBuilder sb = new StringBuilder();
sb.append("Operation count: ").append(operationCount[0]);
sb.append("\nOperations:\n");
if (SINGLE_LINE) {
sb.append("BeagleOperation o[] = {");
}
for (int k = 0; k < operationCount[0] * Beagle.OPERATION_TUPLE_SIZE; ++k) {
if (SINGLE_LINE) {
sb.append(" ").append(operations[0][k]);
if (k < (operationCount[0] * Beagle.OPERATION_TUPLE_SIZE - 1)) {
sb.append(",");
}
} else {
sb.append(operations[0][k]).append("\n");
}
}
if (SINGLE_LINE) {
sb.append(" };\n");
}
sb.append("Use scale factors: ").append(useScaleFactors).append("\n");
System.out.println(sb.toString());
}
int rootIndex = partialBufferHelper.getOffsetIndex(root.getNumber());
System.out.println("Root node: " + rootIndex);
return 0.0;
}
Aggregations