use of beast.evolution.tree.Node in project beast2 by CompEvol.
the class CalibratedYuleModel method initAndValidate.
@Override
public void initAndValidate() {
super.initAndValidate();
type = correctionTypeInput.get();
final TreeInterface tree = treeInput.get();
// shallow copy. we shall change cals later
final List<CalibrationPoint> cals = new ArrayList<>(calibrationsInput.get());
int calCount = cals.size();
final List<TaxonSet> taxaSets = new ArrayList<>(calCount);
if (cals.size() > 0) {
xclades = new int[calCount][];
// convenience
for (final CalibrationPoint cal : cals) {
taxaSets.add(cal.taxa());
}
} else {
// find calibration points from prior
for (final Object beastObject : getOutputs()) {
if (beastObject instanceof CompoundDistribution) {
final CompoundDistribution prior = (CompoundDistribution) beastObject;
for (final Distribution distr : prior.pDistributions.get()) {
if (distr instanceof MRCAPrior) {
final MRCAPrior _MRCAPrior = (MRCAPrior) distr;
// make sure MRCAPrior is monophyletic
if (_MRCAPrior.distInput.get() != null) {
// make sure MRCAPrior is monophyletic
if (!_MRCAPrior.isMonophyleticInput.get()) {
throw new IllegalArgumentException("MRCAPriors must be monophyletic for Calibrated Yule prior");
}
// create CalibrationPoint from MRCAPrior
final CalibrationPoint cal = new CalibrationPoint();
cal.distInput.setValue(_MRCAPrior.distInput.get(), cal);
cal.taxonsetInput.setValue(_MRCAPrior.taxonsetInput.get(), cal);
cal.forParentInput.setValue(_MRCAPrior.useOriginateInput.get(), cal);
cal.initAndValidate();
cals.add(cal);
taxaSets.add(cal.taxa());
cal.taxa().initAndValidate();
calCount++;
calcCalibrations = false;
} else {
if (_MRCAPrior.isMonophyleticInput.get()) {
Log.warning.println("WARNING: MRCAPriors (" + _MRCAPrior.getID() + ") must have a distribution when monophyletic. Ignored for Calibrated Yule prior");
} else {
Log.warning.println("WARNING: MRCAPriors (" + _MRCAPrior.getID() + ") found that is not monophyletic. Ignored for Calibrated Yule prior");
}
}
}
}
}
}
xclades = new int[calCount][];
}
if (calCount == 0) {
Log.warning.println("WARNING: Calibrated Yule prior could not find any properly configured calibrations. Expect this to crash in a BEAST run.");
// assume we are in beauti, back off for now
return;
}
for (int k = 0; k < calCount; ++k) {
final TaxonSet tk = taxaSets.get(k);
for (int i = k + 1; i < calCount; ++i) {
final TaxonSet ti = taxaSets.get(i);
if (ti.containsAny(tk)) {
if (!(ti.containsAll(tk) || tk.containsAll(ti))) {
throw new IllegalArgumentException("Overlapping taxaSets??");
}
}
}
}
orderedCalibrations = new CalibrationPoint[calCount];
{
int loc = taxaSets.size() - 1;
while (loc >= 0) {
assert loc == taxaSets.size() - 1;
// place maximal taxaSets at end one at a time
int k = 0;
for (; /**/
k < taxaSets.size(); ++k) {
if (isMaximal(taxaSets, k)) {
break;
}
}
final List<String> tk = taxaSets.get(k).asStringList();
final int tkcount = tk.size();
this.xclades[loc] = new int[tkcount];
for (int nt = 0; nt < tkcount; ++nt) {
final int taxonIndex = getTaxonIndex(tree, tk.get(nt));
this.xclades[loc][nt] = taxonIndex;
if (taxonIndex < 0) {
throw new IllegalArgumentException("Taxon not found in tree: " + tk.get(nt));
}
}
orderedCalibrations[loc] = cals.remove(k);
taxaSets.remove(k);
// cals and taxaSets should match
--loc;
}
}
// tio[i] will contain all taxaSets contained in the i'th clade, in the form of thier index into orderedCalibrations
@SuppressWarnings("unchecked") final List<Integer>[] tio = new List[orderedCalibrations.length];
for (int k = 0; k < orderedCalibrations.length; ++k) {
tio[k] = new ArrayList<>();
}
for (int k = 0; k < orderedCalibrations.length; ++k) {
final TaxonSet txk = orderedCalibrations[k].taxa();
for (int i = k + 1; i < orderedCalibrations.length; ++i) {
if (orderedCalibrations[i].taxa().containsAll(txk)) {
tio[i].add(k);
break;
}
}
}
this.taxaPartialOrder = new int[orderedCalibrations.length][];
for (int k = 0; k < orderedCalibrations.length; ++k) {
final List<Integer> tiok = tio[k];
this.taxaPartialOrder[k] = new int[tiok.size()];
for (int j = 0; j < tiok.size(); ++j) {
this.taxaPartialOrder[k][j] = tiok.get(j);
}
}
// true if clade is not contained in any other clade
final boolean[] maximal = new boolean[calCount];
for (int k = 0; k < calCount; ++k) {
maximal[k] = true;
}
for (int k = 0; k < calCount; ++k) {
for (final int i : this.taxaPartialOrder[k]) {
maximal[i] = false;
}
}
userPDF = userMarInput.get();
if (userPDF == null) {
if (type == Type.OVER_ALL_TOPOS) {
if (calCount == 1) {
// closed form formula
} else {
boolean anyParent = false;
for (final CalibrationPoint c : orderedCalibrations) {
if (c.forParentInput.get()) {
anyParent = true;
}
}
if (anyParent) {
throw new IllegalArgumentException("Sorry, not implemented: calibration on parent for more than one clade.");
}
if (calCount == 2 && orderedCalibrations[1].taxa().containsAll(orderedCalibrations[0].taxa())) {
// closed form formulas
} else {
setUpTables(tree.getLeafNodeCount() + 1);
linsIter = new CalibrationLineagesIterator(this.xclades, this.taxaPartialOrder, maximal, tree.getLeafNodeCount());
lastHeights = new double[calCount];
}
}
} else if (type == Type.OVER_RANKED_COUNTS) {
setUpTables(tree.getLeafNodeCount() + 1);
}
}
final List<Node> leafs = tree.getExternalNodes();
final double height = leafs.get(0).getHeight();
for (final Node leaf : leafs) {
if (Math.abs(leaf.getHeight() - height) > 1e-8) {
Log.warning.println("WARNING: Calibrated Yule Model cannot handle dated tips. Use for example a coalescent prior instead.");
break;
}
}
}
use of beast.evolution.tree.Node in project beast2 by CompEvol.
the class Exchange method wide.
/**
* WARNING: Assumes strictly bifurcating beast.tree.
* @param tree
*/
public double wide(final Tree tree) {
final int nodeCount = tree.getNodeCount();
Node i = tree.getRoot();
while (i.isRoot()) {
i = tree.getNode(Randomizer.nextInt(nodeCount));
}
Node j = i;
while (j.getNr() == i.getNr() || j.isRoot()) {
j = tree.getNode(Randomizer.nextInt(nodeCount));
}
final Node p = i.getParent();
final Node jP = j.getParent();
if ((p != jP) && (i != jP) && (j != p) && (j.getHeight() < p.getHeight()) && (i.getHeight() < jP.getHeight())) {
exchangeNodes(i, j, p, jP);
// so they need to be marked FILTHY.
if (markCladesInput.get()) {
Node iup = p;
Node jup = jP;
while (iup != jup) {
if (iup.getHeight() < jup.getHeight()) {
assert !iup.isRoot();
iup = iup.getParent();
iup.makeDirty(Tree.IS_FILTHY);
} else {
assert !jup.isRoot();
jup = jup.getParent();
jup.makeDirty(Tree.IS_FILTHY);
}
}
}
return 0;
}
// reject instead of counting (like we do for narrow).
return Double.NEGATIVE_INFINITY;
}
use of beast.evolution.tree.Node in project beast2 by CompEvol.
the class BeagleTreeLikelihood method traverse.
/**
* Traverse the tree calculating partial likelihoods.
*
* @param node node
* @param operatorNumber operatorNumber
* @param flip flip
* @return boolean
*/
private int traverse(Node node, int[] operatorNumber, boolean flip) {
int nodeNum = node.getNr();
if (operatorNumber != null) {
operatorNumber[0] = -1;
}
// First update the transition probability matrix(ices) for this branch
int update = (node.isDirty() | hasDirt);
// if (parent!=null) {
// update |= parent.isDirty();
// }
final double branchRate = branchRateModel.getRateForBranch(node);
final double branchTime = node.getLength() * branchRate;
if (!node.isRoot() && (update != Tree.IS_CLEAN || branchTime != m_branchLengths[nodeNum])) {
m_branchLengths[nodeNum] = branchTime;
if (branchTime < 0.0) {
throw new RuntimeException("Negative branch length: " + branchTime);
}
if (flip) {
// first flip the matrixBufferHelper
matrixBufferHelper.flipOffset(nodeNum);
}
// then set which matrix to update
// = m_substitutionModel.getBranchIndex(node);
final int eigenIndex = 0;
final int updateCount = branchUpdateCount[eigenIndex];
matrixUpdateIndices[eigenIndex][updateCount] = matrixBufferHelper.getOffsetIndex(nodeNum);
// if (!m_substitutionModel.canReturnDiagonalization()) {
// m_substitutionModel.getTransitionProbabilities(node, parent.getHeight(), node.getHeight(), branchRate, m_fProbabilities);
// int matrixIndex = matrixBufferHelper.getOffsetIndex(nodeNum);
// beagle.setTransitionMatrix(matrixIndex, m_fProbabilities, 1);
// }
branchLengths[eigenIndex][updateCount] = branchTime;
branchUpdateCount[eigenIndex]++;
update |= Tree.IS_DIRTY;
}
// If the node is internal, update the partial likelihoods.
if (!node.isLeaf()) {
// Traverse down the two child nodes
Node child1 = node.getLeft();
final int[] op1 = { -1 };
final int update1 = traverse(child1, op1, flip);
Node child2 = node.getRight();
final int[] op2 = { -1 };
final int update2 = traverse(child2, op2, flip);
// If either child node was updated then update this node too
if (update1 != Tree.IS_CLEAN || update2 != Tree.IS_CLEAN) {
int x = operationCount[operationListCount] * Beagle.OPERATION_TUPLE_SIZE;
if (flip) {
// first flip the partialBufferHelper
partialBufferHelper.flipOffset(nodeNum);
}
final int[] operations = this.operations[operationListCount];
operations[x] = partialBufferHelper.getOffsetIndex(nodeNum);
if (useScaleFactors) {
// get the index of this scaling buffer
int n = nodeNum - tipCount;
if (recomputeScaleFactors) {
// flip the indicator: can take either n or (internalNodeCount + 1) - n
scaleBufferHelper.flipOffset(n);
// store the index
scaleBufferIndices[n] = scaleBufferHelper.getOffsetIndex(n);
// Write new scaleFactor
operations[x + 1] = scaleBufferIndices[n];
operations[x + 2] = Beagle.NONE;
} else {
operations[x + 1] = Beagle.NONE;
// Read existing scaleFactor
operations[x + 2] = scaleBufferIndices[n];
}
} else {
if (useAutoScaling) {
scaleBufferIndices[nodeNum - tipCount] = partialBufferHelper.getOffsetIndex(nodeNum);
}
// Not using scaleFactors
operations[x + 1] = Beagle.NONE;
operations[x + 2] = Beagle.NONE;
}
// source node 1
operations[x + 3] = partialBufferHelper.getOffsetIndex(child1.getNr());
// source matrix 1
operations[x + 4] = matrixBufferHelper.getOffsetIndex(child1.getNr());
// source node 2
operations[x + 5] = partialBufferHelper.getOffsetIndex(child2.getNr());
// source matrix 2
operations[x + 6] = matrixBufferHelper.getOffsetIndex(child2.getNr());
operationCount[operationListCount]++;
update |= (update1 | update2);
}
}
return update;
}
use of beast.evolution.tree.Node in project beast2 by CompEvol.
the class BeagleTreeLikelihood method initialize.
private boolean initialize() {
m_nNodeCount = treeInput.get().getNodeCount();
m_bUseAmbiguities = m_useAmbiguities.get();
m_bUseTipLikelihoods = m_useTipLikelihoods.get();
if (!(siteModelInput.get() instanceof SiteModel.Base)) {
throw new IllegalArgumentException("siteModel input should be of type SiteModel.Base");
}
m_siteModel = (SiteModel.Base) siteModelInput.get();
m_siteModel.setDataType(dataInput.get().getDataType());
substitutionModel = m_siteModel.substModelInput.get();
branchRateModel = branchRateModelInput.get();
if (branchRateModel == null) {
branchRateModel = new StrictClockModel();
}
m_branchLengths = new double[m_nNodeCount];
storedBranchLengths = new double[m_nNodeCount];
m_nStateCount = dataInput.get().getMaxStateCount();
patternCount = dataInput.get().getPatternCount();
// System.err.println("Attempt to load BEAGLE TreeLikelihood");
// this.branchSubstitutionModel.getEigenCount();
eigenCount = 1;
double[] categoryRates = m_siteModel.getCategoryRates(null);
// check for invariant rates category
if (m_siteModel.hasPropInvariantCategory) {
for (int i = 0; i < categoryRates.length; i++) {
if (categoryRates[i] == 0) {
proportionInvariant = m_siteModel.getRateForCategory(i, null);
int stateCount = dataInput.get().getMaxStateCount();
int patterns = dataInput.get().getPatternCount();
calcConstantPatternIndices(patterns, stateCount);
invariantCategory = i;
double[] tmp = new double[categoryRates.length - 1];
for (int k = 0; k < invariantCategory; k++) {
tmp[k] = categoryRates[k];
}
for (int k = invariantCategory + 1; k < categoryRates.length; k++) {
tmp[k - 1] = categoryRates[k];
}
categoryRates = tmp;
break;
}
}
if (constantPattern != null && constantPattern.size() > dataInput.get().getPatternCount()) {
// if there are many more constant patterns than patterns (each pattern can
// have a number of constant patters, one for each state) it is less efficient
// to just calculate the TreeLikelihood for constant sites than optimising
Log.debug("switch off constant sites optimisiation: calculating through separate TreeLikelihood category (as in the olden days)");
invariantCategory = -1;
proportionInvariant = 0;
constantPattern = null;
categoryRates = m_siteModel.getCategoryRates(null);
}
}
this.categoryCount = m_siteModel.getCategoryCount() - (invariantCategory >= 0 ? 1 : 0);
tipCount = treeInput.get().getLeafNodeCount();
internalNodeCount = m_nNodeCount - tipCount;
int compactPartialsCount = tipCount;
if (m_bUseAmbiguities) {
// if we are using ambiguities then we don't use tip partials
compactPartialsCount = 0;
}
// one partials buffer for each tip and two for each internal node (for store restore)
partialBufferHelper = new BufferIndexHelper(m_nNodeCount, tipCount);
// two eigen buffers for each decomposition for store and restore.
eigenBufferHelper = new BufferIndexHelper(eigenCount, 0);
// two matrices for each node less the root
matrixBufferHelper = new BufferIndexHelper(m_nNodeCount, 0);
// one scaling buffer for each internal node plus an extra for the accumulation, then doubled for store/restore
scaleBufferHelper = new BufferIndexHelper(getScaleBufferCount(), 0);
// Attempt to get the resource order from the System Property
if (resourceOrder == null) {
resourceOrder = parseSystemPropertyIntegerArray(RESOURCE_ORDER_PROPERTY);
}
if (preferredOrder == null) {
preferredOrder = parseSystemPropertyIntegerArray(PREFERRED_FLAGS_PROPERTY);
}
if (requiredOrder == null) {
requiredOrder = parseSystemPropertyIntegerArray(REQUIRED_FLAGS_PROPERTY);
}
if (scalingOrder == null) {
scalingOrder = parseSystemPropertyStringArray(SCALING_PROPERTY);
}
// first set the rescaling scheme to use from the parser
// = rescalingScheme;
rescalingScheme = PartialsRescalingScheme.DEFAULT;
rescalingScheme = DEFAULT_RESCALING_SCHEME;
int[] resourceList = null;
long preferenceFlags = 0;
long requirementFlags = 0;
if (scalingOrder.size() > 0) {
this.rescalingScheme = PartialsRescalingScheme.parseFromString(scalingOrder.get(instanceCount % scalingOrder.size()));
}
if (resourceOrder.size() > 0) {
// added the zero on the end so that a CPU is selected if requested resource fails
resourceList = new int[] { resourceOrder.get(instanceCount % resourceOrder.size()), 0 };
if (resourceList[0] > 0) {
// Add preference weight against CPU
preferenceFlags |= BeagleFlag.PROCESSOR_GPU.getMask();
}
}
if (preferredOrder.size() > 0) {
preferenceFlags = preferredOrder.get(instanceCount % preferredOrder.size());
}
if (requiredOrder.size() > 0) {
requirementFlags = requiredOrder.get(instanceCount % requiredOrder.size());
}
if (scaling.get().equals(Scaling.always)) {
this.rescalingScheme = PartialsRescalingScheme.ALWAYS;
}
if (scaling.get().equals(Scaling.none)) {
this.rescalingScheme = PartialsRescalingScheme.NONE;
}
// Define default behaviour here
if (this.rescalingScheme == PartialsRescalingScheme.DEFAULT) {
// if GPU: the default is^H^Hwas dynamic scaling in BEAST, now NONE
if (resourceList != null && resourceList[0] > 1) {
// this.rescalingScheme = PartialsRescalingScheme.DYNAMIC;
this.rescalingScheme = PartialsRescalingScheme.NONE;
} else {
// if CPU: just run as fast as possible
// this.rescalingScheme = PartialsRescalingScheme.NONE;
// Dynamic should run as fast as none until first underflow
this.rescalingScheme = PartialsRescalingScheme.DYNAMIC;
}
}
if (this.rescalingScheme == PartialsRescalingScheme.AUTO) {
preferenceFlags |= BeagleFlag.SCALING_AUTO.getMask();
useAutoScaling = true;
} else {
// preferenceFlags |= BeagleFlag.SCALING_MANUAL.getMask();
}
String r = System.getProperty(RESCALE_FREQUENCY_PROPERTY);
if (r != null) {
rescalingFrequency = Integer.parseInt(r);
if (rescalingFrequency < 1) {
rescalingFrequency = RESCALE_FREQUENCY;
}
}
if (preferenceFlags == 0 && resourceList == null) {
// else determine dataset characteristics
if (// TODO determine good cut-off
m_nStateCount == 4 && patternCount < 10000)
preferenceFlags |= BeagleFlag.PROCESSOR_CPU.getMask();
}
if (substitutionModel.canReturnComplexDiagonalization()) {
requirementFlags |= BeagleFlag.EIGEN_COMPLEX.getMask();
}
instanceCount++;
try {
beagle = BeagleFactory.loadBeagleInstance(tipCount, partialBufferHelper.getBufferCount(), compactPartialsCount, m_nStateCount, patternCount, // eigenBufferCount
eigenBufferHelper.getBufferCount(), matrixBufferHelper.getBufferCount(), categoryCount, // Always allocate; they may become necessary
scaleBufferHelper.getBufferCount(), resourceList, preferenceFlags, requirementFlags);
} catch (Exception e) {
beagle = null;
}
if (beagle == null) {
return false;
}
InstanceDetails instanceDetails = beagle.getDetails();
ResourceDetails resourceDetails = null;
if (instanceDetails != null) {
resourceDetails = BeagleFactory.getResourceDetails(instanceDetails.getResourceNumber());
if (resourceDetails != null) {
StringBuilder sb = new StringBuilder(" Using BEAGLE version: " + BeagleInfo.getVersion() + " resource ");
sb.append(resourceDetails.getNumber()).append(": ");
sb.append(resourceDetails.getName()).append("\n");
if (resourceDetails.getDescription() != null) {
String[] description = resourceDetails.getDescription().split("\\|");
for (String desc : description) {
if (desc.trim().length() > 0) {
sb.append(" ").append(desc.trim()).append("\n");
}
}
}
sb.append(" with instance flags: ").append(instanceDetails.toString());
Log.info.println(sb.toString());
} else {
Log.warning.println(" Error retrieving BEAGLE resource for instance: " + instanceDetails.toString());
beagle = null;
return false;
}
} else {
Log.warning.println(" No external BEAGLE resources available, or resource list/requirements not met, using Java implementation");
beagle = null;
return false;
}
Log.warning.println(" " + (m_bUseAmbiguities ? "Using" : "Ignoring") + " ambiguities in tree likelihood.");
Log.warning.println(" " + (m_bUseTipLikelihoods ? "Using" : "Ignoring") + " character uncertainty in tree likelihood.");
Log.warning.println(" With " + patternCount + " unique site patterns.");
Node[] nodes = treeInput.get().getNodesAsArray();
for (int i = 0; i < tipCount; i++) {
int taxon = dataInput.get().getTaxonIndex(nodes[i].getID());
if (m_bUseAmbiguities || m_bUseTipLikelihoods) {
setPartials(beagle, i, taxon);
} else {
setStates(beagle, i, taxon);
}
}
if (dataInput.get().isAscertained) {
ascertainedSitePatterns = true;
}
double[] patternWeights = new double[patternCount];
for (int i = 0; i < patternCount; i++) {
patternWeights[i] = dataInput.get().getPatternWeight(i);
}
beagle.setPatternWeights(patternWeights);
if (this.rescalingScheme == PartialsRescalingScheme.AUTO && resourceDetails != null && (resourceDetails.getFlags() & BeagleFlag.SCALING_AUTO.getMask()) == 0) {
// If auto scaling in BEAGLE is not supported then do it here
this.rescalingScheme = PartialsRescalingScheme.DYNAMIC;
Log.warning.println(" Auto rescaling not supported in BEAGLE, using : " + this.rescalingScheme.getText());
} else {
Log.warning.println(" Using rescaling scheme : " + this.rescalingScheme.getText());
}
if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
// If false, BEAST does not rescale until first under-/over-flow.
everUnderflowed = false;
}
updateSubstitutionModel = true;
updateSiteModel = true;
// some subst models (e.g. WAG) never become dirty, so set up subst models right now
setUpSubstModel();
// set up sitemodel
beagle.setCategoryRates(categoryRates);
currentCategoryRates = categoryRates;
currentFreqs = new double[m_nStateCount];
currentCategoryWeights = new double[categoryRates.length];
return true;
}
use of beast.evolution.tree.Node in project beast2 by CompEvol.
the class RateStatistic method calcValues.
/**
* calculate the three statistics from scratch *
*/
public double[] calcValues() {
int length = 0;
int offset = 0;
final int nrOfLeafs = tree.getLeafNodeCount();
if (external) {
length += nrOfLeafs;
}
if (internal) {
length += tree.getInternalNodeCount() - 1;
}
final double[] rates = new double[length];
// need those only for mean
final double[] branchLengths = new double[length];
final Node[] nodes = tree.getNodesAsArray();
/**
* handle leaf nodes *
*/
if (external) {
for (int i = 0; i < nrOfLeafs; i++) {
final Node child = nodes[i];
final Node parent = child.getParent();
branchLengths[i] = parent.getHeight() - child.getHeight();
rates[i] = branchRateModel.getRateForBranch(child);
}
offset = nrOfLeafs;
}
/**
* handle internal nodes *
*/
if (internal) {
final int n = tree.getNodeCount();
int k = offset;
for (int i = nrOfLeafs; i < n; i++) {
final Node child = nodes[i];
if (!child.isRoot()) {
final Node parent = child.getParent();
branchLengths[k] = parent.getHeight() - child.getHeight();
rates[k] = branchRateModel.getRateForBranch(child);
k++;
}
}
}
final double[] values = new double[3];
double totalWeightedRate = 0.0;
double totalTreeLength = 0.0;
for (int i = 0; i < rates.length; i++) {
totalWeightedRate += rates[i] * branchLengths[i];
totalTreeLength += branchLengths[i];
}
values[MEAN] = totalWeightedRate / totalTreeLength;
// Q2R why not?
// final double mean = DiscreteStatistics.mean(rates);
// values[VARIANCE] = DiscreteStatistics.variance(rates, mean);
// values[COEFFICIENT_OF_VARIATION] = Math.sqrt(D values[VARIANCE]) / mean;
values[VARIANCE] = DiscreteStatistics.variance(rates);
final double mean = DiscreteStatistics.mean(rates);
values[COEFFICIENT_OF_VARIATION] = Math.sqrt(DiscreteStatistics.variance(rates, mean)) / mean;
return values;
}
Aggregations