use of beast.math.distributions.MRCAPrior in project beast2 by CompEvol.
the class BeautiDoc method deepCopyPlugin.
/**
* Create a deep copy of a beastObject, but in a different partition context
* First, find all beastObjects that are predecessors of the beastObject to be copied
* that are ancestors of StateNodes
*
* @param beastObject
* @param parent
* @return
*/
public static BEASTInterface deepCopyPlugin(BEASTInterface beastObject, BEASTInterface parent, MCMC mcmc, PartitionContext oldContext, PartitionContext newContext, BeautiDoc doc, List<BEASTInterface> tabooList) {
/**
* taboo = list of beastObjects that should not be copied *
*/
Set<BEASTInterface> taboo = new HashSet<>();
taboo.add(parent);
// add state
taboo.add(mcmc.startStateInput.get());
// add likelihood and prior
if (mcmc.posteriorInput.get() instanceof CompoundDistribution) {
for (Distribution distr : ((CompoundDistribution) mcmc.posteriorInput.get()).pDistributions.get()) {
if (distr instanceof CompoundDistribution) {
taboo.add(distr);
}
}
}
// add posterior
taboo.add(mcmc.posteriorInput.get());
// parent of operators
taboo.add(mcmc);
// add loggers
taboo.addAll(mcmc.loggersInput.get());
// add exception for *BEAST logger (perhaps need to be generalised?)
if (doc.pluginmap.containsKey("SpeciesTreeLoggerX")) {
taboo.add(doc.pluginmap.get("SpeciesTreeLoggerX"));
}
// add trees
for (StateNode node : mcmc.startStateInput.get().stateNodeInput.get()) {
if (node instanceof Tree) {
taboo.add(node);
}
}
// add MRCAPriors
for (String id : doc.pluginmap.keySet()) {
BEASTInterface o = doc.pluginmap.get(id);
if (o instanceof MRCAPrior) {
taboo.add(o);
}
}
if (tabooList != null) {
taboo.addAll(tabooList);
}
// find predecessors of beastObject to be copied
List<BEASTInterface> predecessors = new ArrayList<>();
collectPredecessors(beastObject, predecessors);
// find ancestors of StateNodes that are predecessors + the beastObject
// itself
Set<BEASTInterface> ancestors = new HashSet<>();
collectAncestors(beastObject, ancestors, taboo);
Log.info.print(Arrays.toString(ancestors.toArray()));
for (BEASTInterface beastObject2 : predecessors) {
if (beastObject2 instanceof StateNode) {
Set<BEASTInterface> ancestors2 = new HashSet<>();
collectAncestors(beastObject2, ancestors2, taboo);
ancestors.addAll(ancestors2);
} else if (beastObject2 instanceof Alignment || beastObject2 instanceof FilteredAlignment) {
for (Object output : beastObject2.getOutputs()) {
if (!taboo.contains(output)) {
Set<BEASTInterface> ancestors2 = new HashSet<>();
collectAncestors((BEASTInterface) output, ancestors2, taboo);
ancestors.addAll(ancestors2);
}
}
}
}
// collect priors
predecessors.addAll(ancestors);
for (BEASTInterface o : predecessors) {
if (o instanceof Prior) {
List<BEASTInterface> priorPredecessors = new ArrayList<>();
collectPredecessors(o, priorPredecessors);
ancestors.addAll(priorPredecessors);
}
}
Log.info.print(Arrays.toString(predecessors.toArray()));
for (BEASTInterface p : ancestors) {
Log.info.print("(");
try {
for (BEASTInterface p2 : p.listActiveBEASTObjects()) {
if (ancestors.contains(p2)) {
Log.info.print(p2.getID() + " ");
}
}
} catch (IllegalArgumentException e) {
e.printStackTrace();
}
Log.info.print(") ");
Log.info.println(p.getID());
}
// now the ancestors contain all beastObjects to be copied
// make a copy of all individual BEASTObjects, before connecting them up
Map<String, BEASTInterface> copySet = new HashMap<>();
for (BEASTInterface beastObject2 : ancestors) {
String id = beastObject2.getID();
if (id == null) {
id = beastObject.getClass().getName().replaceAll(".*\\.", "");
int i = 0;
while (doc.pluginmap.containsKey(id + "." + i)) {
i++;
}
id = id + "." + i;
beastObject2.setID(id);
}
String copyID = renameId(id, oldContext, newContext);
if (!id.equals(copyID)) {
if (doc.pluginmap.containsKey(copyID)) {
BEASTInterface org = doc.pluginmap.get(copyID);
copySet.put(id, org);
} else {
BEASTInterface copy;
try {
copy = beastObject2.getClass().newInstance();
copy.setID(copyID);
copySet.put(id, copy);
} catch (InstantiationException | IllegalAccessException e) {
e.printStackTrace();
throw new RuntimeException("Programmer error: every object in the model should have a default constructor that is publicly accessible");
}
}
}
Log.warning.println("Copy: " + id + " -> " + copyID);
}
// set all inputs of copied beastObjects + outputs to taboo
for (BEASTInterface beastObject2 : ancestors) {
String id = beastObject2.getID();
BEASTInterface copy = copySet.get(id);
if (copy != null) {
Log.warning.println("Processing: " + id + " -> " + copy.getID());
// set inputs
for (Input<?> input : beastObject2.listInputs()) {
if (input.get() != null) {
if (input.get() instanceof List) {
// ((List)copy.getInput(input.getName())).clear();
for (Object o : (List<?>) input.get()) {
if (o instanceof BEASTInterface) {
BEASTInterface value = getCopyValue((BEASTInterface) o, copySet, oldContext, newContext, doc);
// make sure it is not already in the list
Object o2 = copy.getInput(input.getName()).get();
boolean alreadyInList = false;
if (o2 instanceof List) {
List<?> currentList = (List<?>) o2;
for (Object v : currentList) {
if (v == value) {
alreadyInList = true;
break;
}
}
}
if (!alreadyInList) {
// add to the list
copy.setInputValue(input.getName(), value);
}
} else {
// it is a primitive value
if (copy instanceof Parameter.Base && input.getName().equals("value")) {
// // prevent appending to parameter values
Parameter.Base<?> p = ((Parameter.Base<?>) copy);
((List<?>) p.valuesInput.get()).clear();
}
copy.setInputValue(input.getName(), input.get());
}
}
} else if (input.get() instanceof BEASTInterface) {
// handle BEASTObject
BEASTInterface value = getCopyValue((BEASTInterface) input.get(), copySet, oldContext, newContext, doc);
copy.setInputValue(input.getName(), value);
} else if (input.get() instanceof String) {
// may need to replace partition info
String s = (String) input.get();
s = s.replaceAll("\\.c:[a-zA-Z0-9_]*", ".c:" + newContext.clockModel);
s = s.replaceAll("\\.s:[a-zA-Z0-9_]*", ".s:" + newContext.siteModel);
s = s.replaceAll("\\.t:[a-zA-Z0-9_]*", ".t:" + newContext.tree);
copy.setInputValue(input.getName(), s);
} else {
// it is a primitive value
copy.setInputValue(input.getName(), input.get());
}
}
}
// set outputs
for (Object output : beastObject2.getOutputs()) {
if (taboo.contains(output) && output != parent) {
BEASTInterface output2 = getCopyValue((BEASTInterface) output, copySet, oldContext, newContext, doc);
for (Input<?> input : ((BEASTInterface) output).listInputs()) {
// do not add state node initialisers automatically
if (input.get() instanceof List && // do not update state node initialisers
!(taboo.contains(output2) && input.getName().equals("init"))) {
List<?> list = (List<?>) input.get();
if (list.contains(beastObject2)) {
List<?> list2 = (List<?>) output2.getInput(input.getName()).get();
if (!list2.contains(copy)) {
output2.setInputValue(input.getName(), copy);
}
}
}
}
}
}
copySet.put(id, copy);
// Log.warning.println(base.operatorsAsString());
}
}
// deep copy must be obtained from copyset, before sorting
// since the sorting changes (deletes items) from the copySet map
BEASTInterface deepCopy = copySet.get(beastObject.getID());
// first need to sort copySet by topology, before we can initAndValidate
// them
List<BEASTInterface> sorted = new ArrayList<>();
Collection<BEASTInterface> values = copySet.values();
while (values.size() > 0) {
for (BEASTInterface copy : values) {
boolean found = false;
for (BEASTInterface beastObject2 : copy.listActiveBEASTObjects()) {
if (values.contains(beastObject2)) {
found = true;
break;
}
}
if (!found) {
sorted.add(copy);
}
}
values.remove(sorted.get(sorted.size() - 1));
}
// initialise copied beastObjects
Set<BEASTInterface> done = new HashSet<>();
for (BEASTInterface copy : sorted) {
try {
if (!done.contains(copy)) {
copy.initAndValidate();
done.add(copy);
}
} catch (Exception e) {
// ignore
Log.warning.print(e.getMessage());
}
if (doc != null) {
doc.addPlugin(copy);
}
}
doc.scrubAll(true, false);
return deepCopy;
}
use of beast.math.distributions.MRCAPrior in project beast2 by CompEvol.
the class BeautiDoc method setClockRate.
void setClockRate() {
boolean needsEstimationBySPTree = false;
if (pluginmap.containsKey("Tree.t:Species")) {
Tree sptree = (Tree) pluginmap.get("Tree.t:Species");
// check whether there is a calibration
for (Object beastObject : sptree.getOutputs()) {
if (beastObject instanceof MRCAPrior) {
MRCAPrior prior = (MRCAPrior) beastObject;
if (prior.distInput.get() != null) {
needsEstimationBySPTree = true;
}
}
}
}
BEASTInterface likelihood = pluginmap.get("likelihood");
if (likelihood instanceof CompoundDistribution) {
int i = 0;
RealParameter firstClock = null;
for (Distribution distr : ((CompoundDistribution) likelihood).pDistributions.get()) {
if (distr instanceof GenericTreeLikelihood) {
GenericTreeLikelihood treeLikelihood = (GenericTreeLikelihood) distr;
boolean needsEstimation = needsEstimationBySPTree;
if (i > 0) {
BranchRateModel.Base model = treeLikelihood.branchRateModelInput.get();
needsEstimation = (model.meanRateInput.get() != firstClock) || firstClock.isEstimatedInput.get();
} else {
// TODO: this might not be a valid type conversion from TreeInterface to Tree
Tree tree = (Tree) treeLikelihood.treeInput.get();
// check whether there are tip dates
if (tree.hasDateTrait()) {
needsEstimation = true;
}
// check whether there is a calibration
for (Object beastObject : tree.getOutputs()) {
if (beastObject instanceof MRCAPrior) {
MRCAPrior prior = (MRCAPrior) beastObject;
if (prior.distInput.get() != null) {
needsEstimation = true;
}
}
}
}
BranchRateModel.Base model = treeLikelihood.branchRateModelInput.get();
if (model != null) {
RealParameter clockRate = model.meanRateInput.get();
clockRate.isEstimatedInput.setValue(needsEstimation, clockRate);
if (firstClock == null) {
firstClock = clockRate;
}
}
i++;
}
}
}
}
use of beast.math.distributions.MRCAPrior in project beast2 by CompEvol.
the class BeautiDoc method scrubAll.
// TreeDistribution getTreePrior(String partition) {
// int k = 0;
// for (Alignment data : alignments) {
// if (data.getID().equals(partition)) {
// return treePriors.get(k);
// }
// k++;
// }
// return null;
// }
public synchronized void scrubAll(boolean useNotEstimatedStateNodes, boolean isInitial) {
try {
if (autoSetClockRate) {
setClockRate();
}
if (autoUpdateFixMeanSubstRate) {
SiteModelInputEditor.customConnector(this);
}
// }
if (pluginmap.containsKey("Tree.t:Species")) {
Tree tree = (Tree) pluginmap.get("Tree.t:Species");
tree.isEstimatedInput.setValue(true, tree);
}
// go through all templates, and process connectors in relevant ones
boolean progress = true;
while (progress) {
warning("============================ start scrubbing ===========================");
progress = false;
setUpActivePlugins();
// process MRCA priors
for (String id : pluginmap.keySet()) {
if (id != null && id.endsWith(".prior")) {
BEASTInterface beastObject = pluginmap.get(id);
if (beastObject instanceof MRCAPrior) {
MRCAPrior prior = (MRCAPrior) beastObject;
if (prior.treeInput.get().isEstimatedInput.get() == false) {
// disconnect
disconnect(beastObject, "prior", "distribution");
} else {
// connect
connect(beastObject, "prior", "distribution");
}
}
}
}
List<BeautiSubTemplate> templates = new ArrayList<>();
templates.add(beautiConfig.partitionTemplate.get());
templates.addAll(beautiConfig.subTemplates);
for (PartitionContext context : possibleContexts) {
applyBeautiRules(templates, isInitial, context);
}
// add 'Species' as special partition name
applyBeautiRules(templates, isInitial, new PartitionContext("Species"));
// if the model changed, some rules that use inposterior() may
// not have been triggered properly
// so we need to check that the model changed, and if so,
// revisit the BeautiConnectors
List<BEASTInterface> posteriorPredecessors2 = new ArrayList<>();
collectPredecessors(((MCMC) mcmc.get()).posteriorInput.get(), posteriorPredecessors2);
if (posteriorPredecessors.size() != posteriorPredecessors2.size()) {
progress = true;
} else {
for (BEASTInterface beastObject : posteriorPredecessors2) {
if (!posteriorPredecessors.contains(beastObject)) {
progress = true;
break;
}
}
}
}
List<BeautiSubTemplate> templates = new ArrayList<>();
templates.add(beautiConfig.hyperPriorTemplate);
for (BEASTInterface beastObject : pluginmap.values()) {
if (beastObject instanceof RealParameter) {
if (beastObject.getID() != null && beastObject.getID().startsWith("parameter.")) {
PartitionContext context = new PartitionContext(beastObject.getID().substring("parameter.".length()));
applyBeautiRules(templates, isInitial, context);
}
}
}
collectClockModels();
// collectTreePriors();
Log.warning.println("PARTITIONS:\n");
Log.warning.println(Arrays.toString(currentPartitions));
determineLinks();
} catch (Exception e) {
Log.err.println(e.getMessage());
}
}
use of beast.math.distributions.MRCAPrior in project beast2 by CompEvol.
the class RandomTree method initStateNodes.
// taxonset intersection test
// private boolean intersects(final BitSet bitSet, final BitSet bitSet2) {
// for (int k = bitSet.nextSetBit(0); k >= 0; k = bitSet.nextSetBit(k + 1)) {
// if (bitSet2.get(k)) {
// return true;
// }
// }
// return false;
// }
// returns true if bitSet is a subset of bitSet2
// private boolean isSubset(final BitSet bitSet, final BitSet bitSet2) {
// boolean isSubset = true;
// for (int k = bitSet.nextSetBit(0); isSubset && k >= 0; k = bitSet.nextSetBit(k + 1)) {
// isSubset = bitSet2.get(k);
// }
// return isSubset;
// }
@SuppressWarnings("unchecked")
@Override
public void initStateNodes() {
// find taxon sets we are dealing with
taxonSets = new ArrayList<>();
m_bounds = new ArrayList<>();
distributions = new ArrayList<>();
taxonSetIDs = new ArrayList<>();
lastMonophyletic = 0;
if (taxaInput.get() != null) {
taxa.addAll(taxaInput.get().getTaxaNames());
} else {
taxa.addAll(m_taxonset.get().asStringList());
}
// pick up constraints from outputs, m_inititial input tree and output tree, if any
List<MRCAPrior> calibrations = new ArrayList<>();
calibrations.addAll(calibrationsInput.get());
// pick up constraints in m_initial tree
for (final Object beastObject : getOutputs()) {
if (beastObject instanceof MRCAPrior && !calibrations.contains(beastObject)) {
calibrations.add((MRCAPrior) beastObject);
}
}
if (m_initial.get() != null) {
for (final Object beastObject : m_initial.get().getOutputs()) {
if (beastObject instanceof MRCAPrior && !calibrations.contains(beastObject)) {
calibrations.add((MRCAPrior) beastObject);
}
}
}
for (final MRCAPrior prior : calibrations) {
final TaxonSet taxonSet = prior.taxonsetInput.get();
if (taxonSet != null && !prior.onlyUseTipsInput.get()) {
final Set<String> usedTaxa = new LinkedHashSet<>();
if (taxonSet.asStringList() == null) {
taxonSet.initAndValidate();
}
for (final String taxonID : taxonSet.asStringList()) {
if (!taxa.contains(taxonID)) {
throw new IllegalArgumentException("Taxon <" + taxonID + "> could not be found in list of taxa. Choose one of " + taxa);
}
usedTaxa.add(taxonID);
}
final ParametricDistribution distr = prior.distInput.get();
final Bound bounds = new Bound();
if (distr != null) {
List<BEASTInterface> beastObjects = new ArrayList<>();
distr.getPredecessors(beastObjects);
for (int i = beastObjects.size() - 1; i >= 0; i--) {
beastObjects.get(i).initAndValidate();
}
try {
bounds.lower = distr.inverseCumulativeProbability(0.0) + distr.offsetInput.get();
bounds.upper = distr.inverseCumulativeProbability(1.0) + distr.offsetInput.get();
} catch (MathException e) {
Log.warning.println("At RandomTree::initStateNodes, bound on MRCAPrior could not be set " + e.getMessage());
}
}
if (prior.isMonophyleticInput.get()) {
// add any monophyletic constraint
taxonSets.add(lastMonophyletic, usedTaxa);
distributions.add(lastMonophyletic, distr);
m_bounds.add(lastMonophyletic, bounds);
taxonSetIDs.add(prior.getID());
lastMonophyletic++;
} else {
// only calibrations with finite bounds are added
if (!Double.isInfinite(bounds.lower) || !Double.isInfinite(bounds.upper)) {
taxonSets.add(usedTaxa);
distributions.add(distr);
m_bounds.add(bounds);
taxonSetIDs.add(prior.getID());
}
}
}
}
// assume all calibration constraints are MonoPhyletic
// TODO: verify that this is a reasonable assumption
lastMonophyletic = taxonSets.size();
// sort constraints such that if taxon set i is subset of taxon set j, then i < j
for (int i = 0; i < lastMonophyletic; i++) {
for (int j = i + 1; j < lastMonophyletic; j++) {
Set<String> intersection = new LinkedHashSet<>(taxonSets.get(i));
intersection.retainAll(taxonSets.get(j));
if (intersection.size() > 0) {
final boolean isSubset = taxonSets.get(i).containsAll(taxonSets.get(j));
final boolean isSubset2 = taxonSets.get(j).containsAll(taxonSets.get(i));
// o taxonset1 does not intersect taxonset2
if (!(isSubset || isSubset2)) {
throw new IllegalArgumentException("333: Don't know how to generate a Random Tree for taxon sets that intersect, " + "but are not inclusive. Taxonset " + taxonSetIDs.get(i) + " and " + taxonSetIDs.get(j));
}
// swap i & j if b1 subset of b2
if (isSubset) {
swap(taxonSets, i, j);
swap(distributions, i, j);
swap(m_bounds, i, j);
swap(taxonSetIDs, i, j);
}
}
}
}
// build tree of mono constraints such that j is parent of i if i is a subset of j but i+1,i+2,...,j-1 are not.
// The last one, standing for the virtual "root" of all monophyletic clades is not associated with an actual clade
final int[] parent = new int[lastMonophyletic];
children = new List[lastMonophyletic + 1];
for (int i = 0; i < lastMonophyletic + 1; i++) {
children[i] = new ArrayList<>();
}
for (int i = 0; i < lastMonophyletic; i++) {
int j = i + 1;
while (j < lastMonophyletic && !taxonSets.get(j).containsAll(taxonSets.get(i))) {
j++;
}
parent[i] = j;
children[j].add(i);
}
// make sure upper bounds of a child does not exceed the upper bound of its parent
for (int i = lastMonophyletic - 1; i >= 0; --i) {
if (parent[i] < lastMonophyletic) {
if (m_bounds.get(i).upper > m_bounds.get(parent[i]).upper) {
m_bounds.get(i).upper = m_bounds.get(parent[i]).upper - 1e-100;
}
}
}
final PopulationFunction popFunction = populationFunctionInput.get();
simulateTree(taxa, popFunction);
if (rootHeightInput.get() != null) {
scaleToFit(rootHeightInput.get() / root.getHeight(), root);
}
nodeCount = 2 * taxa.size() - 1;
internalNodeCount = taxa.size() - 1;
leafNodeCount = taxa.size();
HashMap<String, Integer> taxonToNR = null;
// preserve node numbers where possible
if (m_initial.get() != null) {
if (leafNodeCount == m_initial.get().getLeafNodeCount()) {
// dont ask me how the initial tree is rubbish (i.e. 0:0.0)
taxonToNR = new HashMap<>();
for (Node n : m_initial.get().getExternalNodes()) {
taxonToNR.put(n.getID(), n.getNr());
}
}
} else {
taxonToNR = new HashMap<>();
String[] taxa = getTaxaNames();
for (int k = 0; k < taxa.length; ++k) {
taxonToNR.put(taxa[k], k);
}
}
// multiple simulation tries may produce an excess of nodes with invalid nr's. reset those.
setNodesNrs(root, 0, new int[1], taxonToNR);
initArrays();
if (m_initial.get() != null) {
m_initial.get().assignFromWithoutID(this);
}
for (int k = 0; k < lastMonophyletic; ++k) {
final MRCAPrior p = calibrations.get(k);
if (p.isMonophyleticInput.get()) {
final TaxonSet taxonSet = p.taxonsetInput.get();
if (taxonSet == null) {
throw new IllegalArgumentException("Something is wrong with constraint " + p.getID() + " -- a taxonset must be specified if a monophyletic constraint is enforced.");
}
final Set<String> usedTaxa = new LinkedHashSet<>();
if (taxonSet.asStringList() == null) {
taxonSet.initAndValidate();
}
usedTaxa.addAll(taxonSet.asStringList());
/* int c = */
traverse(root, usedTaxa, taxonSet.getTaxonCount(), new int[1]);
// boolean b = c == nrOfTaxa + 127;
}
}
}
use of beast.math.distributions.MRCAPrior in project beast2 by CompEvol.
the class CalibratedYuleModel method initAndValidate.
@Override
public void initAndValidate() {
super.initAndValidate();
type = correctionTypeInput.get();
final TreeInterface tree = treeInput.get();
// shallow copy. we shall change cals later
final List<CalibrationPoint> cals = new ArrayList<>(calibrationsInput.get());
int calCount = cals.size();
final List<TaxonSet> taxaSets = new ArrayList<>(calCount);
if (cals.size() > 0) {
xclades = new int[calCount][];
// convenience
for (final CalibrationPoint cal : cals) {
taxaSets.add(cal.taxa());
}
} else {
// find calibration points from prior
for (final Object beastObject : getOutputs()) {
if (beastObject instanceof CompoundDistribution) {
final CompoundDistribution prior = (CompoundDistribution) beastObject;
for (final Distribution distr : prior.pDistributions.get()) {
if (distr instanceof MRCAPrior) {
final MRCAPrior _MRCAPrior = (MRCAPrior) distr;
// make sure MRCAPrior is monophyletic
if (_MRCAPrior.distInput.get() != null) {
// make sure MRCAPrior is monophyletic
if (!_MRCAPrior.isMonophyleticInput.get()) {
throw new IllegalArgumentException("MRCAPriors must be monophyletic for Calibrated Yule prior");
}
// create CalibrationPoint from MRCAPrior
final CalibrationPoint cal = new CalibrationPoint();
cal.distInput.setValue(_MRCAPrior.distInput.get(), cal);
cal.taxonsetInput.setValue(_MRCAPrior.taxonsetInput.get(), cal);
cal.forParentInput.setValue(_MRCAPrior.useOriginateInput.get(), cal);
cal.initAndValidate();
cals.add(cal);
taxaSets.add(cal.taxa());
cal.taxa().initAndValidate();
calCount++;
calcCalibrations = false;
} else {
if (_MRCAPrior.isMonophyleticInput.get()) {
Log.warning.println("WARNING: MRCAPriors (" + _MRCAPrior.getID() + ") must have a distribution when monophyletic. Ignored for Calibrated Yule prior");
} else {
Log.warning.println("WARNING: MRCAPriors (" + _MRCAPrior.getID() + ") found that is not monophyletic. Ignored for Calibrated Yule prior");
}
}
}
}
}
}
xclades = new int[calCount][];
}
if (calCount == 0) {
Log.warning.println("WARNING: Calibrated Yule prior could not find any properly configured calibrations. Expect this to crash in a BEAST run.");
// assume we are in beauti, back off for now
return;
}
for (int k = 0; k < calCount; ++k) {
final TaxonSet tk = taxaSets.get(k);
for (int i = k + 1; i < calCount; ++i) {
final TaxonSet ti = taxaSets.get(i);
if (ti.containsAny(tk)) {
if (!(ti.containsAll(tk) || tk.containsAll(ti))) {
throw new IllegalArgumentException("Overlapping taxaSets??");
}
}
}
}
orderedCalibrations = new CalibrationPoint[calCount];
{
int loc = taxaSets.size() - 1;
while (loc >= 0) {
assert loc == taxaSets.size() - 1;
// place maximal taxaSets at end one at a time
int k = 0;
for (; /**/
k < taxaSets.size(); ++k) {
if (isMaximal(taxaSets, k)) {
break;
}
}
final List<String> tk = taxaSets.get(k).asStringList();
final int tkcount = tk.size();
this.xclades[loc] = new int[tkcount];
for (int nt = 0; nt < tkcount; ++nt) {
final int taxonIndex = getTaxonIndex(tree, tk.get(nt));
this.xclades[loc][nt] = taxonIndex;
if (taxonIndex < 0) {
throw new IllegalArgumentException("Taxon not found in tree: " + tk.get(nt));
}
}
orderedCalibrations[loc] = cals.remove(k);
taxaSets.remove(k);
// cals and taxaSets should match
--loc;
}
}
// tio[i] will contain all taxaSets contained in the i'th clade, in the form of thier index into orderedCalibrations
@SuppressWarnings("unchecked") final List<Integer>[] tio = new List[orderedCalibrations.length];
for (int k = 0; k < orderedCalibrations.length; ++k) {
tio[k] = new ArrayList<>();
}
for (int k = 0; k < orderedCalibrations.length; ++k) {
final TaxonSet txk = orderedCalibrations[k].taxa();
for (int i = k + 1; i < orderedCalibrations.length; ++i) {
if (orderedCalibrations[i].taxa().containsAll(txk)) {
tio[i].add(k);
break;
}
}
}
this.taxaPartialOrder = new int[orderedCalibrations.length][];
for (int k = 0; k < orderedCalibrations.length; ++k) {
final List<Integer> tiok = tio[k];
this.taxaPartialOrder[k] = new int[tiok.size()];
for (int j = 0; j < tiok.size(); ++j) {
this.taxaPartialOrder[k][j] = tiok.get(j);
}
}
// true if clade is not contained in any other clade
final boolean[] maximal = new boolean[calCount];
for (int k = 0; k < calCount; ++k) {
maximal[k] = true;
}
for (int k = 0; k < calCount; ++k) {
for (final int i : this.taxaPartialOrder[k]) {
maximal[i] = false;
}
}
userPDF = userMarInput.get();
if (userPDF == null) {
if (type == Type.OVER_ALL_TOPOS) {
if (calCount == 1) {
// closed form formula
} else {
boolean anyParent = false;
for (final CalibrationPoint c : orderedCalibrations) {
if (c.forParentInput.get()) {
anyParent = true;
}
}
if (anyParent) {
throw new IllegalArgumentException("Sorry, not implemented: calibration on parent for more than one clade.");
}
if (calCount == 2 && orderedCalibrations[1].taxa().containsAll(orderedCalibrations[0].taxa())) {
// closed form formulas
} else {
setUpTables(tree.getLeafNodeCount() + 1);
linsIter = new CalibrationLineagesIterator(this.xclades, this.taxaPartialOrder, maximal, tree.getLeafNodeCount());
lastHeights = new double[calCount];
}
}
} else if (type == Type.OVER_RANKED_COUNTS) {
setUpTables(tree.getLeafNodeCount() + 1);
}
}
final List<Node> leafs = tree.getExternalNodes();
final double height = leafs.get(0).getHeight();
for (final Node leaf : leafs) {
if (Math.abs(leaf.getHeight() - height) > 1e-8) {
Log.warning.println("WARNING: Calibrated Yule Model cannot handle dated tips. Use for example a coalescent prior instead.");
break;
}
}
}
Aggregations