use of dr.inference.model.Likelihood in project beast-mcmc by beast-dev.
the class CheckPointTreeModifier method incorporateAdditionalTaxa.
/**
* Add the remaining taxa, which can be identified through the TreeDataLikelihood XML elements.
*/
public ArrayList<NodeRef> incorporateAdditionalTaxa(CheckPointUpdaterApp.UpdateChoice choice, BranchRates rateModel) {
System.out.println("Tree before adding taxa:\n" + treeModel.toString() + "\n");
ArrayList<NodeRef> newTaxaNodes = new ArrayList<NodeRef>();
for (String str : newTaxaNames) {
for (int i = 0; i < treeModel.getExternalNodeCount(); i++) {
if (treeModel.getNodeTaxon(treeModel.getExternalNode(i)).getId().equals(str)) {
newTaxaNodes.add(treeModel.getExternalNode(i));
//always take into account Taxon dates vs. dates set through a TreeModel
System.out.println(treeModel.getNodeTaxon(treeModel.getExternalNode(i)).getId() + " with height " + treeModel.getNodeHeight(treeModel.getExternalNode(i)) + " or " + treeModel.getNodeTaxon(treeModel.getExternalNode(i)).getHeight());
}
}
}
System.out.println("newTaxaNodes length = " + newTaxaNodes.size());
ArrayList<Taxon> currentTaxa = new ArrayList<Taxon>();
for (int i = 0; i < treeModel.getExternalNodeCount(); i++) {
boolean taxonFound = false;
for (String str : newTaxaNames) {
if (str.equals((treeModel.getNodeTaxon(treeModel.getExternalNode(i))).getId())) {
taxonFound = true;
}
}
if (!taxonFound) {
System.out.println("Adding " + treeModel.getNodeTaxon(treeModel.getExternalNode(i)).getId() + " to list of current taxa");
currentTaxa.add(treeModel.getNodeTaxon(treeModel.getExternalNode(i)));
}
}
System.out.println("Current taxa count = " + currentTaxa.size());
//iterate over both current taxa and to be added taxa
boolean originTaxon = true;
for (Taxon taxon : currentTaxa) {
if (taxon.getHeight() == 0.0) {
originTaxon = false;
System.out.println("Current taxon " + taxon.getId() + " has node height 0.0");
}
}
for (NodeRef newTaxon : newTaxaNodes) {
if (treeModel.getNodeTaxon(newTaxon).getHeight() == 0.0) {
originTaxon = false;
System.out.println("New taxon " + treeModel.getNodeTaxon(newTaxon).getId() + " has node height 0.0");
}
}
//check the Tree(Data)Likelihoods in the connected set of likelihoods
//focus on TreeDataLikelihood, which has getTree() to get the tree for each likelihood
//also get the DataLikelihoodDelegate from TreeDataLikelihood
ArrayList<TreeDataLikelihood> likelihoods = new ArrayList<TreeDataLikelihood>();
ArrayList<Tree> trees = new ArrayList<Tree>();
ArrayList<DataLikelihoodDelegate> delegates = new ArrayList<DataLikelihoodDelegate>();
for (Likelihood likelihood : Likelihood.CONNECTED_LIKELIHOOD_SET) {
if (likelihood instanceof TreeDataLikelihood) {
likelihoods.add((TreeDataLikelihood) likelihood);
trees.add(((TreeDataLikelihood) likelihood).getTree());
delegates.add(((TreeDataLikelihood) likelihood).getDataLikelihoodDelegate());
}
}
//suggested to go through TreeDataLikelihoodParser and give it an extra option to create a HashMap
//keyed by the tree; am currently not overly fond of this approach
ArrayList<PatternList> patternLists = new ArrayList<PatternList>();
for (DataLikelihoodDelegate del : delegates) {
if (del instanceof BeagleDataLikelihoodDelegate) {
patternLists.add(((BeagleDataLikelihoodDelegate) del).getPatternList());
} else if (del instanceof MultiPartitionDataLikelihoodDelegate) {
MultiPartitionDataLikelihoodDelegate mpdld = (MultiPartitionDataLikelihoodDelegate) del;
List<PatternList> list = mpdld.getPatternLists();
for (PatternList pList : list) {
patternLists.add(pList);
}
}
}
if (patternLists.size() == 0) {
throw new RuntimeException("No patterns detected. Please make sure the XML file is BEAST 1.9 compatible.");
}
//aggregate all patterns to create distance matrix
//TODO What about different trees for different partitions?
Patterns patterns = new Patterns(patternLists.get(0));
if (patternLists.size() > 1) {
for (int i = 1; i < patternLists.size(); i++) {
patterns.addPatterns(patternLists.get(i));
}
}
//set the patterns for the distance matrix computations
choice.setPatterns(patterns);
//add new taxa one at a time
System.out.println("Adding " + newTaxaNodes.size() + " taxa ...");
for (NodeRef newTaxon : newTaxaNodes) {
treeModel.setNodeHeight(newTaxon, treeModel.getNodeTaxon(newTaxon).getHeight());
System.out.println("\nadding Taxon: " + newTaxon + " (height = " + treeModel.getNodeHeight(newTaxon) + ")");
//check if this taxon has a more recent sampling date than all other nodes in the current TreeModel
double offset = checkCurrentTreeNodes(newTaxon, treeModel.getRoot());
System.out.println("Sampling date offset when adding " + newTaxon + " = " + offset);
//AND set its current node height to 0.0 IF no originTaxon has been found
if (offset < 0.0) {
if (!originTaxon) {
System.out.println("Updating all node heights with offset " + Math.abs(offset));
updateAllTreeNodes(Math.abs(offset), treeModel.getRoot());
treeModel.setNodeHeight(newTaxon, 0.0);
}
} else if (offset == 0.0) {
if (!originTaxon) {
treeModel.setNodeHeight(newTaxon, 0.0);
}
}
//get the closest Taxon to the Taxon that needs to be added
//take into account which taxa can currently be chosen
Taxon closest = choice.getClosestTaxon(treeModel.getNodeTaxon(newTaxon), currentTaxa);
System.out.println("\nclosest Taxon: " + closest + " with original height: " + closest.getHeight());
//get the distance between these two taxa
double distance = choice.getDistance(treeModel.getNodeTaxon(newTaxon), closest);
System.out.println("at distance: " + distance);
//TODO what if distance == 0.0 ??? how to choose closest taxon then (in absence of geo info)?
//find the NodeRef for the closest Taxon (do not rely on node numbering)
NodeRef closestRef = null;
//careful with node numbering and subtract number of new taxa
for (int i = 0; i < treeModel.getExternalNodeCount(); i++) {
if (treeModel.getNodeTaxon(treeModel.getExternalNode(i)) == closest) {
closestRef = treeModel.getExternalNode(i);
}
}
System.out.println(closestRef + " with height " + treeModel.getNodeHeight(closestRef));
//System.out.println("trying to set node height: " + closestRef + " from " + treeModel.getNodeHeight(closestRef) + " to " + closest.getHeight());
//treeModel.setNodeHeight(closestRef, closest.getHeight());
double timeForDistance = distance / rateModel.getBranchRate(treeModel, closestRef);
System.out.println("timeForDistance = " + timeForDistance);
//get parent node of branch that will be split
NodeRef parent = treeModel.getParent(closestRef);
//determine height of new node
double insertHeight;
if (treeModel.getNodeHeight(closestRef) == treeModel.getNodeHeight(newTaxon)) {
insertHeight = treeModel.getNodeHeight(closestRef) + timeForDistance / 2.0;
System.out.println("treeModel.getNodeHeight(closestRef) == treeModel.getNodeHeight(newTaxon): " + insertHeight);
if (insertHeight >= treeModel.getNodeHeight(parent)) {
insertHeight = treeModel.getNodeHeight(closestRef) + EPSILON * (treeModel.getNodeHeight(parent) - treeModel.getNodeHeight(closestRef));
}
} else {
double remainder = (timeForDistance - Math.abs(treeModel.getNodeHeight(closestRef) - treeModel.getNodeHeight(newTaxon))) / 2.0;
if (remainder > 0) {
insertHeight = Math.max(treeModel.getNodeHeight(closestRef), treeModel.getNodeHeight(newTaxon)) + remainder;
System.out.println("remainder > 0: " + insertHeight);
if (insertHeight >= treeModel.getNodeHeight(parent)) {
insertHeight = treeModel.getNodeHeight(closestRef) + EPSILON * (treeModel.getNodeHeight(parent) - treeModel.getNodeHeight(closestRef));
}
} else {
insertHeight = EPSILON * (treeModel.getNodeHeight(parent) - Math.max(treeModel.getNodeHeight(closestRef), treeModel.getNodeHeight(newTaxon)));
insertHeight += Math.max(treeModel.getNodeHeight(closestRef), treeModel.getNodeHeight(newTaxon));
System.out.println("remainder <= 0: " + insertHeight);
}
}
System.out.println("insert at height: " + insertHeight);
//pass on all the necessary variables to a method that adds the new taxon to the tree
addTaxonAlongBranch(newTaxon, parent, closestRef, insertHeight);
//option to print tree after each taxon addition
System.out.println("\nTree after adding taxon " + newTaxon + ":\n" + treeModel.toString());
//add newly added Taxon to list of current taxa
currentTaxa.add(treeModel.getNodeTaxon(newTaxon));
}
return newTaxaNodes;
}
use of dr.inference.model.Likelihood in project beast-mcmc by beast-dev.
the class FactorRJMCMCOperatorParser method parseXMLObject.
@Override
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
//attributes
double weight = xo.getDoubleAttribute(WEIGHT);
int chainLength = xo.getIntegerAttribute(CHAIN_LENGTH);
double sizeParameter = xo.getDoubleAttribute(SIZE_PARAMETER);
//declaration
AdaptableSizeFastMatrixParameter factors, loadings, cutoffs, loadingsSparcity;
//parameters
if (xo.hasChildNamed(FACTORS))
factors = (AdaptableSizeFastMatrixParameter) xo.getChild(FACTORS).getChild(AdaptableSizeFastMatrixParameter.class);
else
factors = null;
loadings = (AdaptableSizeFastMatrixParameter) xo.getChild(LOADINGS).getChild(AdaptableSizeFastMatrixParameter.class);
if (xo.hasChildNamed(CUTOFFS)) {
cutoffs = (AdaptableSizeFastMatrixParameter) xo.getChild(CUTOFFS).getChild(AdaptableSizeFastMatrixParameter.class);
} else
cutoffs = null;
if (xo.hasChildNamed(LOADINGS_SPARSITY))
loadingsSparcity = (AdaptableSizeFastMatrixParameter) xo.getChild(LOADINGS_SPARSITY).getChild(AdaptableSizeFastMatrixParameter.class);
else
loadingsSparcity = null;
//models
DeterminentalPointProcessPrior DPP = null;
if (xo.getChild(SPARSITY_PRIOR) != null)
DPP = (DeterminentalPointProcessPrior) xo.getChild(SPARSITY_PRIOR).getChild(DeterminentalPointProcessPrior.class);
SimpleMCMCOperator NOp = null;
if (xo.getChild(NEGATION_OPERATOR) != null) {
NOp = (SimpleMCMCOperator) xo.getChild(NEGATION_OPERATOR).getChild(SimpleMCMCOperator.class);
}
AbstractModelLikelihood LFM = (AbstractModelLikelihood) xo.getChild(AbstractModelLikelihood.class);
RowDimensionPoissonPrior rowPrior = (RowDimensionPoissonPrior) xo.getChild(ROW_PRIOR).getChild(RowDimensionPoissonPrior.class);
Likelihood loadingsPrior = null;
if (xo.hasChildNamed(LOADINGS_PRIOR)) {
loadingsPrior = (Likelihood) xo.getChild(LOADINGS_PRIOR).getChild(Likelihood.class);
}
//operators
BitFlipOperator sparsityOperator = null;
if (xo.getChild(BitFlipOperator.class) != null)
sparsityOperator = (BitFlipOperator) xo.getChild(BitFlipOperator.class);
SimpleMCMCOperator loadingsOperator = (SimpleMCMCOperator) xo.getChild(LOADINGS_OPERATOR).getChild(SimpleMCMCOperator.class);
SimpleMCMCOperator factorOperator = null;
if (xo.getChild(FACTOR_OPERATOR) != null)
factorOperator = (SimpleMCMCOperator) xo.getChild(FACTOR_OPERATOR).getChild(FactorTreeGibbsOperator.class);
return new FactorRJMCMCOperator(weight, sizeParameter, chainLength, factors, loadings, cutoffs, loadingsSparcity, LFM, DPP, loadingsPrior, loadingsOperator, factorOperator, sparsityOperator, NOp, rowPrior);
}
use of dr.inference.model.Likelihood in project beast-mcmc by beast-dev.
the class SliceOperator method main.
public static void main(String[] arg) {
// Define normal model
// Starting value
Parameter meanParameter = new Parameter.Default(1.0);
// Fixed value
Variable<Double> stdev = new Variable.D(1.0, 1);
ParametricDistributionModel densityModel = new NormalDistributionModel(meanParameter, stdev);
DistributionLikelihood likelihood = new DistributionLikelihood(densityModel);
// Define prior
// Hyper-priors
DistributionLikelihood prior = new DistributionLikelihood(new NormalDistribution(0.0, 1.0));
prior.addData(meanParameter);
// Define data
likelihood.addData(new Attribute.Default<double[]>("Data", new double[] { 0.0, 1.0, 2.0 }));
List<Likelihood> list = new ArrayList<Likelihood>();
list.add(likelihood);
list.add(prior);
CompoundLikelihood posterior = new CompoundLikelihood(0, list);
SliceOperator sliceSampler = new SliceOperator(meanParameter);
final int length = 10000;
double mean = 0;
double variance = 0;
for (int i = 0; i < length; i++) {
sliceSampler.doOperation(posterior);
double x = meanParameter.getValue(0);
mean += x;
variance += x * x;
}
mean /= length;
variance /= length;
variance -= mean * mean;
System.out.println("E(x) = " + mean);
System.out.println("V(x) = " + variance);
}
use of dr.inference.model.Likelihood in project beast-mcmc by beast-dev.
the class StrictClockTest method testStrictClock.
public void testStrictClock() throws Exception {
Parameter popSize = new Parameter.Default(ConstantPopulationModelParser.POPULATION_SIZE, 380.0, 0, 38000.0);
ConstantPopulationModel constantModel = createRandomInitialTree(popSize);
CoalescentLikelihood coalescent = new CoalescentLikelihood(treeModel, null, new ArrayList<TaxonList>(), constantModel);
coalescent.setId("coalescent");
// clock model
Parameter rateParameter = new Parameter.Default(StrictClockBranchRates.RATE, 2.3E-5, 0, 100.0);
StrictClockBranchRates branchRateModel = new StrictClockBranchRates(rateParameter);
// Sub model
Parameter freqs = new Parameter.Default(alignment.getStateFrequencies());
Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 0, 100.0);
FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
HKY hky = new HKY(kappa, f);
//siteModel
GammaSiteModel siteModel = new GammaSiteModel(hky);
Parameter mu = new Parameter.Default(GammaSiteModelParser.MUTATION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
siteModel.setMutationRateParameter(mu);
//treeLikelihood
SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
TreeLikelihood treeLikelihood = new TreeLikelihood(patterns, treeModel, siteModel, branchRateModel, null, false, false, true, false, false);
treeLikelihood.setId(TreeLikelihoodParser.TREE_LIKELIHOOD);
// Operators
OperatorSchedule schedule = new SimpleOperatorSchedule();
MCMCOperator operator = new ScaleOperator(kappa, 0.75);
operator.setWeight(1.0);
schedule.addOperator(operator);
operator = new ScaleOperator(rateParameter, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
Parameter allInternalHeights = treeModel.createNodeHeightsParameter(true, true, false);
operator = new UpDownOperator(new Scalable[] { new Scalable.Default(rateParameter) }, new Scalable[] { new Scalable.Default(allInternalHeights) }, 0.75, 3.0, CoercionMode.COERCION_ON);
schedule.addOperator(operator);
operator = new ScaleOperator(popSize, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
Parameter rootHeight = treeModel.getRootHeightParameter();
rootHeight.setId(TREE_HEIGHT);
operator = new ScaleOperator(rootHeight, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
Parameter internalHeights = treeModel.createNodeHeightsParameter(false, true, false);
operator = new UniformOperator(internalHeights, 30.0);
schedule.addOperator(operator);
operator = new SubtreeSlideOperator(treeModel, 15.0, 1.0, true, false, false, false, CoercionMode.COERCION_ON);
schedule.addOperator(operator);
operator = new ExchangeOperator(ExchangeOperator.NARROW, treeModel, 15.0);
// operator.doOperation();
schedule.addOperator(operator);
operator = new ExchangeOperator(ExchangeOperator.WIDE, treeModel, 3.0);
// operator.doOperation();
schedule.addOperator(operator);
operator = new WilsonBalding(treeModel, 3.0);
// operator.doOperation();
schedule.addOperator(operator);
//CompoundLikelihood
List<Likelihood> likelihoods = new ArrayList<Likelihood>();
likelihoods.add(coalescent);
Likelihood prior = new CompoundLikelihood(0, likelihoods);
prior.setId(CompoundLikelihoodParser.PRIOR);
likelihoods.clear();
likelihoods.add(treeLikelihood);
Likelihood likelihood = new CompoundLikelihood(-1, likelihoods);
likelihoods.clear();
likelihoods.add(prior);
likelihoods.add(likelihood);
Likelihood posterior = new CompoundLikelihood(0, likelihoods);
posterior.setId(CompoundLikelihoodParser.POSTERIOR);
// Log
ArrayLogFormatter formatter = new ArrayLogFormatter(false);
MCLogger[] loggers = new MCLogger[2];
loggers[0] = new MCLogger(formatter, 500, false);
loggers[0].add(posterior);
loggers[0].add(treeLikelihood);
loggers[0].add(rootHeight);
loggers[0].add(rateParameter);
loggers[0].add(popSize);
loggers[0].add(kappa);
loggers[0].add(coalescent);
loggers[1] = new MCLogger(new TabDelimitedFormatter(System.out), 10000, false);
loggers[1].add(posterior);
loggers[1].add(treeLikelihood);
loggers[1].add(rootHeight);
loggers[1].add(rateParameter);
loggers[1].add(coalescent);
// MCMC
MCMC mcmc = new MCMC("mcmc1");
MCMCOptions options = new MCMCOptions(1000000);
mcmc.setShowOperatorAnalysis(true);
mcmc.init(options, posterior, schedule, loggers);
mcmc.run();
// time
System.out.println(mcmc.getTimer().toString());
// Tracer
List<Trace> traces = formatter.getTraces();
ArrayTraceList traceList = new ArrayTraceList("RandomLocalClockTest", traces, 0);
for (int i = 1; i < traces.size(); i++) {
traceList.analyseTrace(i);
}
// <expectation name="posterior" value="-3928.71"/>
// <expectation name="clock.rate" value="8.04835E-4"/>
// <expectation name="constant.popSize" value="37.3762"/>
// <expectation name="hky.kappa" value="18.2782"/>
// <expectation name="treeModel.rootHeight" value="69.0580"/>
// <expectation name="treeLikelihood" value="-3856.59"/>
// <expectation name="coalescent" value="-72.1285"/>
TraceCorrelation likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(CompoundLikelihoodParser.POSTERIOR));
assertExpectation(CompoundLikelihoodParser.POSTERIOR, likelihoodStats, -3928.71);
likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TreeLikelihoodParser.TREE_LIKELIHOOD));
assertExpectation(TreeLikelihoodParser.TREE_LIKELIHOOD, likelihoodStats, -3856.59);
TraceCorrelation treeHeightStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TREE_HEIGHT));
assertExpectation(TREE_HEIGHT, treeHeightStats, 69.0580);
TraceCorrelation kappaStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(HKYParser.KAPPA));
assertExpectation(HKYParser.KAPPA, kappaStats, 18.2782);
TraceCorrelation rateStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(StrictClockBranchRates.RATE));
assertExpectation(StrictClockBranchRates.RATE, rateStats, 8.04835E-4);
TraceCorrelation popStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(ConstantPopulationModelParser.POPULATION_SIZE));
assertExpectation(ConstantPopulationModelParser.POPULATION_SIZE, popStats, 37.3762);
TraceCorrelation coalescentStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("coalescent"));
assertExpectation("coalescent", coalescentStats, -72.1285);
}
use of dr.inference.model.Likelihood in project beast-mcmc by beast-dev.
the class RLYModelTest method randomLocalYuleTester.
private void randomLocalYuleTester(TreeModel treeModel, Parameter I, Parameter b, OperatorSchedule schedule) {
MCMC mcmc = new MCMC("mcmc1");
MCMCOptions options = new MCMCOptions(1000000);
TreeLengthStatistic tls = new TreeLengthStatistic(TL, treeModel);
TreeHeightStatistic rootHeight = new TreeHeightStatistic(TREE_HEIGHT, treeModel);
Parameter m = new Parameter.Default("m", 1.0, 0.0, Double.MAX_VALUE);
SpeciationModel speciationModel = new RandomLocalYuleModel(b, I, m, false, Units.Type.YEARS, 4);
Likelihood likelihood = new SpeciationLikelihood(treeModel, speciationModel, "randomYule.like");
ArrayLogFormatter formatter = new ArrayLogFormatter(false);
MCLogger[] loggers = new MCLogger[2];
loggers[0] = new MCLogger(formatter, 100, false);
loggers[0].add(likelihood);
loggers[0].add(rootHeight);
loggers[0].add(tls);
loggers[0].add(I);
loggers[1] = new MCLogger(new TabDelimitedFormatter(System.out), 100000, false);
loggers[1].add(likelihood);
loggers[1].add(rootHeight);
loggers[1].add(tls);
loggers[1].add(I);
mcmc.setShowOperatorAnalysis(true);
mcmc.init(options, likelihood, schedule, loggers);
mcmc.run();
List<Trace> traces = formatter.getTraces();
ArrayTraceList traceList = new ArrayTraceList("yuleModelTest", traces, 0);
for (int i = 1; i < traces.size(); i++) {
traceList.analyseTrace(i);
}
TraceCorrelation tlStats = traceList.getCorrelationStatistics(traceList.getTraceIndex("root." + birthRateIndicator));
System.out.println("mean = " + tlStats.getMean());
System.out.println("expected mean = 0.5");
assertExpectation("root." + birthRateIndicator, tlStats, 0.5);
}
Aggregations