use of dr.inference.model.CompoundLikelihood in project beast-mcmc by beast-dev.
the class MCMCParser method parseXMLObject.
/**
* @return an mcmc object based on the XML element it was passed.
*/
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
MCMC mcmc = new MCMC(xo.getAttribute(NAME, "mcmc1"));
long chainLength = xo.getLongIntegerAttribute(CHAIN_LENGTH);
boolean useCoercion = xo.getAttribute(COERCION, true);
long coercionDelay = chainLength / 100;
if (xo.hasAttribute(PRE_BURNIN)) {
coercionDelay = xo.getIntegerAttribute(PRE_BURNIN);
}
coercionDelay = xo.getAttribute(COERCION_DELAY, coercionDelay);
double temperature = xo.getAttribute(TEMPERATURE, 1.0);
long fullEvaluationCount = xo.getAttribute(FULL_EVALUATION, 2000);
double evaluationTestThreshold = MarkovChain.EVALUATION_TEST_THRESHOLD;
if (System.getProperty("mcmc.evaluation.threshold") != null) {
evaluationTestThreshold = Double.parseDouble(System.getProperty("mcmc.evaluation.threshold"));
}
evaluationTestThreshold = xo.getAttribute(EVALUATION_THRESHOLD, evaluationTestThreshold);
int minOperatorCountForFullEvaluation = xo.getAttribute(MIN_OPS_EVALUATIONS, 1);
MCMCOptions options = new MCMCOptions(chainLength, fullEvaluationCount, minOperatorCountForFullEvaluation, evaluationTestThreshold, useCoercion, coercionDelay, temperature);
OperatorSchedule opsched = (OperatorSchedule) xo.getChild(OperatorSchedule.class);
Likelihood likelihood = (Likelihood) xo.getChild(Likelihood.class);
likelihood.setUsed();
if (Boolean.valueOf(System.getProperty("show_warnings", "false"))) {
// check that all models, parameters and likelihoods are being used
for (Likelihood l : Likelihood.FULL_LIKELIHOOD_SET) {
if (!l.isUsed()) {
java.util.logging.Logger.getLogger("dr.inference").warning("Likelihood, " + l.getId() + ", of class " + l.getClass().getName() + " is not being handled by the MCMC.");
}
}
for (Model m : Model.FULL_MODEL_SET) {
if (!m.isUsed()) {
java.util.logging.Logger.getLogger("dr.inference").warning("Model, " + m.getId() + ", of class " + m.getClass().getName() + " is not being handled by the MCMC.");
}
}
for (Parameter p : Parameter.FULL_PARAMETER_SET) {
if (!p.isUsed()) {
java.util.logging.Logger.getLogger("dr.inference").warning("Parameter, " + p.getId() + ", of class " + p.getClass().getName() + " is not being handled by the MCMC.");
}
}
}
ArrayList<Logger> loggers = new ArrayList<Logger>();
for (int i = 0; i < xo.getChildCount(); i++) {
Object child = xo.getChild(i);
if (child instanceof Logger) {
loggers.add((Logger) child);
}
}
mcmc.setShowOperatorAnalysis(true);
if (xo.hasAttribute(OPERATOR_ANALYSIS)) {
mcmc.setOperatorAnalysisFile(XMLParser.getLogFile(xo, OPERATOR_ANALYSIS));
}
Logger[] loggerArray = new Logger[loggers.size()];
loggers.toArray(loggerArray);
java.util.logging.Logger.getLogger("dr.inference").info("\nCreating the MCMC chain:" + "\n chainLength=" + options.getChainLength() + "\n autoOptimize=" + options.useCoercion() + (options.useCoercion() ? "\n autoOptimize delayed for " + options.getCoercionDelay() + " steps" : "") + (options.getFullEvaluationCount() == 0 ? "\n full evaluation test off" : ""));
mcmc.init(options, likelihood, opsched, loggerArray);
MarkovChain mc = mcmc.getMarkovChain();
double initialScore = mc.getCurrentScore();
if (initialScore == Double.NEGATIVE_INFINITY) {
String message = "The initial posterior is zero";
if (likelihood instanceof CompoundLikelihood) {
message += ": " + ((CompoundLikelihood) likelihood).getDiagnosis(2);
} else {
message += "!";
}
throw new IllegalArgumentException(message);
}
if (!xo.getAttribute(SPAWN, true))
mcmc.setSpawnable(false);
return mcmc;
}
use of dr.inference.model.CompoundLikelihood in project beast-mcmc by beast-dev.
the class PMDTestProblem method testPMD.
public void testPMD() throws Exception {
Parameter popSize = new Parameter.Default(ConstantPopulationModelParser.POPULATION_SIZE, 496432.69917113904, 0, Double.POSITIVE_INFINITY);
ConstantPopulationModel constantModel = createRandomInitialTree(popSize);
CoalescentLikelihood coalescent = new CoalescentLikelihood(treeModel, null, new ArrayList<TaxonList>(), constantModel);
coalescent.setId("coalescent");
// clock model
Parameter rateParameter = new Parameter.Default(StrictClockBranchRates.RATE, 4.0E-7, 0, 100.0);
StrictClockBranchRates branchRateModel = new StrictClockBranchRates(rateParameter);
// Sub model
Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 });
Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
HKY hky = new HKY(kappa, f);
//siteModel
GammaSiteModel siteModel = new GammaSiteModel(hky);
Parameter mu = new Parameter.Default(GammaSiteModelParser.MUTATION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
siteModel.setMutationRateParameter(mu);
// SequenceErrorModel
Parameter ageRelatedRateParameter = new Parameter.Default(SequenceErrorModelParser.AGE_RELATED_RATE, 4.0E-7, 0, 100.0);
TipStatesModel aDNADamageModel = new SequenceErrorModel(null, null, SequenceErrorModel.ErrorType.TRANSITIONS_ONLY, null, ageRelatedRateParameter, null);
//treeLikelihood
SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
TreeLikelihood treeLikelihood = new TreeLikelihood(patterns, treeModel, siteModel, branchRateModel, aDNADamageModel, false, false, true, false, false);
treeLikelihood.setId(TreeLikelihoodParser.TREE_LIKELIHOOD);
// Operators
OperatorSchedule schedule = new SimpleOperatorSchedule();
MCMCOperator operator = new ScaleOperator(kappa, 0.75);
operator.setWeight(1.0);
schedule.addOperator(operator);
operator = new ScaleOperator(rateParameter, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
Parameter allInternalHeights = treeModel.createNodeHeightsParameter(true, true, false);
operator = new UpDownOperator(new Scalable[] { new Scalable.Default(rateParameter) }, new Scalable[] { new Scalable.Default(allInternalHeights) }, 0.75, 3.0, CoercionMode.COERCION_ON);
schedule.addOperator(operator);
operator = new ScaleOperator(popSize, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
operator = new ScaleOperator(ageRelatedRateParameter, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
Parameter rootHeight = treeModel.getRootHeightParameter();
rootHeight.setId(TREE_HEIGHT);
operator = new ScaleOperator(rootHeight, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
Parameter internalHeights = treeModel.createNodeHeightsParameter(false, true, false);
operator = new UniformOperator(internalHeights, 30.0);
schedule.addOperator(operator);
operator = new SubtreeSlideOperator(treeModel, 15.0, 49643.2699171139, true, false, false, false, CoercionMode.COERCION_ON);
schedule.addOperator(operator);
operator = new ExchangeOperator(ExchangeOperator.NARROW, treeModel, 15.0);
// operator.doOperation();
schedule.addOperator(operator);
operator = new ExchangeOperator(ExchangeOperator.WIDE, treeModel, 3.0);
// operator.doOperation();
schedule.addOperator(operator);
operator = new WilsonBalding(treeModel, 3.0);
// operator.doOperation();
schedule.addOperator(operator);
// ??? correct?
operator = new DeltaExchangeOperator(freqs, new int[] { 1, 1, 1, 1 }, 0.01, 1.0, false, CoercionMode.COERCION_ON);
schedule.addOperator(operator);
//CompoundLikelihood
OneOnXPrior likelihood1 = new OneOnXPrior();
likelihood1.addData(popSize);
OneOnXPrior likelihood2 = new OneOnXPrior();
likelihood2.addData(kappa);
List<Likelihood> likelihoods = new ArrayList<Likelihood>();
likelihoods.add(likelihood1);
likelihoods.add(likelihood2);
likelihoods.add(coalescent);
Likelihood prior = new CompoundLikelihood(0, likelihoods);
prior.setId(CompoundLikelihoodParser.PRIOR);
likelihoods.clear();
likelihoods.add(treeLikelihood);
Likelihood likelihood = new CompoundLikelihood(-1, likelihoods);
likelihoods.clear();
likelihoods.add(prior);
likelihoods.add(likelihood);
Likelihood posterior = new CompoundLikelihood(0, likelihoods);
posterior.setId(CompoundLikelihoodParser.POSTERIOR);
// Log
ArrayLogFormatter formatter = new ArrayLogFormatter(false);
MCLogger[] loggers = new MCLogger[2];
loggers[0] = new MCLogger(formatter, 1000, false);
loggers[0].add(posterior);
loggers[0].add(treeLikelihood);
loggers[0].add(rootHeight);
loggers[0].add(rateParameter);
loggers[0].add(ageRelatedRateParameter);
loggers[0].add(popSize);
loggers[0].add(kappa);
loggers[0].add(coalescent);
loggers[1] = new MCLogger(new TabDelimitedFormatter(System.out), 10000, false);
loggers[1].add(posterior);
loggers[1].add(treeLikelihood);
loggers[1].add(rootHeight);
loggers[1].add(rateParameter);
// MCMC
MCMC mcmc = new MCMC("mcmc1");
MCMCOptions options = new MCMCOptions(1000000);
mcmc.setShowOperatorAnalysis(true);
mcmc.init(options, posterior, schedule, loggers);
mcmc.run();
// time
System.out.println(mcmc.getTimer().toString());
// Tracer
List<Trace> traces = formatter.getTraces();
ArrayTraceList traceList = new ArrayTraceList("PMDTest", traces, 0);
for (int i = 1; i < traces.size(); i++) {
traceList.analyseTrace(i);
}
// <expectation name="clock.rate" value="1.5E-7"/>
// <expectation name="errorModel.ageRate" value="0.7E-7"/>
// <expectation name="hky.kappa" value="10"/>
TraceCorrelation kappaStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(HKYParser.KAPPA));
assertExpectation(HKYParser.KAPPA, kappaStats, 10);
TraceCorrelation rateStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(StrictClockBranchRates.RATE));
assertExpectation(StrictClockBranchRates.RATE, rateStats, 1.5E-7);
TraceCorrelation ageRateStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(SequenceErrorModelParser.AGE_RELATED_RATE));
assertExpectation(SequenceErrorModelParser.AGE_RELATED_RATE, ageRateStats, 0.7E-7);
}
Aggregations