use of dr.evomodel.treelikelihood.MarkovJumpsBeagleTreeLikelihood in project beast-mcmc by beast-dev.
the class MarkovJumpsTest method testMarkovJumps.
public void testMarkovJumps() {
MathUtils.setSeed(666);
createAlignment(sequencesTwo, Nucleotides.INSTANCE);
try {
// createSpecifiedTree("((human:1,chimp:1):1,gorilla:2)");
createSpecifiedTree("(human:1,chimp:1)");
} catch (Exception e) {
throw new RuntimeException("Unable to parse Newick tree");
}
//substitutionModel
Parameter freqs = new Parameter.Default(new double[] { 0.40, 0.25, 0.25, 0.10 });
Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 10.0, 0, 100);
FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
HKY hky = new HKY(kappa, f);
//siteModel
// double alpha = 0.5;
Parameter mu = new Parameter.Default(GammaSiteModelParser.MUTATION_RATE, 0.5, 0, Double.POSITIVE_INFINITY);
// Parameter pInv = new Parameter.Default("pInv", 0.5, 0, 1);
Parameter pInv = null;
GammaSiteRateModel siteRateModel = new GammaSiteRateModel("gammaModel", mu, null, -1, pInv);
siteRateModel.setSubstitutionModel(hky);
//treeLikelihood
SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
BranchModel branchModel = new HomogeneousBranchModel(siteRateModel.getSubstitutionModel());
BranchRateModel branchRateModel = null;
MarkovJumpsBeagleTreeLikelihood mjTreeLikelihood = new MarkovJumpsBeagleTreeLikelihood(patterns, treeModel, branchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.AUTO, true, null, hky.getDataType(), "stateTag", // use MAP
false, // return ML
true, // use uniformization
false, false, 1000);
int nRegisters = registerValues.length;
int nSim = 10000;
for (int i = 0; i < nRegisters; i++) {
Parameter registerParameter = new Parameter.Default(registerValues[i]);
registerParameter.setId(registerTages[i]);
mjTreeLikelihood.addRegister(registerParameter, registerTypes[i], registerScales[i]);
}
double logLikelihood = mjTreeLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
double[] averages = new double[nRegisters];
for (int i = 0; i < nSim; i++) {
for (int r = 0; r < nRegisters; r++) {
double[][] values = mjTreeLikelihood.getMarkovJumpsForRegister(treeModel, r);
for (double[] value : values) {
averages[r] += value[0];
}
}
mjTreeLikelihood.makeDirty();
}
for (int r = 0; r < nRegisters; r++) {
averages[r] /= (double) nSim;
System.out.print(" " + averages[r]);
}
System.out.println("");
assertEquals(valuesFromR, averages, 1E-2);
}
use of dr.evomodel.treelikelihood.MarkovJumpsBeagleTreeLikelihood in project beast-mcmc by beast-dev.
the class MarkovJumpsTreeLikelihoodParser method createTreeLikelihood.
protected BeagleTreeLikelihood createTreeLikelihood(PatternList patternList, TreeModel treeModel, BranchModel branchModel, GammaSiteRateModel siteRateModel, BranchRateModel branchRateModel, TipStatesModel tipStatesModel, boolean useAmbiguities, PartialsRescalingScheme scalingScheme, boolean delayScaling, Map<Set<String>, Parameter> partialsRestrictions, XMLObject xo) throws XMLParseException {
DataType dataType = branchModel.getRootSubstitutionModel().getDataType();
String stateTag = xo.getAttribute(RECONSTRUCTION_TAG_NAME, RECONSTRUCTION_TAG);
String jumpTag = xo.getAttribute(JUMP_TAG_NAME, JUMP_TAG);
boolean scaleRewards = xo.getAttribute(SCALE_REWARDS, true);
boolean useMAP = xo.getAttribute(MAP_RECONSTRUCTION, false);
boolean useMarginalLogLikelihood = xo.getAttribute(MARGINAL_LIKELIHOOD, true);
boolean useUniformization = xo.getAttribute(USE_UNIFORMIZATION, false);
boolean reportUnconditionedColumns = xo.getAttribute(REPORT_UNCONDITIONED_COLUMNS, false);
int nSimulants = xo.getAttribute(NUMBER_OF_SIMULANTS, 1);
if (patternList.areUnique()) {
throw new XMLParseException("Markov Jumps reconstruction cannot be used with compressed (unique) patterns.");
}
MarkovJumpsBeagleTreeLikelihood treeLikelihood = new MarkovJumpsBeagleTreeLikelihood(patternList, treeModel, branchModel, siteRateModel, branchRateModel, tipStatesModel, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, dataType, stateTag, useMAP, useMarginalLogLikelihood, useUniformization, reportUnconditionedColumns, nSimulants);
int registersFound = parseAllChildren(xo, treeLikelihood, dataType.getStateCount(), jumpTag, MarkovJumpsType.COUNTS, // For backwards compatibility
false);
XMLObject cxo = xo.getChild(COUNTS);
if (cxo != null) {
registersFound += parseAllChildren(cxo, treeLikelihood, dataType.getStateCount(), jumpTag, MarkovJumpsType.COUNTS, false);
}
cxo = xo.getChild(REWARDS);
if (cxo != null) {
registersFound += parseAllChildren(cxo, treeLikelihood, dataType.getStateCount(), jumpTag, MarkovJumpsType.REWARDS, scaleRewards);
}
if (registersFound == 0) {
// Some default values for testing
// double[] registration = new double[dataType.getStateCount()*dataType.getStateCount()];
// MarkovJumpsCore.fillRegistrationMatrix(registration,dataType.getStateCount()); // Count all transitions
// Parameter registerParameter = new Parameter.Default(registration);
// registerParameter.setId(jumpTag);
// treeLikelihood.addRegister(registerParameter,
// MarkovJumpsType.COUNTS,
// false);
// Do nothing, should run the same as AncestralStateBeagleTreeLikelihood
}
boolean saveCompleteHistory = xo.getAttribute(SAVE_HISTORY, false);
if (saveCompleteHistory) {
Parameter allCounts = new Parameter.Default(dataType.getStateCount() * dataType.getStateCount());
for (int i = 0; i < dataType.getStateCount(); ++i) {
for (int j = 0; j < dataType.getStateCount(); ++j) {
if (j == i) {
allCounts.setParameterValue(i * dataType.getStateCount() + j, 0.0);
} else {
allCounts.setParameterValue(i * dataType.getStateCount() + j, 1.0);
}
}
}
allCounts.setId(MarkovJumpsBeagleTreeLikelihood.TOTAL_COUNTS);
treeLikelihood.setLogHistories(xo.getAttribute(LOG_HISTORY, false));
treeLikelihood.setUseCompactHistory(xo.getAttribute(COMPACT_HISTORY, false));
treeLikelihood.addRegister(allCounts, MarkovJumpsType.HISTORY, false);
}
return treeLikelihood;
}
Aggregations