use of dr.evomodel.tipstatesmodel.TipStatesModel in project beast-mcmc by beast-dev.
the class BalancedBeagleTreeLikelihoodParser method parseXMLObject.
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
boolean useAmbiguities = xo.getAttribute(BeagleTreeLikelihoodParser.USE_AMBIGUITIES, false);
/*int instanceCount = xo.getAttribute(INSTANCE_COUNT, 1);
if (instanceCount < 1) {
instanceCount = 1;
}
String ic = System.getProperty(BEAGLE_INSTANCE_COUNT);
if (ic != null && ic.length() > 0) {
instanceCount = Integer.parseInt(ic);
}*/
PatternList patternList = (PatternList) xo.getChild(PatternList.class);
TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
GammaSiteRateModel siteRateModel = (GammaSiteRateModel) xo.getChild(GammaSiteRateModel.class);
FrequencyModel rootFreqModel = (FrequencyModel) xo.getChild(FrequencyModel.class);
BranchModel branchModel = (BranchModel) xo.getChild(BranchModel.class);
if (branchModel == null) {
SubstitutionModel substitutionModel = (SubstitutionModel) xo.getChild(SubstitutionModel.class);
if (substitutionModel == null) {
substitutionModel = siteRateModel.getSubstitutionModel();
}
if (substitutionModel == null) {
throw new XMLParseException("No substitution model available for TreeLikelihood: " + xo.getId());
}
branchModel = new HomogeneousBranchModel(substitutionModel, rootFreqModel);
}
BranchRateModel branchRateModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
TipStatesModel tipStatesModel = (TipStatesModel) xo.getChild(TipStatesModel.class);
// if (xo.getChild(TipStatesModel.class) != null) {
// throw new XMLParseException("Sequence Error Models are not supported under BEAGLE yet. Please use Native BEAST Likelihood.");
// }
PartialsRescalingScheme scalingScheme = PartialsRescalingScheme.DEFAULT;
if (xo.hasAttribute(BeagleTreeLikelihoodParser.SCALING_SCHEME)) {
// scalingScheme = PartialsRescalingScheme.parseFromString(xo.getStringAttribute(BeagleTreeLikelihoodParser.SCALING_SCHEME));
if (scalingScheme == null)
throw new XMLParseException("Unknown scaling scheme '" + xo.getStringAttribute(BeagleTreeLikelihoodParser.SCALING_SCHEME) + "' in " + "OldBeagleTreeLikelihood object '" + xo.getId());
}
boolean delayScaling = true;
Map<Set<String>, Parameter> partialsRestrictions = null;
if (xo.hasChildNamed(PARTIALS_RESTRICTION)) {
XMLObject cxo = xo.getChild(PARTIALS_RESTRICTION);
TaxonList taxonList = (TaxonList) cxo.getChild(TaxonList.class);
// Parameter parameter = (Parameter) cxo.getChild(Parameter.class);
try {
TreeUtils.getLeavesForTaxa(treeModel, taxonList);
} catch (TreeUtils.MissingTaxonException e) {
throw new XMLParseException("Unable to parse taxon list: " + e.getMessage());
}
throw new XMLParseException("Restricting internal nodes is not yet implemented. Contact Marc");
}
/*if (instanceCount == 1 || patternList.getPatternCount() < instanceCount) {
return createTreeLikelihood(
patternList,
treeModel,
branchModel,
siteRateModel,
branchRateModel,
tipStatesModel,
useAmbiguities,
scalingScheme,
partialsRestrictions,
xo
);
}*/
// first run a test for instanceCount == 1
System.err.println("\nTesting instanceCount == 1");
Likelihood baseLikelihood = createTreeLikelihood(patternList, treeModel, branchModel, siteRateModel, branchRateModel, tipStatesModel, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo);
double start = System.nanoTime();
for (int i = 0; i < TEST_RUNS; i++) {
baseLikelihood.makeDirty();
baseLikelihood.getLogLikelihood();
}
double end = System.nanoTime();
double baseResult = end - start;
System.err.println("Evaluation took: " + baseResult);
if (!(patternList instanceof SitePatterns)) {
throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with BEAUti-selected codon partitioning.");
}
if (tipStatesModel != null) {
throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with a TipStateModel (i.e., a sequence error model).");
}
// List<Likelihood> likelihoods = new ArrayList<Likelihood>();
List<Likelihood> likelihoods = null;
CompoundLikelihood compound = null;
int instanceCount = 2;
boolean optimal = false;
while (optimal == false) {
System.err.println("\nCreating instanceCount == " + instanceCount);
likelihoods = new ArrayList<Likelihood>();
for (int i = 0; i < instanceCount; i++) {
Patterns subPatterns = new Patterns((SitePatterns) patternList, 0, 0, 1, i, instanceCount);
AbstractTreeLikelihood treeLikelihood = createTreeLikelihood(subPatterns, treeModel, branchModel, siteRateModel, branchRateModel, null, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo);
treeLikelihood.setId(xo.getId() + "_" + instanceCount);
likelihoods.add(treeLikelihood);
}
// construct compoundLikelihood
compound = new CompoundLikelihood(instanceCount, likelihoods);
// test timings
System.err.println("\nTesting instanceCount == " + instanceCount);
start = System.nanoTime();
for (int i = 0; i < TEST_RUNS; i++) {
compound.makeDirty();
compound.getLogLikelihood();
}
end = System.nanoTime();
double newResult = end - start;
System.err.println("Evaluation took: " + newResult);
if (baseResult / newResult > TEST_CUTOFF) {
instanceCount++;
baseResult = newResult;
} else {
optimal = true;
instanceCount--;
System.err.println("\nCreating final BeagleTreeLikelihood with instanceCount: " + instanceCount);
likelihoods = new ArrayList<Likelihood>();
for (int i = 0; i < instanceCount; i++) {
Patterns subPatterns = new Patterns((SitePatterns) patternList, 0, 0, 1, i, instanceCount);
AbstractTreeLikelihood treeLikelihood = createTreeLikelihood(subPatterns, treeModel, branchModel, siteRateModel, branchRateModel, null, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo);
treeLikelihood.setId(xo.getId() + "_" + instanceCount);
likelihoods.add(treeLikelihood);
}
// construct compoundLikelihood
compound = new CompoundLikelihood(instanceCount, likelihoods);
}
}
return compound;
/*for (int i = 0; i < instanceCount; i++) {
Patterns subPatterns = new Patterns((SitePatterns)patternList, 0, 0, 1, i, instanceCount);
AbstractTreeLikelihood treeLikelihood = createTreeLikelihood(
subPatterns,
treeModel,
branchModel,
siteRateModel,
branchRateModel,
null,
useAmbiguities,
scalingScheme,
partialsRestrictions,
xo);
treeLikelihood.setId(xo.getId() + "_" + instanceCount);
likelihoods.add(treeLikelihood);
}
return new CompoundLikelihood(likelihoods);*/
}
use of dr.evomodel.tipstatesmodel.TipStatesModel in project beast-mcmc by beast-dev.
the class BeagleTreeLikelihoodParser method parseXMLObject.
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
boolean useAmbiguities = xo.getAttribute(USE_AMBIGUITIES, false);
int instanceCount = xo.getAttribute(INSTANCE_COUNT, 1);
if (instanceCount < 1) {
instanceCount = 1;
}
String ic = System.getProperty(BEAGLE_INSTANCE_COUNT);
if (ic != null && ic.length() > 0) {
instanceCount = Integer.parseInt(ic);
}
PatternList patternList = (PatternList) xo.getChild(PatternList.class);
MutableTreeModel treeModel = (MutableTreeModel) xo.getChild(MutableTreeModel.class);
GammaSiteRateModel siteRateModel = (GammaSiteRateModel) xo.getChild(GammaSiteRateModel.class);
FrequencyModel rootFreqModel = (FrequencyModel) xo.getChild(FrequencyModel.class);
BranchModel branchModel = (BranchModel) xo.getChild(BranchModel.class);
if (branchModel == null) {
SubstitutionModel substitutionModel = (SubstitutionModel) xo.getChild(SubstitutionModel.class);
if (substitutionModel == null) {
substitutionModel = siteRateModel.getSubstitutionModel();
}
if (substitutionModel == null) {
throw new XMLParseException("No substitution model available for TreeLikelihood: " + xo.getId());
}
branchModel = new HomogeneousBranchModel(substitutionModel, rootFreqModel);
}
BranchRateModel branchRateModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
if (branchModel instanceof EpochBranchModel && rootFreqModel != null) {
EpochBranchModel epochBranchModel = (EpochBranchModel) branchModel;
epochBranchModel.setRootFrequencyModel(rootFreqModel);
}
TipStatesModel tipStatesModel = (TipStatesModel) xo.getChild(TipStatesModel.class);
// if (xo.getChild(TipStatesModel.class) != null) {
// throw new XMLParseException("Sequence Error Models are not supported under BEAGLE yet. Please use Native BEAST Likelihood.");
// }
PartialsRescalingScheme scalingScheme = PartialsRescalingScheme.DEFAULT;
boolean delayScaling = true;
if (xo.hasAttribute(SCALING_SCHEME)) {
scalingScheme = PartialsRescalingScheme.parseFromString(xo.getStringAttribute(SCALING_SCHEME));
if (scalingScheme == null)
throw new XMLParseException("Unknown scaling scheme '" + xo.getStringAttribute(SCALING_SCHEME) + "' in " + "OldBeagleTreeLikelihood object '" + xo.getId());
}
if (xo.hasAttribute(DELAY_SCALING)) {
delayScaling = xo.getBooleanAttribute(DELAY_SCALING);
}
Map<Set<String>, Parameter> partialsRestrictions = null;
if (xo.hasChildNamed(PARTIALS_RESTRICTION)) {
XMLObject cxo = xo.getChild(PARTIALS_RESTRICTION);
TaxonList taxonList = (TaxonList) cxo.getChild(TaxonList.class);
// Parameter parameter = (Parameter) cxo.getChild(Parameter.class);
try {
TreeUtils.getLeavesForTaxa(treeModel, taxonList);
} catch (TreeUtils.MissingTaxonException e) {
throw new XMLParseException("Unable to parse taxon list: " + e.getMessage());
}
throw new XMLParseException("Restricting internal nodes is not yet implemented. Contact Marc");
}
int beagleThreadCount = -1;
if (System.getProperty(BEAGLE_THREAD_COUNT) != null) {
beagleThreadCount = Integer.parseInt(System.getProperty(BEAGLE_THREAD_COUNT));
}
if (beagleThreadCount == -1) {
// the default is -1 threads (automatic thread pool size) but an XML attribute can override it
int threadCount = xo.getAttribute(THREADS, -1);
if (System.getProperty(THREAD_COUNT) != null) {
threadCount = Integer.parseInt(System.getProperty(THREAD_COUNT));
}
// Todo: allow for different number of threads per beagle instance according to pattern counts
if (threadCount >= 0) {
System.setProperty(BEAGLE_THREAD_COUNT, Integer.toString(threadCount / instanceCount));
}
}
if (instanceCount == 1 || patternList.getPatternCount() < instanceCount) {
return createTreeLikelihood(patternList, treeModel, branchModel, siteRateModel, branchRateModel, tipStatesModel, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo);
}
if (tipStatesModel != null) {
throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with a TipStateModel (i.e., a sequence error model).");
}
List<Likelihood> likelihoods = new ArrayList<Likelihood>();
for (int i = 0; i < instanceCount; i++) {
Patterns subPatterns = new Patterns(patternList, i, instanceCount);
AbstractTreeLikelihood treeLikelihood = createTreeLikelihood(subPatterns, treeModel, branchModel, siteRateModel, branchRateModel, null, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo);
treeLikelihood.setId(xo.getId() + "_" + instanceCount);
likelihoods.add(treeLikelihood);
}
return new CompoundLikelihood(likelihoods);
}
use of dr.evomodel.tipstatesmodel.TipStatesModel in project beast-mcmc by beast-dev.
the class TreeDataLikelihoodParser method parseXMLObject.
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
boolean useAmbiguities = xo.getAttribute(USE_AMBIGUITIES, false);
boolean usePreOrder = xo.getAttribute(USE_PREORDER, false);
boolean branchRateDerivative = xo.getAttribute(BRANCHRATE_DERIVATIVE, usePreOrder);
boolean branchInfinitesimalDerivative = xo.getAttribute(BRANCHINFINITESIMAL_DERIVATIVE, false);
if (usePreOrder != (branchRateDerivative || branchInfinitesimalDerivative)) {
throw new RuntimeException("Need to specify derivative types.");
}
PreOrderSettings settings = new PreOrderSettings(usePreOrder, branchRateDerivative, branchInfinitesimalDerivative);
// TreeDataLikelihood doesn't currently support Instances defined from the command line
// int instanceCount = xo.getAttribute(INSTANCE_COUNT, 1);
// if (instanceCount < 1) {
// instanceCount = 1;
// }
//
// String ic = System.getProperty(BEAGLE_INSTANCE_COUNT);
// if (ic != null && ic.length() > 0) {
// instanceCount = Integer.parseInt(ic);
// }
List<PatternList> patternLists = new ArrayList<PatternList>();
List<SiteRateModel> siteRateModels = new ArrayList<SiteRateModel>();
List<BranchModel> branchModels = new ArrayList<BranchModel>();
boolean hasSinglePartition = false;
PatternList patternList = (PatternList) xo.getChild(PatternList.class);
if (patternList != null) {
hasSinglePartition = true;
patternLists.add(patternList);
GammaSiteRateModel siteRateModel = (GammaSiteRateModel) xo.getChild(GammaSiteRateModel.class);
siteRateModels.add(siteRateModel);
FrequencyModel rootFreqModel = (FrequencyModel) xo.getChild(FrequencyModel.class);
BranchModel branchModel = (BranchModel) xo.getChild(BranchModel.class);
if (branchModel == null) {
SubstitutionModel substitutionModel = (SubstitutionModel) xo.getChild(SubstitutionModel.class);
if (substitutionModel == null) {
substitutionModel = siteRateModel.getSubstitutionModel();
}
if (substitutionModel == null) {
throw new XMLParseException("No substitution model available for partition in DataTreeLikelihood: " + xo.getId());
}
branchModel = new HomogeneousBranchModel(substitutionModel, rootFreqModel);
}
branchModels.add(branchModel);
}
int k = 0;
for (int i = 0; i < xo.getChildCount(); i++) {
if (xo.getChildName(i).equals(PARTITION)) {
if (hasSinglePartition) {
throw new XMLParseException("Either a single set of patterns should be given or multiple 'partitions' elements within DataTreeLikelihood: " + xo.getId());
}
k += 1;
XMLObject cxo = (XMLObject) xo.getChild(i);
patternList = (PatternList) cxo.getChild(PatternList.class);
patternLists.add(patternList);
GammaSiteRateModel siteRateModel = (GammaSiteRateModel) cxo.getChild(GammaSiteRateModel.class);
siteRateModels.add(siteRateModel);
FrequencyModel rootFreqModel = (FrequencyModel) xo.getChild(FrequencyModel.class);
BranchModel branchModel = (BranchModel) cxo.getChild(BranchModel.class);
if (branchModel == null) {
SubstitutionModel substitutionModel = (SubstitutionModel) xo.getChild(SubstitutionModel.class);
if (substitutionModel == null) {
substitutionModel = siteRateModel.getSubstitutionModel();
}
if (substitutionModel == null) {
throw new XMLParseException("No substitution model available for partition " + k + " in DataTreeLikelihood: " + xo.getId());
}
branchModel = new HomogeneousBranchModel(substitutionModel, rootFreqModel);
}
branchModels.add(branchModel);
BranchRateModel branchRateModel = (BranchRateModel) cxo.getChild(BranchRateModel.class);
if (branchRateModel != null) {
throw new XMLParseException("Partitions are not currently allowed their own BranchRateModel in TreeDataLikelihood object '" + xo.getId());
}
}
}
if (patternLists.size() == 0) {
throw new XMLParseException("Either a single set of patterns should be given or multiple 'partitions' elements within DataTreeLikelihood: " + xo.getId());
}
Tree treeModel = (Tree) xo.getChild(Tree.class);
BranchRateModel branchRateModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
if (branchRateModel == null) {
throw new XMLParseException("BranchRateModel missing from TreeDataLikelihood object '" + xo.getId());
// branchRateModel = new DefaultBranchRateModel();
}
TipStatesModel tipStatesModel = (TipStatesModel) xo.getChild(TipStatesModel.class);
final boolean preferGPU = xo.getAttribute(PREFER_GPU, false);
PartialsRescalingScheme scalingScheme = PartialsRescalingScheme.DEFAULT;
if (xo.hasAttribute(SCALING_SCHEME)) {
scalingScheme = PartialsRescalingScheme.parseFromString(xo.getStringAttribute(SCALING_SCHEME));
if (scalingScheme == null)
throw new XMLParseException("Unknown scaling scheme '" + xo.getStringAttribute(SCALING_SCHEME) + "' in " + "TreeDataLikelihood object '" + xo.getId());
}
final boolean delayScaling = xo.getAttribute(DELAY_SCALING, true);
if (tipStatesModel != null) {
throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with a TipStateModel (i.e., a sequence error model).");
}
return createTreeDataLikelihood(patternLists, branchModels, siteRateModels, treeModel, branchRateModel, null, useAmbiguities, preferGPU, scalingScheme, delayScaling, settings);
}
use of dr.evomodel.tipstatesmodel.TipStatesModel in project beast-mcmc by beast-dev.
the class MultiPartitionDataLikelihoodParser method parseXMLObject.
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
boolean useAmbiguities = xo.getAttribute(USE_AMBIGUITIES, false);
int instanceCount = xo.getAttribute(INSTANCE_COUNT, 1);
if (instanceCount < 1) {
instanceCount = 1;
}
String ic = System.getProperty(BEAGLE_INSTANCE_COUNT);
if (ic != null && ic.length() > 0) {
instanceCount = Integer.parseInt(ic);
}
if (DEBUG) {
System.out.println("instanceCount: " + instanceCount);
}
List<PatternList> patternLists = xo.getAllChildren(PatternList.class);
TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
List<SiteRateModel> siteRateModels = xo.getAllChildren(SiteRateModel.class);
FrequencyModel rootFreqModel = (FrequencyModel) xo.getChild(FrequencyModel.class);
List<BranchModel> branchModels = xo.getAllChildren(BranchModel.class);
if (branchModels.size() == 0) {
if (DEBUG) {
System.out.println("branchModels == null");
}
branchModels = new ArrayList<BranchModel>();
List<SubstitutionModel> substitutionModels = xo.getAllChildren(SubstitutionModel.class);
if (substitutionModels.size() == 0) {
if (DEBUG) {
System.out.println("substitutionModels == null");
}
for (SiteRateModel siteRateModel : siteRateModels) {
SubstitutionModel substitutionModel = ((GammaSiteRateModel) siteRateModel).getSubstitutionModel();
if (substitutionModel == null) {
throw new XMLParseException("No substitution model available for TreeDataLikelihood: " + xo.getId());
}
branchModels.add(new HomogeneousBranchModel(substitutionModel, rootFreqModel));
}
}
if (DEBUG) {
System.out.println("branchModels size: " + branchModels.size());
}
for (BranchModel branchModel : branchModels) {
System.out.println(" " + branchModel.getId() + " " + branchModel.getModelName());
}
}
BranchRateModel branchRateModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
if (branchRateModel == null) {
branchRateModel = new DefaultBranchRateModel();
}
if (DEBUG) {
System.out.println("BranchRateModel: " + branchRateModel.getId());
}
TipStatesModel tipStatesModel = (TipStatesModel) xo.getChild(TipStatesModel.class);
PartialsRescalingScheme scalingScheme = PartialsRescalingScheme.DEFAULT;
boolean delayScaling = true;
if (xo.hasAttribute(SCALING_SCHEME)) {
scalingScheme = PartialsRescalingScheme.parseFromString(xo.getStringAttribute(SCALING_SCHEME));
if (scalingScheme == null)
throw new XMLParseException("Unknown scaling scheme '" + xo.getStringAttribute(SCALING_SCHEME) + "' in " + "OldBeagleTreeLikelihood object '" + xo.getId());
}
if (xo.hasAttribute(DELAY_SCALING)) {
delayScaling = xo.getBooleanAttribute(DELAY_SCALING);
}
if (instanceCount == 1) {
if (DEBUG) {
System.out.println("instanceCount == 1");
}
return createTreeDataLikelihood(patternLists, treeModel, branchModels, siteRateModels, branchRateModel, tipStatesModel, useAmbiguities, scalingScheme, delayScaling, xo);
}
if (tipStatesModel != null) {
throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with a TipStateModel (i.e., a sequence error model).");
}
List<PatternList> patternInstanceLists = new ArrayList<PatternList>();
for (int j = 0; j < patternLists.size(); j++) {
for (int i = 0; i < instanceCount; i++) {
patternInstanceLists.add(new Patterns(patternLists.get(j), i, instanceCount));
}
}
return createTreeDataLikelihood(patternLists, treeModel, branchModels, siteRateModels, branchRateModel, null, useAmbiguities, scalingScheme, delayScaling, xo);
}
use of dr.evomodel.tipstatesmodel.TipStatesModel in project beast-mcmc by beast-dev.
the class PMDTestProblem method testPMD.
public void testPMD() throws Exception {
Parameter popSize = new Parameter.Default(ConstantPopulationModelParser.POPULATION_SIZE, 496432.69917113904, 0, Double.POSITIVE_INFINITY);
ConstantPopulationModel constantModel = createRandomInitialTree(popSize);
TreeIntervals intervalList = new TreeIntervals(treeModel, null, null);
CoalescentLikelihood coalescent = new CoalescentLikelihood(intervalList, constantModel);
coalescent.setId("coalescent");
// clock model
Parameter rateParameter = new Parameter.Default(StrictClockBranchRates.RATE, 4.0E-7, 0, 100.0);
StrictClockBranchRates branchRateModel = new StrictClockBranchRates(rateParameter);
// Sub model
Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 });
Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
HKY hky = new HKY(kappa, f);
// siteModel
GammaSiteModel siteModel = new GammaSiteModel(hky);
Parameter mu = new Parameter.Default(GammaSiteModelParser.MUTATION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
siteModel.setMutationRateParameter(mu);
// SequenceErrorModel
Parameter ageRelatedRateParameter = new Parameter.Default(SequenceErrorModelParser.AGE_RELATED_RATE, 4.0E-7, 0, 100.0);
TipStatesModel aDNADamageModel = new SequenceErrorModel(null, null, SequenceErrorModel.ErrorType.TRANSITIONS_ONLY, null, ageRelatedRateParameter, null);
// treeLikelihood
SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
TreeLikelihood treeLikelihood = new TreeLikelihood(patterns, treeModel, siteModel, branchRateModel, aDNADamageModel, false, false, true, false, false);
treeLikelihood.setId(TreeLikelihoodParser.TREE_LIKELIHOOD);
// Operators
OperatorSchedule schedule = new SimpleOperatorSchedule();
MCMCOperator operator = new ScaleOperator(kappa, 0.75);
operator.setWeight(1.0);
schedule.addOperator(operator);
operator = new ScaleOperator(rateParameter, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
Parameter allInternalHeights = ((DefaultTreeModel) treeModel).createNodeHeightsParameter(true, true, false);
operator = new UpDownOperator(new Scalable[] { new Scalable.Default(rateParameter) }, new Scalable[] { new Scalable.Default(allInternalHeights) }, 0.75, 3.0, AdaptationMode.ADAPTATION_ON);
schedule.addOperator(operator);
operator = new ScaleOperator(popSize, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
operator = new ScaleOperator(ageRelatedRateParameter, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
Parameter rootHeight = ((DefaultTreeModel) treeModel).getRootHeightParameter();
rootHeight.setId(TREE_HEIGHT);
operator = new ScaleOperator(rootHeight, 0.75);
operator.setWeight(3.0);
schedule.addOperator(operator);
Parameter internalHeights = ((DefaultTreeModel) treeModel).createNodeHeightsParameter(false, true, false);
operator = new UniformOperator(internalHeights, 30.0);
schedule.addOperator(operator);
operator = new SubtreeSlideOperator(((DefaultTreeModel) treeModel), 15.0, 49643.2699171139, true, false, false, false, AdaptationMode.ADAPTATION_ON, AdaptableMCMCOperator.DEFAULT_ADAPTATION_TARGET);
schedule.addOperator(operator);
operator = new ExchangeOperator(ExchangeOperator.NARROW, treeModel, 15.0);
// operator.doOperation();
schedule.addOperator(operator);
operator = new ExchangeOperator(ExchangeOperator.WIDE, treeModel, 3.0);
// operator.doOperation();
schedule.addOperator(operator);
operator = new WilsonBalding(treeModel, 3.0);
// operator.doOperation();
schedule.addOperator(operator);
// ??? correct?
operator = new DeltaExchangeOperator(freqs, new int[] { 1, 1, 1, 1 }, 0.01, 1.0, false, AdaptationMode.ADAPTATION_ON);
schedule.addOperator(operator);
// CompoundLikelihood
OneOnXPrior likelihood1 = new OneOnXPrior();
likelihood1.addData(popSize);
OneOnXPrior likelihood2 = new OneOnXPrior();
likelihood2.addData(kappa);
List<Likelihood> likelihoods = new ArrayList<Likelihood>();
likelihoods.add(likelihood1);
likelihoods.add(likelihood2);
likelihoods.add(coalescent);
Likelihood prior = new CompoundLikelihood(0, likelihoods);
prior.setId(CompoundLikelihoodParser.PRIOR);
likelihoods.clear();
likelihoods.add(treeLikelihood);
Likelihood likelihood = new CompoundLikelihood(-1, likelihoods);
likelihoods.clear();
likelihoods.add(prior);
likelihoods.add(likelihood);
Likelihood posterior = new CompoundLikelihood(0, likelihoods);
posterior.setId(CompoundLikelihoodParser.POSTERIOR);
// Log
ArrayLogFormatter formatter = new ArrayLogFormatter(false);
MCLogger[] loggers = new MCLogger[2];
loggers[0] = new MCLogger(formatter, 1000, false);
loggers[0].add(posterior);
loggers[0].add(treeLikelihood);
loggers[0].add(rootHeight);
loggers[0].add(rateParameter);
loggers[0].add(ageRelatedRateParameter);
loggers[0].add(popSize);
loggers[0].add(kappa);
loggers[0].add(coalescent);
loggers[1] = new MCLogger(new TabDelimitedFormatter(System.out), 10000, false);
loggers[1].add(posterior);
loggers[1].add(treeLikelihood);
loggers[1].add(rootHeight);
loggers[1].add(rateParameter);
// MCMC
MCMC mcmc = new MCMC("mcmc1");
MCMCOptions options = new MCMCOptions(1000000);
mcmc.setShowOperatorAnalysis(true);
mcmc.init(options, posterior, schedule, loggers);
mcmc.run();
// time
System.out.println(mcmc.getTimer().toString());
// Tracer
List<Trace> traces = formatter.getTraces();
ArrayTraceList traceList = new ArrayTraceList("PMDTest", traces, 0);
for (int i = 1; i < traces.size(); i++) {
traceList.analyseTrace(i);
}
// <expectation name="clock.rate" value="1.5E-7"/>
// <expectation name="errorModel.ageRate" value="0.7E-7"/>
// <expectation name="hky.kappa" value="10"/>
TraceCorrelation kappaStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(HKYParser.KAPPA));
assertExpectation(HKYParser.KAPPA, kappaStats, 10);
TraceCorrelation rateStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(StrictClockBranchRates.RATE));
assertExpectation(StrictClockBranchRates.RATE, rateStats, 1.5E-7);
TraceCorrelation ageRateStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(SequenceErrorModelParser.AGE_RELATED_RATE));
assertExpectation(SequenceErrorModelParser.AGE_RELATED_RATE, ageRateStats, 0.7E-7);
}
Aggregations