use of dr.app.beauti.types.TreePriorParameterizationType in project beast-mcmc by beast-dev.
the class TreePriorGenerator method writeTreePriorModel.
// void writeTreePrior(PartitionTreePrior prior, PartitionTreeModel model, XMLWriter writer) { // for species, partitionName.treeModel
// setModelPrefix(prior.getPrefix()); // only has prefix, if (options.getPartitionTreePriors().size() > 1)
//
// writePriorLikelihood(prior, model, writer);
// }
/**
* Write a tree prior (coalescent or speciational) model
*
* @param prior the partition tree prior
* @param writer the writer
*/
void writeTreePriorModel(PartitionTreePrior prior, XMLWriter writer) {
// only has prefix, if (options.getPartitionTreePriors().size() > 1)
setModelPrefix(prior.getPrefix());
String initialPopSize = null;
TreePriorType nodeHeightPrior = prior.getNodeHeightPrior();
Units.Type units = options.units;
TreePriorParameterizationType parameterization = prior.getParameterization();
switch(nodeHeightPrior) {
case CONSTANT:
writer.writeComment("A prior assumption that the population size has remained constant", "throughout the time spanned by the genealogy.");
writer.writeOpenTag(ConstantPopulationModelParser.CONSTANT_POPULATION_MODEL, new Attribute[] { new Attribute.Default<String>(XMLParser.ID, modelPrefix + "constant"), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(options.units)) });
writer.writeOpenTag(ConstantPopulationModelParser.POPULATION_SIZE);
writeParameter("constant.popSize", prior, writer);
writer.writeCloseTag(ConstantPopulationModelParser.POPULATION_SIZE);
writer.writeCloseTag(ConstantPopulationModelParser.CONSTANT_POPULATION_MODEL);
break;
case EXPONENTIAL:
// generate an exponential prior tree
writer.writeComment("A prior assumption that the population size has grown exponentially", "throughout the time spanned by the genealogy.");
writer.writeOpenTag(ExponentialGrowthModelParser.EXPONENTIAL_GROWTH_MODEL, new Attribute[] { new Attribute.Default<String>(XMLParser.ID, modelPrefix + "exponential"), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(options.units)) });
// write pop size socket
writer.writeOpenTag(ExponentialGrowthModelParser.POPULATION_SIZE);
writeParameter("exponential.popSize", prior, writer);
writer.writeCloseTag(ExponentialGrowthModelParser.POPULATION_SIZE);
if (parameterization == TreePriorParameterizationType.GROWTH_RATE) {
// write growth rate socket
writer.writeOpenTag(ExponentialGrowthModelParser.GROWTH_RATE);
writeParameter("exponential.growthRate", prior, writer);
writer.writeCloseTag(ExponentialGrowthModelParser.GROWTH_RATE);
} else {
// write doubling time socket
writer.writeOpenTag(ExponentialGrowthModelParser.DOUBLING_TIME);
writeParameter("exponential.doublingTime", prior, writer);
writer.writeCloseTag(ExponentialGrowthModelParser.DOUBLING_TIME);
}
writer.writeCloseTag(ExponentialGrowthModelParser.EXPONENTIAL_GROWTH_MODEL);
break;
case LOGISTIC:
// generate an exponential prior tree
writer.writeComment("A prior assumption that the population size has grown logistically", "throughout the time spanned by the genealogy.");
writer.writeOpenTag(LogisticGrowthModelParser.LOGISTIC_GROWTH_MODEL, new Attribute[] { new Attribute.Default<String>(XMLParser.ID, modelPrefix + "logistic"), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(options.units)) });
// write pop size socket
writer.writeOpenTag(LogisticGrowthModelParser.POPULATION_SIZE);
writeParameter("logistic.popSize", prior, writer);
writer.writeCloseTag(LogisticGrowthModelParser.POPULATION_SIZE);
if (parameterization == TreePriorParameterizationType.GROWTH_RATE) {
// write growth rate socket
writer.writeOpenTag(LogisticGrowthModelParser.GROWTH_RATE);
writeParameter("logistic.growthRate", prior, writer);
writer.writeCloseTag(LogisticGrowthModelParser.GROWTH_RATE);
} else {
// write doubling time socket
writer.writeOpenTag(LogisticGrowthModelParser.DOUBLING_TIME);
writeParameter("logistic.doublingTime", prior, writer);
writer.writeCloseTag(LogisticGrowthModelParser.DOUBLING_TIME);
}
// write logistic t50 socket
writer.writeOpenTag(LogisticGrowthModelParser.TIME_50);
// if (options.clockModelOptions.getRateOptionClockModel() == FixRateType.FIX_MEAN
// || options.clockModelOptions.getRateOptionClockModel() == FixRateType.RELATIVE_TO) {
// writer.writeComment("No calibration");
// writer.writeComment("logistic.t50 initial always has to < treeRootHeight initial");
// dr.app.beauti.options.Parameter priorPara = prior.getParameter("logistic.t50");
//
// double initRootHeight;
// if (options.isShareSameTreePrior()) {
// initRootHeight = priorPara.initial;
// for (PartitionTreeModel tree : options.getPartitionTreeModels()) {
// double tmpRootHeight = tree.getParameter("treeModel.rootHeight").initial;
// if (initRootHeight > tmpRootHeight) { // take min
// initRootHeight = tmpRootHeight;
// }
// }
// } else {
// initRootHeight = prior.getTreeModel().getParameter("treeModel.rootHeight").initial;
// }
// // logistic.t50 initial always has to < treeRootHeight initial
// if (priorPara.initial >= initRootHeight) {
// priorPara.initial = initRootHeight / 2; // tree prior.initial has to < treeRootHeight.initial
// }
// } else {
// writer.writeComment("Has calibration");
//
// throw new IllegalArgumentException("This function is not available in this release !");
// }
writeParameter("logistic.t50", prior, writer);
writer.writeCloseTag(LogisticGrowthModelParser.TIME_50);
writer.writeCloseTag(LogisticGrowthModelParser.LOGISTIC_GROWTH_MODEL);
initialPopSize = "logistic.popSize";
break;
case EXPANSION:
// generate an exponential prior tree
writer.writeComment("A prior assumption that the population size has grown exponentially", "from some ancestral population size in the past.");
writer.writeOpenTag(ExpansionModelParser.EXPANSION_MODEL, new Attribute[] { new Attribute.Default<String>(XMLParser.ID, modelPrefix + "expansion"), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(options.units)) });
// write pop size socket
writeParameter(ExpansionModelParser.POPULATION_SIZE, "expansion.popSize", prior, writer);
if (parameterization == TreePriorParameterizationType.GROWTH_RATE) {
// write growth rate socket
writeParameter(ExpansionModelParser.GROWTH_RATE, "expansion.growthRate", prior, writer);
} else {
// write doubling time socket
writeParameter(ExpansionModelParser.DOUBLING_TIME, "expansion.doublingTime", prior, writer);
}
// write ancestral proportion socket
writeParameter(ExpansionModelParser.ANCESTRAL_POPULATION_PROPORTION, "expansion.ancestralProportion", prior, writer);
writer.writeCloseTag(ExpansionModelParser.EXPANSION_MODEL);
initialPopSize = "expansion.popSize";
break;
case YULE:
case YULE_CALIBRATION:
if (nodeHeightPrior == TreePriorType.YULE_CALIBRATION) {
writer.writeComment("Calibrated Yule: Heled J, Drummond AJ (2011), Syst Biol, doi: " + "10.1093/sysbio/syr087");
} else {
writer.writeComment("A prior on the distribution node heights defined given", "a Yule speciation process (a pure birth process).");
}
writer.writeOpenTag(YuleModelParser.YULE_MODEL, new Attribute[] { new Attribute.Default<String>(XMLParser.ID, modelPrefix + YuleModelParser.YULE), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(units)) });
writeParameter(YuleModelParser.BIRTH_RATE, "yule.birthRate", prior, writer);
writer.writeCloseTag(YuleModelParser.YULE_MODEL);
break;
case BIRTH_DEATH:
case BIRTH_DEATH_INCOMPLETE_SAMPLING:
writer.writeComment("A prior on the distribution node heights defined given");
writer.writeComment(nodeHeightPrior == TreePriorType.BIRTH_DEATH_INCOMPLETE_SAMPLING ? BirthDeathModelParser.getCitationRHO() : BirthDeathModelParser.getCitation());
writer.writeOpenTag(BirthDeathModelParser.BIRTH_DEATH_MODEL, new Attribute[] { new Attribute.Default<String>(XMLParser.ID, modelPrefix + BirthDeathModelParser.BIRTH_DEATH), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(units)) });
writeParameter(BirthDeathModelParser.BIRTHDIFF_RATE, BirthDeathModelParser.MEAN_GROWTH_RATE_PARAM_NAME, prior, writer);
writeParameter(BirthDeathModelParser.RELATIVE_DEATH_RATE, BirthDeathModelParser.RELATIVE_DEATH_RATE_PARAM_NAME, prior, writer);
if (nodeHeightPrior == TreePriorType.BIRTH_DEATH_INCOMPLETE_SAMPLING) {
writeParameter(BirthDeathModelParser.SAMPLE_PROB, BirthDeathModelParser.BIRTH_DEATH + "." + BirthDeathModelParser.SAMPLE_PROB, prior, writer);
}
writer.writeCloseTag(BirthDeathModelParser.BIRTH_DEATH_MODEL);
break;
case BIRTH_DEATH_SERIAL_SAMPLING:
writer.writeComment(BirthDeathSerialSamplingModelParser.getCitationPsiOrg());
writer.writeOpenTag(BirthDeathSerialSamplingModelParser.BIRTH_DEATH_SERIAL_MODEL, new Attribute[] { new Attribute.Default<String>(XMLParser.ID, modelPrefix + BirthDeathSerialSamplingModelParser.BDSS), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(units)), new Attribute.Default<Boolean>(BirthDeathSerialSamplingModelParser.HAS_FINAL_SAMPLE, false) });
writeParameter(BirthDeathSerialSamplingModelParser.LAMBDA, BirthDeathSerialSamplingModelParser.BDSS + "." + BirthDeathSerialSamplingModelParser.LAMBDA, prior, writer);
writeParameter(BirthDeathSerialSamplingModelParser.RELATIVE_MU, BirthDeathSerialSamplingModelParser.BDSS + "." + BirthDeathSerialSamplingModelParser.RELATIVE_MU, prior, writer);
// writeParameter(BirthDeathSerialSamplingModelParser.SAMPLE_PROBABILITY,
// BirthDeathSerialSamplingModelParser.BDSS + "." + BirthDeathSerialSamplingModelParser.SAMPLE_PROBABILITY, prior, writer);
writeParameter(BirthDeathSerialSamplingModelParser.PSI, BirthDeathSerialSamplingModelParser.BDSS + "." + BirthDeathSerialSamplingModelParser.PSI, prior, writer);
writeParameter(BirthDeathSerialSamplingModelParser.ORIGIN, BirthDeathSerialSamplingModelParser.BDSS + "." + BirthDeathSerialSamplingModelParser.ORIGIN, prior, writer);
writer.writeCloseTag(BirthDeathSerialSamplingModelParser.BIRTH_DEATH_SERIAL_MODEL);
break;
case BIRTH_DEATH_BASIC_REPRODUCTIVE_NUMBER:
writer.writeComment(BirthDeathSerialSamplingModelParser.getCitationRT());
writer.writeOpenTag(BirthDeathEpidemiologyModelParser.BIRTH_DEATH_EPIDEMIOLOGY, new Attribute[] { new Attribute.Default<String>(XMLParser.ID, modelPrefix + BirthDeathEpidemiologyModelParser.BIRTH_DEATH_EPIDEMIOLOGY), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(units)) });
writeParameter(BirthDeathEpidemiologyModelParser.R0, BirthDeathEpidemiologyModelParser.R0, prior, writer);
writeParameter(BirthDeathEpidemiologyModelParser.RECOVERY_RATE, BirthDeathEpidemiologyModelParser.RECOVERY_RATE, prior, writer);
writeParameter(BirthDeathEpidemiologyModelParser.SAMPLING_PROBABILITY, BirthDeathEpidemiologyModelParser.SAMPLING_PROBABILITY, prior, writer);
writeParameter(BirthDeathEpidemiologyModelParser.ORIGIN, BirthDeathEpidemiologyModelParser.ORIGIN, prior, writer);
writer.writeCloseTag(BirthDeathEpidemiologyModelParser.BIRTH_DEATH_EPIDEMIOLOGY);
break;
case SPECIES_BIRTH_DEATH:
case SPECIES_YULE:
case SPECIES_YULE_CALIBRATION:
writer.writeComment("A prior assumption that the population size has remained constant");
writer.writeComment("throughout the time spanned by the genealogy.");
if (nodeHeightPrior == TreePriorType.SPECIES_YULE_CALIBRATION)
writer.writeComment("Calibrated Yule: Heled J, Drummond AJ (2011), Syst Biol, doi: " + "10.1093/sysbio/syr087");
writer.writeOpenTag(ConstantPopulationModelParser.CONSTANT_POPULATION_MODEL, new Attribute[] { new Attribute.Default<String>(XMLParser.ID, modelPrefix + "constant"), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(options.units)) });
// initial value for pop mean is the same as what used to be the value for the population size
Parameter para = options.starBEASTOptions.getParameter(TraitData.TRAIT_SPECIES + "." + options.starBEASTOptions.POP_MEAN);
prior.getParameter("constant.popSize").setInitial(para.getInitial());
writer.writeOpenTag(ConstantPopulationModelParser.POPULATION_SIZE);
writeParameter("constant.popSize", prior, writer);
writer.writeCloseTag(ConstantPopulationModelParser.POPULATION_SIZE);
writer.writeCloseTag(ConstantPopulationModelParser.CONSTANT_POPULATION_MODEL);
break;
}
if ((!options.useStarBEAST) && nodeHeightPrior != TreePriorType.CONSTANT && nodeHeightPrior != TreePriorType.EXPONENTIAL) {
// If the node height prior is not one of these two then we need to simulate a
// random starting tree under a constant size coalescent.
writer.writeComment("This is a simple constant population size coalescent model", "that is used to generate an initial tree for the chain.");
writer.writeOpenTag(ConstantPopulationModelParser.CONSTANT_POPULATION_MODEL, new Attribute[] { new Attribute.Default<String>(XMLParser.ID, modelPrefix + "initialDemo"), new Attribute.Default<String>("units", Units.Utils.getDefaultUnitName(units)) });
writer.writeOpenTag(ConstantPopulationModelParser.POPULATION_SIZE);
if (initialPopSize != null) {
writer.writeIDref(ParameterParser.PARAMETER, modelPrefix + initialPopSize);
} else {
writeParameter(modelPrefix + "initialDemo.popSize", 1, 100.0, Double.NaN, Double.NaN, writer);
}
writer.writeCloseTag(ConstantPopulationModelParser.POPULATION_SIZE);
writer.writeCloseTag(ConstantPopulationModelParser.CONSTANT_POPULATION_MODEL);
}
// if (nodeHeightPrior == TreePriorType.BIRTH_DEATH_BASIC_REPRODUCTIVE_NUMBER) {
// writer.writeComment("R0 = b/(b*d+s*r)");
// writer.writeOpenTag(RPNcalculatorStatisticParser.RPN_STATISTIC,
// new Attribute[]{
// new Attribute.Default<String>(XMLParser.ID, modelPrefix + "R0")
// });
//
// writer.writeOpenTag(RPNcalculatorStatisticParser.VARIABLE,
// new Attribute[]{
// new Attribute.Default<String>(Statistic.NAME, modelPrefix + "b")
// });
// writeParameterRef(modelPrefix + BirthDeathSerialSamplingModelParser.BDSS + "." + BirthDeathSerialSamplingModelParser.LAMBDA, writer);
// writer.writeCloseTag(RPNcalculatorStatisticParser.VARIABLE);
//
// writer.writeOpenTag(RPNcalculatorStatisticParser.VARIABLE,
// new Attribute[]{
// new Attribute.Default<String>(Statistic.NAME, modelPrefix + "d")
// });
// writeParameterRef(modelPrefix + BirthDeathSerialSamplingModelParser.BDSS + "." + BirthDeathSerialSamplingModelParser.RELATIVE_MU, writer);
// writer.writeCloseTag(RPNcalculatorStatisticParser.VARIABLE);
//
// writer.writeOpenTag(RPNcalculatorStatisticParser.VARIABLE,
// new Attribute[]{
// new Attribute.Default<String>(Statistic.NAME, modelPrefix + "s")
// });
// writeParameterRef(modelPrefix + BirthDeathSerialSamplingModelParser.BDSS + "." + BirthDeathSerialSamplingModelParser.PSI, writer);
// writer.writeCloseTag(RPNcalculatorStatisticParser.VARIABLE);
//
// writer.writeOpenTag(RPNcalculatorStatisticParser.VARIABLE,
// new Attribute[]{
// new Attribute.Default<String>(Statistic.NAME, modelPrefix + "r")
// });
// writeParameterRef(modelPrefix + BirthDeathSerialSamplingModelParser.BDSS + "." + BirthDeathSerialSamplingModelParser.R, writer);
// writer.writeCloseTag(RPNcalculatorStatisticParser.VARIABLE);
//
// writer.writeOpenTag(RPNcalculatorStatisticParser.EXPRESSION,
// new Attribute[]{
// new Attribute.Default<String>(Statistic.NAME, modelPrefix + "R0")
// });
// writer.writeText(modelPrefix + "b " + modelPrefix + "b " + modelPrefix + "d " + "* " + modelPrefix + "s " + modelPrefix + "r " + "* + /");
// writer.writeCloseTag(RPNcalculatorStatisticParser.EXPRESSION);
//
// writer.writeCloseTag(RPNcalculatorStatisticParser.RPN_STATISTIC);
// }
}
Aggregations