use of dr.util.Attribute in project beast-mcmc by beast-dev.
the class InitialTreeGenerator method writeStartingTree.
/**
* Generate XML for the starting tree
* @param model PartitionTreeModel
*
* @param writer the writer
*/
public void writeStartingTree(PartitionTreeModel model, XMLWriter writer) {
// only has prefix, if (options.getPartitionTreeModels().size() > 1)
setModelPrefix(model.getPrefix());
switch(model.getStartingTreeType()) {
case USER:
case UPGMA:
Parameter rootHeight = model.getParameter("treeModel.rootHeight");
// generate a rescaled starting tree
writer.writeComment("Construct a starting tree that is compatible with specified clade heights");
Attribute[] attributes = (rootHeight.priorType != PriorType.NONE_TREE_PRIOR ? new Attribute[] { new Attribute.Default<String>(XMLParser.ID, modelPrefix + STARTING_TREE), new Attribute.Default<String>(RescaledTreeParser.HEIGHT, "" + rootHeight.getInitial()) } : new Attribute[] { new Attribute.Default<String>(XMLParser.ID, modelPrefix + STARTING_TREE) });
writer.writeOpenTag(RescaledTreeParser.RESCALED_TREE, attributes);
writeSourceTree(model, writer);
if (options.taxonSets != null && options.taxonSets.size() > 0 && !options.useStarBEAST) {
for (Taxa taxa : options.taxonSets) {
Double height = options.taxonSetsHeights.get(taxa);
if (height != null) {
writer.writeOpenTag(RescaledTreeParser.CLADE, new Attribute.Default<String>(RescaledTreeParser.HEIGHT, height.toString()));
writer.writeTag("taxa", new Attribute.Default<String>(XMLParser.IDREF, taxa.getId()), true);
writer.writeCloseTag(RescaledTreeParser.CLADE);
} else if (options.taxonSetsMono.get(taxa)) {
// if monophyly is enforced then placing this clade element here will force BEAST to check
// the clade exists in the tree.
writer.writeOpenTag(RescaledTreeParser.CLADE);
writer.writeTag("taxa", new Attribute.Default<String>(XMLParser.IDREF, taxa.getId()), true);
writer.writeCloseTag(RescaledTreeParser.CLADE);
}
}
}
writer.writeCloseTag(RescaledTreeParser.RESCALED_TREE);
break;
case RANDOM:
// generate a coalescent tree
String simulatorId = modelPrefix + STARTING_TREE;
String taxaId = TaxaParser.TAXA;
AbstractPartitionData partition = options.getDataPartitions(model).get(0);
if (!options.hasIdenticalTaxa()) {
taxaId = partition.getPartitionTreeModel().getPrefix() + TaxaParser.TAXA;
}
if (partition instanceof PartitionPattern && ((PartitionPattern) partition).getPatterns().hasMask()) {
taxaId = partition.getPrefix() + TaxaParser.TAXA;
}
writer.writeComment("Generate a random starting tree under the coalescent process");
if (options.taxonSets != null && options.taxonSets.size() > 0 && !options.useStarBEAST) {
// need !options.useStarBEAST,
writeSubTree(simulatorId, taxaId, options.taxonList, model, writer);
} else {
writer.writeOpenTag(CoalescentSimulatorParser.COALESCENT_SIMULATOR, new Attribute[] { new Attribute.Default<String>(XMLParser.ID, simulatorId) });
writeTaxaRef(taxaId, model, writer);
writeInitialDemoModelRef(model, writer);
writer.writeCloseTag(CoalescentSimulatorParser.COALESCENT_SIMULATOR);
}
break;
default:
throw new IllegalArgumentException("Unknown StartingTreeType");
}
}
use of dr.util.Attribute in project beast-mcmc by beast-dev.
the class InitialTreeGenerator method writeTaxaRef.
private void writeTaxaRef(String taxaId, PartitionTreeModel model, XMLWriter writer) {
Attribute[] taxaAttribute = { new Attribute.Default<String>(XMLParser.IDREF, taxaId) };
if (options.taxonSets != null && options.taxonSets.size() > 0 && !options.useStarBEAST) {
// need !options.useStarBEAST,
// *BEAST case is in STARBEASTGenerator.writeStartingTreeForCalibration(XMLWriter writer)
writer.writeOpenTag(OldCoalescentSimulatorParser.CONSTRAINED_TAXA);
writer.writeTag(TaxaParser.TAXA, taxaAttribute, true);
for (Taxa taxa : options.taxonSets) {
if (options.taxonSetsTreeModel.get(taxa).equals(model)) {
Parameter statistic = options.getStatistic(taxa);
Attribute mono = new Attribute.Default<Boolean>(OldCoalescentSimulatorParser.IS_MONOPHYLETIC, options.taxonSetsMono.get(taxa));
writer.writeOpenTag(OldCoalescentSimulatorParser.TMRCA_CONSTRAINT, mono);
writer.writeIDref(TaxaParser.TAXA, taxa.getId());
if (model.getPartitionTreePrior().getNodeHeightPrior() == TreePriorType.YULE_CALIBRATION && statistic.priorType == PriorType.UNIFORM_PRIOR) {
writeDistribution(statistic, false, writer);
}
writer.writeCloseTag(OldCoalescentSimulatorParser.TMRCA_CONSTRAINT);
}
}
writer.writeCloseTag(OldCoalescentSimulatorParser.CONSTRAINED_TAXA);
} else {
writer.writeTag(TaxaParser.TAXA, taxaAttribute, true);
}
}
use of dr.util.Attribute in project beast-mcmc by beast-dev.
the class LogGenerator method writeDemographicLogToFile.
public void writeDemographicLogToFile(XMLWriter writer, TreePriorGenerator treePriorGenerator, ClockModelGenerator clockModelGenerator, SubstitutionModelGenerator substitutionModelGenerator, TreeLikelihoodGenerator treeLikelihoodGenerator) {
writer.writeComment("demographic log file");
if (options.demographicLogFileName == null) {
options.demographicLogFileName = options.fileNameStem + ".demo.log";
}
String header = "Demographic Model: " + options.demographicModelName;
writer.writeOpenTag(LoggerParser.LOG, new Attribute[] { new Attribute.Default<String>(XMLParser.ID, "fileLog"), new Attribute.Default<String>(LoggerParser.HEADER, header + ""), new Attribute.Default<String>(LoggerParser.LOG_EVERY, options.logEvery + ""), new Attribute.Default<String>(LoggerParser.FILE_NAME, options.logFileName), new Attribute.Default<Boolean>(LoggerParser.ALLOW_OVERWRITE_LOG, options.allowOverwriteLog) });
if (options.hasData()) {
writer.writeIDref(CompoundLikelihoodParser.POSTERIOR, "posterior");
}
writer.writeIDref(CompoundLikelihoodParser.PRIOR, "prior");
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + TreeModel.TREE_MODEL + "." + TreeModelParser.ROOT_HEIGHT);
}
if (options.useStarBEAST) {
for (Taxa taxa : options.speciesSets) {
// make tmrca(tree.name) eay to read in log for Tracer
writer.writeIDref(TMRCAStatisticParser.TMRCA_STATISTIC, "tmrca(" + taxa.getId() + ")");
}
} else {
for (Taxa taxa : options.taxonSets) {
// make tmrca(tree.name) eay to read in log for Tracer
PartitionTreeModel treeModel = options.taxonSetsTreeModel.get(taxa);
writer.writeIDref(TMRCAStatisticParser.TMRCA_STATISTIC, "tmrca(" + treeModel.getPrefix() + taxa.getId() + ")");
}
}
// } else { // no species
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
// treePriorGenerator.setModelPrefix(prior.getPrefix()); // priorName.treeModel
treePriorGenerator.writeParameterLog(prior, writer);
}
for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
substitutionModelGenerator.writeLog(model, writer);
}
for (PartitionClockModel model : options.getPartitionClockModels()) {
// if (model.getRateTypeOption() == FixRateType.FIXED_MEAN) {
// writer.writeIDref(ParameterParser.PARAMETER, model.getName());
// if (model.getClockType() == ClockType.UNCORRELATED) {
// switch (model.getClockDistributionType()) {
// case LOGNORMAL:
// writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCLD_STDEV);
// break;
// case GAMMA:
// throw new UnsupportedOperationException("Uncorrelated gamma model not implemented yet");
//// break;
// case CAUCHY:
// throw new UnsupportedOperationException("Uncorrelated Cauchy model not implemented yet");
//// break;
// case EXPONENTIAL:
// // nothing required
// break;
// }
// }
// }
clockModelGenerator.writeLog(model, writer);
}
for (PartitionClockModel model : options.getPartitionClockModels()) {
clockModelGenerator.writeLogStatistic(model, writer);
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_FILE_LOG_PARAMETERS, writer);
treeLikelihoodGenerator.writeTreeLikelihoodReferences(writer);
clockModelGenerator.writeClockLikelihoodReferences(writer);
generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_FILE_LOG_LIKELIHOODS, writer);
// coalescentLikelihood
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
PartitionTreePrior prior = model.getPartitionTreePrior();
treePriorGenerator.writePriorLikelihoodReferenceLog(prior, model, writer);
writer.writeText("");
}
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
if (prior.getNodeHeightPrior() == TreePriorType.EXTENDED_SKYLINE) {
// only 1 coalescent
writer.writeIDref(CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD, prior.getPrefix() + COALESCENT);
} else if (prior.getNodeHeightPrior() == TreePriorType.SKYGRID) {
writer.writeIDref(GMRFSkyrideLikelihoodParser.SKYGRID_LIKELIHOOD, prior.getPrefix() + "skygrid");
}
}
writer.writeCloseTag(LoggerParser.LOG);
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_FILE_LOG, writer);
}
use of dr.util.Attribute in project beast-mcmc by beast-dev.
the class OperatorsGenerator method writeOperatorSchedule.
/**
* Write the operator schedule XML block.
*
* @param operators the list of operators
* @param writer the writer
*/
public void writeOperatorSchedule(List<Operator> operators, XMLWriter writer) {
Attribute[] operatorAttributes;
// certain models would benefit from a logarithm operator optimization
boolean shouldLogCool = false;
for (PartitionTreePrior partition : options.getPartitionTreePriors()) {
if (partition.getNodeHeightPrior() == TreePriorType.SKYGRID || partition.getNodeHeightPrior() == TreePriorType.GMRF_SKYRIDE) {
shouldLogCool = true;
break;
}
}
for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
if (model.getDataType().getType() == DataType.GENERAL || model.getDataType().getType() == DataType.CONTINUOUS) {
shouldLogCool = true;
break;
}
}
operatorAttributes = new Attribute[] { new Attribute.Default<String>(XMLParser.ID, "operators"), new Attribute.Default<String>(SimpleOperatorScheduleParser.OPTIMIZATION_SCHEDULE, (shouldLogCool ? OperatorSchedule.OptimizationTransform.LOG.toString() : OperatorSchedule.OptimizationTransform.DEFAULT.toString())) };
writer.writeComment("Define operators");
writer.writeOpenTag(SimpleOperatorScheduleParser.OPERATOR_SCHEDULE, operatorAttributes);
for (Operator operator : operators) {
if (operator.getWeight() > 0. && operator.isUsed()) {
setModelPrefix(operator.getPrefix());
writeOperator(operator, writer);
}
}
// Added for special operators
generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_OPERATORS, writer);
writer.writeCloseTag(SimpleOperatorScheduleParser.OPERATOR_SCHEDULE);
}
use of dr.util.Attribute in project beast-mcmc by beast-dev.
the class PatternListGenerator method writePatternList.
/**
* Micro-sat
* @param partition
* @param microsatList
* @param writer
*/
public void writePatternList(PartitionPattern partition, List<Microsatellite> microsatList, XMLWriter writer) throws GeneratorException {
PartitionSubstitutionModel model = partition.getPartitionSubstitutionModel();
if (model.getDataType().getType() == DataType.MICRO_SAT) {
Patterns patterns = partition.getPatterns();
writer.writeComment("The patterns for microsatellite");
writer.writeOpenTag(MicrosatellitePatternParser.MICROSATPATTERN, new Attribute[] { new Attribute.Default<String>(XMLParser.ID, partition.getName()) });
if (options.hasIdenticalTaxa() && !patterns.hasMask()) {
writer.writeIDref(TaxaParser.TAXA, TaxaParser.TAXA);
} else {
writer.writeIDref(TaxaParser.TAXA, partition.getName() + "." + TaxaParser.TAXA);
}
Microsatellite m = model.getMicrosatellite();
if (m == null)
throw new GeneratorException("Microsatellite is null in partition:\n" + partition.getName());
if (!microsatList.contains(m)) {
microsatList.add(m);
writer.writeTag(MicrosatelliteParser.MICROSAT, new Attribute[] { new Attribute.Default<String>(XMLParser.ID, m.getName()), new Attribute.Default<Integer>(MicrosatelliteParser.MAX, m.getMax()), new Attribute.Default<Integer>(MicrosatelliteParser.MIN, m.getMin()), new Attribute.Default<Integer>(MicrosatelliteParser.UNIT_LENGTH, m.getUnitLength()) }, true);
} else {
writer.writeTag(MicrosatelliteParser.MICROSAT, new Attribute[] { new Attribute.Default<String>(XMLParser.IDREF, m.getName()) }, true);
}
writer.writeOpenTag(MicrosatellitePatternParser.MICROSAT_SEQ);
String seq = "";
int c = 0;
for (int i = 0; i < patterns.getTaxonCount(); i++) {
if (!patterns.isMasked(i)) {
if (c > 0)
seq += ",";
int state = patterns.getPatternState(i, 0);
if (state == Microsatellite.UNKNOWN_STATE_LENGTH) {
seq += Microsatellite.UNKNOWN_CHARACTER;
} else {
seq += Integer.toString(state);
}
c++;
}
}
writer.writeText(seq);
writer.writeCloseTag(MicrosatellitePatternParser.MICROSAT_SEQ);
writer.writeCloseTag(MicrosatellitePatternParser.MICROSATPATTERN);
} else {
}
}
Aggregations