use of dr.evolution.datatype.DataType in project beast-mcmc by beast-dev.
the class CompleteHistorySimulatorParser method parseXMLObject.
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
int nReplications = xo.getIntegerAttribute(REPLICATIONS);
Tree tree = (Tree) xo.getChild(Tree.class);
GammaSiteRateModel siteModel = (GammaSiteRateModel) xo.getChild(GammaSiteRateModel.class);
BranchRateModel rateModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
if (rateModel == null)
rateModel = new DefaultBranchRateModel();
DataType dataType = siteModel.getSubstitutionModel().getDataType();
String jumpTag = xo.getAttribute(JUMP_TAG_NAME, JUMP_TAG);
boolean sumAcrossSites = xo.getAttribute(SUM_SITES, false);
Parameter branchSpecificParameter = null;
Parameter variableValueParameter = null;
if (xo.hasChildNamed(BRANCH_SPECIFIC_SPECIFICATION)) {
XMLObject cxo = xo.getChild(BRANCH_SPECIFIC_SPECIFICATION);
branchSpecificParameter = (Parameter) cxo.getChild(BRANCH_VARIABLE_PARAMETER).getChild(Parameter.class);
variableValueParameter = (Parameter) cxo.getChild(VARIABLE_VALUE_PARAMETER).getChild(Parameter.class);
}
CompleteHistorySimulator history = new CompleteHistorySimulator(tree, siteModel, rateModel, nReplications, sumAcrossSites, branchSpecificParameter, variableValueParameter);
XMLObject cxo = xo.getChild(COUNTS);
if (cxo != null) {
MarkovJumpsTreeLikelihoodParser.parseAllChildren(cxo, history, dataType.getStateCount(), jumpTag, MarkovJumpsType.COUNTS, false);
}
cxo = xo.getChild(REWARDS);
if (cxo != null) {
MarkovJumpsTreeLikelihoodParser.parseAllChildren(cxo, history, dataType.getStateCount(), jumpTag, MarkovJumpsType.REWARDS, false);
}
if (dataType instanceof Codons) {
Codons codons = (Codons) dataType;
if (xo.getAttribute(SYN_JUMPS, false)) {
// use base 61
double[] synRegMatrix = CodonLabeling.getRegisterMatrix(CodonLabeling.SYN, codons, false);
Parameter registerParameter = new Parameter.Default(synRegMatrix);
registerParameter.setId("S");
history.addRegister(registerParameter, MarkovJumpsType.COUNTS, false);
}
if (xo.getAttribute(NON_SYN_JUMPS, false)) {
// use base 61
double[] nonSynRegMatrix = CodonLabeling.getRegisterMatrix(CodonLabeling.NON_SYN, codons, false);
Parameter registerParameter = new Parameter.Default(nonSynRegMatrix);
registerParameter.setId("N");
history.addRegister(registerParameter, MarkovJumpsType.COUNTS, false);
}
}
if (xo.getAttribute(ANNOTATE_WITH_ALIGNMENT, false)) {
history.addAlignmentTrait();
}
boolean alignmentOnly = xo.getAttribute(ALIGNMENT_ONLY, false);
if (dataType instanceof Codons && !alignmentOnly) {
System.out.println("Codon models give exception when count statistics are done on them. " + "You can supress this by setting alignmentOnly to true.");
}
if (alignmentOnly) {
history.setAlignmentOnly();
}
history.simulate();
return history;
}
use of dr.evolution.datatype.DataType in project beast-mcmc by beast-dev.
the class BeastGenerator method generateXML.
/**
* Generate a beast xml file from these beast options
*
* @param file File
* @throws java.io.IOException IOException
* @throws dr.app.util.Arguments.ArgumentException
* ArgumentException
*/
public void generateXML(File file) throws GeneratorException, IOException, Arguments.ArgumentException {
XMLWriter writer = new XMLWriter(new BufferedWriter(new FileWriter(file)));
writer.writeText("<?xml version=\"1.0\" standalone=\"yes\"?>");
writer.writeComment("Generated by BEAUTi " + VERSION.getVersionString(), " by Alexei J. Drummond, Andrew Rambaut and Marc A. Suchard", " Department of Computer Science, University of Auckland and", " Institute of Evolutionary Biology, University of Edinburgh", " David Geffen School of Medicine, University of California, Los Angeles", " http://beast.community/");
writer.writeOpenTag("beast", new Attribute.Default<String>("version", BeautiApp.VERSION.getVersion()));
writer.writeText("");
// this gives any added implementations of the 'Component' interface a
// chance to generate XML at this point in the BEAST file.
generateInsertionPoint(ComponentGenerator.InsertionPoint.BEFORE_TAXA, writer);
if (options.originDate != null) {
// Create a dummy taxon whose job is to specify the origin date
Taxon originTaxon = new Taxon("originTaxon");
options.originDate.setUnits(options.units);
originTaxon.setDate(options.originDate);
writeTaxon(originTaxon, true, false, writer);
}
// ++++++++++++++++ Taxon List ++++++++++++++++++
try {
// write complete taxon list
writeTaxa(options.taxonList, writer);
writer.writeText("");
if (!options.hasIdenticalTaxa()) {
// write all taxa in each gene tree regarding each data partition,
for (AbstractPartitionData partition : options.dataPartitions) {
if (partition.getTaxonList() != null) {
writeDifferentTaxa(partition, writer);
}
}
} else {
// microsat
for (PartitionPattern partitionPattern : options.getPartitionPattern()) {
if (partitionPattern.getTaxonList() != null && partitionPattern.getPatterns().hasMask()) {
writeDifferentTaxa(partitionPattern, writer);
}
}
}
} catch (Exception e) {
e.printStackTrace(System.err);
throw new GeneratorException("Taxon list generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ Taxon Sets ++++++++++++++++++
List<Taxa> taxonSets = options.taxonSets;
try {
if (taxonSets != null && taxonSets.size() > 0) {
tmrcaStatisticsGenerator.writeTaxonSets(writer, taxonSets);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Taxon sets generation has failed:\n" + e.getMessage());
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TAXA, writer);
// ++++++++++++++++ Alignments ++++++++++++++++++
List<Alignment> alignments = new ArrayList<Alignment>();
try {
for (AbstractPartitionData partition : options.dataPartitions) {
Alignment alignment = null;
if (partition instanceof PartitionData) {
// microsat has no alignment
alignment = ((PartitionData) partition).getAlignment();
}
if (alignment != null && !alignments.contains(alignment)) {
alignments.add(alignment);
}
}
if (alignments.size() > 0) {
alignmentGenerator.writeAlignments(alignments, writer);
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SEQUENCES, writer);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Alignments generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ Pattern Lists ++++++++++++++++++
try {
// Construct pattern lists even if sampling from a null alignment
// if (!options.samplePriorOnly) {
List<Microsatellite> microsatList = new ArrayList<Microsatellite>();
for (AbstractPartitionData partition : options.dataPartitions) {
// Each PD has one TreeLikelihood
if (partition.getTaxonList() != null) {
switch(partition.getDataType().getType()) {
case DataType.NUCLEOTIDES:
case DataType.AMINO_ACIDS:
case DataType.CODONS:
case DataType.COVARION:
case DataType.TWO_STATES:
patternListGenerator.writePatternList((PartitionData) partition, writer);
break;
case DataType.GENERAL:
case DataType.CONTINUOUS:
// attribute patterns which is generated next bit of this method.
break;
case DataType.MICRO_SAT:
// microsat does not have alignment
patternListGenerator.writePatternList((PartitionPattern) partition, microsatList, writer);
break;
default:
throw new IllegalArgumentException("Unsupported data type");
}
writer.writeText("");
}
}
// }
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Pattern lists generation has failed:\n" + e.getMessage());
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_PATTERNS, writer);
// ++++++++++++++++ Tree Prior Model ++++++++++++++++++
try {
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
treePriorGenerator.writeTreePriorModel(prior, writer);
writer.writeText("");
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree prior model generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ Starting Tree ++++++++++++++++++
try {
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
initialTreeGenerator.writeStartingTree(model, writer);
writer.writeText("");
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Starting tree generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ Tree Model +++++++++++++++++++
try {
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
treeModelGenerator.writeTreeModel(model, writer);
writer.writeText("");
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_MODEL, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree model generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ Statistics ++++++++++++++++++
try {
if (taxonSets != null && taxonSets.size() > 0) {
tmrcaStatisticsGenerator.writeTMRCAStatistics(writer);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("TMRCA statistics generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ Tree Prior Likelihood ++++++++++++++++++
try {
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
treePriorGenerator.writePriorLikelihood(model, writer);
writer.writeText("");
}
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
treePriorGenerator.writeMultiLociTreePriors(prior, writer);
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_PRIOR, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree prior likelihood generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ Branch Rates Model ++++++++++++++++++
try {
for (PartitionClockModel model : options.getPartitionClockModels()) {
clockModelGenerator.writeBranchRatesModel(model, writer);
writer.writeText("");
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Branch rates model generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ Substitution Model & Site Model ++++++++++++++++++
try {
for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
substitutionModelGenerator.writeSubstitutionSiteModel(model, writer);
writer.writeText("");
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SUBSTITUTION_MODEL, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Substitution model or site model generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ AllMus parameter ++++++++++++++++++
try {
for (PartitionClockModel model : options.getPartitionClockModels()) {
clockModelGenerator.writeAllMus(model, writer);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Clock model generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ Site Model ++++++++++++++++++
// for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
// substitutionModelGenerator.writeSiteModel(model, writer); // site model
// substitutionModelGenerator.writeAllMus(model, writer); // allMus
// writer.writeText("");
// }
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SITE_MODEL, writer);
// ++++++++++++++++ Tree Likelihood ++++++++++++++++++
try {
Map<Pair<Pair<PartitionTreeModel, PartitionClockModel>, DataType>, List<PartitionData>> partitionLists = new HashMap<Pair<Pair<PartitionTreeModel, PartitionClockModel>, DataType>, List<PartitionData>>();
options.multiPartitionLists.clear();
options.otherPartitions.clear();
for (AbstractPartitionData partition : options.dataPartitions) {
// generate tree likelihoods for alignment data partitions
if (partition.getTaxonList() != null) {
if (treeLikelihoodGenerator.canUseMultiPartition(partition)) {
// all sequence partitions of the same type as the first into the list for use in a
// MultipartitionTreeDataLikelihood. Must also share the same tree, clock model and not be doing
// ancestral reconstruction or counting
Pair<Pair<PartitionTreeModel, PartitionClockModel>, DataType> key = new Pair(new Pair(partition.getPartitionTreeModel(), partition.getPartitionClockModel()), partition.getDataType());
List<PartitionData> partitions = partitionLists.get(key);
if (partitions == null) {
partitions = new ArrayList<PartitionData>();
options.multiPartitionLists.add(partitions);
}
partitions.add((PartitionData) partition);
partitionLists.put(key, partitions);
} else {
options.otherPartitions.add(partition);
}
}
}
treeLikelihoodGenerator.writeAllTreeLikelihoods(writer);
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_LIKELIHOOD, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree likelihood generation has failed:\n" + e.getMessage());
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TRAITS, writer);
// ++++++++++++++++ Operators ++++++++++++++++++
try {
generateInsertionPoint(ComponentGenerator.InsertionPoint.BEFORE_OPERATORS, writer);
List<Operator> operators = options.selectOperators();
operatorsGenerator.writeOperatorSchedule(operators, writer);
writer.writeText("");
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_OPERATORS, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Operators generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ MCMC ++++++++++++++++++
try {
// XMLWriter writer, List<PartitionSubstitutionModel> models,
writeMCMC(writer);
writer.writeText("");
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_MCMC, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("MCMC or log generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ ++++++++++++++++++
try {
writeTimerReport(writer);
writer.writeText("");
if (options.performTraceAnalysis) {
writeTraceAnalysis(writer);
}
if (options.generateCSV) {
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
treePriorGenerator.writeEBSPAnalysisToCSVfile(prior, writer);
}
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("The last part of XML generation has failed:\n" + e.getMessage());
}
writer.writeCloseTag("beast");
writer.flush();
writer.close();
}
use of dr.evolution.datatype.DataType in project beast-mcmc by beast-dev.
the class SiteModelsPanel method setCurrentModels.
private void setCurrentModels(List<PartitionSubstitutionModel> models) {
modelPanelParent.removeAll();
currentModel = null;
Set<DataType> dataTypes = new HashSet<DataType>();
for (PartitionSubstitutionModel model : models) {
dataTypes.add(model.getDataType());
}
if (dataTypes.size() == 1) {
DataType dataType = dataTypes.iterator().next();
modelBorder.setTitle("Multiple " + dataType.getName() + " substitution models selected");
} else {
modelBorder.setTitle("Multiple mixed type substitution models selected");
}
cloneModelsAction.setEnabled(dataTypes.size() == 1);
repaint();
}
use of dr.evolution.datatype.DataType in project beast-mcmc by beast-dev.
the class PartitionData method createFrequencyModel.
public FrequencyModel createFrequencyModel() {
FrequencyModel frequencyModel = null;
if (this.frequencyModelIndex == 0) {
// Nucleotidefrequencies
Parameter freqs = new Parameter.Default(new double[] { frequencyParameterValues[0], frequencyParameterValues[1], frequencyParameterValues[2], frequencyParameterValues[3] });
DataType dataType = this.createDataType();
frequencyModel = new FrequencyModel(dataType, freqs);
} else if (this.frequencyModelIndex == 1) {
Parameter freqs = new Parameter.Default(new double[] { frequencyParameterValues[4], frequencyParameterValues[5], frequencyParameterValues[6], frequencyParameterValues[7], frequencyParameterValues[8], frequencyParameterValues[9], frequencyParameterValues[10], frequencyParameterValues[11], frequencyParameterValues[12], frequencyParameterValues[13], frequencyParameterValues[14], frequencyParameterValues[15], frequencyParameterValues[16], frequencyParameterValues[17], frequencyParameterValues[18], frequencyParameterValues[19], frequencyParameterValues[20], frequencyParameterValues[21], frequencyParameterValues[22], frequencyParameterValues[23], frequencyParameterValues[24], frequencyParameterValues[25], frequencyParameterValues[26], frequencyParameterValues[27], frequencyParameterValues[28], frequencyParameterValues[29], frequencyParameterValues[30], frequencyParameterValues[31], frequencyParameterValues[32], frequencyParameterValues[33], frequencyParameterValues[34], frequencyParameterValues[35], frequencyParameterValues[36], frequencyParameterValues[37], frequencyParameterValues[38], frequencyParameterValues[39], frequencyParameterValues[40], frequencyParameterValues[41], frequencyParameterValues[42], frequencyParameterValues[43], frequencyParameterValues[44], frequencyParameterValues[45], frequencyParameterValues[46], frequencyParameterValues[47], frequencyParameterValues[48], frequencyParameterValues[49], frequencyParameterValues[50], frequencyParameterValues[51], frequencyParameterValues[52], frequencyParameterValues[53], frequencyParameterValues[54], frequencyParameterValues[55], frequencyParameterValues[56], frequencyParameterValues[57], frequencyParameterValues[58], frequencyParameterValues[59], frequencyParameterValues[60], frequencyParameterValues[61], frequencyParameterValues[62], frequencyParameterValues[63], frequencyParameterValues[64] });
DataType dataType = this.createDataType();
frequencyModel = new FrequencyModel(dataType, freqs);
} else if (this.frequencyModelIndex == 2) {
Parameter freqs = new Parameter.Default(new double[] { frequencyParameterValues[65], frequencyParameterValues[66], frequencyParameterValues[67], frequencyParameterValues[68], frequencyParameterValues[69], frequencyParameterValues[70], frequencyParameterValues[71], frequencyParameterValues[72], frequencyParameterValues[73], frequencyParameterValues[74], frequencyParameterValues[75], frequencyParameterValues[76], frequencyParameterValues[77], frequencyParameterValues[78], frequencyParameterValues[79], frequencyParameterValues[80], frequencyParameterValues[81], frequencyParameterValues[82], frequencyParameterValues[83], frequencyParameterValues[84] });
DataType dataType = this.createDataType();
frequencyModel = new FrequencyModel(dataType, freqs);
} else {
System.out.println("Not yet implemented");
}
return frequencyModel;
}
use of dr.evolution.datatype.DataType in project beast-mcmc by beast-dev.
the class XMLGenerator method writeFrequencyModel.
// END: writeBranchModel
private void writeFrequencyModel(PartitionData data, XMLWriter writer) {
DataType dataType = null;
String frequencies = null;
// int dataTypeIndex = data.dataTypeIndex;
int frequencyModelIndex = data.frequencyModelIndex;
switch(frequencyModelIndex) {
case // Nucleotide
0:
dataType = data.createDataType();
// dataType = Nucleotides.INSTANCE;
frequencies = data.frequencyParameterValues[0] + "";
for (int i = 1; i < 4; i++) {
frequencies += " " + data.frequencyParameterValues[i];
}
// tagname
writer.writeOpenTag(// tagname
FrequencyModelParser.FREQUENCY_MODEL, new Attribute[] { // attributes[]
new Attribute.Default<String>(XMLParser.ID, // id
data.frequencyModelIdref), new Attribute.Default<String>(DataType.DATA_TYPE, // dataType
dataType.getDescription()) });
writeParameter(FrequencyModelParser.FREQUENCIES, null, dataType.getStateCount(), frequencies, writer);
writer.writeCloseTag(FrequencyModelParser.FREQUENCY_MODEL);
break;
case // Codon
1:
dataType = data.createDataType();
// dataType = Codons.UNIVERSAL;
frequencies = data.frequencyParameterValues[4] + "";
for (int i = 5; i < 65; i++) {
frequencies += " " + data.frequencyParameterValues[i];
}
// tagname
writer.writeOpenTag(// tagname
FrequencyModelParser.FREQUENCY_MODEL, new Attribute[] { // attributes[]
new Attribute.Default<String>(XMLParser.ID, // id
data.frequencyModelIdref), new Attribute.Default<String>(DataType.DATA_TYPE, // dataType.getDescription()
Utils.CODON_UNIVERSAL) // dataType
});
writeParameter(FrequencyModelParser.FREQUENCIES, null, dataType.getStateCount(), frequencies, writer);
writer.writeCloseTag(FrequencyModelParser.FREQUENCY_MODEL);
break;
case // Amino acid
2:
dataType = data.createDataType();
// dataType = AminoAcids.INSTANCE;
frequencies = data.frequencyParameterValues[65] + "";
for (int i = 66; i < 85; i++) {
frequencies += " " + data.frequencyParameterValues[i];
}
// tagname
writer.writeOpenTag(// tagname
FrequencyModelParser.FREQUENCY_MODEL, new Attribute[] { // attributes[]
new Attribute.Default<String>(XMLParser.ID, // id
data.frequencyModelIdref), new Attribute.Default<String>(DataType.DATA_TYPE, // dataType.getDescription()
AminoAcids.DESCRIPTION) // dataType
});
writeParameter(FrequencyModelParser.FREQUENCIES, null, dataType.getStateCount(), frequencies, writer);
writer.writeCloseTag(FrequencyModelParser.FREQUENCY_MODEL);
break;
}
// END: switch
}
Aggregations