use of dr.util.Pair in project beast-mcmc by beast-dev.
the class BeastParser method executingRunnable.
@Override
protected void executingRunnable() {
Logger.getLogger("dr.apps.beast").info("\nCitations for this analysis: ");
Map<String, Set<Pair<String, String>>> categoryMap = new LinkedHashMap<String, Set<Pair<String, String>>>();
// force the Framework category to be first...
categoryMap.put("Framework", new LinkedHashSet<Pair<String, String>>());
for (Pair<String, String> keyPair : getCitationStore().keySet()) {
Set<Pair<String, String>> pairSet = categoryMap.get(keyPair.fst);
if (pairSet == null) {
pairSet = new LinkedHashSet<Pair<String, String>>();
categoryMap.put(keyPair.fst, pairSet);
}
pairSet.add(keyPair);
}
for (String category : categoryMap.keySet()) {
Logger.getLogger("dr.apps.beast").info("\n" + category.toUpperCase());
Set<Pair<String, String>> pairSet = categoryMap.get(category);
for (Pair<String, String> keyPair : pairSet) {
Logger.getLogger("dr.apps.beast").info(keyPair.snd + ":");
for (Citation citation : getCitationStore().get(keyPair)) {
Logger.getLogger("dr.apps.beast").info("\t" + citation.toString());
}
}
}
// clear the citation store so all the same citations don't get cited again
getCitationStore().clear();
Logger.getLogger("dr.apps.beast").info("\n");
}
use of dr.util.Pair in project beast-mcmc by beast-dev.
the class BeastGenerator method generateXML.
/**
* Generate a beast xml file from these beast options
*
* @param file File
* @throws java.io.IOException IOException
* @throws dr.app.util.Arguments.ArgumentException
* ArgumentException
*/
public void generateXML(File file) throws GeneratorException, IOException, Arguments.ArgumentException {
XMLWriter writer = new XMLWriter(new BufferedWriter(new FileWriter(file)));
writer.writeText("<?xml version=\"1.0\" standalone=\"yes\"?>");
writer.writeComment("Generated by BEAUTi " + version.getVersionString(), " by Alexei J. Drummond, Andrew Rambaut and Marc A. Suchard", " Department of Computer Science, University of Auckland and", " Institute of Evolutionary Biology, University of Edinburgh", " David Geffen School of Medicine, University of California, Los Angeles", " http://beast.bio.ed.ac.uk/");
writer.writeOpenTag("beast");
writer.writeText("");
// this gives any added implementations of the 'Component' interface a
// chance to generate XML at this point in the BEAST file.
generateInsertionPoint(ComponentGenerator.InsertionPoint.BEFORE_TAXA, writer);
if (options.originDate != null) {
// Create a dummy taxon whose job is to specify the origin date
Taxon originTaxon = new Taxon("originTaxon");
options.originDate.setUnits(options.units);
originTaxon.setDate(options.originDate);
writeTaxon(originTaxon, true, false, writer);
}
//++++++++++++++++ Taxon List ++++++++++++++++++
try {
// write complete taxon list
writeTaxa(options.taxonList, writer);
writer.writeText("");
if (!options.hasIdenticalTaxa()) {
// write all taxa in each gene tree regarding each data partition,
for (AbstractPartitionData partition : options.dataPartitions) {
if (partition.getTaxonList() != null) {
writeDifferentTaxa(partition, writer);
}
}
} else {
// microsat
for (PartitionPattern partitionPattern : options.getPartitionPattern()) {
if (partitionPattern.getTaxonList() != null && partitionPattern.getPatterns().hasMask()) {
writeDifferentTaxa(partitionPattern, writer);
}
}
}
} catch (Exception e) {
e.printStackTrace(System.err);
throw new GeneratorException("Taxon list generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Taxon Sets ++++++++++++++++++
List<Taxa> taxonSets = options.taxonSets;
try {
if (taxonSets != null && taxonSets.size() > 0 && !options.useStarBEAST) {
tmrcaStatisticsGenerator.writeTaxonSets(writer, taxonSets);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Taxon sets generation has failed:\n" + e.getMessage());
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TAXA, writer);
//++++++++++++++++ Alignments ++++++++++++++++++
List<Alignment> alignments = new ArrayList<Alignment>();
try {
for (AbstractPartitionData partition : options.dataPartitions) {
Alignment alignment = null;
if (partition instanceof PartitionData) {
// microsat has no alignment
alignment = ((PartitionData) partition).getAlignment();
}
if (alignment != null && !alignments.contains(alignment)) {
alignments.add(alignment);
}
}
if (alignments.size() > 0) {
alignmentGenerator.writeAlignments(alignments, writer);
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SEQUENCES, writer);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Alignments generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Pattern Lists ++++++++++++++++++
try {
if (!options.samplePriorOnly) {
List<Microsatellite> microsatList = new ArrayList<Microsatellite>();
for (AbstractPartitionData partition : options.dataPartitions) {
// Each PD has one TreeLikelihood
if (partition.getTaxonList() != null) {
switch(partition.getDataType().getType()) {
case DataType.NUCLEOTIDES:
case DataType.AMINO_ACIDS:
case DataType.CODONS:
case DataType.COVARION:
case DataType.TWO_STATES:
patternListGenerator.writePatternList((PartitionData) partition, writer);
break;
case DataType.GENERAL:
case DataType.CONTINUOUS:
// attribute patterns which is generated next bit of this method.
break;
case DataType.MICRO_SAT:
// microsat does not have alignment
patternListGenerator.writePatternList((PartitionPattern) partition, microsatList, writer);
break;
default:
throw new IllegalArgumentException("Unsupported data type");
}
writer.writeText("");
}
}
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Pattern lists generation has failed:\n" + e.getMessage());
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_PATTERNS, writer);
//++++++++++++++++ Tree Prior Model ++++++++++++++++++
try {
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
treePriorGenerator.writeTreePriorModel(prior, writer);
writer.writeText("");
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree prior model generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Starting Tree ++++++++++++++++++
try {
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
initialTreeGenerator.writeStartingTree(model, writer);
writer.writeText("");
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Starting tree generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Tree Model +++++++++++++++++++
try {
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
treeModelGenerator.writeTreeModel(model, writer);
writer.writeText("");
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_MODEL, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree model generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Statistics ++++++++++++++++++
try {
if (taxonSets != null && taxonSets.size() > 0 && !options.useStarBEAST) {
tmrcaStatisticsGenerator.writeTMRCAStatistics(writer);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("TMRCA statistics generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Tree Prior Likelihood ++++++++++++++++++
try {
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
treePriorGenerator.writePriorLikelihood(model, writer);
writer.writeText("");
}
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
treePriorGenerator.writeMultiLociTreePriors(prior, writer);
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_PRIOR, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree prior likelihood generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Branch Rates Model ++++++++++++++++++
try {
for (PartitionClockModel model : options.getPartitionClockModels()) {
clockModelGenerator.writeBranchRatesModel(model, writer);
writer.writeText("");
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Branch rates model generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Substitution Model & Site Model ++++++++++++++++++
try {
for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
substitutionModelGenerator.writeSubstitutionSiteModel(model, writer);
writer.writeText("");
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SUBSTITUTION_MODEL, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Substitution model or site model generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ AllMus parameter ++++++++++++++++++
try {
for (PartitionClockModel model : options.getPartitionClockModels()) {
clockModelGenerator.writeAllMus(model, writer);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Clock model generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Site Model ++++++++++++++++++
// for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
// substitutionModelGenerator.writeSiteModel(model, writer); // site model
// substitutionModelGenerator.writeAllMus(model, writer); // allMus
// writer.writeText("");
// }
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SITE_MODEL, writer);
//++++++++++++++++ Tree Likelihood ++++++++++++++++++
try {
AncestralStatesComponentOptions ancestralStatesOptions = (AncestralStatesComponentOptions) options.getComponentOptions(AncestralStatesComponentOptions.class);
Map<Pair<PartitionTreeModel, DataType>, List<PartitionData>> partitionLists = new HashMap<Pair<PartitionTreeModel, DataType>, List<PartitionData>>();
List<AbstractPartitionData> otherPartitions = new ArrayList<AbstractPartitionData>();
for (AbstractPartitionData partition : options.dataPartitions) {
// generate tree likelihoods for alignment data partitions
if (partition.getTaxonList() != null) {
if (partition.getDataType().getType() == DataType.NUCLEOTIDES || partition.getDataType().getType() != DataType.AMINO_ACIDS) {
// all sequence partitions of the same type as the first into the list for use in a
// MultipartitionTreeDataLikelihood. Must also share the same tree and not be doing
// ancestral reconstruction or counting
Pair<PartitionTreeModel, DataType> key = new Pair(partition.getPartitionTreeModel(), partition.getDataType());
List<PartitionData> partitions = partitionLists.get(key);
if (partitions == null) {
partitions = new ArrayList<PartitionData>();
}
partitions.add((PartitionData) partition);
partitionLists.put(key, partitions);
} else {
otherPartitions.add(partition);
}
}
}
for (List<PartitionData> partitions : partitionLists.values()) {
treeLikelihoodGenerator.writeTreeDataLikelihood(partitions, writer);
writer.writeText("");
}
for (AbstractPartitionData partition : otherPartitions) {
// generate tree likelihoods for the other data partitions
if (partition.getTaxonList() != null) {
if (partition instanceof PartitionData) {
treeLikelihoodGenerator.writeTreeLikelihood((PartitionData) partition, writer);
writer.writeText("");
} else if (partition instanceof PartitionPattern) {
// microsat
treeLikelihoodGenerator.writeTreeLikelihood((PartitionPattern) partition, writer);
writer.writeText("");
} else {
throw new GeneratorException("Find unrecognized partition:\n" + partition.getName());
}
}
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_LIKELIHOOD, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree likelihood generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ *BEAST ++++++++++++++++++
if (options.useStarBEAST) {
//++++++++++++++++ species ++++++++++++++++++
try {
starBeastGenerator.writeSpecies(writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("*BEAST species section generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Species Sets ++++++++++++++++++
List<Taxa> speciesSets = options.speciesSets;
try {
if (speciesSets != null && speciesSets.size() > 0) {
tmrcaStatisticsGenerator.writeTaxonSets(writer, speciesSets);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Species sets generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ trees ++++++++++++++++++
try {
if (speciesSets != null && speciesSets.size() > 0) {
starBeastGenerator.writeStartingTreeForCalibration(writer);
}
starBeastGenerator.writeSpeciesTree(writer, speciesSets != null && speciesSets.size() > 0);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("*BEAST trees generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Statistics ++++++++++++++++++
try {
if (speciesSets != null && speciesSets.size() > 0) {
tmrcaStatisticsGenerator.writeTMRCAStatistics(writer);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("*BEAST TMRCA statistics generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ prior and likelihood ++++++++++++++++++
try {
starBeastGenerator.writeSTARBEAST(writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("*BEAST trees section generation has failed:\n" + e.getMessage());
}
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TRAITS, writer);
//++++++++++++++++ Operators ++++++++++++++++++
try {
generateInsertionPoint(ComponentGenerator.InsertionPoint.BEFORE_OPERATORS, writer);
List<Operator> operators = options.selectOperators();
operatorsGenerator.writeOperatorSchedule(operators, writer);
writer.writeText("");
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_OPERATORS, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Operators generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ MCMC ++++++++++++++++++
try {
// XMLWriter writer, List<PartitionSubstitutionModel> models,
writeMCMC(writer);
writer.writeText("");
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_MCMC, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("MCMC or log generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ ++++++++++++++++++
try {
writeTimerReport(writer);
writer.writeText("");
if (options.performTraceAnalysis) {
writeTraceAnalysis(writer);
}
if (options.generateCSV) {
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
treePriorGenerator.writeEBSPAnalysisToCSVfile(prior, writer);
}
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("The last part of XML generation has failed:\n" + e.getMessage());
}
writer.writeCloseTag("beast");
writer.flush();
writer.close();
}
Aggregations