use of dr.app.beauti.util.XMLWriter in project beast-mcmc by beast-dev.
the class GetDateFromTree method outputBDSSXML.
private static void outputBDSSXML(String curD, int index, double[][] tips, double[] origin, Tree[] trees) throws IOException {
XMLWriter w = writeHeadAndTaxa(curD, index, tips);
w.flush();
w.writeText("\t<!-- Stadler et al (2011) : Estimating the basic reproductive number from viral sequence data, Submitted.-->\n" + // logTransformed=\"true\">\n" +
"\t<birthDeathSerialSampling id=\"bdss\" units=\"substitutions\" hasFinalSample=\"false\">\n" + "\t\t<birthRate>\n" + "\t\t\t<parameter id=\"bdss.birthRate\" value=\"8.23E-4\" lower=\"0.0\" upper=\"1000.0\"/>\n" + "\t\t</birthRate>\n");
if (isRelativeDeath) {
w.writeText("\t\t<relativeDeathRate>\n" + "\t\t\t<parameter id=\"bdss.relativeDeathRate\" value=\"0.107\" lower=\"0.0\" upper=\"100.0\"/>\n" + "\t\t</relativeDeathRate>\n");
} else {
w.writeText("\t\t<deathRate>\n" + "\t\t\t<parameter id=\"bdss.deathRate\" value=\"9.46e-5\" lower=\"0.0\" upper=\"1000.0\"/>\n" + "\t\t</deathRate>\n");
}
w.writeText("\t\t<sampleProbability>\n" + "\t\t\t<parameter id=\"bdss.sampleProbability\" value=\"0.01\" lower=\"0.0\" upper=\"1.0\"/>\n" + "\t\t</sampleProbability>\n" + "\t\t<psi>\n" + "\t\t\t<parameter id=\"bdss.psi\" value=\"2.78E-4\" lower=\"0.0\" upper=\"100.0\"/>\n" + "\t\t</psi>\n" + "\t\t<origin>\n" + "\t\t\t<parameter id=\"bdss.origin\" value=\"" + origin[0] + "\" lower=\"0.0\" upper=\"14000.0\"/>\n" + "\t\t</origin>\n" + "\t\t<sampleBecomesNonInfectiousProb>\n" + "\t\t\t<parameter id=\"bdss.r\" value=\"1.0\"/>\n" + "\t\t</sampleBecomesNonInfectiousProb>\n" + "\t</birthDeathSerialSampling>\n");
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\t<!-- Stadler et al (2011) : Estimating the basic reproductive number from viral sequence data, Submitted.-->\n" + "\t<birthDeathSerialSampling id=\"bdss" + tree + // logTransformed=\"true\">\n" +
"\" units=\"substitutions\" hasFinalSample=\"false\">\n" + "\t\t<birthRate>\n" + "\t\t\t<parameter idref=\"bdss.birthRate\"/>\n" + "\t\t</birthRate>\n");
if (isRelativeDeath) {
w.writeText("\t\t<relativeDeathRate>\n" + "\t\t\t<parameter idref=\"bdss.relativeDeathRate\"/>\n" + "\t\t</relativeDeathRate>\n");
} else {
w.writeText("\t\t<deathRate>\n" + "\t\t\t<parameter idref=\"bdss.deathRate\"/>\n" + "\t\t</deathRate>\n");
}
w.writeText("\t\t<sampleProbability>\n" + "\t\t\t<parameter idref=\"bdss.sampleProbability\"/>\n" + "\t\t</sampleProbability>\n" + "\t\t<psi>\n" + "\t\t\t<parameter idref=\"bdss.psi\"/>\n" + "\t\t</psi>\n" + "\t\t<origin>\n" + "\t\t\t<parameter id=\"bdss" + tree + ".origin\" value=\"" + origin[tree - 1] + "\" lower=\"0.0\" upper=\"14000.0\"/>\n" + "\t\t</origin>\n" + "\t\t<sampleBecomesNonInfectiousProb>\n" + "\t\t\t<parameter idref=\"bdss.r\"/>\n" + "\t\t</sampleBecomesNonInfectiousProb>\n" + "\t</birthDeathSerialSampling>\n");
}
w.flush();
// w.writeText("\t<RPNcalculator id=\"mur\">\n" +
// "\t\t<variable name=\"b\">\n" +
// "\t\t\t<parameter idref=\"bdss.birthRate\"/>\n" +
// "\t\t</variable>\n" +
// "\t\t<variable name=\"d\">\n" +
// "\t\t\t<parameter idref=\"bdss.deathRate\"/>\n" +
// "\t\t</variable>\n" +
// "\t\t<expression name=\"mur\">d b /</expression> <!-- d/b -->\n" +
// "\t</RPNcalculator>\n" +
// "\t<RPNcalculator id=\"R0\">\n" +
// "\t\t<variable name=\"b\">\n" +
// "\t\t\t<parameter idref=\"bdss.birthRate\"/>\n" +
// "\t\t</variable>\n" +
// "\t\t<variable name=\"d\">\n" +
// "\t\t\t<parameter idref=\"bdss.deathRate\"/>\n" +
// "\t\t</variable>\n" +
// "\t\t<variable name=\"s\">\n" +
// "\t\t\t<parameter idref=\"bdss.psi\"/>\n" +
// "\t\t</variable>\n" +
// "\t\t<variable name=\"r\">\n" +
// "\t\t\t<parameter idref=\"bdss.r\"/>\n" +
// "\t\t</variable>\n" +
// "\t\t<expression name=\"R0\">b d s r * + /</expression> <!-- b/(d+s*r) -->\n" +
// "\t</RPNcalculator>\n");
// w.writeText("\t<RPNcalculator id=\"td\">\n" +
// "\t\t<variable name=\"d\">\n" +
// "\t\t\t<parameter idref=\"bdss.deathRate\"/>\n" +
// "\t\t</variable>\n" +
// "\t\t<variable name=\"s\">\n" +
// "\t\t\t<parameter idref=\"bdss.psi\"/>\n" +
// "\t\t</variable>\n" +
// "\t\t<expression name=\"td\">d s + </expression> <!-- d + s -->\n" +
// "\t</RPNcalculator>\n");
// w.writeText("\t<RPNcalculator id=\"expp\">\n" +
// "\t\t<variable name=\"p\">\n" +
// "\t\t\t<parameter idref=\"bdss.psi\"/>\n" +
// "\t\t</variable>\n" +
// "\t\t<expression name=\"expp\">\n" +
// "\t\t\tp exp\n" +
// "\t\t</expression>\n" +
// "\t</RPNcalculator>\n");
// for (int tree = 2; tree <= combiTrees; tree++) {
// w.writeText("\t<RPNcalculator id=\"R0" + tree + "\">\n" +
// "\t\t<variable name=\"b\">\n" +
// "\t\t\t<parameter idref=\"bdss" + tree + ".birthRate\"/>\n" +
// "\t\t</variable>\n" +
// "\t\t<variable name=\"d\">\n" +
// "\t\t\t<parameter idref=\"bdss" + tree + ".deathRate\"/>\n" +
// "\t\t</variable>\n" +
// "\t\t<variable name=\"s\">\n" +
// "\t\t\t<parameter idref=\"bdss" + tree + ".psi\"/>\n" +
// "\t\t</variable>\n" +
// "\t\t<variable name=\"r\">\n" +
// "\t\t\t<parameter idref=\"bdss.r\"/>\n" +
// "\t\t</variable>\n" +
// "\t\t<expression name=\"R0" + tree + "\">\n" +
// "\t\t\tb b d * s r * + /\n" +
// "\t\t</expression>\n" +
// "\t</RPNcalculator>\n");
// }
w.flush();
w.writeText("\n" + "\t<!-- Generate a random starting tree under the coalescent process -->\n" + "\t<newick id=\"startingTree\">\n");
w.write(trees[0].toString());
w.writeText("\n" + "\t</newick>\n");
w.flush();
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\n" + "\t<!-- Generate a random starting tree under the coalescent process -->\n" + "\t<newick id=\"startingTree" + tree + "\">\n");
w.write(insetTreeIndex(tree, trees[tree - 1].toString()));
w.writeText("\n" + "\t</newick>\n");
w.flush();
}
w.writeText("\n" + "\t<!-- Generate a tree model -->\n" + "\t<treeModel id=\"treeModel\">\n" + "\t\t<coalescentTree idref=\"startingTree\"/>\n" + "\t\t<rootHeight>\n" + "\t\t\t<parameter id=\"treeModel.rootHeight\"/>\n" + "\t\t</rootHeight>\n" + "\t\t<nodeHeights internalNodes=\"true\">\n" + "\t\t\t<parameter id=\"treeModel.internalNodeHeights\"/>\n" + "\t\t</nodeHeights>\n" + "\t\t<nodeHeights internalNodes=\"true\" rootNode=\"true\">\n" + "\t\t\t<parameter id=\"treeModel.allInternalNodeHeights\"/>\n" + "\t\t</nodeHeights>\n" + "\n" + "\t\t<!-- END Tip date sampling -->\n" + "\t</treeModel>\n");
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\n" + "\t<!-- Generate a tree model -->\n" + "\t<treeModel id=\"treeModel" + tree + "\">\n" + "\t\t<coalescentTree idref=\"startingTree" + tree + "\"/>\n" + "\t\t<rootHeight>\n" + "\t\t\t<parameter id=\"treeModel" + tree + ".rootHeight\"/>\n" + "\t\t</rootHeight>\n" + "\t\t<nodeHeights internalNodes=\"true\">\n" + "\t\t\t<parameter id=\"treeModel" + tree + ".internalNodeHeights\"/>\n" + "\t\t</nodeHeights>\n" + "\t\t<nodeHeights internalNodes=\"true\" rootNode=\"true\">\n" + "\t\t\t<parameter id=\"treeModel" + tree + ".allInternalNodeHeights\"/>\n" + "\t\t</nodeHeights>\n" + "\n" + "\t\t<!-- END Tip date sampling -->\n" + "\t</treeModel>\n");
}
w.flush();
w.writeText("\n" + "\t<!-- Generate a speciation likelihood for Yule or Birth Death -->\n" + "\t<speciationLikelihood id=\"speciation\">\n" + "\t\t<model>\n" + "\t\t\t<birthDeathSerialSampling idref=\"bdss\"/>\n" + "\t\t</model>\n" + "\t\t<speciesTree>\n" + "\t\t\t<treeModel idref=\"treeModel\"/>\n" + "\t\t</speciesTree>\n" + "\t</speciationLikelihood>\n");
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\n" + "\t<!-- Generate a speciation likelihood for Yule or Birth Death -->\n" + "\t<speciationLikelihood id=\"speciation" + tree + "\">\n" + "\t\t<model>\n" + "\t\t\t<birthDeathSerialSampling idref=\"bdss" + tree + "\"/>\n" + "\t\t</model>\n" + "\t\t<speciesTree>\n" + "\t\t\t<treeModel idref=\"treeModel" + tree + "\"/>\n" + "\t\t</speciesTree>\n" + "\t</speciationLikelihood>\n");
}
w.writeText("\n" + "\t<!-- Define operators -->\n" + "\t<operators id=\"operators\">\n" + // "\t\t</randomWalkOperator>\n" +
"\t\t<scaleOperator scaleFactor=\"0.75\" weight=\"10\">\n" + "\t\t\t<parameter idref=\"bdss.birthRate\"/>\n" + "\t\t</scaleOperator>\n" + "\t\t<scaleOperator scaleFactor=\"0.75\" weight=\"10\">\n");
if (isRelativeDeath) {
w.writeText("\t\t\t<parameter idref=\"bdss.relativeDeathRate\"/>\n");
} else {
w.writeText("\t\t\t<parameter idref=\"bdss.deathRate\"/>\n");
}
w.writeText("\t\t</scaleOperator>\n");
if (!isRelativeDeath) {
w.writeText("\t\t<upDownOperator scaleFactor=\"0.75\" weight=\"10\">\n" + "\t\t\t<up>\n" + "\t\t\t\t<parameter idref=\"bdss.birthRate\"/>\n" + "\t\t\t\t<parameter idref=\"bdss.deathRate\"/>\n" + "\t\t\t</up>\n" + "\t\t\t<down/>\n" + "\t\t</upDownOperator>\n");
}
// "\t\t<randomWalkOperator windowSize=\"1.0\" weight=\"10\">\n" +
// "\t\t\t<parameter idref=\"bdss.psi\"/>\n" +
// "\t\t</randomWalkOperator>\n" +
// "\t\t<scaleOperator scaleFactor=\"0.75\" weight=\"10\">\n" +
// "\t\t\t<parameter idref=\"bdss.psi\"/>\n" +
// "\t\t</scaleOperator>\n"+
w.writeText("\t\t<scaleOperator scaleFactor=\"0.75\" weight=\"10\">\n" + "\t\t\t<parameter idref=\"bdss.origin\"/>\n" + "\t\t</scaleOperator>\n");
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\n" + // "\t\t</scaleOperator>\n" +
"\t\t<scaleOperator scaleFactor=\"0.75\" weight=\"10\">\n" + "\t\t\t<parameter idref=\"bdss" + tree + ".origin\"/>\n" + "\t\t</scaleOperator>\n");
}
w.writeText("\n" + "\t</operators>");
w.flush();
w.writeText("\n" + "\t<!-- Define MCMC -->\n" + "\t<mcmc id=\"mcmc\" chainLength=\"10000000\" autoOptimize=\"true\">\n" + "\t\t<posterior id=\"posterior\">\n" + "\t\t\t<prior id=\"prior\">\n" + "\t\t\t\t<uniformPrior lower=\"0.0\" upper=\"1000.0\">\n" + "\t\t\t\t\t<parameter idref=\"bdss.birthRate\"/>\n" + "\t\t\t\t</uniformPrior>\n");
// "\t\t\t\t</oneOnXPrior>\n" +
if (isRelativeDeath) {
w.writeText("\t\t\t\t<uniformPrior lower=\"0.0\" upper=\"100.0\">\n" + "\t\t\t<parameter idref=\"bdss.relativeDeathRate\"/>\n");
} else {
w.writeText("\t\t\t\t<uniformPrior lower=\"0.0\" upper=\"1000.0\">\n" + "\t\t\t<parameter idref=\"bdss.deathRate\"/>\n");
}
w.writeText("\t\t\t\t</uniformPrior>\n");
// "\t\t\t\t<oneOnXPrior>\n" +
// "\t\t\t\t\t<parameter idref=\"bdss.psi\"/>\n" +
// "\t\t\t\t</oneOnXPrior>\n" +
// for (int tree = 2; tree <= combiTrees; tree++) {
// w.writeText("\t\t\t\t<uniformPrior lower=\"0.0\" upper=\"100000.0\">\n" +
// "\t\t\t\t\t<parameter idref=\"bdss" + tree + ".birthRate\"/>\n" +
// "\t\t\t\t</uniformPrior>\n" +
// "\t\t\t\t<uniformPrior lower=\"0.0\" upper=\"1000.0\">\n" +
// "\t\t\t\t\t<parameter idref=\"bdss" + tree + ".deathRate\"/>\n" +
// "\t\t\t\t</uniformPrior>\n" +
// "\t\t\t\t<uniformPrior lower=\"0.0\" upper=\"100.0\">\n" +
// "\t\t\t\t\t<parameter idref=\"bdss" + tree + ".psi\"/>\n" +
// "\t\t\t\t</uniformPrior>\n");
// }
w.writeText("\n" + "\t\t\t\t<uniformPrior lower=\"0.0\" upper=\"1.7976931348623157E308\">\n" + "\t\t\t\t\t<parameter idref=\"bdss.origin\"/>\n" + "\t\t\t\t</uniformPrior>\n");
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\n" + "\t\t\t\t<uniformPrior lower=\"0.0\" upper=\"1.7976931348623157E308\">\n" + "\t\t\t\t\t<parameter idref=\"bdss" + tree + ".origin\"/>\n" + "\t\t\t\t</uniformPrior>\n");
}
w.writeText("\n" + "\t\t\t</prior>\n" + "\t\t\t<likelihood id=\"likelihood\">\n" + "\t\t\t\t<speciationLikelihood idref=\"speciation\"/>\n");
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\t\t\t\t<speciationLikelihood idref=\"speciation" + tree + "\"/>\n");
}
w.writeText("\t\t\t</likelihood>\n" + "\t\t</posterior>\n" + "\t\t<operators idref=\"operators\"/>\n");
w.flush();
w.writeText("\n" + "\t\t<!-- write log to screen -->\n" + "\t\t<log id=\"screenLog\" logEvery=\"100000\">\n" + "\t\t\t<column label=\"Posterior\" dp=\"4\" width=\"12\">\n" + "\t\t\t\t<posterior idref=\"posterior\"/>\n" + "\t\t\t</column>\n" + "\t\t\t<column label=\"Prior\" dp=\"4\" width=\"12\">\n" + "\t\t\t\t<prior idref=\"prior\"/>\n" + "\t\t\t</column>\n" + "\t\t\t<column label=\"speciation\" dp=\"4\" width=\"12\">\n" + "\t\t\t\t<likelihood idref=\"speciation\"/>\n" + "\t\t\t</column>\n" + "\t\t\t<column label=\"rootHeight\" sf=\"6\" width=\"12\">\n" + "\t\t\t\t<parameter idref=\"treeModel.rootHeight\"/>\n" + "\t\t\t</column>\n" + "\t\t\t<parameter idref=\"bdss.birthRate\"/>\n");
if (isRelativeDeath) {
w.writeText("\t\t\t<parameter idref=\"bdss.relativeDeathRate\"/>\n");
} else {
w.writeText("\t\t\t<parameter idref=\"bdss.deathRate\"/>\n");
}
w.writeText("\t\t\t<parameter idref=\"bdss.psi\"/>\n" + "\t\t\t<parameter idref=\"bdss.r\"/>\n");
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\t\t\t<parameter idref=\"treeModel" + tree + ".rootHeight\"/>\n" + // "\t\t\t<RPNcalculator idref=\"R0" + tree + "\"/>\n" +
"\t\t\t<parameter idref=\"bdss" + tree + ".origin\"/>\n");
}
w.writeText("\t\t</log>\n" + "\n" + "\t\t<!-- write log to file -->\n" + "\t\t<log id=\"fileLog\" logEvery=\"1000\" fileName=\"T" + curD + "_" + Integer.toString(index) + ".log\" overwrite=\"false\">\n" + "\t\t\t<posterior idref=\"posterior\"/>\n" + "\t\t\t<prior idref=\"prior\"/>\n" + "\t\t\t<parameter idref=\"treeModel.rootHeight\"/>\n" + "\t\t\t<parameter idref=\"bdss.birthRate\"/>\n");
if (isRelativeDeath) {
w.writeText("\t\t\t<parameter idref=\"bdss.relativeDeathRate\"/>\n");
} else {
w.writeText("\t\t\t<parameter idref=\"bdss.deathRate\"/>\n");
}
w.writeText("\t\t\t<parameter idref=\"bdss.sampleProbability\"/>\n" + "\t\t\t<parameter idref=\"bdss.psi\"/>\n" + "\t\t\t<parameter idref=\"bdss.origin\"/>\n" + "\t\t\t<parameter idref=\"bdss.r\"/>\n");
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\t\t\t<parameter idref=\"treeModel" + tree + ".rootHeight\"/>\n" + // "\t\t\t<RPNcalculator idref=\"R0" + tree + "\"/>\n" +
"\t\t\t<parameter idref=\"bdss" + tree + ".origin\"/>\n");
}
w.writeText("\n" + "\t\t\t<speciationLikelihood idref=\"speciation\"/>\n");
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\t\t\t\t<speciationLikelihood idref=\"speciation" + tree + "\"/>\n");
}
w.writeText("\t\t</log>\n" + "\n" + // "\t\t</logTree>\n" +
"\t</mcmc>\n" + "\t<report>\n" + "\t\t<property name=\"timer\">\n" + "\t\t\t<mcmc idref=\"mcmc\"/>\n" + "\t\t</property>\n" + "\t</report>\n" + "</beast>\n");
w.flush();
w.close();
}
use of dr.app.beauti.util.XMLWriter in project beast-mcmc by beast-dev.
the class GetDateFromTree method outputExponetialXML.
private static void outputExponetialXML(String curD, int index, double[][] tips, double[] origin, Tree[] trees) throws IOException {
XMLWriter w = writeHeadAndTaxa(curD, index, tips);
w.flush();
w.writeText("\n" + "\t<exponentialGrowth id=\"exponential\" units=\"years\">\n" + "\t\t<populationSize>\n" + "\t\t\t<parameter id=\"exponential.popSize\" value=\"100.0\" lower=\"0.0\" upper=\"Infinity\"/>\n" + "\t\t</populationSize>\n" + "\t\t<growthRate>\n" + "\t\t\t<parameter id=\"exponential.growthRate\" value=\"4.50E-4\" lower=\"-Infinity\" upper=\"Infinity\"/>\n" + "\t\t</growthRate>\n" + "\t</exponentialGrowth>\n");
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\n" + "\t<exponentialGrowth id=\"exponential" + tree + "\" units=\"years\">\n" + "\t\t<populationSize>\n" + "\t\t\t<parameter id=\"exponential" + tree + ".popSize\" value=\"100.0\" lower=\"0.0\" upper=\"Infinity\"/>\n" + "\t\t</populationSize>\n" + "\t\t<growthRate>\n" + "\t\t\t<parameter idref=\"exponential.growthRate\"/>\n" + "\t\t</growthRate>\n" + "\t</exponentialGrowth>\n");
}
w.flush();
w.writeText("\n" + "\t<!-- Generate a random starting tree under the coalescent process -->\n" + "\t<newick id=\"startingTree\">\n");
w.write(trees[0].toString());
w.writeText("\n" + "\t</newick>\n");
w.flush();
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\n" + "\t<!-- Generate a random starting tree under the coalescent process -->\n" + "\t<newick id=\"startingTree" + tree + "\">\n");
w.write(insetTreeIndex(tree, trees[tree - 1].toString()));
w.writeText("\n" + "\t</newick>\n");
w.flush();
}
w.writeText("\n" + "\t<!-- Generate a tree model -->\n" + "\t<treeModel id=\"treeModel\">\n" + "\t\t<coalescentTree idref=\"startingTree\"/>\n" + "\t\t<rootHeight>\n" + "\t\t\t<parameter id=\"treeModel.rootHeight\"/>\n" + "\t\t</rootHeight>\n" + "\t\t<nodeHeights internalNodes=\"true\">\n" + "\t\t\t<parameter id=\"treeModel.internalNodeHeights\"/>\n" + "\t\t</nodeHeights>\n" + "\t\t<nodeHeights internalNodes=\"true\" rootNode=\"true\">\n" + "\t\t\t<parameter id=\"treeModel.allInternalNodeHeights\"/>\n" + "\t\t</nodeHeights>\n" + "\n" + "\t\t<!-- END Tip date sampling -->\n" + "\t</treeModel>\n");
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\n" + "\t<!-- Generate a tree model -->\n" + "\t<treeModel id=\"treeModel" + tree + "\">\n" + "\t\t<coalescentTree idref=\"startingTree" + tree + "\"/>\n" + "\t\t<rootHeight>\n" + "\t\t\t<parameter id=\"treeModel" + tree + ".rootHeight\"/>\n" + "\t\t</rootHeight>\n" + "\t\t<nodeHeights internalNodes=\"true\">\n" + "\t\t\t<parameter id=\"treeModel" + tree + ".internalNodeHeights\"/>\n" + "\t\t</nodeHeights>\n" + "\t\t<nodeHeights internalNodes=\"true\" rootNode=\"true\">\n" + "\t\t\t<parameter id=\"treeModel" + tree + ".allInternalNodeHeights\"/>\n" + "\t\t</nodeHeights>\n" + "\n" + "\t\t<!-- END Tip date sampling -->\n" + "\t</treeModel>\n");
}
w.flush();
w.writeText("\n" + "\t<coalescentLikelihood id=\"coalescent\">\n" + "\t\t<model>\n" + "\t\t\t<exponentialGrowth idref=\"exponential\"/>\n" + "\t\t</model>\n" + "\t\t<populationTree>\n" + "\t\t\t<treeModel idref=\"treeModel\"/>\n" + "\t\t</populationTree>\n" + "\t</coalescentLikelihood>\n");
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\n" + "\t<coalescentLikelihood id=\"coalescent" + tree + "\">\n" + "\t\t<model>\n" + "\t\t\t<exponentialGrowth idref=\"exponential" + tree + "\"/>\n" + "\t\t</model>\n" + "\t\t<populationTree>\n" + "\t\t\t<treeModel idref=\"treeModel" + tree + "\"/>\n" + "\t\t</populationTree>\n" + "\t</coalescentLikelihood>\n");
}
w.writeText("\n" + "\t<!-- Define operators -->\n" + "\t<operators id=\"operators\">\n" + "\t<randomWalkOperator windowSize=\"1.0\" weight=\"10\">\n" + "\t\t<parameter idref=\"exponential.growthRate\"/>\n" + "\t</randomWalkOperator>\n" + "\t<scaleOperator scaleFactor=\"0.75\" weight=\"10\">\n" + "\t\t<parameter idref=\"exponential.popSize\"/>\n" + "\t</scaleOperator>\n");
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\n" + "\t<scaleOperator scaleFactor=\"0.75\" weight=\"10\">\n" + "\t\t<parameter idref=\"exponential" + tree + ".popSize\"/>\n" + "\t</scaleOperator>\n");
}
w.writeText("\n" + "\t</operators>");
w.flush();
w.writeText("\n" + "\t<!-- Define MCMC -->\n" + "\t<mcmc id=\"mcmc\" chainLength=\"10000000\" autoOptimize=\"true\">\n" + "\t\t<posterior id=\"posterior\">\n" + "\t\t\t<prior id=\"prior\">\n" + "\t\t\t\t<oneOnXPrior>\n" + "\t\t\t\t\t<parameter idref=\"exponential.popSize\"/>\n");
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\t\t\t\t\t<parameter idref=\"exponential" + tree + ".popSize\"/>\n");
}
w.writeText("\t\t\t\t</oneOnXPrior>\n" + "\n" + "\t\t\t\t<laplacePrior mean=\"0.0010\" scale=\"2.0467423048835964E-4\">\n" + "\t\t\t\t\t<parameter idref=\"exponential.growthRate\"/>\n" + "\t\t\t\t</laplacePrior>\n" + "\t\t\t\t<coalescentLikelihood idref=\"coalescent\"\n/>");
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\t\t\t\t<coalescentLikelihood idref=\"coalescent" + tree + "\"/>\n");
}
w.writeText("\n" + "\t\t\t</prior>\n" + "\t\t</posterior>\n" + "\t\t<operators idref=\"operators\"/>\n");
w.flush();
w.writeText("\n" + "\t\t<!-- write log to screen -->\n" + "\t\t<log id=\"screenLog\" logEvery=\"100000\">\n" + "\t\t\t<column label=\"Posterior\" dp=\"4\" width=\"12\">\n" + "\t\t\t\t<posterior idref=\"posterior\"/>\n" + "\t\t\t</column>\n" + "\t\t\t<column label=\"Prior\" dp=\"4\" width=\"12\">\n" + "\t\t\t\t<prior idref=\"prior\"/>\n" + "\t\t\t</column>\n" + "\t\t\t<column label=\"coalescentLikelihood\" dp=\"4\" width=\"12\">\n" + "\t\t\t\t<coalescentLikelihood idref=\"coalescent\"/>\n" + "\t\t\t</column>\n" + "\t\t\t<column label=\"rootHeight\" sf=\"6\" width=\"12\">\n" + "\t\t\t\t<parameter idref=\"treeModel.rootHeight\"/>\n" + "\t\t\t</column>\n" + "\t\t\t<parameter idref=\"exponential.growthRate\"/>\n" + "\t\t\t<parameter idref=\"exponential.popSize\"/>\n");
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\t\t\t<parameter idref=\"treeModel" + tree + ".rootHeight\"/>\n" + "\t\t\t<parameter idref=\"exponential" + tree + ".popSize\"/>\n");
}
w.writeText("\t\t</log>\n" + "\n" + "\t\t<!-- write log to file -->\n" + "\t\t<log id=\"fileLog\" logEvery=\"1000\" fileName=\"E" + curD + "_" + Integer.toString(index) + ".log\" overwrite=\"false\">\n" + "\t\t\t<posterior idref=\"posterior\"/>\n" + "\t\t\t<prior idref=\"prior\"/>\n" + "\t\t\t<parameter idref=\"treeModel.rootHeight\"/>\n" + "\t\t\t<parameter idref=\"exponential.growthRate\"/>\n" + "\t\t\t<parameter idref=\"exponential.popSize\"/>\n");
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\t\t\t<parameter idref=\"treeModel" + tree + ".rootHeight\"/>\n" + "\t\t\t<parameter idref=\"exponential" + tree + ".popSize\"/>\n");
}
w.writeText("\n" + "\t\t\t<coalescentLikelihood idref=\"coalescent\"/>\n");
for (int tree = 2; tree <= combiTrees; tree++) {
w.writeText("\t\t\t\t<coalescentLikelihood idref=\"coalescent" + tree + "\"/>\n");
}
w.writeText("\t\t</log>\n" + "\n" + // "\t\t</logTree>\n" +
"\t</mcmc>\n" + "\t<report>\n" + "\t\t<property name=\"timer\">\n" + "\t\t\t<mcmc idref=\"mcmc\"/>\n" + "\t\t</property>\n" + "\t</report>\n" + "</beast>\n");
w.flush();
w.close();
}
use of dr.app.beauti.util.XMLWriter in project beast-mcmc by beast-dev.
the class BeastGenerator method generateXML.
/**
* Generate a beast xml file from these beast options
*
* @param file File
* @throws java.io.IOException IOException
* @throws dr.app.util.Arguments.ArgumentException
* ArgumentException
*/
public void generateXML(File file) throws GeneratorException, IOException, Arguments.ArgumentException {
XMLWriter writer = new XMLWriter(new BufferedWriter(new FileWriter(file)));
writer.writeText("<?xml version=\"1.0\" standalone=\"yes\"?>");
writer.writeComment("Generated by BEAUTi " + VERSION.getVersionString(), " by Alexei J. Drummond, Andrew Rambaut and Marc A. Suchard", " Department of Computer Science, University of Auckland and", " Institute of Evolutionary Biology, University of Edinburgh", " David Geffen School of Medicine, University of California, Los Angeles", " http://beast.community/");
writer.writeOpenTag("beast", new Attribute.Default<String>("version", BeautiApp.VERSION.getVersion()));
writer.writeText("");
// this gives any added implementations of the 'Component' interface a
// chance to generate XML at this point in the BEAST file.
generateInsertionPoint(ComponentGenerator.InsertionPoint.BEFORE_TAXA, writer);
if (options.originDate != null) {
// Create a dummy taxon whose job is to specify the origin date
Taxon originTaxon = new Taxon("originTaxon");
options.originDate.setUnits(options.units);
originTaxon.setDate(options.originDate);
writeTaxon(originTaxon, true, false, writer);
}
// ++++++++++++++++ Taxon List ++++++++++++++++++
try {
// write complete taxon list
writeTaxa(options.taxonList, writer);
writer.writeText("");
if (!options.hasIdenticalTaxa()) {
// write all taxa in each gene tree regarding each data partition,
for (AbstractPartitionData partition : options.dataPartitions) {
if (partition.getTaxonList() != null) {
writeDifferentTaxa(partition, writer);
}
}
} else {
// microsat
for (PartitionPattern partitionPattern : options.getPartitionPattern()) {
if (partitionPattern.getTaxonList() != null && partitionPattern.getPatterns().hasMask()) {
writeDifferentTaxa(partitionPattern, writer);
}
}
}
} catch (Exception e) {
e.printStackTrace(System.err);
throw new GeneratorException("Taxon list generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ Taxon Sets ++++++++++++++++++
List<Taxa> taxonSets = options.taxonSets;
try {
if (taxonSets != null && taxonSets.size() > 0) {
tmrcaStatisticsGenerator.writeTaxonSets(writer, taxonSets);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Taxon sets generation has failed:\n" + e.getMessage());
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TAXA, writer);
// ++++++++++++++++ Alignments ++++++++++++++++++
List<Alignment> alignments = new ArrayList<Alignment>();
try {
for (AbstractPartitionData partition : options.dataPartitions) {
Alignment alignment = null;
if (partition instanceof PartitionData) {
// microsat has no alignment
alignment = ((PartitionData) partition).getAlignment();
}
if (alignment != null && !alignments.contains(alignment)) {
alignments.add(alignment);
}
}
if (alignments.size() > 0) {
alignmentGenerator.writeAlignments(alignments, writer);
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SEQUENCES, writer);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Alignments generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ Pattern Lists ++++++++++++++++++
try {
// Construct pattern lists even if sampling from a null alignment
// if (!options.samplePriorOnly) {
List<Microsatellite> microsatList = new ArrayList<Microsatellite>();
for (AbstractPartitionData partition : options.dataPartitions) {
// Each PD has one TreeLikelihood
if (partition.getTaxonList() != null) {
switch(partition.getDataType().getType()) {
case DataType.NUCLEOTIDES:
case DataType.AMINO_ACIDS:
case DataType.CODONS:
case DataType.COVARION:
case DataType.TWO_STATES:
patternListGenerator.writePatternList((PartitionData) partition, writer);
break;
case DataType.GENERAL:
case DataType.CONTINUOUS:
// attribute patterns which is generated next bit of this method.
break;
case DataType.MICRO_SAT:
// microsat does not have alignment
patternListGenerator.writePatternList((PartitionPattern) partition, microsatList, writer);
break;
default:
throw new IllegalArgumentException("Unsupported data type");
}
writer.writeText("");
}
}
// }
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Pattern lists generation has failed:\n" + e.getMessage());
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_PATTERNS, writer);
// ++++++++++++++++ Tree Prior Model ++++++++++++++++++
try {
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
treePriorGenerator.writeTreePriorModel(prior, writer);
writer.writeText("");
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree prior model generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ Starting Tree ++++++++++++++++++
try {
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
initialTreeGenerator.writeStartingTree(model, writer);
writer.writeText("");
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Starting tree generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ Tree Model +++++++++++++++++++
try {
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
treeModelGenerator.writeTreeModel(model, writer);
writer.writeText("");
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_MODEL, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree model generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ Statistics ++++++++++++++++++
try {
if (taxonSets != null && taxonSets.size() > 0) {
tmrcaStatisticsGenerator.writeTMRCAStatistics(writer);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("TMRCA statistics generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ Tree Prior Likelihood ++++++++++++++++++
try {
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
treePriorGenerator.writePriorLikelihood(model, writer);
writer.writeText("");
}
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
treePriorGenerator.writeMultiLociTreePriors(prior, writer);
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_PRIOR, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree prior likelihood generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ Branch Rates Model ++++++++++++++++++
try {
for (PartitionClockModel model : options.getPartitionClockModels()) {
clockModelGenerator.writeBranchRatesModel(model, writer);
writer.writeText("");
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Branch rates model generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ Substitution Model & Site Model ++++++++++++++++++
try {
for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
substitutionModelGenerator.writeSubstitutionSiteModel(model, writer);
writer.writeText("");
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SUBSTITUTION_MODEL, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Substitution model or site model generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ AllMus parameter ++++++++++++++++++
try {
for (PartitionClockModel model : options.getPartitionClockModels()) {
clockModelGenerator.writeAllMus(model, writer);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Clock model generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ Site Model ++++++++++++++++++
// for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
// substitutionModelGenerator.writeSiteModel(model, writer); // site model
// substitutionModelGenerator.writeAllMus(model, writer); // allMus
// writer.writeText("");
// }
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SITE_MODEL, writer);
// ++++++++++++++++ Tree Likelihood ++++++++++++++++++
try {
Map<Pair<Pair<PartitionTreeModel, PartitionClockModel>, DataType>, List<PartitionData>> partitionLists = new HashMap<Pair<Pair<PartitionTreeModel, PartitionClockModel>, DataType>, List<PartitionData>>();
options.multiPartitionLists.clear();
options.otherPartitions.clear();
for (AbstractPartitionData partition : options.dataPartitions) {
// generate tree likelihoods for alignment data partitions
if (partition.getTaxonList() != null) {
if (treeLikelihoodGenerator.canUseMultiPartition(partition)) {
// all sequence partitions of the same type as the first into the list for use in a
// MultipartitionTreeDataLikelihood. Must also share the same tree, clock model and not be doing
// ancestral reconstruction or counting
Pair<Pair<PartitionTreeModel, PartitionClockModel>, DataType> key = new Pair(new Pair(partition.getPartitionTreeModel(), partition.getPartitionClockModel()), partition.getDataType());
List<PartitionData> partitions = partitionLists.get(key);
if (partitions == null) {
partitions = new ArrayList<PartitionData>();
options.multiPartitionLists.add(partitions);
}
partitions.add((PartitionData) partition);
partitionLists.put(key, partitions);
} else {
options.otherPartitions.add(partition);
}
}
}
treeLikelihoodGenerator.writeAllTreeLikelihoods(writer);
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_LIKELIHOOD, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree likelihood generation has failed:\n" + e.getMessage());
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TRAITS, writer);
// ++++++++++++++++ Operators ++++++++++++++++++
try {
generateInsertionPoint(ComponentGenerator.InsertionPoint.BEFORE_OPERATORS, writer);
List<Operator> operators = options.selectOperators();
operatorsGenerator.writeOperatorSchedule(operators, writer);
writer.writeText("");
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_OPERATORS, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Operators generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ MCMC ++++++++++++++++++
try {
// XMLWriter writer, List<PartitionSubstitutionModel> models,
writeMCMC(writer);
writer.writeText("");
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_MCMC, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("MCMC or log generation has failed:\n" + e.getMessage());
}
// ++++++++++++++++ ++++++++++++++++++
try {
writeTimerReport(writer);
writer.writeText("");
if (options.performTraceAnalysis) {
writeTraceAnalysis(writer);
}
if (options.generateCSV) {
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
treePriorGenerator.writeEBSPAnalysisToCSVfile(prior, writer);
}
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("The last part of XML generation has failed:\n" + e.getMessage());
}
writer.writeCloseTag("beast");
writer.flush();
writer.close();
}
use of dr.app.beauti.util.XMLWriter in project beast-mcmc by beast-dev.
the class XMLExporter method exportAlignment.
// END: Constructor
public String exportAlignment(SimpleAlignment alignment) throws IOException {
StringWriter sw = new StringWriter();
XMLWriter writer = new XMLWriter(sw);
// TODO: if we keep the taxa element than lets also write dates
// writer.writeOpenTag(TaxaParser.TAXA, // tagname
// new Attribute[] { // attributes[]
// new Attribute.Default<String>(XMLParser.ID, TaxaParser.TAXA) });
//
// for (int i = 0; i < alignment.getTaxonCount(); i++) {
//
// Taxon taxon = alignment.getTaxon(i);
//
// writer.writeTag(
// TaxonParser.TAXON, // tagname
// new Attribute[] { // attributes[]
// new Attribute.Default<String>(XMLParser.ID, taxon.getId()) },
// true // close
// );
//
// // System.out.println(taxon.getAttribute(Utils.ABSOLUTE_HEIGHT));
//
// // writer.writeCloseTag(TaxonParser.TAXON);
//
// }// END: taxon loop
//
// writer.writeCloseTag(TaxaParser.TAXA);
//
// writer.writeBlankLine();
// tagname
writer.writeOpenTag(// tagname
AlignmentParser.ALIGNMENT, new Attribute[] { // attributes[]
new Attribute.Default<String>(XMLParser.ID, AlignmentParser.ALIGNMENT), new Attribute.Default<String>(DataType.DATA_TYPE, alignment.getDataType().getDescription()) });
for (int i = 0; i < alignment.getSequenceCount(); i++) {
Taxon taxon = alignment.getTaxon(i);
writer.writeOpenTag(SequenceParser.SEQUENCE);
writer.writeIDref(TaxonParser.TAXON, taxon.getId());
writer.writeText(alignment.getSequence(i).getSequenceString());
writer.writeCloseTag(SequenceParser.SEQUENCE);
}
// END: sequences loop
writer.writeCloseTag(AlignmentParser.ALIGNMENT);
writer.close();
return sw.toString();
}
use of dr.app.beauti.util.XMLWriter in project beast-mcmc by beast-dev.
the class XMLGenerator method generateXML.
// END: Constructor
public void generateXML(File file) throws IOException {
XMLWriter writer = new XMLWriter(new BufferedWriter(new FileWriter(file)));
// //////////////
// ---header---//
// //////////////
writer.writeText("<?xml version=\"1.0\" standalone=\"yes\"?>");
writer.writeComment("Generated by " + BeagleSequenceSimulatorApp.LONG_NAME + " " + BeagleSequenceSimulatorApp.VERSION);
writer.writeOpenTag("beast");
writer.writeBlankLine();
try {
int suffix = 1;
ArrayList<Taxa> taxaList = new ArrayList<Taxa>();
for (PartitionData data : dataList) {
if (data.record == null) {
throw new RuntimeException("Set Tree Model in Partitions tab for " + suffix + " partition.");
} else {
Taxa taxa = null;
if (data.demographicModelIndex == 0 && data.record.isTreeSet()) {
taxa = new Taxa(data.record.getTree().asList());
} else if (data.demographicModelIndex == 0 && data.record.isTaxaSet()) {
throw new RuntimeException("Data and demographic model incompatible for partition " + suffix);
} else if ((data.demographicModelIndex > 0 && data.demographicModelIndex <= PartitionData.lastImplementedIndex) && data.record.isTreeSet()) {
taxa = new Taxa(data.record.getTree().asList());
} else if ((data.demographicModelIndex > 0 && data.demographicModelIndex <= PartitionData.lastImplementedIndex) && data.record.isTaxaSet()) {
taxa = data.record.getTaxa();
} else {
//
}
if (taxaList.size() == 0 | !Utils.isTaxaInList(taxa, taxaList)) {
data.taxaIdref += suffix;
writeTaxa(taxa, writer, String.valueOf(suffix));
writer.writeBlankLine();
taxaList.add(taxa);
// System.out.println("NOT IN LIST");
} else {
int index = Utils.taxaIsIdenticalWith(taxa, taxaList) + 1;
data.taxaIdref += index;
// System.out.println("IDENTICAL WITH " + index);
}
}
// END: treeModel set check
suffix++;
}
// END: partition loop
} catch (Exception e) {
throw new RuntimeException("Taxa generation has failed:\n" + e.getMessage());
}
try {
int suffix = 1;
ArrayList<TreeModel> treeModelList = new ArrayList<TreeModel>();
for (PartitionData data : dataList) {
if (data.demographicModelIndex == 0) {
TreeModel treeModel = data.createTreeModel();
if (treeModelList.size() == 0 | !Utils.isTreeModelInList(treeModel, treeModelList)) {
data.treeModelIdref += suffix;
writeNewick(treeModel, writer, String.valueOf(suffix));
writer.writeBlankLine();
treeModelList.add(treeModel);
} else {
int index = Utils.treeModelIsIdenticalWith(treeModel, treeModelList) + 1;
data.treeModelIdref += index;
}
// END: list check
} else if (data.demographicModelIndex > 0 && data.demographicModelIndex <= PartitionData.lastImplementedIndex) {
data.demographicModelIdref += suffix;
data.treeModelIdref += suffix;
writeDemographicModel(data, writer, String.valueOf(suffix));
writer.writeBlankLine();
} else {
// throw exception
}
// END: demo model check
suffix++;
}
// END: partition loop
} catch (Exception e) {
throw new RuntimeException("Topology generation has failed:\n" + e.getMessage());
}
try {
int suffix = 1;
ArrayList<TreesTableRecord> recordsList = new ArrayList<TreesTableRecord>();
for (PartitionData data : dataList) {
TreesTableRecord record = data.record;
if (recordsList.size() == 0 | !Utils.isRecordInList(record, recordsList)) {
writeTreeModel(writer, String.valueOf(suffix));
writer.writeBlankLine();
recordsList.add(record);
}
suffix++;
}
// END: partition loop
} catch (Exception e) {
throw new RuntimeException("Tree model generation has failed:\n" + e.getMessage());
}
try {
int suffix = 1;
ArrayList<PartitionData> partitionList = new ArrayList<PartitionData>();
for (PartitionData data : dataList) {
if (partitionList.size() == 0 | !Utils.isElementInList(data, partitionList, Utils.BRANCH_RATE_MODEL_ELEMENT)) {
data.clockModelIdref += suffix;
writeBranchRatesModel(data, writer, String.valueOf(suffix));
writer.writeBlankLine();
partitionList.add(data);
// System.out.println("NOT IN LIST");
} else {
int index = Utils.isIdenticalWith(data, partitionList, Utils.BRANCH_RATE_MODEL_ELEMENT) + 1;
data.clockModelIdref += index;
// System.out.println("IDENTICAL WITH " + index);
}
suffix++;
}
// END: partition loop
} catch (Exception e) {
throw new RuntimeException("Clock model generation has failed:\n" + e.getMessage());
}
try {
int suffix = 1;
ArrayList<PartitionData> partitionList = new ArrayList<PartitionData>();
for (PartitionData data : dataList) {
if (partitionList.size() == 0 | !Utils.isElementInList(data, partitionList, Utils.FREQUENCY_MODEL_ELEMENT)) {
data.frequencyModelIdref += suffix;
writeFrequencyModel(data, writer);
writer.writeBlankLine();
partitionList.add(data);
// System.out.println("NOT IN LIST");
} else {
int index = Utils.isIdenticalWith(data, partitionList, Utils.FREQUENCY_MODEL_ELEMENT) + 1;
data.frequencyModelIdref += index;
// System.out.println("IDENTICAL WITH " + index);
}
suffix++;
}
// END: partition loop
} catch (Exception e) {
throw new RuntimeException("Frequency model generation has failed:\n" + e.getMessage());
}
try {
int suffix = 1;
ArrayList<PartitionData> partitionList = new ArrayList<PartitionData>();
for (PartitionData data : dataList) {
if (partitionList.size() == 0 | !Utils.isElementInList(data, partitionList, Utils.BRANCH_MODEL_ELEMENT)) {
data.substitutionModelIdref += suffix;
writeBranchModel(data, writer, String.valueOf(suffix));
writer.writeBlankLine();
partitionList.add(data);
} else {
int index = Utils.isIdenticalWith(data, partitionList, Utils.BRANCH_MODEL_ELEMENT) + 1;
data.substitutionModelIdref += index;
}
suffix++;
}
// END: partition loop
} catch (Exception e) {
throw new RuntimeException("Branch model generation has failed:\n" + e.getMessage());
}
try {
int suffix = 1;
ArrayList<PartitionData> partitionList = new ArrayList<PartitionData>();
for (PartitionData data : dataList) {
if (partitionList.size() == 0 | !Utils.isElementInList(data, partitionList, Utils.SITE_RATE_MODEL_ELEMENT)) {
data.siteRateModelIdref += suffix;
writeSiteRateModel(data, writer, suffix);
writer.writeBlankLine();
partitionList.add(data);
} else {
int index = Utils.isIdenticalWith(data, partitionList, Utils.SITE_RATE_MODEL_ELEMENT) + 1;
data.siteRateModelIdref += index;
}
suffix++;
}
// END: partition loop
} catch (Exception e) {
System.err.println(e);
throw new RuntimeException("Site rate model generation has failed:\n" + e.getMessage());
}
try {
writeBeagleSequenceSimulator(writer);
writer.writeBlankLine();
} catch (Exception e) {
throw new RuntimeException("Beagle Sequence Simulator element generation has failed:\n" + e.getMessage());
}
try {
writeReport(writer);
writer.writeBlankLine();
} catch (Exception e) {
System.err.println(e);
throw new RuntimeException("Report element generation has failed:\n" + e.getMessage());
}
// END: try-catch block
writer.writeCloseTag("beast");
writer.flush();
writer.close();
// reset all idrefs
for (PartitionData data : dataList) {
data.resetIdrefs();
}
}
Aggregations