use of dr.inference.model.Likelihood in project beast-mcmc by beast-dev.
the class MLLoggerParser method parseXMLObject.
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
Likelihood likelihood = (Likelihood) xo.getElementFirstChild(LIKELIHOOD);
// logEvery of zero only displays at the end
int logEvery = xo.getAttribute(LOG_EVERY, 0);
final PrintWriter pw = getLogFile(xo, getParserName());
LogFormatter formatter = new TabDelimitedFormatter(pw);
MLLogger logger = new MLLogger(likelihood, formatter, logEvery);
if (xo.hasAttribute(TITLE)) {
logger.setTitle(xo.getStringAttribute(TITLE));
}
for (int i = 0; i < xo.getChildCount(); i++) {
Object child = xo.getChild(i);
if (child instanceof Columns) {
logger.addColumns(((Columns) child).getColumns());
} else if (child instanceof Loggable) {
logger.add((Loggable) child);
} else if (child instanceof Identifiable) {
logger.addColumn(new LogColumn.Default(((Identifiable) child).getId(), child));
} else {
logger.addColumn(new LogColumn.Default(child.getClass().toString(), child));
}
}
return logger;
}
use of dr.inference.model.Likelihood in project beast-mcmc by beast-dev.
the class CompoundLikelihoodParser method parseXMLObject.
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
// the default is -1 threads (automatic thread pool size) but an XML attribute can override it
int threads = xo.getAttribute(THREADS, -1);
// both the XML attribute and a system property can override it
if (System.getProperty("thread.count") != null) {
threads = Integer.parseInt(System.getProperty("thread.count"));
if (threads < -1 || threads > 1000) {
// put an upper limit here - may be unnecessary?
threads = -1;
}
}
// }
List<Likelihood> likelihoods = new ArrayList<Likelihood>();
for (int i = 0; i < xo.getChildCount(); i++) {
final Object child = xo.getChild(i);
if (child instanceof Likelihood) {
if (likelihoods.contains(child)) {
throw new XMLParseException("The likelihood element, '" + ((Likelihood) child).getId() + "', is already present in the likelihood or prior density.");
}
likelihoods.add((Likelihood) child);
// } else if (child instanceof BeagleBranchLikelihoods){
//
// //TODO
// likelihoods.addAll( ((BeagleBranchLikelihoods)child).getBranchLikelihoods());
} else {
throw new XMLParseException("An element (" + child + ") which is not a likelihood has been added to a " + COMPOUND_LIKELIHOOD + " element");
}
}
CompoundLikelihood compoundLikelihood;
if (xo.getName().equalsIgnoreCase(LIKELIHOOD)) {
compoundLikelihood = new CompoundLikelihood(threads, likelihoods);
switch(threads) {
case -1:
Logger.getLogger("dr.evomodel").info("\nLikelihood computation is using an auto sizing thread pool.");
break;
case 0:
Logger.getLogger("dr.evomodel").info("\nLikelihood computation is using a single thread.");
break;
default:
Logger.getLogger("dr.evomodel").info("\nLikelihood computation is using a pool of " + threads + " threads.");
break;
}
} else {
compoundLikelihood = new CompoundLikelihood(likelihoods);
}
return compoundLikelihood;
}
use of dr.inference.model.Likelihood in project beast-mcmc by beast-dev.
the class BeagleBranchLikelihoodParser method parseXMLObject.
@Override
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
// if(xo.hasChildNamed(TreeModel.TREE_MODEL)) {
//
// treeModel = (TreeModel) xo.getChild(TreeModel.class);
// }
Parameter zParameter = (Parameter) xo.getElementFirstChild(DirichletProcessPriorParser.CATEGORIES);
List<Likelihood> likelihoods = new ArrayList<Likelihood>();
XMLObject cxo = (XMLObject) xo.getChild(UNIQUE_LIKELIHOODS);
for (int i = 0; i < cxo.getChildCount(); i++) {
Likelihood likelihood = (Likelihood) cxo.getChild(i);
likelihoods.add(likelihood);
}
return null;
// new BeagleBranchLikelihood(
//// treeModel, likelihoods, zParameter
// );
}
use of dr.inference.model.Likelihood in project beast-mcmc by beast-dev.
the class DirichletProcessOperatorParser method parseXMLObject.
@Override
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
DirichletProcessPrior dpp = (DirichletProcessPrior) xo.getChild(DirichletProcessPrior.class);
Likelihood likelihood = (Likelihood) xo.getElementFirstChild(DATA_LOG_LIKELIHOOD);
Parameter categoriesParameter = (Parameter) xo.getElementFirstChild(DirichletProcessPriorParser.CATEGORIES);
CountableRealizationsParameter allParameters = (CountableRealizationsParameter) xo.getChild(CountableRealizationsParameter.class);
CompoundParameter uniquelyRealizedParameters = (CompoundParameter) xo.getChild(CompoundParameter.class);
int M = xo.getIntegerAttribute(MH_STEPS);
final double weight = xo.getDoubleAttribute(MCMCOperator.WEIGHT);
return new //
DirichletProcessOperator(//
dpp, //
categoriesParameter, //
uniquelyRealizedParameters, //
allParameters, //
likelihood, //
M, //
weight);
}
use of dr.inference.model.Likelihood in project beast-mcmc by beast-dev.
the class BeastCheckpointer method writeStateToFile.
private boolean writeStateToFile(File file, long state, double lnL, MarkovChain markovChain) {
OperatorSchedule operatorSchedule = markovChain.getSchedule();
OutputStream fileOut = null;
try {
fileOut = new FileOutputStream(file);
PrintStream out = new PrintStream(fileOut);
ArrayList<TreeParameterModel> traitModels = new ArrayList<TreeParameterModel>();
int[] rngState = MathUtils.getRandomState();
out.print("rng");
for (int i = 0; i < rngState.length; i++) {
out.print("\t");
out.print(rngState[i]);
}
out.println();
out.print("state\t");
out.println(state);
out.print("lnL\t");
out.println(lnL);
for (Parameter parameter : Parameter.CONNECTED_PARAMETER_SET) {
out.print("parameter");
out.print("\t");
out.print(parameter.getParameterName());
out.print("\t");
out.print(parameter.getDimension());
for (int dim = 0; dim < parameter.getDimension(); dim++) {
out.print("\t");
out.print(parameter.getParameterValue(dim));
}
out.println();
}
for (int i = 0; i < operatorSchedule.getOperatorCount(); i++) {
MCMCOperator operator = operatorSchedule.getOperator(i);
out.print("operator");
out.print("\t");
out.print(operator.getOperatorName());
out.print("\t");
out.print(operator.getAcceptCount());
out.print("\t");
out.print(operator.getRejectCount());
if (operator instanceof CoercableMCMCOperator) {
out.print("\t");
out.print(((CoercableMCMCOperator) operator).getCoercableParameter());
}
out.println();
}
//check up front if there are any TreeParameterModel objects
for (Model model : Model.CONNECTED_MODEL_SET) {
if (model instanceof TreeParameterModel) {
//System.out.println("\nDetected TreeParameterModel: " + ((TreeParameterModel) model).toString());
traitModels.add((TreeParameterModel) model);
}
}
for (Model model : Model.CONNECTED_MODEL_SET) {
if (model instanceof TreeModel) {
out.print("tree");
out.print("\t");
out.println(model.getModelName());
//replace Newick format by printing general graph structure
//out.println(((TreeModel) model).getNewick());
out.println("#node height taxon");
int nodeCount = ((TreeModel) model).getNodeCount();
out.println(nodeCount);
for (int i = 0; i < nodeCount; i++) {
out.print(((TreeModel) model).getNode(i).getNumber());
out.print("\t");
out.print(((TreeModel) model).getNodeHeight(((TreeModel) model).getNode(i)));
if (((TreeModel) model).isExternal(((TreeModel) model).getNode(i))) {
out.print("\t");
out.print(((TreeModel) model).getNodeTaxon(((TreeModel) model).getNode(i)).getId());
}
out.println();
}
out.println("#edges");
out.println("#child-node parent-node L/R-child traits");
out.println(nodeCount);
for (int i = 0; i < nodeCount; i++) {
NodeRef parent = ((TreeModel) model).getParent(((TreeModel) model).getNode(i));
if (parent != null) {
out.print(((TreeModel) model).getNode(i).getNumber());
out.print("\t");
out.print(((TreeModel) model).getParent(((TreeModel) model).getNode(i)).getNumber());
out.print("\t");
if ((((TreeModel) model).getChild(parent, 0) == ((TreeModel) model).getNode(i))) {
//left child
out.print(0);
} else if ((((TreeModel) model).getChild(parent, 1) == ((TreeModel) model).getNode(i))) {
//right child
out.print(1);
} else {
throw new RuntimeException("Operation currently only supported for nodes with 2 children.");
}
for (TreeParameterModel tpm : traitModels) {
out.print("\t");
out.print(tpm.getNodeValue((TreeModel) model, ((TreeModel) model).getNode(i)));
}
out.println();
}
}
}
}
out.close();
fileOut.close();
} catch (IOException ioe) {
System.err.println("Unable to write file: " + ioe.getMessage());
return false;
}
if (DEBUG) {
for (Likelihood likelihood : Likelihood.CONNECTED_LIKELIHOOD_SET) {
System.err.println(likelihood.getId() + ": " + likelihood.getLogLikelihood());
}
}
return true;
}
Aggregations