use of dr.evolution.coalescent.DemographicFunction in project beast-mcmc by beast-dev.
the class CoalGenFrame method generateFile.
protected void generateFile(File outFile) throws IOException {
PrintWriter writer = new PrintWriter(new FileWriter(outFile));
dr.evolution.coalescent.CoalescentSimulator simulator = new dr.evolution.coalescent.CoalescentSimulator();
int count = 0;
while (data.hasNext()) {
DemographicFunction demo = data.nextDemographic();
Tree tree = simulator.simulateTree(data.taxonList, demo);
writer.println(count + "\t" + TreeUtils.newick(tree));
count += 1;
}
writer.close();
}
use of dr.evolution.coalescent.DemographicFunction in project beast-mcmc by beast-dev.
the class TransmissionTreeToVirusTree method main.
public static void main(String[] args) {
ModelType model = ModelType.CONSTANT;
double startNe = 1;
double growthRate = 0;
double t50 = 0;
Arguments arguments = new Arguments(new Arguments.Option[] { new Arguments.StringOption(DEMOGRAPHIC_MODEL, demographics, false, "The type of within-host" + " demographic function to use, default = constant"), new Arguments.RealOption(STARTING_POPULATION_SIZE, "The effective population size at time zero" + " (used in all models), default = 1"), new Arguments.RealOption(GROWTH_RATE, "The effective population size growth rate (used in" + " exponential and logistic models), default = 0"), new Arguments.RealOption(T50, "The time point, relative to the time of infection in backwards " + "time, at which the population is equal to half its final asymptotic value, in the " + "logistic model default = 0") });
try {
arguments.parseArguments(args);
} catch (Arguments.ArgumentException ae) {
System.out.println(ae);
printUsage(arguments);
System.exit(1);
}
if (arguments.hasOption(HELP)) {
printUsage(arguments);
System.exit(0);
}
if (arguments.hasOption(DEMOGRAPHIC_MODEL)) {
String modelString = arguments.getStringOption(DEMOGRAPHIC_MODEL);
if (modelString.toLowerCase().startsWith("c")) {
model = ModelType.CONSTANT;
} else if (modelString.toLowerCase().startsWith("e")) {
model = ModelType.EXPONENTIAL;
} else if (modelString.toLowerCase().startsWith("l")) {
model = ModelType.LOGISTIC;
} else {
progressStream.print("Unrecognised demographic model type");
System.exit(1);
}
}
if (arguments.hasOption(STARTING_POPULATION_SIZE)) {
startNe = arguments.getRealOption(STARTING_POPULATION_SIZE);
}
if (arguments.hasOption(GROWTH_RATE) && model != ModelType.CONSTANT) {
growthRate = arguments.getRealOption(GROWTH_RATE);
}
if (arguments.hasOption(T50) && model == ModelType.LOGISTIC) {
t50 = arguments.getRealOption(T50);
}
DemographicFunction demoFunction = null;
switch(model) {
case CONSTANT:
{
demoFunction = new ConstantPopulation(Units.Type.YEARS);
((ConstantPopulation) demoFunction).setN0(startNe);
}
case EXPONENTIAL:
{
demoFunction = new ExponentialGrowth(Units.Type.YEARS);
((ExponentialGrowth) demoFunction).setN0(startNe);
((ExponentialGrowth) demoFunction).setGrowthRate(growthRate);
}
case LOGISTIC:
{
demoFunction = new LogisticGrowthN0(Units.Type.YEARS);
((LogisticGrowthN0) demoFunction).setN0(startNe);
((LogisticGrowthN0) demoFunction).setGrowthRate(growthRate);
((LogisticGrowthN0) demoFunction).setT50(t50);
}
}
final String[] args2 = arguments.getLeftoverArguments();
if (args2.length != 3) {
printUsage(arguments);
System.exit(1);
}
String infectionsFileName = args2[0];
String samplesFileName = args2[1];
String outputFileRoot = args2[2];
TransmissionTreeToVirusTree instance = new TransmissionTreeToVirusTree(samplesFileName, infectionsFileName, demoFunction, outputFileRoot);
try {
instance.run();
} catch (IOException e) {
e.printStackTrace();
}
}
use of dr.evolution.coalescent.DemographicFunction in project beast-mcmc by beast-dev.
the class MulMSCoalescent method treeLogLikelihood.
private double treeLogLikelihood(MulSpeciesBindings.GeneTreeInfo geneTree, NodeRef node, int[] info, double popFactor) {
// number of lineages remaining at node
int nLineages;
// location in coalescent list (optimization)
int indexInClist = 0;
// accumulated log-likelihood inBranchh from node to it's parent
double like = 0;
final double t0 = spTree.getNodeHeight(node);
final MulSpeciesBindings.CoalInfo[] cList = geneTree.getCoalInfo();
if (verbose) {
if (spTree.isRoot(node)) {
System.err.println("gtree:" + geneTree.tree.getId());
System.err.println("t0 " + t0);
for (int k = 0; k < cList.length; ++k) {
System.err.println(k + " " + cList[k].ctime + " " + cList[k].sinfo[0] + " " + cList[k].sinfo[1]);
}
}
}
if (spTree.isExternal(node)) {
nLineages = geneTree.nLineages(spTree.speciesIndex(node));
indexInClist = 0;
} else {
// assert spTree.getChildCount(node) == 2;
nLineages = 0;
for (int nc = 0; nc < 2; ++nc) {
final NodeRef child = spTree.getChild(node, nc);
like += treeLogLikelihood(geneTree, child, info, popFactor);
nLineages += info[0];
indexInClist = Math.max(indexInClist, info[1]);
}
// root of species tree
if (indexInClist >= cList.length) {
System.err.println("ERROR in evomodel.speciation.MulMSCoalescent.treeLogLikelihood()");
}
assert indexInClist < cList.length;
while (cList[indexInClist].ctime < t0) {
++indexInClist;
}
}
final boolean isRoot = spTree.isRoot(node);
// Upper limit
// use of (t0 + spTree.getBranchLength(node)) caused problem since there was a tiny difference
// between those (supposedly equal) values. we should track where the discrepancy comes from.
final double stopTime = isRoot ? Double.MAX_VALUE : spTree.getNodeHeight(spTree.getParent(node));
// demographic function is 0 based (relative to node height)
// time away from node
double lastTime = 0.0;
// demographic function across branch
DemographicFunction demog = spTree.getNodeDemographic(node);
if (popFactor > 0) {
demog = new ScaledDemographic(demog, popFactor);
}
// if(false) {
// final double duration = isRoot ? cList[cList.length - 1].ctime - t0 : stopTime;
// double demographicAtCoalPoint = demog.getDemographic(duration);
// double intervalArea = demog.getIntegral(0, duration);
// if( demographicAtCoalPoint < 1e-12 * (duration/intervalArea) ) {
// return Double.NEGATIVE_INFINITY;
// }
// }
// Species sharing this branch
FixedBitSet subspeciesSet = spTree.spSet(node);
if (verbose) {
System.err.println(TreeUtils.uniqueNewick(spTree, node) + " nl " + nLineages + " " + subspeciesSet + " t0 - st " + t0 + " - " + stopTime);
}
while (nLineages > 1) {
// }
assert (indexInClist < cList.length);
final double nextT = cList[indexInClist].ctime;
// while rare they can be equal
if (nextT >= stopTime) {
break;
}
if (nonEmptyIntersection(cList[indexInClist].sinfo, subspeciesSet)) {
final double time = nextT - t0;
if (time > 0) {
final double interval = demog.getIntegral(lastTime, time);
lastTime = time;
final int nLineageOver2 = (nLineages * (nLineages - 1)) / 2;
like -= nLineageOver2 * interval;
final double pop = demog.getDemographic(time);
assert (pop > 0);
like -= Math.log(pop);
}
--nLineages;
}
++indexInClist;
}
if (nLineages > 1) {
// add term for No coalescent until root
final double interval = demog.getIntegral(lastTime, stopTime - t0);
final int nLineageOver2 = (nLineages * (nLineages - 1)) / 2;
like -= nLineageOver2 * interval;
}
info[0] = nLineages;
info[1] = indexInClist;
if (verbose) {
System.err.println(TreeUtils.uniqueNewick(spTree, node) + " stopTime " + stopTime + " nl " + nLineages + " icl " + indexInClist);
}
return like;
}
use of dr.evolution.coalescent.DemographicFunction in project beast-mcmc by beast-dev.
the class VeryOldCoalescentLikelihood method calculateLogLikelihood.
/**
* Calculates the log likelihood of this set of coalescent intervals,
* given a demographic model.
*/
public double calculateLogLikelihood() {
if (!intervalsKnown)
setupIntervals();
if (demoModel == null)
return calculateAnalyticalLogLikelihood();
double logL = 0.0;
double currentTime = 0.0;
DemographicFunction demoFunction = demoModel.getDemographicFunction();
for (int j = 0; j < intervalCount; j++) {
logL += calculateIntervalLikelihood(demoFunction, intervals[j], currentTime, lineageCounts[j], getIntervalType(j));
// insert zero-length coalescent intervals
int diff = getCoalescentEvents(j) - 1;
for (int k = 0; k < diff; k++) {
logL += calculateIntervalLikelihood(demoFunction, 0.0, currentTime, lineageCounts[j] - k - 1, COALESCENT);
}
currentTime += intervals[j];
}
return logL;
}
use of dr.evolution.coalescent.DemographicFunction in project beast-mcmc by beast-dev.
the class BNPRSamplingLikelihood method calculateLogLikelihood.
public double calculateLogLikelihood() {
double minSample = samplingTimes[0];
double maxSample = samplingTimes[numSamples - 1];
double beta0 = betas.getParameterValue(0);
double beta1 = betas.getParameterValue(1);
// Start with beta0 part of likelihood.
double logLik = numSamples * beta0;
DemographicFunction f = population.getDemographicFunction();
// Separated to make cut-paste into a new setupLogPopSizes() function easy.
for (int i = 0; i < numSamples; i++) {
logPopSizes[i] = f.getLogDemographic(samplingTimes[i]);
}
double eventLogLik = 0.0;
// Calculate the event component of the inhomogeneous Poisson process log-likelihood
for (int i = 0; i < numSamples; i++) {
eventLogLik += beta1 * logPopSizes[i];
if (this.powerCovariates != null) {
for (int j = 0; j < powerBetas.getDimension(); j++) {
eventLogLik += powerBetas.getParameterValue(j) * evaluatePiecewise(this.epochWidths, powerCovariates.getColumnValues(j), samplingTimes[i]) * logPopSizes[i];
}
}
if (this.covariates != null) {
for (int j = 2; j < betas.getDimension(); j++) {
eventLogLik += betas.getParameterValue(j) * evaluatePiecewise(this.epochWidths, covariates.getColumnValues(j - 2), samplingTimes[i]);
}
}
}
logLik += eventLogLik;
double integralLogLik = 0.0;
double[] fBeta = new double[this.midpoints.length];
double fBetaLog;
for (int i = 0; i < fBeta.length; i++) {
fBetaLog = beta1 * f.getLogDemographic(this.midpoints[i]);
if (this.powerCovariates != null) {
for (int j = 0; j < powerBetas.getDimension(); j++) {
fBetaLog += powerBetas.getParameterValue(j) * this.powerCovariates.getParameterValue(i, j) * f.getLogDemographic(this.midpoints[i]);
}
}
if (this.covariates != null) {
for (int j = 2; j < betas.getDimension(); j++) {
fBetaLog += betas.getParameterValue(j) * this.covariates.getParameterValue(i, j - 2);
}
}
fBeta[i] = Math.exp(fBetaLog);
}
integralLogLik -= Math.exp(beta0) * integratePiecewise(this.epochWidths, fBeta, minSample, maxSample);
logLik += integralLogLik;
return logLik;
}
Aggregations