use of dr.oldevomodel.substmodel.FrequencyModel in project beast-mcmc by beast-dev.
the class HKYTest method testHKY.
public void testHKY() {
for (Instance test : all) {
Parameter kappa = new Parameter.Default(1, test.getKappa());
double[] pi = test.getPi();
Parameter freqs = new Parameter.Default(pi);
FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
HKY hky = new HKY(kappa, f);
double distance = test.getDistance();
double[] mat = new double[4 * 4];
hky.getTransitionProbabilities(distance, mat);
final double[] result = test.getExpectedResult();
for (int k = 0; k < mat.length; ++k) {
assertEquals(mat[k], result[k], 1e-10);
// System.out.print(" " + (mat[k] - result[k]));
}
}
}
use of dr.oldevomodel.substmodel.FrequencyModel in project beast-mcmc by beast-dev.
the class CovarionHKYTest method transitionProbabilitiesTester.
public void transitionProbabilitiesTester(double[] freqs, double[] expectedP) {
Parameter frequencies = new Parameter.Default(freqs);
FrequencyModel freqModel = new FrequencyModel(dataType, frequencies);
model = new CovarionHKY(dataType, kappa, alpha, switchingRate, freqModel);
alpha.setParameterValue(0, 0.0);
switchingRate.setParameterValue(0, 1.0);
kappa.setParameterValue(0, 2.0);
model.setupMatrix();
System.out.println(SubstitutionModelUtils.toString(model.getQ(), dataType, 2));
double[] matrix = new double[64];
double[] pi = model.getFrequencyModel().getFrequencies();
int index = 0;
for (double distance = 0.01; distance <= 1.005; distance += 0.01) {
model.getTransitionProbabilities(distance, matrix);
double pChange = 0.0;
double pNoOrHiddenChange = 0.0;
int k = 0;
for (int i = 0; i < 8; i++) {
for (int j = 0; j < 8; j++) {
if ((i % 4) != (j % 4)) {
pChange += matrix[k] * pi[i];
} else {
pNoOrHiddenChange += matrix[k] * pi[i];
}
k += 1;
}
}
double totalP = pChange + pNoOrHiddenChange;
assertEquals(1.0, totalP, 1e-10);
System.out.print(distance + "\t" + "\t" + pChange + "\t");
if (index < 100)
System.out.print(expectedP[index]);
System.out.println();
//assertEquals(expectedP[index], pChange, 1e-14);
index += 1;
}
}
use of dr.oldevomodel.substmodel.FrequencyModel in project beast-mcmc by beast-dev.
the class GeneralF81Test method testGF81.
public void testGF81() {
for (Instance test : all) {
double[] pi = test.getPi();
// Parameter freqs = new Parameter.Default(pi);
GeneralDataType gdt = new GeneralDataType(new String[] { "1", "2", "3", "4" });
FrequencyModel f = new FrequencyModel(gdt, pi);
GeneralF81Model generalF81Model = new GeneralF81Model(f);
double distance = test.getDistance();
double[] mat = new double[4 * 4];
generalF81Model.getTransitionProbabilities(distance, mat);
final double[] result = test.getExpectedResult();
for (int k = 0; k < mat.length; ++k) {
assertEquals(mat[k], result[k], 1e-10);
// System.out.print(" " + (mat[k] - result[k]));
}
}
}
use of dr.oldevomodel.substmodel.FrequencyModel in project beast-mcmc by beast-dev.
the class SequenceLikelihoodTest method computeSitePatternLikelihoods.
protected double[] computeSitePatternLikelihoods(SitePatterns patterns) {
// Sub model
Parameter freqs = new Parameter.Default(alignment.getStateFrequencies());
Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 29.739445, 0, 100);
FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
HKY hky = new HKY(kappa, f);
//siteModel
GammaSiteModel siteModel = new GammaSiteModel(hky);
Parameter mu = new Parameter.Default(GammaSiteModelParser.MUTATION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
siteModel.setMutationRateParameter(mu);
//treeLikelihood
TreeLikelihood treeLikelihood = new TreeLikelihood(patterns, treeModel, siteModel, null, null, false, false, true, false, false);
return treeLikelihood.getPatternLogLikelihoods();
}
use of dr.oldevomodel.substmodel.FrequencyModel in project beast-mcmc by beast-dev.
the class MCMCTest method testMCMC.
public void testMCMC() {
// Sub model
//new double[]{0.25, 0.25, 0.25, 0.25});
Parameter freqs = new Parameter.Default(alignment.getStateFrequencies());
Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 1.0E-8, Double.POSITIVE_INFINITY);
FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
HKY hky = new HKY(kappa, f);
//siteModel
GammaSiteModel siteModel = new GammaSiteModel(hky);
Parameter mu = new Parameter.Default(GammaSiteModelParser.MUTATION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
siteModel.setMutationRateParameter(mu);
//treeLikelihood
SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
TreeLikelihood treeLikelihood = new TreeLikelihood(patterns, treeModel, siteModel, null, null, false, false, true, false, false);
treeLikelihood.setId(TreeLikelihoodParser.TREE_LIKELIHOOD);
// Operators
OperatorSchedule schedule = new SimpleOperatorSchedule();
MCMCOperator operator = new ScaleOperator(kappa, 0.5);
operator.setWeight(1.0);
schedule.addOperator(operator);
// Parameter rootParameter = treeModel.createNodeHeightsParameter(true, false, false);
// ScaleOperator scaleOperator = new ScaleOperator(rootParameter, 0.75, CoercionMode.COERCION_ON, 1.0);
Parameter rootHeight = treeModel.getRootHeightParameter();
rootHeight.setId(TREE_HEIGHT);
operator = new ScaleOperator(rootHeight, 0.5);
operator.setWeight(1.0);
schedule.addOperator(operator);
Parameter internalHeights = treeModel.createNodeHeightsParameter(false, true, false);
operator = new UniformOperator(internalHeights, 10.0);
schedule.addOperator(operator);
operator = new SubtreeSlideOperator(treeModel, 1, 1, true, false, false, false, CoercionMode.COERCION_ON);
schedule.addOperator(operator);
operator = new ExchangeOperator(ExchangeOperator.NARROW, treeModel, 1.0);
// operator.doOperation();
schedule.addOperator(operator);
operator = new ExchangeOperator(ExchangeOperator.WIDE, treeModel, 1.0);
// operator.doOperation();
schedule.addOperator(operator);
operator = new WilsonBalding(treeModel, 1.0);
// operator.doOperation();
schedule.addOperator(operator);
// Log
ArrayLogFormatter formatter = new ArrayLogFormatter(false);
MCLogger[] loggers = new MCLogger[2];
loggers[0] = new MCLogger(formatter, 1000, false);
loggers[0].add(treeLikelihood);
loggers[0].add(rootHeight);
loggers[0].add(kappa);
loggers[1] = new MCLogger(new TabDelimitedFormatter(System.out), 100000, false);
loggers[1].add(treeLikelihood);
loggers[1].add(rootHeight);
loggers[1].add(kappa);
// MCMC
MCMC mcmc = new MCMC("mcmc1");
MCMCOptions options = new MCMCOptions(10000000);
mcmc.setShowOperatorAnalysis(true);
mcmc.init(options, treeLikelihood, schedule, loggers);
mcmc.run();
// time
System.out.println(mcmc.getTimer().toString());
// Tracer
List<Trace> traces = formatter.getTraces();
ArrayTraceList traceList = new ArrayTraceList("MCMCTest", traces, 0);
for (int i = 1; i < traces.size(); i++) {
traceList.analyseTrace(i);
}
// <expectation name="likelihood" value="-1815.75"/>
// <expectation name="treeModel.rootHeight" value="6.42048E-2"/>
// <expectation name="hky.kappa" value="32.8941"/>
TraceCorrelation likelihoodStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TreeLikelihoodParser.TREE_LIKELIHOOD));
assertExpectation(TreeLikelihoodParser.TREE_LIKELIHOOD, likelihoodStats, -1815.75);
TraceCorrelation treeHeightStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(TREE_HEIGHT));
assertExpectation(TREE_HEIGHT, treeHeightStats, 6.42048E-2);
TraceCorrelation kappaStats = traceList.getCorrelationStatistics(traceList.getTraceIndex(HKYParser.KAPPA));
assertExpectation(HKYParser.KAPPA, kappaStats, 32.8941);
}
Aggregations