use of dr.evomodel.branchmodel.HomogeneousBranchModel in project beast-mcmc by beast-dev.
the class AncestralStateBeagleTreeLikelihoodTest method testJointLikelihood.
public void testJointLikelihood() {
TreeModel treeModel = new TreeModel("treeModel", tree);
Sequence[] sequence = new Sequence[3];
sequence[0] = new Sequence(new Taxon("0"), "A");
sequence[1] = new Sequence(new Taxon("1"), "C");
sequence[2] = new Sequence(new Taxon("2"), "C");
Taxa taxa = new Taxa();
for (Sequence s : sequence) {
taxa.addTaxon(s.getTaxon());
}
SimpleAlignment alignment = new SimpleAlignment();
for (Sequence s : sequence) {
alignment.addSequence(s);
}
Parameter mu = new Parameter.Default(1, 1.0);
Parameter kappa = new Parameter.Default(1, 1.0);
double[] pi = { 0.25, 0.25, 0.25, 0.25 };
Parameter freqs = new Parameter.Default(pi);
FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
HKY hky = new HKY(kappa, f);
GammaSiteRateModel siteRateModel = new GammaSiteRateModel("gammaModel", mu, null, -1, null);
siteRateModel.setSubstitutionModel(hky);
BranchModel branchModel = new HomogeneousBranchModel(siteRateModel.getSubstitutionModel());
BranchRateModel branchRateModel = null;
AncestralStateBeagleTreeLikelihood treeLikelihood = new AncestralStateBeagleTreeLikelihood(alignment, treeModel, branchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.DEFAULT, true, null, hky.getDataType(), "stateTag", // useMap = true
true, false);
double logLike = treeLikelihood.getLogLikelihood();
StringBuffer buffer = new StringBuffer();
// Tree.Utils.newick(treeModel, treeModel.getRoot(), false, Tree.BranchLengthType.LENGTHS_AS_TIME,
// null, null, new NodeAttributeProvider[]{treeLikelihood}, null, null, buffer);
TreeUtils.newick(treeModel, treeModel.getRoot(), false, TreeUtils.BranchLengthType.LENGTHS_AS_TIME, null, null, new TreeTraitProvider[] { treeLikelihood }, null, buffer);
System.out.println(buffer);
System.out.println("t_CA(2) = " + t(false, 2.0));
System.out.println("t_CC(1) = " + t(true, 1.0));
double trueValue = 0.25 * t(false, 2.0) * Math.pow(t(true, 1.0), 3.0);
assertEquals(logLike, Math.log(trueValue), 1e-6);
}
use of dr.evomodel.branchmodel.HomogeneousBranchModel in project beast-mcmc by beast-dev.
the class MarkovJumpsTest method testMarkovJumps.
public void testMarkovJumps() {
MathUtils.setSeed(666);
createAlignment(sequencesTwo, Nucleotides.INSTANCE);
try {
// createSpecifiedTree("((human:1,chimp:1):1,gorilla:2)");
createSpecifiedTree("(human:1,chimp:1)");
} catch (Exception e) {
throw new RuntimeException("Unable to parse Newick tree");
}
//substitutionModel
Parameter freqs = new Parameter.Default(new double[] { 0.40, 0.25, 0.25, 0.10 });
Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 10.0, 0, 100);
FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
HKY hky = new HKY(kappa, f);
//siteModel
// double alpha = 0.5;
Parameter mu = new Parameter.Default(GammaSiteModelParser.MUTATION_RATE, 0.5, 0, Double.POSITIVE_INFINITY);
// Parameter pInv = new Parameter.Default("pInv", 0.5, 0, 1);
Parameter pInv = null;
GammaSiteRateModel siteRateModel = new GammaSiteRateModel("gammaModel", mu, null, -1, pInv);
siteRateModel.setSubstitutionModel(hky);
//treeLikelihood
SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
BranchModel branchModel = new HomogeneousBranchModel(siteRateModel.getSubstitutionModel());
BranchRateModel branchRateModel = null;
MarkovJumpsBeagleTreeLikelihood mjTreeLikelihood = new MarkovJumpsBeagleTreeLikelihood(patterns, treeModel, branchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.AUTO, true, null, hky.getDataType(), "stateTag", // use MAP
false, // return ML
true, // use uniformization
false, false, 1000);
int nRegisters = registerValues.length;
int nSim = 10000;
for (int i = 0; i < nRegisters; i++) {
Parameter registerParameter = new Parameter.Default(registerValues[i]);
registerParameter.setId(registerTages[i]);
mjTreeLikelihood.addRegister(registerParameter, registerTypes[i], registerScales[i]);
}
double logLikelihood = mjTreeLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
double[] averages = new double[nRegisters];
for (int i = 0; i < nSim; i++) {
for (int r = 0; r < nRegisters; r++) {
double[][] values = mjTreeLikelihood.getMarkovJumpsForRegister(treeModel, r);
for (double[] value : values) {
averages[r] += value[0];
}
}
mjTreeLikelihood.makeDirty();
}
for (int r = 0; r < nRegisters; r++) {
averages[r] /= (double) nSim;
System.out.print(" " + averages[r]);
}
System.out.println("");
assertEquals(valuesFromR, averages, 1E-2);
}
use of dr.evomodel.branchmodel.HomogeneousBranchModel in project beast-mcmc by beast-dev.
the class BeagleBranchLikelihood method main.
// END: LikelihoodColumn class
// ////////////
// ---TEST---//
// ////////////
public static void main(String[] args) {
try {
MathUtils.setSeed(666);
int sequenceLength = 1000;
ArrayList<Partition> partitionsList = new ArrayList<Partition>();
// create tree
NewickImporter importer = new NewickImporter("((SimSeq1:22.0,SimSeq2:22.0):12.0,(SimSeq3:23.1,SimSeq4:23.1):10.899999999999999);");
Tree tree = importer.importTree(null);
TreeModel treeModel = new TreeModel(tree);
// create Frequency Model
Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 });
FrequencyModel freqModel = new FrequencyModel(Nucleotides.INSTANCE, freqs);
// create branch model
Parameter kappa1 = new Parameter.Default(1, 1);
HKY hky1 = new HKY(kappa1, freqModel);
BranchModel homogeneousBranchModel = new HomogeneousBranchModel(hky1);
List<SubstitutionModel> substitutionModels = new ArrayList<SubstitutionModel>();
substitutionModels.add(hky1);
List<FrequencyModel> freqModels = new ArrayList<FrequencyModel>();
freqModels.add(freqModel);
// create branch rate model
Parameter rate = new Parameter.Default(1, 1.000);
BranchRateModel branchRateModel = new StrictClockBranchRates(rate);
// create site model
GammaSiteRateModel siteRateModel = new GammaSiteRateModel("siteModel");
// create partition
Partition partition1 = new //
Partition(//
treeModel, //
homogeneousBranchModel, //
siteRateModel, //
branchRateModel, //
freqModel, // from
0, // to
sequenceLength - 1, // every
1);
partitionsList.add(partition1);
// feed to sequence simulator and generate data
BeagleSequenceSimulator simulator = new BeagleSequenceSimulator(partitionsList);
Alignment alignment = simulator.simulate(false, false);
System.out.println(alignment);
BeagleTreeLikelihood btl = new BeagleTreeLikelihood(alignment, treeModel, homogeneousBranchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.DEFAULT, true);
System.out.println("BTL(homogeneous) = " + btl.getLogLikelihood());
BeagleBranchLikelihood bbl = new BeagleBranchLikelihood(alignment, treeModel, homogeneousBranchModel, siteRateModel, freqModel, branchRateModel);
int branchIndex = 4;
System.out.println(bbl.getBranchLogLikelihood(branchIndex));
bbl.finalizeBeagle();
} catch (Exception e) {
e.printStackTrace();
System.exit(-1);
} catch (Throwable e) {
e.printStackTrace();
System.exit(-1);
}
// END: try-catch block
}
use of dr.evomodel.branchmodel.HomogeneousBranchModel in project beast-mcmc by beast-dev.
the class LineageSpecificBranchModel method main.
// END: acceptState
public static void main(String[] args) {
try {
// the seed of the BEAST
MathUtils.setSeed(666);
// create tree
NewickImporter importer = new NewickImporter("(SimSeq1:73.7468,(SimSeq2:25.256989999999995,SimSeq3:45.256989999999995):18.48981);");
TreeModel tree = new TreeModel(importer.importTree(null));
// create site model
GammaSiteRateModel siteRateModel = new GammaSiteRateModel("siteModel");
// create branch rate model
BranchRateModel branchRateModel = new DefaultBranchRateModel();
int sequenceLength = 10;
ArrayList<Partition> partitionsList = new ArrayList<Partition>();
// create Frequency Model
Parameter freqs = new Parameter.Default(new double[] { //
0.0163936, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344 });
FrequencyModel freqModel = new FrequencyModel(Codons.UNIVERSAL, freqs);
// create substitution model
Parameter alpha = new Parameter.Default(1, 10);
Parameter beta = new Parameter.Default(1, 5);
MG94CodonModel mg94 = new MG94CodonModel(Codons.UNIVERSAL, alpha, beta, freqModel);
HomogeneousBranchModel substitutionModel = new HomogeneousBranchModel(mg94);
// create partition
Partition partition1 = new //
Partition(//
tree, //
substitutionModel, //
siteRateModel, //
branchRateModel, //
freqModel, // from
0, // to
sequenceLength - 1, // every
1);
partitionsList.add(partition1);
// feed to sequence simulator and generate data
BeagleSequenceSimulator simulator = new BeagleSequenceSimulator(partitionsList);
Alignment alignment = simulator.simulate(false, false);
ConvertAlignment convert = new ConvertAlignment(Nucleotides.INSTANCE, GeneticCode.UNIVERSAL, alignment);
List<SubstitutionModel> substModels = new ArrayList<SubstitutionModel>();
for (int i = 0; i < 2; i++) {
// alpha = new Parameter.Default(1, 10 );
// beta = new Parameter.Default(1, 5 );
// mg94 = new MG94CodonModel(Codons.UNIVERSAL, alpha, beta,
// freqModel);
substModels.add(mg94);
}
Parameter uCategories = new Parameter.Default(2, 0);
// CountableBranchCategoryProvider provider = new CountableBranchCategoryProvider.IndependentBranchCategoryModel(tree, uCategories);
LineageSpecificBranchModel branchSpecific = new //provider,
LineageSpecificBranchModel(//provider,
tree, //provider,
freqModel, //provider,
substModels, uCategories);
BeagleTreeLikelihood like = new //
BeagleTreeLikelihood(//
convert, //
tree, //
branchSpecific, //
siteRateModel, //
branchRateModel, //
null, //
false, PartialsRescalingScheme.DEFAULT, true);
BeagleTreeLikelihood gold = new //
BeagleTreeLikelihood(//
convert, //
tree, //
substitutionModel, //
siteRateModel, //
branchRateModel, //
null, //
false, PartialsRescalingScheme.DEFAULT, true);
System.out.println("likelihood (gold) = " + gold.getLogLikelihood());
System.out.println("likelihood = " + like.getLogLikelihood());
} catch (Exception e) {
e.printStackTrace();
}
}
use of dr.evomodel.branchmodel.HomogeneousBranchModel in project beast-mcmc by beast-dev.
the class DataLikelihoodTester method main.
public static void main(String[] args) {
// turn off logging to avoid screen noise...
Logger logger = Logger.getLogger("dr");
logger.setUseParentHandlers(false);
SimpleAlignment alignment = createAlignment(sequences, Nucleotides.INSTANCE);
TreeModel treeModel;
try {
treeModel = createSpecifiedTree("((human:0.1,chimp:0.1):0.1,gorilla:0.2)");
} catch (Exception e) {
throw new RuntimeException("Unable to parse Newick tree");
}
System.out.print("\nTest BeagleTreeLikelihood (kappa = 1): ");
//substitutionModel
Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 });
Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 0, 100);
FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
HKY hky = new HKY(kappa, f);
//siteModel
double alpha = 0.5;
GammaSiteRateModel siteRateModel = new GammaSiteRateModel("gammaModel", alpha, 4);
// GammaSiteRateModel siteRateModel = new GammaSiteRateModel("siteRateModel");
siteRateModel.setSubstitutionModel(hky);
Parameter mu = new Parameter.Default(GammaSiteModelParser.SUBSTITUTION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
siteRateModel.setRelativeRateParameter(mu);
FrequencyModel f2 = new FrequencyModel(Nucleotides.INSTANCE, freqs);
Parameter kappa2 = new Parameter.Default(HKYParser.KAPPA, 10.0, 0, 100);
HKY hky2 = new HKY(kappa2, f2);
GammaSiteRateModel siteRateModel2 = new GammaSiteRateModel("gammaModel", alpha, 4);
siteRateModel2.setSubstitutionModel(hky2);
siteRateModel2.setRelativeRateParameter(mu);
//treeLikelihood
SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
BranchModel branchModel = new HomogeneousBranchModel(siteRateModel.getSubstitutionModel(), siteRateModel.getSubstitutionModel().getFrequencyModel());
BranchModel branchModel2 = new HomogeneousBranchModel(siteRateModel2.getSubstitutionModel(), siteRateModel2.getSubstitutionModel().getFrequencyModel());
BranchRateModel branchRateModel = new DefaultBranchRateModel();
BeagleTreeLikelihood treeLikelihood = new BeagleTreeLikelihood(patterns, treeModel, branchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.AUTO, true);
double logLikelihood = treeLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 1): ");
BeagleDataLikelihoodDelegate dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, PartialsRescalingScheme.NONE, false);
TreeDataLikelihood treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(5.0);
System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 5): ");
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 10): ");
dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel2, siteRateModel2, false, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
hky2.setKappa(11.0);
System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 11): ");
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(1.0);
hky2.setKappa(10.0);
MultiPartitionDataLikelihoodDelegate multiPartitionDataLikelihoodDelegate;
System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 1):");
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(5.0);
System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 5):");
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(1.0);
System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 10):");
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
List<PatternList> patternLists = new ArrayList<PatternList>();
patternLists.add(patterns);
patternLists.add(patterns);
List<SiteRateModel> siteRateModels = new ArrayList<SiteRateModel>();
siteRateModels.add(siteRateModel);
siteRateModels.add(siteRateModel2);
List<BranchModel> branchModels = new ArrayList<BranchModel>();
branchModels.add(branchModel);
branchModels.add(branchModel2);
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: this is 2x the logLikelihood of the 2nd partition)\n\n");
System.exit(0);
//START ADDITIONAL TEST #1 - Guy Baele
System.out.println("-- Test #1 SiteRateModels -- ");
//alpha in partition 1 reject followed by alpha in partition 2 reject
System.out.print("Adjust alpha in partition 1: ");
siteRateModel.setAlpha(0.4);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Return alpha in partition 1 to original value: ");
siteRateModel.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
System.out.print("Adjust alpha in partition 2: ");
siteRateModel2.setAlpha(0.35);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Return alpha in partition 2 to original value: ");
siteRateModel2.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
//alpha in partition 1 accept followed by alpha in partition 2 accept
System.out.print("Adjust alpha in partition 1: ");
siteRateModel.setAlpha(0.4);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Adjust alpha in partition 2: ");
siteRateModel2.setAlpha(0.35);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only setting alpha in partition 2)");
System.out.print("Return alpha in partition 1 to original value: ");
siteRateModel.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 2 has not been returned to original value yet)");
System.out.print("Return alpha in partition 2 to original value: ");
siteRateModel2.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n");
//adjusting alphas in both partitions without explicitly calling getLogLikelihood() in between
System.out.print("Adjust both alphas in partitions 1 and 2: ");
siteRateModel.setAlpha(0.4);
siteRateModel2.setAlpha(0.35);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Return alpha in partition 2 to original value: ");
siteRateModel2.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 1 has not been returned to original value yet)");
System.out.print("Return alpha in partition 1 to original value: ");
siteRateModel.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n\n");
//END ADDITIONAL TEST - Guy Baele
//START ADDITIONAL TEST #2 - Guy Baele
System.out.println("-- Test #2 SiteRateModels -- ");
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
//1 siteRateModel shared across 2 partitions
siteRateModels = new ArrayList<SiteRateModel>();
siteRateModels.add(siteRateModel);
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n");
System.out.print("Adjust alpha in shared siteRateModel: ");
siteRateModel.setAlpha(0.4);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only adjusted alpha for partition 1)");
siteRateModel.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n\n");
//END ADDITIONAL TEST - Guy Baele
//START ADDITIONAL TEST #3 - Guy Baele
System.out.println("-- Test #3 SiteRateModels -- ");
siteRateModel = new GammaSiteRateModel("gammaModel");
siteRateModel.setSubstitutionModel(hky);
siteRateModel.setRelativeRateParameter(mu);
siteRateModel2 = new GammaSiteRateModel("gammaModel2");
siteRateModel2.setSubstitutionModel(hky2);
siteRateModel2.setRelativeRateParameter(mu);
siteRateModels = new ArrayList<SiteRateModel>();
siteRateModels.add(siteRateModel);
siteRateModels.add(siteRateModel2);
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n");
System.out.print("Adjust kappa in partition 1: ");
hky.setKappa(5.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: logLikelihood has not changed?)");
System.out.print("Return kappa in partition 1 to original value: ");
hky.setKappa(1.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n");
System.out.print("Adjust kappa in partition 2: ");
hky2.setKappa(11.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Return kappa in partition 2 to original value: ");
hky2.setKappa(10.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n\n");
//END ADDITIONAL TEST - Guy Baele
//START ADDITIONAL TEST #4 - Guy Baele
System.out.println("-- Test #4 SiteRateModels -- ");
SimpleAlignment secondAlignment = createAlignment(moreSequences, Nucleotides.INSTANCE);
SitePatterns morePatterns = new SitePatterns(secondAlignment, null, 0, -1, 1, true);
BeagleDataLikelihoodDelegate dataLikelihoodDelegateOne = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, PartialsRescalingScheme.NONE, false);
TreeDataLikelihood treeDataLikelihoodOne = new TreeDataLikelihood(dataLikelihoodDelegateOne, treeModel, branchRateModel);
logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
System.out.println("\nBeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 1) = " + logLikelihood);
hky.setKappa(10.0);
logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 10) = " + logLikelihood);
hky.setKappa(1.0);
BeagleDataLikelihoodDelegate dataLikelihoodDelegateTwo = new BeagleDataLikelihoodDelegate(treeModel, morePatterns, branchModel2, siteRateModel2, false, PartialsRescalingScheme.NONE, false);
TreeDataLikelihood treeDataLikelihoodTwo = new TreeDataLikelihood(dataLikelihoodDelegateTwo, treeModel, branchRateModel);
logLikelihood = treeDataLikelihoodTwo.getLogLikelihood();
System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 2 (kappa = 10) = " + logLikelihood + "\n");
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 1):");
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(10.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 10):");
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(1.0);
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) morePatterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.print("Test MultiPartitionDataLikelihoodDelegate 2nd partition (kappa = 10):");
System.out.println("logLikelihood = " + logLikelihood + "\n");
patternLists = new ArrayList<PatternList>();
patternLists.add(patterns);
patternLists.add(morePatterns);
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.print("Test MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: should be the sum of both separate logLikelihoods)\nKappa value of partition 2 is used to compute logLikelihood for both partitions?");
//END ADDITIONAL TEST - Guy Baele
}
Aggregations