use of dr.evomodel.branchratemodel.DefaultBranchRateModel in project beast-mcmc by beast-dev.
the class LineageSpecificBranchModel method main.
// END: acceptState
public static void main(String[] args) {
try {
// the seed of the BEAST
MathUtils.setSeed(666);
// create tree
NewickImporter importer = new NewickImporter("(SimSeq1:73.7468,(SimSeq2:25.256989999999995,SimSeq3:45.256989999999995):18.48981);");
TreeModel tree = new TreeModel(importer.importTree(null));
// create site model
GammaSiteRateModel siteRateModel = new GammaSiteRateModel("siteModel");
// create branch rate model
BranchRateModel branchRateModel = new DefaultBranchRateModel();
int sequenceLength = 10;
ArrayList<Partition> partitionsList = new ArrayList<Partition>();
// create Frequency Model
Parameter freqs = new Parameter.Default(new double[] { //
0.0163936, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344, //
0.01639344 });
FrequencyModel freqModel = new FrequencyModel(Codons.UNIVERSAL, freqs);
// create substitution model
Parameter alpha = new Parameter.Default(1, 10);
Parameter beta = new Parameter.Default(1, 5);
MG94CodonModel mg94 = new MG94CodonModel(Codons.UNIVERSAL, alpha, beta, freqModel);
HomogeneousBranchModel substitutionModel = new HomogeneousBranchModel(mg94);
// create partition
Partition partition1 = new //
Partition(//
tree, //
substitutionModel, //
siteRateModel, //
branchRateModel, //
freqModel, // from
0, // to
sequenceLength - 1, // every
1);
partitionsList.add(partition1);
// feed to sequence simulator and generate data
BeagleSequenceSimulator simulator = new BeagleSequenceSimulator(partitionsList);
Alignment alignment = simulator.simulate(false, false);
ConvertAlignment convert = new ConvertAlignment(Nucleotides.INSTANCE, GeneticCode.UNIVERSAL, alignment);
List<SubstitutionModel> substModels = new ArrayList<SubstitutionModel>();
for (int i = 0; i < 2; i++) {
// alpha = new Parameter.Default(1, 10 );
// beta = new Parameter.Default(1, 5 );
// mg94 = new MG94CodonModel(Codons.UNIVERSAL, alpha, beta,
// freqModel);
substModels.add(mg94);
}
Parameter uCategories = new Parameter.Default(2, 0);
// CountableBranchCategoryProvider provider = new CountableBranchCategoryProvider.IndependentBranchCategoryModel(tree, uCategories);
LineageSpecificBranchModel branchSpecific = new //provider,
LineageSpecificBranchModel(//provider,
tree, //provider,
freqModel, //provider,
substModels, uCategories);
BeagleTreeLikelihood like = new //
BeagleTreeLikelihood(//
convert, //
tree, //
branchSpecific, //
siteRateModel, //
branchRateModel, //
null, //
false, PartialsRescalingScheme.DEFAULT, true);
BeagleTreeLikelihood gold = new //
BeagleTreeLikelihood(//
convert, //
tree, //
substitutionModel, //
siteRateModel, //
branchRateModel, //
null, //
false, PartialsRescalingScheme.DEFAULT, true);
System.out.println("likelihood (gold) = " + gold.getLogLikelihood());
System.out.println("likelihood = " + like.getLogLikelihood());
} catch (Exception e) {
e.printStackTrace();
}
}
use of dr.evomodel.branchratemodel.DefaultBranchRateModel in project beast-mcmc by beast-dev.
the class DataLikelihoodTester method main.
public static void main(String[] args) {
// turn off logging to avoid screen noise...
Logger logger = Logger.getLogger("dr");
logger.setUseParentHandlers(false);
SimpleAlignment alignment = createAlignment(sequences, Nucleotides.INSTANCE);
TreeModel treeModel;
try {
treeModel = createSpecifiedTree("((human:0.1,chimp:0.1):0.1,gorilla:0.2)");
} catch (Exception e) {
throw new RuntimeException("Unable to parse Newick tree");
}
System.out.print("\nTest BeagleTreeLikelihood (kappa = 1): ");
//substitutionModel
Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 });
Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 0, 100);
FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
HKY hky = new HKY(kappa, f);
//siteModel
double alpha = 0.5;
GammaSiteRateModel siteRateModel = new GammaSiteRateModel("gammaModel", alpha, 4);
// GammaSiteRateModel siteRateModel = new GammaSiteRateModel("siteRateModel");
siteRateModel.setSubstitutionModel(hky);
Parameter mu = new Parameter.Default(GammaSiteModelParser.SUBSTITUTION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
siteRateModel.setRelativeRateParameter(mu);
FrequencyModel f2 = new FrequencyModel(Nucleotides.INSTANCE, freqs);
Parameter kappa2 = new Parameter.Default(HKYParser.KAPPA, 10.0, 0, 100);
HKY hky2 = new HKY(kappa2, f2);
GammaSiteRateModel siteRateModel2 = new GammaSiteRateModel("gammaModel", alpha, 4);
siteRateModel2.setSubstitutionModel(hky2);
siteRateModel2.setRelativeRateParameter(mu);
//treeLikelihood
SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
BranchModel branchModel = new HomogeneousBranchModel(siteRateModel.getSubstitutionModel(), siteRateModel.getSubstitutionModel().getFrequencyModel());
BranchModel branchModel2 = new HomogeneousBranchModel(siteRateModel2.getSubstitutionModel(), siteRateModel2.getSubstitutionModel().getFrequencyModel());
BranchRateModel branchRateModel = new DefaultBranchRateModel();
BeagleTreeLikelihood treeLikelihood = new BeagleTreeLikelihood(patterns, treeModel, branchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.AUTO, true);
double logLikelihood = treeLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 1): ");
BeagleDataLikelihoodDelegate dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, PartialsRescalingScheme.NONE, false);
TreeDataLikelihood treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(5.0);
System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 5): ");
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 10): ");
dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel2, siteRateModel2, false, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
hky2.setKappa(11.0);
System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 11): ");
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(1.0);
hky2.setKappa(10.0);
MultiPartitionDataLikelihoodDelegate multiPartitionDataLikelihoodDelegate;
System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 1):");
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(5.0);
System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 5):");
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(1.0);
System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 10):");
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
List<PatternList> patternLists = new ArrayList<PatternList>();
patternLists.add(patterns);
patternLists.add(patterns);
List<SiteRateModel> siteRateModels = new ArrayList<SiteRateModel>();
siteRateModels.add(siteRateModel);
siteRateModels.add(siteRateModel2);
List<BranchModel> branchModels = new ArrayList<BranchModel>();
branchModels.add(branchModel);
branchModels.add(branchModel2);
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: this is 2x the logLikelihood of the 2nd partition)\n\n");
System.exit(0);
//START ADDITIONAL TEST #1 - Guy Baele
System.out.println("-- Test #1 SiteRateModels -- ");
//alpha in partition 1 reject followed by alpha in partition 2 reject
System.out.print("Adjust alpha in partition 1: ");
siteRateModel.setAlpha(0.4);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Return alpha in partition 1 to original value: ");
siteRateModel.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
System.out.print("Adjust alpha in partition 2: ");
siteRateModel2.setAlpha(0.35);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Return alpha in partition 2 to original value: ");
siteRateModel2.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
//alpha in partition 1 accept followed by alpha in partition 2 accept
System.out.print("Adjust alpha in partition 1: ");
siteRateModel.setAlpha(0.4);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Adjust alpha in partition 2: ");
siteRateModel2.setAlpha(0.35);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only setting alpha in partition 2)");
System.out.print("Return alpha in partition 1 to original value: ");
siteRateModel.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 2 has not been returned to original value yet)");
System.out.print("Return alpha in partition 2 to original value: ");
siteRateModel2.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n");
//adjusting alphas in both partitions without explicitly calling getLogLikelihood() in between
System.out.print("Adjust both alphas in partitions 1 and 2: ");
siteRateModel.setAlpha(0.4);
siteRateModel2.setAlpha(0.35);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Return alpha in partition 2 to original value: ");
siteRateModel2.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 1 has not been returned to original value yet)");
System.out.print("Return alpha in partition 1 to original value: ");
siteRateModel.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n\n");
//END ADDITIONAL TEST - Guy Baele
//START ADDITIONAL TEST #2 - Guy Baele
System.out.println("-- Test #2 SiteRateModels -- ");
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
//1 siteRateModel shared across 2 partitions
siteRateModels = new ArrayList<SiteRateModel>();
siteRateModels.add(siteRateModel);
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n");
System.out.print("Adjust alpha in shared siteRateModel: ");
siteRateModel.setAlpha(0.4);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only adjusted alpha for partition 1)");
siteRateModel.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n\n");
//END ADDITIONAL TEST - Guy Baele
//START ADDITIONAL TEST #3 - Guy Baele
System.out.println("-- Test #3 SiteRateModels -- ");
siteRateModel = new GammaSiteRateModel("gammaModel");
siteRateModel.setSubstitutionModel(hky);
siteRateModel.setRelativeRateParameter(mu);
siteRateModel2 = new GammaSiteRateModel("gammaModel2");
siteRateModel2.setSubstitutionModel(hky2);
siteRateModel2.setRelativeRateParameter(mu);
siteRateModels = new ArrayList<SiteRateModel>();
siteRateModels.add(siteRateModel);
siteRateModels.add(siteRateModel2);
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n");
System.out.print("Adjust kappa in partition 1: ");
hky.setKappa(5.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: logLikelihood has not changed?)");
System.out.print("Return kappa in partition 1 to original value: ");
hky.setKappa(1.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n");
System.out.print("Adjust kappa in partition 2: ");
hky2.setKappa(11.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Return kappa in partition 2 to original value: ");
hky2.setKappa(10.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n\n");
//END ADDITIONAL TEST - Guy Baele
//START ADDITIONAL TEST #4 - Guy Baele
System.out.println("-- Test #4 SiteRateModels -- ");
SimpleAlignment secondAlignment = createAlignment(moreSequences, Nucleotides.INSTANCE);
SitePatterns morePatterns = new SitePatterns(secondAlignment, null, 0, -1, 1, true);
BeagleDataLikelihoodDelegate dataLikelihoodDelegateOne = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, PartialsRescalingScheme.NONE, false);
TreeDataLikelihood treeDataLikelihoodOne = new TreeDataLikelihood(dataLikelihoodDelegateOne, treeModel, branchRateModel);
logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
System.out.println("\nBeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 1) = " + logLikelihood);
hky.setKappa(10.0);
logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 10) = " + logLikelihood);
hky.setKappa(1.0);
BeagleDataLikelihoodDelegate dataLikelihoodDelegateTwo = new BeagleDataLikelihoodDelegate(treeModel, morePatterns, branchModel2, siteRateModel2, false, PartialsRescalingScheme.NONE, false);
TreeDataLikelihood treeDataLikelihoodTwo = new TreeDataLikelihood(dataLikelihoodDelegateTwo, treeModel, branchRateModel);
logLikelihood = treeDataLikelihoodTwo.getLogLikelihood();
System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 2 (kappa = 10) = " + logLikelihood + "\n");
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 1):");
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(10.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 10):");
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(1.0);
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) morePatterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.print("Test MultiPartitionDataLikelihoodDelegate 2nd partition (kappa = 10):");
System.out.println("logLikelihood = " + logLikelihood + "\n");
patternLists = new ArrayList<PatternList>();
patternLists.add(patterns);
patternLists.add(morePatterns);
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.print("Test MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: should be the sum of both separate logLikelihoods)\nKappa value of partition 2 is used to compute logLikelihood for both partitions?");
//END ADDITIONAL TEST - Guy Baele
}
use of dr.evomodel.branchratemodel.DefaultBranchRateModel in project beast-mcmc by beast-dev.
the class SequenceSimulator method main.
// getDefaultSiteModel
public static void main(String[] args) {
try {
int nReplications = 10;
// create tree
NewickImporter importer = new NewickImporter("((A:1.0,B:1.0)AB:1.0,(C:1.0,D:1.0)CD:1.0)ABCD;");
Tree tree = importer.importTree(null);
// create site model
SiteModel siteModel = getDefaultSiteModel();
// create branch rate model
BranchRateModel branchRateModel = new DefaultBranchRateModel();
// feed to sequence simulator and generate leaves
SequenceSimulator treeSimulator = new SequenceSimulator(tree, siteModel, branchRateModel, nReplications);
Sequence ancestralSequence = new Sequence();
ancestralSequence.appendSequenceString("TCAGGTCAAG");
treeSimulator.setAncestralSequence(ancestralSequence);
System.out.println(treeSimulator.simulate().toString());
} catch (Exception e) {
e.printStackTrace();
}
//END: try-catch block
}
use of dr.evomodel.branchratemodel.DefaultBranchRateModel in project beast-mcmc by beast-dev.
the class MicrosatelliteSimulatorParser method parseXMLObject.
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
Microsatellite msatDataType = (Microsatellite) xo.getChild(Microsatellite.class);
Taxa taxa = (Taxa) xo.getChild(Taxa.class);
Tree tree = (Tree) xo.getChild(Tree.class);
MicrosatelliteModel msatModel = (MicrosatelliteModel) xo.getChild(MicrosatelliteModel.class);
BranchRateModel brModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
if (brModel == null) {
brModel = new DefaultBranchRateModel();
}
MicrosatelliteSimulator msatSim = new MicrosatelliteSimulator(msatDataType, taxa, tree, new GammaSiteModel(msatModel), brModel);
Patterns patterns = msatSim.simulateMsatPattern();
String msatPatId = xo.getAttribute("id", "simMsatPat");
patterns.setId(msatPatId);
MicrosatellitePatternParser.printDetails(patterns);
MicrosatellitePatternParser.printMicrosatContent(patterns);
return patterns;
}
use of dr.evomodel.branchratemodel.DefaultBranchRateModel in project beast-mcmc by beast-dev.
the class VariableBranchCompleteHistorySimulatorTest method testHKYVariableSimulation.
public void testHKYVariableSimulation() {
System.out.println("Starting HKY variable branch simulation");
Parameter kappa = new Parameter.Default(1, 2.0);
double[] pi = { 0.45, 0.05, 0.25, 0.25 };
Parameter freqs = new Parameter.Default(pi);
FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
HKY hky = new HKY(kappa, f);
int stateCount = hky.getDataType().getStateCount();
Parameter mu = new Parameter.Default(1, 0.5);
Parameter alpha = new Parameter.Default(1, 0.5);
GammaSiteRateModel siteModel = new GammaSiteRateModel("gammaModel", mu, alpha, 4, null);
siteModel.setSubstitutionModel(hky);
BranchRateModel branchRateModel = new DefaultBranchRateModel();
double analyticResult = TreeUtils.getTreeLength(tree, tree.getRoot()) * mu.getParameterValue(0);
int nSites = 200;
double[] register1 = new double[stateCount * stateCount];
double[] register2 = new double[stateCount * stateCount];
// Count all jumps
MarkovJumpsCore.fillRegistrationMatrix(register1, stateCount);
// Move some jumps from 1 to 2
register1[1 * stateCount + 2] = 0;
register2[1 * stateCount + 2] = 1;
register1[1 * stateCount + 3] = 0;
register2[1 * stateCount + 3] = 1;
register1[2 * stateCount + 3] = 0;
register2[2 * stateCount + 3] = 1;
double[] branchValues = { 10.0, 10.0, 10.0, 10.0, 10.0 };
Parameter branchValuesParam = new Parameter.Default(branchValues);
runSimulation(N, tree, siteModel, branchRateModel, nSites, new double[][] { register1, register2 }, analyticResult, kappa, branchValuesParam);
}
Aggregations