use of dr.evomodel.branchratemodel.DefaultBranchRateModel in project beast-mcmc by beast-dev.
the class BASTADensityTester method main.
public static void main(String[] args) {
MathUtils.setSeed(666);
// turn off logging to avoid screen noise...
Logger logger = Logger.getLogger("dr");
logger.setUseParentHandlers(false);
System.out.println("EXAMPLE 1: 4 taxa with 2 demes");
SimpleAlignment alignment = createAlignment(sequences, Nucleotides.INSTANCE);
TreeModel treeModel;
try {
treeModel = createSpecifiedTree("(((AB192965Japan2004:0.42343888910376215,AF189018Indonesia2005:1.4234388891037622)5:9.725099517053918,AF071228Spain1997:3.14853840615768)6:1.7275747971782511,AF105975Portugal1995:2.8761132033359313)");
} catch (Exception e) {
throw new RuntimeException("Unable to parse Newick tree");
}
List<String> states = new ArrayList<String>();
states.add("Asia");
states.add("West_Medit");
GeneralDataType dataType = new GeneralDataType(states);
Taxa taxonList = new Taxa();
Taxon taxonOne = new Taxon(sequences[0][0]);
taxonOne.setAttribute(DEME, states.get(0));
taxonList.addTaxon(taxonOne);
Taxon taxonTwo = new Taxon(sequences[0][1]);
taxonTwo.setAttribute(DEME, states.get(1));
taxonList.addTaxon(taxonTwo);
Taxon taxonThree = new Taxon(sequences[0][2]);
taxonThree.setAttribute(DEME, states.get(1));
taxonList.addTaxon(taxonThree);
Taxon taxonFour = new Taxon(sequences[0][3]);
taxonFour.setAttribute(DEME, states.get(0));
taxonList.addTaxon(taxonFour);
int[] pattern = new int[sequences[0].length];
pattern[0] = 0;
pattern[1] = 1;
pattern[2] = 1;
pattern[3] = 0;
SimpleSiteList patterns = new SimpleSiteList(dataType, taxonList);
patterns.addPattern(pattern);
double[] freqValues = new double[2];
freqValues[0] = 0.5;
freqValues[1] = 0.5;
FrequencyModel freqModel = new FrequencyModel(dataType, freqValues);
Parameter ratesParameter = new Parameter.Default("migration rates", 2);
ratesParameter.setParameterValue(0, 1.0);
ratesParameter.setParameterValue(1, 1.0);
Parameter popSizesParameter = new Parameter.Default("population sizes", 2);
popSizesParameter.setParameterValue(0, 1.0);
popSizesParameter.setParameterValue(1, 1.0);
// GeneralSubstitutionModel migrationModel = new GeneralSubstitutionModel("migrationModel", dataType, null, ratesParameter, 0);
SVSComplexSubstitutionModel migrationModel = new SVSComplexSubstitutionModel("migrationModel", dataType, freqModel, ratesParameter, null);
TaxonList includeSubtree = null;
List<TaxonList> excludeSubtrees = new ArrayList<TaxonList>();
int subIntervals = 2;
BranchRateModel branchRateModel = new DefaultBranchRateModel();
StructuredCoalescentLikelihood structured = null;
try {
structured = new StructuredCoalescentLikelihood(treeModel, branchRateModel, popSizesParameter, patterns, null, "", migrationModel, subIntervals, includeSubtree, excludeSubtrees, false);
} catch (TreeUtils.MissingTaxonException missing) {
System.out.println("Error thrown in test class dr.evomodel.coalescent.basta.SCLikelihoodTester: " + missing);
}
System.out.println("Structured coalescent lnL = " + structured.calculateLogLikelihood() + "\n");
System.out.println("EXAMPLE 2: 5 taxa (2 identical sampling dates) with 2 demes");
try {
treeModel = createSpecifiedTree("((((AB192965Japan2004:0.48972300366304955,AJ842306France2004:0.48972300366304955):1.2052625264106087,AF189018Indonesia2005:2.6949855300736583):6.86618775860274,AF071228Spain1997:1.5611732886763985):1.4313107562768952,AF105975Portugal1995:0.9924840449532937)");
} catch (Exception e) {
throw new RuntimeException("Unable to parse Newick tree");
}
Taxon taxonFive = new Taxon(sequencesTwo[0][4]);
taxonFive.setAttribute(DEME, states.get(1));
taxonList.addTaxon(taxonFive);
pattern = new int[sequencesTwo[0].length];
pattern[0] = 0;
pattern[1] = 1;
pattern[2] = 1;
pattern[3] = 0;
pattern[4] = 1;
patterns = new SimpleSiteList(dataType, taxonList);
patterns.addPattern(pattern);
try {
structured = new StructuredCoalescentLikelihood(treeModel, branchRateModel, popSizesParameter, patterns, null, "", migrationModel, subIntervals, includeSubtree, excludeSubtrees, false);
} catch (TreeUtils.MissingTaxonException missing) {
System.out.println("Error thrown in test class dr.evomodel.coalescent.basta.SCLikelihoodTester: " + missing);
}
System.out.println("Structured coalescent lnL = " + structured.calculateLogLikelihood() + "\n");
System.out.println("EXAMPLE 3: 6 taxa (2 identical sampling dates) with 3 demes");
try {
treeModel = createSpecifiedTree("(((AB192965Japan2004:4.509764060335954,((AF189018Indonesia2005:1.496797606742874,AJ842306France2004:0.4967976067428741):0.3617175987505199,FM212660Cameroon2007:3.858515205493394):3.6512488548425597):4.957323993357177,AF071228Spain1997:2.4670880536931303):0.9760399779205056,AF105975Portugal1995:1.443128031613636)");
} catch (Exception e) {
throw new RuntimeException("Unable to parse Newick tree");
}
states = new ArrayList<String>();
states.add("Asia");
states.add("West_Medit");
states.add("Africa");
dataType = new GeneralDataType(states);
taxonOne.setAttribute(DEME, states.get(0));
taxonTwo.setAttribute(DEME, states.get(1));
taxonThree.setAttribute(DEME, states.get(1));
taxonFour.setAttribute(DEME, states.get(0));
taxonFive.setAttribute(DEME, states.get(1));
Taxon taxonSix = new Taxon(sequencesThree[0][5]);
taxonSix.setAttribute(DEME, states.get(2));
taxonList.addTaxon(taxonSix);
pattern = new int[sequencesThree[0].length];
pattern[0] = 0;
pattern[1] = 1;
pattern[2] = 1;
pattern[3] = 0;
pattern[4] = 1;
pattern[5] = 2;
patterns = new SimpleSiteList(dataType, taxonList);
patterns.addPattern(pattern);
freqValues = new double[3];
freqValues[0] = 1.0 / 3.0;
freqValues[1] = 1.0 / 3.0;
freqValues[2] = 1 - freqValues[0] - freqValues[1];
freqModel = new FrequencyModel(dataType, freqValues);
ratesParameter = new Parameter.Default("migration rates", 6);
ratesParameter.setParameterValue(0, 1.0);
ratesParameter.setParameterValue(1, 1.0);
ratesParameter.setParameterValue(2, 1.0);
ratesParameter.setParameterValue(3, 1.0);
ratesParameter.setParameterValue(4, 1.0);
ratesParameter.setParameterValue(5, 1.0);
popSizesParameter = new Parameter.Default("population sizes", 3);
popSizesParameter.setParameterValue(0, 1.0);
popSizesParameter.setParameterValue(1, 1.0);
popSizesParameter.setParameterValue(2, 1.0);
// GeneralSubstitutionModel migrationModel = new GeneralSubstitutionModel("migrationModel", dataType, null, ratesParameter, 0);
migrationModel = new SVSComplexSubstitutionModel("migrationModel", dataType, freqModel, ratesParameter, null);
try {
structured = new StructuredCoalescentLikelihood(treeModel, branchRateModel, popSizesParameter, patterns, null, "", migrationModel, subIntervals, includeSubtree, excludeSubtrees, false);
} catch (TreeUtils.MissingTaxonException missing) {
System.out.println("Error thrown in test class dr.evomodel.coalescent.basta.SCLikelihoodTester: " + missing);
}
System.out.println("Structured coalescent lnL = " + structured.calculateLogLikelihood() + "\n");
System.out.println("EXAMPLE 4: 6 taxa (2 identical sampling dates) with 3 demes and simple branch lengths");
try {
treeModel = createSpecifiedTree("(((AB192965Japan2004:4.5,((AF189018Indonesia2005:1.5,AJ842306France2004:0.5):0.25,FM212660Cameroon2007:3.75):3.75):5.0,AF071228Spain1997:2.5):1.0,AF105975Portugal1995:1.5)");
} catch (Exception e) {
throw new RuntimeException("Unable to parse Newick tree");
}
try {
structured = new StructuredCoalescentLikelihood(treeModel, branchRateModel, popSizesParameter, patterns, null, "", migrationModel, subIntervals, includeSubtree, excludeSubtrees, false);
} catch (TreeUtils.MissingTaxonException missing) {
System.out.println("Error thrown in test class dr.evomodel.coalescent.basta.SCLikelihoodTester: " + missing);
}
System.out.println("Structured coalescent lnL = " + structured.calculateLogLikelihood() + "\n");
System.out.println("EXAMPLE 5: 6 taxa (2 identical sampling dates) with 3 demes and simple (but different than in EXAMPLE 4) branch lengths");
try {
treeModel = createSpecifiedTree("(((((AF189018Indonesia2005:1.5,AJ842306France2004:0.5):0.75,AB192965Japan2004:1.25):3.25,FM212660Cameroon2007:7.5):5.0,AF071228Spain1997:2.5):1.0,AF105975Portugal1995:1.5)");
} catch (Exception e) {
throw new RuntimeException("Unable to parse Newick tree");
}
try {
structured = new StructuredCoalescentLikelihood(treeModel, branchRateModel, popSizesParameter, patterns, null, "", migrationModel, subIntervals, includeSubtree, excludeSubtrees, false);
} catch (TreeUtils.MissingTaxonException missing) {
System.out.println("Error thrown in test class dr.evomodel.coalescent.basta.SCLikelihoodTester: " + missing);
}
System.out.println("Structured coalescent lnL = " + structured.calculateLogLikelihood() + "\n");
}
use of dr.evomodel.branchratemodel.DefaultBranchRateModel in project beast-mcmc by beast-dev.
the class DataLikelihoodTester2 method main.
public static void main(String[] args) {
// turn off logging to avoid screen noise...
Logger logger = Logger.getLogger("dr");
logger.setUseParentHandlers(false);
SimpleAlignment alignment = createAlignment(sequences, Nucleotides.INSTANCE);
TreeModel treeModel;
try {
treeModel = createSpecifiedTree("((human:0.1,chimp:0.1):0.1,gorilla:0.2)");
} catch (Exception e) {
throw new RuntimeException("Unable to parse Newick tree");
}
System.out.print("\nTest BeagleTreeLikelihood (kappa = 1): ");
// substitutionModel
Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 });
Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 0, 100);
FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
HKY hky = new HKY(kappa, f);
// siteModel
double alpha = 0.5;
GammaSiteRateModel siteRateModel = new GammaSiteRateModel("gammaModel", alpha, 4);
// GammaSiteRateModel siteRateModel = new GammaSiteRateModel("siteRateModel");
siteRateModel.setSubstitutionModel(hky);
Parameter mu = new Parameter.Default(GammaSiteModelParser.SUBSTITUTION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
siteRateModel.setRelativeRateParameter(mu);
FrequencyModel f2 = new FrequencyModel(Nucleotides.INSTANCE, freqs);
Parameter kappa2 = new Parameter.Default(HKYParser.KAPPA, 10.0, 0, 100);
HKY hky2 = new HKY(kappa2, f2);
GammaSiteRateModel siteRateModel2 = new GammaSiteRateModel("gammaModel", alpha, 4);
siteRateModel2.setSubstitutionModel(hky2);
siteRateModel2.setRelativeRateParameter(mu);
// treeLikelihood
SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
BranchModel branchModel = new HomogeneousBranchModel(siteRateModel.getSubstitutionModel(), siteRateModel.getSubstitutionModel().getFrequencyModel());
BranchModel branchModel2 = new HomogeneousBranchModel(siteRateModel2.getSubstitutionModel(), siteRateModel2.getSubstitutionModel().getFrequencyModel());
BranchRateModel branchRateModel = new DefaultBranchRateModel();
BeagleTreeLikelihood treeLikelihood = new BeagleTreeLikelihood(patterns, treeModel, branchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.AUTO, true);
double logLikelihood = treeLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 1): ");
BeagleDataLikelihoodDelegate dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, false, PartialsRescalingScheme.NONE, false, PreOrderSettings.getDefault());
TreeDataLikelihood treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(5.0);
System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 5): ");
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 10): ");
dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel2, siteRateModel2, false, false, PartialsRescalingScheme.NONE, false, PreOrderSettings.getDefault());
treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
hky2.setKappa(11.0);
System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 11): ");
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(1.0);
hky2.setKappa(10.0);
MultiPartitionDataLikelihoodDelegate multiPartitionDataLikelihoodDelegate;
System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 1):");
try {
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
} catch (DataLikelihoodDelegate.DelegateTypeException dte) {
System.out.print("Failed to create multiPartitionDataLikelihoodDelegate instance (wrong resource type or no partitions, needs to be CUDA or OpenCL device with multiple partitions)");
}
hky.setKappa(5.0);
System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 5):");
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(1.0);
System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 10):");
try {
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
} catch (DataLikelihoodDelegate.DelegateTypeException dte) {
System.out.print("Failed to create multiPartitionDataLikelihoodDelegate instance (wrong resource type or no partitions, needs to be CUDA or OpenCL device with multiple partitions)");
}
System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
List<PatternList> patternLists = new ArrayList<PatternList>();
patternLists.add(patterns);
patternLists.add(patterns);
List<SiteRateModel> siteRateModels = new ArrayList<SiteRateModel>();
siteRateModels.add(siteRateModel);
siteRateModels.add(siteRateModel2);
List<BranchModel> branchModels = new ArrayList<BranchModel>();
branchModels.add(branchModel);
branchModels.add(branchModel2);
try {
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: this is 2x the logLikelihood of the 2nd partition)\n\n");
} catch (DataLikelihoodDelegate.DelegateTypeException dte) {
System.out.print("Failed to create multiPartitionDataLikelihoodDelegate instance (wrong resource type or no partitions, needs to be CUDA or OpenCL device with multiple partitions)");
}
System.exit(0);
// START ADDITIONAL TEST #1 - Guy Baele
System.out.println("-- Test #1 SiteRateModels -- ");
// alpha in partition 1 reject followed by alpha in partition 2 reject
System.out.print("Adjust alpha in partition 1: ");
siteRateModel.setAlpha(0.4);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Return alpha in partition 1 to original value: ");
siteRateModel.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
System.out.print("Adjust alpha in partition 2: ");
siteRateModel2.setAlpha(0.35);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Return alpha in partition 2 to original value: ");
siteRateModel2.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
// alpha in partition 1 accept followed by alpha in partition 2 accept
System.out.print("Adjust alpha in partition 1: ");
siteRateModel.setAlpha(0.4);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Adjust alpha in partition 2: ");
siteRateModel2.setAlpha(0.35);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only setting alpha in partition 2)");
System.out.print("Return alpha in partition 1 to original value: ");
siteRateModel.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 2 has not been returned to original value yet)");
System.out.print("Return alpha in partition 2 to original value: ");
siteRateModel2.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n");
// adjusting alphas in both partitions without explicitly calling getLogLikelihood() in between
System.out.print("Adjust both alphas in partitions 1 and 2: ");
siteRateModel.setAlpha(0.4);
siteRateModel2.setAlpha(0.35);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Return alpha in partition 2 to original value: ");
siteRateModel2.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 1 has not been returned to original value yet)");
System.out.print("Return alpha in partition 1 to original value: ");
siteRateModel.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n\n");
// END ADDITIONAL TEST - Guy Baele
// START ADDITIONAL TEST #2 - Guy Baele
System.out.println("-- Test #2 SiteRateModels -- ");
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
// 1 siteRateModel shared across 2 partitions
siteRateModels = new ArrayList<SiteRateModel>();
siteRateModels.add(siteRateModel);
try {
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n");
System.out.print("Adjust alpha in shared siteRateModel: ");
siteRateModel.setAlpha(0.4);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only adjusted alpha for partition 1)");
siteRateModel.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n\n");
} catch (DataLikelihoodDelegate.DelegateTypeException dte) {
System.out.print("Failed to create multiPartitionDataLikelihoodDelegate instance (wrong resource type or no partitions, needs to be CUDA or OpenCL device with multiple partitions)");
}
// END ADDITIONAL TEST - Guy Baele
// START ADDITIONAL TEST #3 - Guy Baele
System.out.println("-- Test #3 SiteRateModels -- ");
siteRateModel = new GammaSiteRateModel("gammaModel");
siteRateModel.setSubstitutionModel(hky);
siteRateModel.setRelativeRateParameter(mu);
siteRateModel2 = new GammaSiteRateModel("gammaModel2");
siteRateModel2.setSubstitutionModel(hky2);
siteRateModel2.setRelativeRateParameter(mu);
siteRateModels = new ArrayList<SiteRateModel>();
siteRateModels.add(siteRateModel);
siteRateModels.add(siteRateModel2);
try {
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n");
System.out.print("Adjust kappa in partition 1: ");
hky.setKappa(5.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: logLikelihood has not changed?)");
System.out.print("Return kappa in partition 1 to original value: ");
hky.setKappa(1.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n");
System.out.print("Adjust kappa in partition 2: ");
hky2.setKappa(11.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Return kappa in partition 2 to original value: ");
hky2.setKappa(10.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n\n");
} catch (DataLikelihoodDelegate.DelegateTypeException dte) {
System.out.print("Failed to create multiPartitionDataLikelihoodDelegate instance (wrong resource type or no partitions, needs to be CUDA or OpenCL device with multiple partitions)");
}
// END ADDITIONAL TEST - Guy Baele
// START ADDITIONAL TEST #4 - Guy Baele
System.out.println("-- Test #4 SiteRateModels -- ");
SimpleAlignment secondAlignment = createAlignment(moreSequences, Nucleotides.INSTANCE);
SitePatterns morePatterns = new SitePatterns(secondAlignment, null, 0, -1, 1, true);
BeagleDataLikelihoodDelegate dataLikelihoodDelegateOne = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, false, PartialsRescalingScheme.NONE, false, PreOrderSettings.getDefault());
TreeDataLikelihood treeDataLikelihoodOne = new TreeDataLikelihood(dataLikelihoodDelegateOne, treeModel, branchRateModel);
logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
System.out.println("\nBeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 1) = " + logLikelihood);
hky.setKappa(10.0);
logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 10) = " + logLikelihood);
hky.setKappa(1.0);
BeagleDataLikelihoodDelegate dataLikelihoodDelegateTwo = new BeagleDataLikelihoodDelegate(treeModel, morePatterns, branchModel2, siteRateModel2, false, false, PartialsRescalingScheme.NONE, false, PreOrderSettings.getDefault());
TreeDataLikelihood treeDataLikelihoodTwo = new TreeDataLikelihood(dataLikelihoodDelegateTwo, treeModel, branchRateModel);
logLikelihood = treeDataLikelihoodTwo.getLogLikelihood();
System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 2 (kappa = 10) = " + logLikelihood + "\n");
try {
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 1):");
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(10.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 10):");
System.out.println("logLikelihood = " + logLikelihood);
} catch (DataLikelihoodDelegate.DelegateTypeException dte) {
System.out.print("Failed to create multiPartitionDataLikelihoodDelegate instance (wrong resource type or no partitions, needs to be CUDA or OpenCL device with multiple partitions)");
}
hky.setKappa(1.0);
try {
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) morePatterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.print("Test MultiPartitionDataLikelihoodDelegate 2nd partition (kappa = 10):");
System.out.println("logLikelihood = " + logLikelihood + "\n");
} catch (DataLikelihoodDelegate.DelegateTypeException dte) {
System.out.print("Failed to create multiPartitionDataLikelihoodDelegate instance (wrong resource type or no partitions, needs to be CUDA or OpenCL device with multiple partitions)");
}
patternLists = new ArrayList<PatternList>();
patternLists.add(patterns);
patternLists.add(morePatterns);
try {
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.print("Test MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: should be the sum of both separate logLikelihoods)\nKappa value of partition 2 is used to compute logLikelihood for both partitions?");
} catch (DataLikelihoodDelegate.DelegateTypeException dte) {
System.out.print("Failed to create multiPartitionDataLikelihoodDelegate instance (wrong resource type or no partitions, needs to be CUDA or OpenCL device with multiple partitions)");
}
// END ADDITIONAL TEST - Guy Baele
}
use of dr.evomodel.branchratemodel.DefaultBranchRateModel in project beast-mcmc by beast-dev.
the class LocationScaleGradientParser method parseTreeDataLikelihood.
private GradientWrtParameterProvider parseTreeDataLikelihood(XMLObject xo, TreeDataLikelihood treeDataLikelihood, String traitName, boolean useHessian) throws XMLParseException {
BranchRateModel branchRateModel = treeDataLikelihood.getBranchRateModel();
DataLikelihoodDelegate delegate = treeDataLikelihood.getDataLikelihoodDelegate();
if (delegate instanceof ContinuousDataLikelihoodDelegate) {
throw new XMLParseException("Not yet implemented! ");
} else if (delegate instanceof BeagleDataLikelihoodDelegate) {
BeagleDataLikelihoodDelegate beagleData = (BeagleDataLikelihoodDelegate) delegate;
BranchModel branchModel = beagleData.getBranchModel();
if (branchRateModel instanceof DefaultBranchRateModel || branchRateModel instanceof ArbitraryBranchRates) {
if (xo.hasChildNamed(LOCATION)) {
BranchSpecificFixedEffects location = parseLocation(xo);
return new LocationGradient(traitName, treeDataLikelihood, beagleData, location, useHessian);
} else if (xo.hasChildNamed(SCALE)) {
Parameter scale = (Parameter) xo.getElementFirstChild(SCALE);
return new ScaleGradient(traitName, treeDataLikelihood, beagleData, scale, useHessian);
} else {
throw new XMLParseException("Poorly formed");
}
} else if (branchModel instanceof ArbitrarySubstitutionParameterBranchModel) {
BranchParameter branchParameter = (BranchParameter) xo.getChild(BranchParameter.class);
if (xo.hasChildNamed(LOCATION)) {
BranchSpecificFixedEffects location = parseLocation(xo);
return new BranchSubstitutionParameterLocationGradient(traitName, treeDataLikelihood, beagleData, branchParameter, useHessian, location);
} else if (xo.hasChildNamed(SCALE)) {
Parameter scale = (Parameter) xo.getElementFirstChild(SCALE);
return new BranchSubstitutionParameterScaleGradient(traitName, treeDataLikelihood, beagleData, branchParameter, scale, useHessian);
} else {
throw new XMLParseException("Not yet implemented.");
}
} else {
throw new XMLParseException("Only implemented for an arbitrary rates model");
}
} else {
throw new XMLParseException("Unknown likelihood delegate type");
}
}
use of dr.evomodel.branchratemodel.DefaultBranchRateModel in project beast-mcmc by beast-dev.
the class NodeHeightGradientParser method parseXMLObject.
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
String traitName = xo.getAttribute(TRAIT_NAME, DEFAULT_TRAIT_NAME);
final TreeDataLikelihood treeDataLikelihood = (TreeDataLikelihood) xo.getChild(TreeDataLikelihood.class);
BranchRateModel branchRateModel = treeDataLikelihood.getBranchRateModel();
if (branchRateModel instanceof DefaultBranchRateModel || branchRateModel instanceof ArbitraryBranchRates) {
Parameter branchRates = null;
if (branchRateModel instanceof ArbitraryBranchRates) {
branchRates = ((ArbitraryBranchRates) branchRateModel).getRateParameter();
}
DataLikelihoodDelegate delegate = treeDataLikelihood.getDataLikelihoodDelegate();
if (delegate instanceof ContinuousDataLikelihoodDelegate) {
throw new XMLParseException("Not yet implemented! ");
} else if (delegate instanceof BeagleDataLikelihoodDelegate) {
BeagleDataLikelihoodDelegate beagleData = (BeagleDataLikelihoodDelegate) delegate;
return new NodeHeightGradientForDiscreteTrait(traitName, treeDataLikelihood, beagleData, branchRates);
} else {
throw new XMLParseException("Unknown likelihood delegate type");
}
} else {
throw new XMLParseException("Only implemented for an arbitrary rates model");
}
}
use of dr.evomodel.branchratemodel.DefaultBranchRateModel in project beast-mcmc by beast-dev.
the class ContinuousDataLikelihoodParser method parseXMLObject.
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
Tree treeModel = (Tree) xo.getChild(Tree.class);
MultivariateDiffusionModel diffusionModel = (MultivariateDiffusionModel) xo.getChild(MultivariateDiffusionModel.class);
BranchRateModel rateModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
boolean useTreeLength = xo.getAttribute(USE_TREE_LENGTH, false);
boolean scaleByTime = xo.getAttribute(SCALE_BY_TIME, false);
boolean reciprocalRates = xo.getAttribute(RECIPROCAL_RATES, false);
if (reciprocalRates) {
throw new XMLParseException("Reciprocal rates are not yet implemented.");
}
if (rateModel == null) {
rateModel = new DefaultBranchRateModel();
}
ContinuousRateTransformation rateTransformation = new ContinuousRateTransformation.Default(treeModel, scaleByTime, useTreeLength);
final int dim = diffusionModel.getPrecisionmatrix().length;
String traitName = TreeTraitParserUtilities.DEFAULT_TRAIT_NAME;
List<Integer> missingIndices;
// Parameter sampleMissingParameter = null;
ContinuousTraitPartialsProvider dataModel;
boolean useMissingIndices = true;
boolean integratedProcess = xo.getAttribute(INTEGRATED_PROCESS, false);
if (xo.hasChildNamed(TreeTraitParserUtilities.TRAIT_PARAMETER)) {
TreeTraitParserUtilities utilities = new TreeTraitParserUtilities();
TreeTraitParserUtilities.TraitsAndMissingIndices returnValue = utilities.parseTraitsFromTaxonAttributes(xo, traitName, treeModel, true);
CompoundParameter traitParameter = returnValue.traitParameter;
missingIndices = returnValue.missingIndices;
// sampleMissingParameter = returnValue.sampleMissingParameter;
traitName = returnValue.traitName;
useMissingIndices = returnValue.useMissingIndices;
PrecisionType precisionType = PrecisionType.SCALAR;
if (xo.getAttribute(FORCE_FULL_PRECISION, false) || (useMissingIndices && !xo.getAttribute(FORCE_COMPLETELY_MISSING, false))) {
precisionType = PrecisionType.FULL;
}
if (xo.hasChildNamed(TreeTraitParserUtilities.JITTER)) {
utilities.jitter(xo, diffusionModel.getPrecisionmatrix().length, missingIndices);
}
if (!integratedProcess) {
dataModel = new ContinuousTraitDataModel(traitName, traitParameter, missingIndices, useMissingIndices, dim, precisionType);
} else {
dataModel = new IntegratedProcessTraitDataModel(traitName, traitParameter, missingIndices, useMissingIndices, dim, precisionType);
}
} else {
// Has ContinuousTraitPartialsProvider
dataModel = (ContinuousTraitPartialsProvider) xo.getChild(ContinuousTraitPartialsProvider.class);
}
ConjugateRootTraitPrior rootPrior = ConjugateRootTraitPrior.parseConjugateRootTraitPrior(xo, dataModel.getTraitDimension());
final boolean allowSingular;
if (dataModel instanceof IntegratedFactorAnalysisLikelihood) {
if (traitName == TreeTraitParserUtilities.DEFAULT_TRAIT_NAME) {
traitName = FACTOR_NAME;
}
if (xo.hasAttribute(ALLOW_SINGULAR)) {
allowSingular = xo.getAttribute(ALLOW_SINGULAR, false);
} else {
allowSingular = true;
}
} else if (dataModel instanceof RepeatedMeasuresTraitDataModel) {
traitName = ((RepeatedMeasuresTraitDataModel) dataModel).getTraitName();
allowSingular = xo.getAttribute(ALLOW_SINGULAR, false);
} else {
allowSingular = xo.getAttribute(ALLOW_SINGULAR, false);
}
List<BranchRateModel> driftModels = AbstractMultivariateTraitLikelihood.parseDriftModels(xo, diffusionModel);
List<BranchRateModel> optimalTraitsModels = AbstractMultivariateTraitLikelihood.parseOptimalValuesModels(xo, diffusionModel);
MultivariateElasticModel elasticModel = null;
if (xo.hasChildNamed(STRENGTH_OF_SELECTION_MATRIX)) {
XMLObject cxo = xo.getChild(STRENGTH_OF_SELECTION_MATRIX);
MatrixParameterInterface strengthOfSelectionMatrixParam;
strengthOfSelectionMatrixParam = (MatrixParameterInterface) cxo.getChild(MatrixParameterInterface.class);
if (strengthOfSelectionMatrixParam != null) {
elasticModel = new MultivariateElasticModel(strengthOfSelectionMatrixParam);
}
}
DiffusionProcessDelegate diffusionProcessDelegate;
if ((optimalTraitsModels != null && elasticModel != null) || xo.getAttribute(FORCE_OU, false)) {
if (!integratedProcess) {
diffusionProcessDelegate = new OUDiffusionModelDelegate(treeModel, diffusionModel, optimalTraitsModels, elasticModel);
} else {
diffusionProcessDelegate = new IntegratedOUDiffusionModelDelegate(treeModel, diffusionModel, optimalTraitsModels, elasticModel);
}
} else {
if (driftModels != null || xo.getAttribute(FORCE_DRIFT, false)) {
diffusionProcessDelegate = new DriftDiffusionModelDelegate(treeModel, diffusionModel, driftModels);
} else {
diffusionProcessDelegate = new HomogeneousDiffusionModelDelegate(treeModel, diffusionModel);
}
}
ContinuousDataLikelihoodDelegate delegate = new ContinuousDataLikelihoodDelegate(treeModel, diffusionProcessDelegate, dataModel, rootPrior, rateTransformation, rateModel, allowSingular);
if (dataModel instanceof IntegratedFactorAnalysisLikelihood) {
((IntegratedFactorAnalysisLikelihood) dataModel).setLikelihoodDelegate(delegate);
}
TreeDataLikelihood treeDataLikelihood = new TreeDataLikelihood(delegate, treeModel, rateModel);
boolean reconstructTraits = xo.getAttribute(RECONSTRUCT_TRAITS, true);
if (reconstructTraits) {
// if (missingIndices != null && missingIndices.size() == 0) {
if (!useMissingIndices) {
ProcessSimulationDelegate simulationDelegate = delegate.getPrecisionType() == PrecisionType.SCALAR ? new ConditionalOnTipsRealizedDelegate(traitName, treeModel, diffusionModel, dataModel, rootPrior, rateTransformation, delegate) : new MultivariateConditionalOnTipsRealizedDelegate(traitName, treeModel, diffusionModel, dataModel, rootPrior, rateTransformation, delegate);
TreeTraitProvider traitProvider = new ProcessSimulation(treeDataLikelihood, simulationDelegate);
treeDataLikelihood.addTraits(traitProvider.getTreeTraits());
} else {
ProcessSimulationDelegate simulationDelegate = delegate.getPrecisionType() == PrecisionType.SCALAR ? new ConditionalOnTipsRealizedDelegate(traitName, treeModel, diffusionModel, dataModel, rootPrior, rateTransformation, delegate) : new MultivariateConditionalOnTipsRealizedDelegate(traitName, treeModel, diffusionModel, dataModel, rootPrior, rateTransformation, delegate);
TreeTraitProvider traitProvider = new ProcessSimulation(treeDataLikelihood, simulationDelegate);
treeDataLikelihood.addTraits(traitProvider.getTreeTraits());
ProcessSimulationDelegate fullConditionalDelegate = new TipRealizedValuesViaFullConditionalDelegate(traitName, treeModel, diffusionModel, dataModel, rootPrior, rateTransformation, delegate);
treeDataLikelihood.addTraits(new ProcessSimulation(treeDataLikelihood, fullConditionalDelegate).getTreeTraits());
// String partialTraitName = getPartiallyMissingTraitName(traitName);
//
// ProcessSimulationDelegate partialSimulationDelegate = new ProcessSimulationDelegate.ConditionalOnPartiallyMissingTipsDelegate(partialTraitName,
// treeModel, diffusionModel, dataModel, rootPrior, rateTransformation, rateModel, delegate);
//
// TreeTraitProvider partialTraitProvider = new ProcessSimulation(partialTraitName,
// treeDataLikelihood, partialSimulationDelegate);
//
// treeDataLikelihood.addTraits(partialTraitProvider.getTreeTraits());
}
}
return treeDataLikelihood;
}
Aggregations