use of dr.evomodel.branchmodel.HomogeneousBranchModel in project beast-mcmc by beast-dev.
the class BeagleTreeLikelihood method main.
public static void main(String[] args) {
try {
MathUtils.setSeed(666);
System.out.println("Test case 1: simulateOnePartition");
int sequenceLength = 1000;
ArrayList<Partition> partitionsList = new ArrayList<Partition>();
// create tree
NewickImporter importer = new NewickImporter("(SimSeq1:73.7468,(SimSeq2:25.256989999999995,SimSeq3:45.256989999999995):18.48981);");
Tree tree = importer.importTree(null);
TreeModel treeModel = new TreeModel(tree);
// create Frequency Model
Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 });
FrequencyModel freqModel = new FrequencyModel(Nucleotides.INSTANCE, freqs);
// create branch model
Parameter kappa1 = new Parameter.Default(1, 1);
Parameter kappa2 = new Parameter.Default(1, 1);
HKY hky1 = new HKY(kappa1, freqModel);
HKY hky2 = new HKY(kappa2, freqModel);
HomogeneousBranchModel homogenousBranchSubstitutionModel = new HomogeneousBranchModel(hky1);
List<SubstitutionModel> substitutionModels = new ArrayList<SubstitutionModel>();
substitutionModels.add(hky1);
substitutionModels.add(hky2);
List<FrequencyModel> freqModels = new ArrayList<FrequencyModel>();
freqModels.add(freqModel);
Parameter epochTimes = new Parameter.Default(1, 20);
// create branch rate model
Parameter rate = new Parameter.Default(1, 0.001);
BranchRateModel branchRateModel = new StrictClockBranchRates(rate);
// create site model
GammaSiteRateModel siteRateModel = new GammaSiteRateModel("siteModel");
BranchModel homogeneousBranchModel = new HomogeneousBranchModel(hky1);
BranchModel epochBranchModel = new EpochBranchModel(treeModel, substitutionModels, epochTimes);
// create partition
Partition partition1 = new //
Partition(//
treeModel, //
homogenousBranchSubstitutionModel, //
siteRateModel, //
branchRateModel, //
freqModel, // from
0, // to
sequenceLength - 1, // every
1);
partitionsList.add(partition1);
// feed to sequence simulator and generate data
BeagleSequenceSimulator simulator = new BeagleSequenceSimulator(partitionsList);
Alignment alignment = simulator.simulate(false, false);
BeagleTreeLikelihood nbtl = new BeagleTreeLikelihood(alignment, treeModel, homogeneousBranchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.DEFAULT, false);
System.out.println("nBTL(homogeneous) = " + nbtl.getLogLikelihood());
nbtl = new BeagleTreeLikelihood(alignment, treeModel, epochBranchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.DEFAULT, false);
System.out.println("nBTL(epoch) = " + nbtl.getLogLikelihood());
} catch (Exception e) {
e.printStackTrace();
System.exit(-1);
}
// END: try-catch block
}
use of dr.evomodel.branchmodel.HomogeneousBranchModel in project beast-mcmc by beast-dev.
the class DataLikelihoodTester2 method main.
public static void main(String[] args) {
// turn off logging to avoid screen noise...
Logger logger = Logger.getLogger("dr");
logger.setUseParentHandlers(false);
SimpleAlignment alignment = createAlignment(sequences, Nucleotides.INSTANCE);
TreeModel treeModel;
try {
treeModel = createSpecifiedTree("((human:0.1,chimp:0.1):0.1,gorilla:0.2)");
} catch (Exception e) {
throw new RuntimeException("Unable to parse Newick tree");
}
System.out.print("\nTest BeagleTreeLikelihood (kappa = 1): ");
// substitutionModel
Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 });
Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 0, 100);
FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
HKY hky = new HKY(kappa, f);
// siteModel
double alpha = 0.5;
GammaSiteRateModel siteRateModel = new GammaSiteRateModel("gammaModel", alpha, 4);
// GammaSiteRateModel siteRateModel = new GammaSiteRateModel("siteRateModel");
siteRateModel.setSubstitutionModel(hky);
Parameter mu = new Parameter.Default(GammaSiteModelParser.SUBSTITUTION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
siteRateModel.setRelativeRateParameter(mu);
FrequencyModel f2 = new FrequencyModel(Nucleotides.INSTANCE, freqs);
Parameter kappa2 = new Parameter.Default(HKYParser.KAPPA, 10.0, 0, 100);
HKY hky2 = new HKY(kappa2, f2);
GammaSiteRateModel siteRateModel2 = new GammaSiteRateModel("gammaModel", alpha, 4);
siteRateModel2.setSubstitutionModel(hky2);
siteRateModel2.setRelativeRateParameter(mu);
// treeLikelihood
SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
BranchModel branchModel = new HomogeneousBranchModel(siteRateModel.getSubstitutionModel(), siteRateModel.getSubstitutionModel().getFrequencyModel());
BranchModel branchModel2 = new HomogeneousBranchModel(siteRateModel2.getSubstitutionModel(), siteRateModel2.getSubstitutionModel().getFrequencyModel());
BranchRateModel branchRateModel = new DefaultBranchRateModel();
BeagleTreeLikelihood treeLikelihood = new BeagleTreeLikelihood(patterns, treeModel, branchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.AUTO, true);
double logLikelihood = treeLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 1): ");
BeagleDataLikelihoodDelegate dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, false, PartialsRescalingScheme.NONE, false, PreOrderSettings.getDefault());
TreeDataLikelihood treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(5.0);
System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 5): ");
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 10): ");
dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel2, siteRateModel2, false, false, PartialsRescalingScheme.NONE, false, PreOrderSettings.getDefault());
treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
hky2.setKappa(11.0);
System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 11): ");
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(1.0);
hky2.setKappa(10.0);
MultiPartitionDataLikelihoodDelegate multiPartitionDataLikelihoodDelegate;
System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 1):");
try {
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
} catch (DataLikelihoodDelegate.DelegateTypeException dte) {
System.out.print("Failed to create multiPartitionDataLikelihoodDelegate instance (wrong resource type or no partitions, needs to be CUDA or OpenCL device with multiple partitions)");
}
hky.setKappa(5.0);
System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 5):");
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(1.0);
System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 10):");
try {
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
} catch (DataLikelihoodDelegate.DelegateTypeException dte) {
System.out.print("Failed to create multiPartitionDataLikelihoodDelegate instance (wrong resource type or no partitions, needs to be CUDA or OpenCL device with multiple partitions)");
}
System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
List<PatternList> patternLists = new ArrayList<PatternList>();
patternLists.add(patterns);
patternLists.add(patterns);
List<SiteRateModel> siteRateModels = new ArrayList<SiteRateModel>();
siteRateModels.add(siteRateModel);
siteRateModels.add(siteRateModel2);
List<BranchModel> branchModels = new ArrayList<BranchModel>();
branchModels.add(branchModel);
branchModels.add(branchModel2);
try {
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: this is 2x the logLikelihood of the 2nd partition)\n\n");
} catch (DataLikelihoodDelegate.DelegateTypeException dte) {
System.out.print("Failed to create multiPartitionDataLikelihoodDelegate instance (wrong resource type or no partitions, needs to be CUDA or OpenCL device with multiple partitions)");
}
System.exit(0);
// START ADDITIONAL TEST #1 - Guy Baele
System.out.println("-- Test #1 SiteRateModels -- ");
// alpha in partition 1 reject followed by alpha in partition 2 reject
System.out.print("Adjust alpha in partition 1: ");
siteRateModel.setAlpha(0.4);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Return alpha in partition 1 to original value: ");
siteRateModel.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
System.out.print("Adjust alpha in partition 2: ");
siteRateModel2.setAlpha(0.35);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Return alpha in partition 2 to original value: ");
siteRateModel2.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
// alpha in partition 1 accept followed by alpha in partition 2 accept
System.out.print("Adjust alpha in partition 1: ");
siteRateModel.setAlpha(0.4);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Adjust alpha in partition 2: ");
siteRateModel2.setAlpha(0.35);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only setting alpha in partition 2)");
System.out.print("Return alpha in partition 1 to original value: ");
siteRateModel.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 2 has not been returned to original value yet)");
System.out.print("Return alpha in partition 2 to original value: ");
siteRateModel2.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n");
// adjusting alphas in both partitions without explicitly calling getLogLikelihood() in between
System.out.print("Adjust both alphas in partitions 1 and 2: ");
siteRateModel.setAlpha(0.4);
siteRateModel2.setAlpha(0.35);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Return alpha in partition 2 to original value: ");
siteRateModel2.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 1 has not been returned to original value yet)");
System.out.print("Return alpha in partition 1 to original value: ");
siteRateModel.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n\n");
// END ADDITIONAL TEST - Guy Baele
// START ADDITIONAL TEST #2 - Guy Baele
System.out.println("-- Test #2 SiteRateModels -- ");
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
// 1 siteRateModel shared across 2 partitions
siteRateModels = new ArrayList<SiteRateModel>();
siteRateModels.add(siteRateModel);
try {
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n");
System.out.print("Adjust alpha in shared siteRateModel: ");
siteRateModel.setAlpha(0.4);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only adjusted alpha for partition 1)");
siteRateModel.setAlpha(0.5);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n\n");
} catch (DataLikelihoodDelegate.DelegateTypeException dte) {
System.out.print("Failed to create multiPartitionDataLikelihoodDelegate instance (wrong resource type or no partitions, needs to be CUDA or OpenCL device with multiple partitions)");
}
// END ADDITIONAL TEST - Guy Baele
// START ADDITIONAL TEST #3 - Guy Baele
System.out.println("-- Test #3 SiteRateModels -- ");
siteRateModel = new GammaSiteRateModel("gammaModel");
siteRateModel.setSubstitutionModel(hky);
siteRateModel.setRelativeRateParameter(mu);
siteRateModel2 = new GammaSiteRateModel("gammaModel2");
siteRateModel2.setSubstitutionModel(hky2);
siteRateModel2.setRelativeRateParameter(mu);
siteRateModels = new ArrayList<SiteRateModel>();
siteRateModels.add(siteRateModel);
siteRateModels.add(siteRateModel2);
try {
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n");
System.out.print("Adjust kappa in partition 1: ");
hky.setKappa(5.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: logLikelihood has not changed?)");
System.out.print("Return kappa in partition 1 to original value: ");
hky.setKappa(1.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + "\n");
System.out.print("Adjust kappa in partition 2: ");
hky2.setKappa(11.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood);
System.out.print("Return kappa in partition 2 to original value: ");
hky2.setKappa(10.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n\n");
} catch (DataLikelihoodDelegate.DelegateTypeException dte) {
System.out.print("Failed to create multiPartitionDataLikelihoodDelegate instance (wrong resource type or no partitions, needs to be CUDA or OpenCL device with multiple partitions)");
}
// END ADDITIONAL TEST - Guy Baele
// START ADDITIONAL TEST #4 - Guy Baele
System.out.println("-- Test #4 SiteRateModels -- ");
SimpleAlignment secondAlignment = createAlignment(moreSequences, Nucleotides.INSTANCE);
SitePatterns morePatterns = new SitePatterns(secondAlignment, null, 0, -1, 1, true);
BeagleDataLikelihoodDelegate dataLikelihoodDelegateOne = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, false, PartialsRescalingScheme.NONE, false, PreOrderSettings.getDefault());
TreeDataLikelihood treeDataLikelihoodOne = new TreeDataLikelihood(dataLikelihoodDelegateOne, treeModel, branchRateModel);
logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
System.out.println("\nBeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 1) = " + logLikelihood);
hky.setKappa(10.0);
logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 10) = " + logLikelihood);
hky.setKappa(1.0);
BeagleDataLikelihoodDelegate dataLikelihoodDelegateTwo = new BeagleDataLikelihoodDelegate(treeModel, morePatterns, branchModel2, siteRateModel2, false, false, PartialsRescalingScheme.NONE, false, PreOrderSettings.getDefault());
TreeDataLikelihood treeDataLikelihoodTwo = new TreeDataLikelihood(dataLikelihoodDelegateTwo, treeModel, branchRateModel);
logLikelihood = treeDataLikelihoodTwo.getLogLikelihood();
System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 2 (kappa = 10) = " + logLikelihood + "\n");
try {
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 1):");
System.out.println("logLikelihood = " + logLikelihood);
hky.setKappa(10.0);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 10):");
System.out.println("logLikelihood = " + logLikelihood);
} catch (DataLikelihoodDelegate.DelegateTypeException dte) {
System.out.print("Failed to create multiPartitionDataLikelihoodDelegate instance (wrong resource type or no partitions, needs to be CUDA or OpenCL device with multiple partitions)");
}
hky.setKappa(1.0);
try {
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) morePatterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.print("Test MultiPartitionDataLikelihoodDelegate 2nd partition (kappa = 10):");
System.out.println("logLikelihood = " + logLikelihood + "\n");
} catch (DataLikelihoodDelegate.DelegateTypeException dte) {
System.out.print("Failed to create multiPartitionDataLikelihoodDelegate instance (wrong resource type or no partitions, needs to be CUDA or OpenCL device with multiple partitions)");
}
patternLists = new ArrayList<PatternList>();
patternLists.add(patterns);
patternLists.add(morePatterns);
try {
multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
logLikelihood = treeDataLikelihood.getLogLikelihood();
System.out.print("Test MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: should be the sum of both separate logLikelihoods)\nKappa value of partition 2 is used to compute logLikelihood for both partitions?");
} catch (DataLikelihoodDelegate.DelegateTypeException dte) {
System.out.print("Failed to create multiPartitionDataLikelihoodDelegate instance (wrong resource type or no partitions, needs to be CUDA or OpenCL device with multiple partitions)");
}
// END ADDITIONAL TEST - Guy Baele
}
use of dr.evomodel.branchmodel.HomogeneousBranchModel in project beast-mcmc by beast-dev.
the class BalancedBeagleTreeLikelihoodParser method parseXMLObject.
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
boolean useAmbiguities = xo.getAttribute(BeagleTreeLikelihoodParser.USE_AMBIGUITIES, false);
/*int instanceCount = xo.getAttribute(INSTANCE_COUNT, 1);
if (instanceCount < 1) {
instanceCount = 1;
}
String ic = System.getProperty(BEAGLE_INSTANCE_COUNT);
if (ic != null && ic.length() > 0) {
instanceCount = Integer.parseInt(ic);
}*/
PatternList patternList = (PatternList) xo.getChild(PatternList.class);
TreeModel treeModel = (TreeModel) xo.getChild(TreeModel.class);
GammaSiteRateModel siteRateModel = (GammaSiteRateModel) xo.getChild(GammaSiteRateModel.class);
FrequencyModel rootFreqModel = (FrequencyModel) xo.getChild(FrequencyModel.class);
BranchModel branchModel = (BranchModel) xo.getChild(BranchModel.class);
if (branchModel == null) {
SubstitutionModel substitutionModel = (SubstitutionModel) xo.getChild(SubstitutionModel.class);
if (substitutionModel == null) {
substitutionModel = siteRateModel.getSubstitutionModel();
}
if (substitutionModel == null) {
throw new XMLParseException("No substitution model available for TreeLikelihood: " + xo.getId());
}
branchModel = new HomogeneousBranchModel(substitutionModel, rootFreqModel);
}
BranchRateModel branchRateModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
TipStatesModel tipStatesModel = (TipStatesModel) xo.getChild(TipStatesModel.class);
// if (xo.getChild(TipStatesModel.class) != null) {
// throw new XMLParseException("Sequence Error Models are not supported under BEAGLE yet. Please use Native BEAST Likelihood.");
// }
PartialsRescalingScheme scalingScheme = PartialsRescalingScheme.DEFAULT;
if (xo.hasAttribute(BeagleTreeLikelihoodParser.SCALING_SCHEME)) {
// scalingScheme = PartialsRescalingScheme.parseFromString(xo.getStringAttribute(BeagleTreeLikelihoodParser.SCALING_SCHEME));
if (scalingScheme == null)
throw new XMLParseException("Unknown scaling scheme '" + xo.getStringAttribute(BeagleTreeLikelihoodParser.SCALING_SCHEME) + "' in " + "OldBeagleTreeLikelihood object '" + xo.getId());
}
boolean delayScaling = true;
Map<Set<String>, Parameter> partialsRestrictions = null;
if (xo.hasChildNamed(PARTIALS_RESTRICTION)) {
XMLObject cxo = xo.getChild(PARTIALS_RESTRICTION);
TaxonList taxonList = (TaxonList) cxo.getChild(TaxonList.class);
// Parameter parameter = (Parameter) cxo.getChild(Parameter.class);
try {
TreeUtils.getLeavesForTaxa(treeModel, taxonList);
} catch (TreeUtils.MissingTaxonException e) {
throw new XMLParseException("Unable to parse taxon list: " + e.getMessage());
}
throw new XMLParseException("Restricting internal nodes is not yet implemented. Contact Marc");
}
/*if (instanceCount == 1 || patternList.getPatternCount() < instanceCount) {
return createTreeLikelihood(
patternList,
treeModel,
branchModel,
siteRateModel,
branchRateModel,
tipStatesModel,
useAmbiguities,
scalingScheme,
partialsRestrictions,
xo
);
}*/
// first run a test for instanceCount == 1
System.err.println("\nTesting instanceCount == 1");
Likelihood baseLikelihood = createTreeLikelihood(patternList, treeModel, branchModel, siteRateModel, branchRateModel, tipStatesModel, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo);
double start = System.nanoTime();
for (int i = 0; i < TEST_RUNS; i++) {
baseLikelihood.makeDirty();
baseLikelihood.getLogLikelihood();
}
double end = System.nanoTime();
double baseResult = end - start;
System.err.println("Evaluation took: " + baseResult);
if (!(patternList instanceof SitePatterns)) {
throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with BEAUti-selected codon partitioning.");
}
if (tipStatesModel != null) {
throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with a TipStateModel (i.e., a sequence error model).");
}
// List<Likelihood> likelihoods = new ArrayList<Likelihood>();
List<Likelihood> likelihoods = null;
CompoundLikelihood compound = null;
int instanceCount = 2;
boolean optimal = false;
while (optimal == false) {
System.err.println("\nCreating instanceCount == " + instanceCount);
likelihoods = new ArrayList<Likelihood>();
for (int i = 0; i < instanceCount; i++) {
Patterns subPatterns = new Patterns((SitePatterns) patternList, 0, 0, 1, i, instanceCount);
AbstractTreeLikelihood treeLikelihood = createTreeLikelihood(subPatterns, treeModel, branchModel, siteRateModel, branchRateModel, null, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo);
treeLikelihood.setId(xo.getId() + "_" + instanceCount);
likelihoods.add(treeLikelihood);
}
// construct compoundLikelihood
compound = new CompoundLikelihood(instanceCount, likelihoods);
// test timings
System.err.println("\nTesting instanceCount == " + instanceCount);
start = System.nanoTime();
for (int i = 0; i < TEST_RUNS; i++) {
compound.makeDirty();
compound.getLogLikelihood();
}
end = System.nanoTime();
double newResult = end - start;
System.err.println("Evaluation took: " + newResult);
if (baseResult / newResult > TEST_CUTOFF) {
instanceCount++;
baseResult = newResult;
} else {
optimal = true;
instanceCount--;
System.err.println("\nCreating final BeagleTreeLikelihood with instanceCount: " + instanceCount);
likelihoods = new ArrayList<Likelihood>();
for (int i = 0; i < instanceCount; i++) {
Patterns subPatterns = new Patterns((SitePatterns) patternList, 0, 0, 1, i, instanceCount);
AbstractTreeLikelihood treeLikelihood = createTreeLikelihood(subPatterns, treeModel, branchModel, siteRateModel, branchRateModel, null, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo);
treeLikelihood.setId(xo.getId() + "_" + instanceCount);
likelihoods.add(treeLikelihood);
}
// construct compoundLikelihood
compound = new CompoundLikelihood(instanceCount, likelihoods);
}
}
return compound;
/*for (int i = 0; i < instanceCount; i++) {
Patterns subPatterns = new Patterns((SitePatterns)patternList, 0, 0, 1, i, instanceCount);
AbstractTreeLikelihood treeLikelihood = createTreeLikelihood(
subPatterns,
treeModel,
branchModel,
siteRateModel,
branchRateModel,
null,
useAmbiguities,
scalingScheme,
partialsRestrictions,
xo);
treeLikelihood.setId(xo.getId() + "_" + instanceCount);
likelihoods.add(treeLikelihood);
}
return new CompoundLikelihood(likelihoods);*/
}
use of dr.evomodel.branchmodel.HomogeneousBranchModel in project beast-mcmc by beast-dev.
the class BeagleTreeLikelihoodParser method parseXMLObject.
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
boolean useAmbiguities = xo.getAttribute(USE_AMBIGUITIES, false);
int instanceCount = xo.getAttribute(INSTANCE_COUNT, 1);
if (instanceCount < 1) {
instanceCount = 1;
}
String ic = System.getProperty(BEAGLE_INSTANCE_COUNT);
if (ic != null && ic.length() > 0) {
instanceCount = Integer.parseInt(ic);
}
PatternList patternList = (PatternList) xo.getChild(PatternList.class);
MutableTreeModel treeModel = (MutableTreeModel) xo.getChild(MutableTreeModel.class);
GammaSiteRateModel siteRateModel = (GammaSiteRateModel) xo.getChild(GammaSiteRateModel.class);
FrequencyModel rootFreqModel = (FrequencyModel) xo.getChild(FrequencyModel.class);
BranchModel branchModel = (BranchModel) xo.getChild(BranchModel.class);
if (branchModel == null) {
SubstitutionModel substitutionModel = (SubstitutionModel) xo.getChild(SubstitutionModel.class);
if (substitutionModel == null) {
substitutionModel = siteRateModel.getSubstitutionModel();
}
if (substitutionModel == null) {
throw new XMLParseException("No substitution model available for TreeLikelihood: " + xo.getId());
}
branchModel = new HomogeneousBranchModel(substitutionModel, rootFreqModel);
}
BranchRateModel branchRateModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
if (branchModel instanceof EpochBranchModel && rootFreqModel != null) {
EpochBranchModel epochBranchModel = (EpochBranchModel) branchModel;
epochBranchModel.setRootFrequencyModel(rootFreqModel);
}
TipStatesModel tipStatesModel = (TipStatesModel) xo.getChild(TipStatesModel.class);
// if (xo.getChild(TipStatesModel.class) != null) {
// throw new XMLParseException("Sequence Error Models are not supported under BEAGLE yet. Please use Native BEAST Likelihood.");
// }
PartialsRescalingScheme scalingScheme = PartialsRescalingScheme.DEFAULT;
boolean delayScaling = true;
if (xo.hasAttribute(SCALING_SCHEME)) {
scalingScheme = PartialsRescalingScheme.parseFromString(xo.getStringAttribute(SCALING_SCHEME));
if (scalingScheme == null)
throw new XMLParseException("Unknown scaling scheme '" + xo.getStringAttribute(SCALING_SCHEME) + "' in " + "OldBeagleTreeLikelihood object '" + xo.getId());
}
if (xo.hasAttribute(DELAY_SCALING)) {
delayScaling = xo.getBooleanAttribute(DELAY_SCALING);
}
Map<Set<String>, Parameter> partialsRestrictions = null;
if (xo.hasChildNamed(PARTIALS_RESTRICTION)) {
XMLObject cxo = xo.getChild(PARTIALS_RESTRICTION);
TaxonList taxonList = (TaxonList) cxo.getChild(TaxonList.class);
// Parameter parameter = (Parameter) cxo.getChild(Parameter.class);
try {
TreeUtils.getLeavesForTaxa(treeModel, taxonList);
} catch (TreeUtils.MissingTaxonException e) {
throw new XMLParseException("Unable to parse taxon list: " + e.getMessage());
}
throw new XMLParseException("Restricting internal nodes is not yet implemented. Contact Marc");
}
int beagleThreadCount = -1;
if (System.getProperty(BEAGLE_THREAD_COUNT) != null) {
beagleThreadCount = Integer.parseInt(System.getProperty(BEAGLE_THREAD_COUNT));
}
if (beagleThreadCount == -1) {
// the default is -1 threads (automatic thread pool size) but an XML attribute can override it
int threadCount = xo.getAttribute(THREADS, -1);
if (System.getProperty(THREAD_COUNT) != null) {
threadCount = Integer.parseInt(System.getProperty(THREAD_COUNT));
}
// Todo: allow for different number of threads per beagle instance according to pattern counts
if (threadCount >= 0) {
System.setProperty(BEAGLE_THREAD_COUNT, Integer.toString(threadCount / instanceCount));
}
}
if (instanceCount == 1 || patternList.getPatternCount() < instanceCount) {
return createTreeLikelihood(patternList, treeModel, branchModel, siteRateModel, branchRateModel, tipStatesModel, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo);
}
if (tipStatesModel != null) {
throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with a TipStateModel (i.e., a sequence error model).");
}
List<Likelihood> likelihoods = new ArrayList<Likelihood>();
for (int i = 0; i < instanceCount; i++) {
Patterns subPatterns = new Patterns(patternList, i, instanceCount);
AbstractTreeLikelihood treeLikelihood = createTreeLikelihood(subPatterns, treeModel, branchModel, siteRateModel, branchRateModel, null, useAmbiguities, scalingScheme, delayScaling, partialsRestrictions, xo);
treeLikelihood.setId(xo.getId() + "_" + instanceCount);
likelihoods.add(treeLikelihood);
}
return new CompoundLikelihood(likelihoods);
}
use of dr.evomodel.branchmodel.HomogeneousBranchModel in project beast-mcmc by beast-dev.
the class TreeDataLikelihoodParser method parseXMLObject.
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
boolean useAmbiguities = xo.getAttribute(USE_AMBIGUITIES, false);
boolean usePreOrder = xo.getAttribute(USE_PREORDER, false);
boolean branchRateDerivative = xo.getAttribute(BRANCHRATE_DERIVATIVE, usePreOrder);
boolean branchInfinitesimalDerivative = xo.getAttribute(BRANCHINFINITESIMAL_DERIVATIVE, false);
if (usePreOrder != (branchRateDerivative || branchInfinitesimalDerivative)) {
throw new RuntimeException("Need to specify derivative types.");
}
PreOrderSettings settings = new PreOrderSettings(usePreOrder, branchRateDerivative, branchInfinitesimalDerivative);
// TreeDataLikelihood doesn't currently support Instances defined from the command line
// int instanceCount = xo.getAttribute(INSTANCE_COUNT, 1);
// if (instanceCount < 1) {
// instanceCount = 1;
// }
//
// String ic = System.getProperty(BEAGLE_INSTANCE_COUNT);
// if (ic != null && ic.length() > 0) {
// instanceCount = Integer.parseInt(ic);
// }
List<PatternList> patternLists = new ArrayList<PatternList>();
List<SiteRateModel> siteRateModels = new ArrayList<SiteRateModel>();
List<BranchModel> branchModels = new ArrayList<BranchModel>();
boolean hasSinglePartition = false;
PatternList patternList = (PatternList) xo.getChild(PatternList.class);
if (patternList != null) {
hasSinglePartition = true;
patternLists.add(patternList);
GammaSiteRateModel siteRateModel = (GammaSiteRateModel) xo.getChild(GammaSiteRateModel.class);
siteRateModels.add(siteRateModel);
FrequencyModel rootFreqModel = (FrequencyModel) xo.getChild(FrequencyModel.class);
BranchModel branchModel = (BranchModel) xo.getChild(BranchModel.class);
if (branchModel == null) {
SubstitutionModel substitutionModel = (SubstitutionModel) xo.getChild(SubstitutionModel.class);
if (substitutionModel == null) {
substitutionModel = siteRateModel.getSubstitutionModel();
}
if (substitutionModel == null) {
throw new XMLParseException("No substitution model available for partition in DataTreeLikelihood: " + xo.getId());
}
branchModel = new HomogeneousBranchModel(substitutionModel, rootFreqModel);
}
branchModels.add(branchModel);
}
int k = 0;
for (int i = 0; i < xo.getChildCount(); i++) {
if (xo.getChildName(i).equals(PARTITION)) {
if (hasSinglePartition) {
throw new XMLParseException("Either a single set of patterns should be given or multiple 'partitions' elements within DataTreeLikelihood: " + xo.getId());
}
k += 1;
XMLObject cxo = (XMLObject) xo.getChild(i);
patternList = (PatternList) cxo.getChild(PatternList.class);
patternLists.add(patternList);
GammaSiteRateModel siteRateModel = (GammaSiteRateModel) cxo.getChild(GammaSiteRateModel.class);
siteRateModels.add(siteRateModel);
FrequencyModel rootFreqModel = (FrequencyModel) xo.getChild(FrequencyModel.class);
BranchModel branchModel = (BranchModel) cxo.getChild(BranchModel.class);
if (branchModel == null) {
SubstitutionModel substitutionModel = (SubstitutionModel) xo.getChild(SubstitutionModel.class);
if (substitutionModel == null) {
substitutionModel = siteRateModel.getSubstitutionModel();
}
if (substitutionModel == null) {
throw new XMLParseException("No substitution model available for partition " + k + " in DataTreeLikelihood: " + xo.getId());
}
branchModel = new HomogeneousBranchModel(substitutionModel, rootFreqModel);
}
branchModels.add(branchModel);
BranchRateModel branchRateModel = (BranchRateModel) cxo.getChild(BranchRateModel.class);
if (branchRateModel != null) {
throw new XMLParseException("Partitions are not currently allowed their own BranchRateModel in TreeDataLikelihood object '" + xo.getId());
}
}
}
if (patternLists.size() == 0) {
throw new XMLParseException("Either a single set of patterns should be given or multiple 'partitions' elements within DataTreeLikelihood: " + xo.getId());
}
Tree treeModel = (Tree) xo.getChild(Tree.class);
BranchRateModel branchRateModel = (BranchRateModel) xo.getChild(BranchRateModel.class);
if (branchRateModel == null) {
throw new XMLParseException("BranchRateModel missing from TreeDataLikelihood object '" + xo.getId());
// branchRateModel = new DefaultBranchRateModel();
}
TipStatesModel tipStatesModel = (TipStatesModel) xo.getChild(TipStatesModel.class);
final boolean preferGPU = xo.getAttribute(PREFER_GPU, false);
PartialsRescalingScheme scalingScheme = PartialsRescalingScheme.DEFAULT;
if (xo.hasAttribute(SCALING_SCHEME)) {
scalingScheme = PartialsRescalingScheme.parseFromString(xo.getStringAttribute(SCALING_SCHEME));
if (scalingScheme == null)
throw new XMLParseException("Unknown scaling scheme '" + xo.getStringAttribute(SCALING_SCHEME) + "' in " + "TreeDataLikelihood object '" + xo.getId());
}
final boolean delayScaling = xo.getAttribute(DELAY_SCALING, true);
if (tipStatesModel != null) {
throw new XMLParseException("BEAGLE_INSTANCES option cannot be used with a TipStateModel (i.e., a sequence error model).");
}
return createTreeDataLikelihood(patternLists, branchModels, siteRateModels, treeModel, branchRateModel, null, useAmbiguities, preferGPU, scalingScheme, delayScaling, settings);
}
Aggregations