use of dr.evolution.alignment.AscertainedSitePatterns in project beast-mcmc by beast-dev.
the class AscertainedSitePatternsParser method parseXMLObject.
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
Alignment alignment = (Alignment) xo.getChild(Alignment.class);
XMLObject xoc;
TaxonList taxa = null;
int from = -1;
int to = -1;
int every = xo.getAttribute(EVERY, 1);
if (every <= 0)
throw new XMLParseException("illegal 'every' attribute in patterns element");
int startInclude = -1;
int stopInclude = -1;
int startExclude = -1;
int stopExclude = -1;
if (xo.hasAttribute(FROM)) {
from = xo.getIntegerAttribute(FROM) - 1;
if (from < 0)
throw new XMLParseException("illegal 'from' attribute in patterns element");
}
if (xo.hasAttribute(TO)) {
to = xo.getIntegerAttribute(TO) - 1;
if (to < 0 || to < from)
throw new XMLParseException("illegal 'to' attribute in patterns element");
}
if (xo.hasChildNamed(TAXON_LIST)) {
taxa = (TaxonList) xo.getElementFirstChild(TAXON_LIST);
}
if (from > alignment.getSiteCount())
throw new XMLParseException("illegal 'from' attribute in patterns element");
if (to > alignment.getSiteCount())
throw new XMLParseException("illegal 'to' attribute in patterns element");
if (from < 0)
from = 0;
if (to < 0)
to = alignment.getSiteCount() - 1;
// if (xo.hasAttribute(XMLParser.ID)) {
Logger.getLogger("dr.evoxml").info("Creating ascertained site patterns '" + xo.getId() + "' from positions " + Integer.toString(from + 1) + "-" + Integer.toString(to + 1) + " of alignment '" + alignment.getId() + "'");
if (every > 1) {
Logger.getLogger("dr.evoxml").info(" only using every " + every + " site");
}
if (xo.hasChildNamed(INCLUDE)) {
xoc = xo.getChild(INCLUDE);
if (xoc.hasAttribute(FROM) && xoc.hasAttribute(TO)) {
startInclude = xoc.getIntegerAttribute(FROM) - 1;
stopInclude = xoc.getIntegerAttribute(TO);
} else {
throw new XMLParseException("both from and to attributes are required for includePatterns");
}
if (startInclude < 0 || stopInclude < startInclude) {
throw new XMLParseException("invalid 'from' and 'to' attributes in includePatterns");
}
Logger.getLogger("dr.evoxml").info("\tAscertainment: Patterns in columns " + (startInclude + 1) + " to " + (stopInclude) + " are only possible. ");
}
if (xo.hasChildNamed(EXCLUDE)) {
xoc = xo.getChild(EXCLUDE);
if (xoc.hasAttribute(FROM) && xoc.hasAttribute(TO)) {
startExclude = xoc.getIntegerAttribute(FROM) - 1;
stopExclude = xoc.getIntegerAttribute(TO);
} else {
throw new XMLParseException("both from and to attributes are required for excludePatterns");
}
if (startExclude < 0 || stopExclude < startExclude) {
throw new XMLParseException("invalid 'from' and 'to' attributes in includePatterns");
}
Logger.getLogger("dr.evoxml").info("\tAscertainment: Patterns in columns " + (startExclude + 1) + " to " + (stopExclude) + " are not possible. ");
}
AscertainedSitePatterns patterns = new AscertainedSitePatterns(alignment, taxa, from, to, every, startInclude, stopInclude, startExclude, stopExclude);
Logger.getLogger("dr.evoxml").info("\tThere are " + patterns.getPatternCount() + " patterns in total.");
Logger.getLogger("dr.evoxml").info("\tPlease cite:\n" + Citable.Utils.getCitationString(patterns));
return patterns;
}
use of dr.evolution.alignment.AscertainedSitePatterns in project beast-mcmc by beast-dev.
the class AscertainmentCorrectedLikelihoodTest method testCorrectedPatterns.
public void testCorrectedPatterns() {
AscertainedSitePatterns patterns = new AscertainedSitePatterns(alignment, null, 0, -1, 1, // Include from/to
-1, // Include from/to
-1, // Exclude from/to
0, // Exclude from/to
9);
System.out.println("Using " + patterns.getPatternCount() + " patterns, with " + patterns.getExcludePatternCount() + " excluded");
double total = computeSumOfPatterns(patterns);
System.out.println("Total of 10 missing (corrected) probabilities = " + total);
assertEquals("uncorrected", 1.0, total, tolerance);
}
use of dr.evolution.alignment.AscertainedSitePatterns in project beast-mcmc by beast-dev.
the class BeagleTreeLikelihood method calculateLogLikelihood.
// int marcCount = 0;
// **************************************************************
// Likelihood IMPLEMENTATION
// **************************************************************
/**
* Calculate the log likelihood of the current state.
*
* @return the log likelihood.
*/
protected double calculateLogLikelihood() {
if (patternLogLikelihoods == null) {
patternLogLikelihoods = new double[patternCount];
}
if (branchUpdateIndices == null) {
branchUpdateIndices = new int[nodeCount];
branchLengths = new double[nodeCount];
scaleBufferIndices = new int[internalNodeCount];
storedScaleBufferIndices = new int[internalNodeCount];
}
if (operations == null) {
operations = new int[numRestrictedPartials + 1][internalNodeCount * Beagle.OPERATION_TUPLE_SIZE];
operationCount = new int[numRestrictedPartials + 1];
}
recomputeScaleFactors = false;
if (!this.delayRescalingUntilUnderflow || everUnderflowed) {
if (this.rescalingScheme == PartialsRescalingScheme.ALWAYS || this.rescalingScheme == PartialsRescalingScheme.DELAYED) {
useScaleFactors = true;
recomputeScaleFactors = true;
} else if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
useScaleFactors = true;
if (DEBUG) {
System.out.println("rescalingCount = " + rescalingCount);
System.out.println("rescalingCountInner = " + rescalingCountInner);
}
if (rescalingCount > rescalingFrequency) {
if (DEBUG) {
System.out.println("rescalingCount > rescalingFrequency");
}
rescalingCount = 0;
rescalingCountInner = 0;
}
if (rescalingCountInner < RESCALE_TIMES) {
recomputeScaleFactors = true;
updateAllNodes();
rescalingCountInner++;
}
rescalingCount++;
}
}
if (RESCALING_OFF) {
// a debugging switch
useScaleFactors = false;
recomputeScaleFactors = false;
}
if (tipStatesModel != null) {
int tipCount = treeModel.getExternalNodeCount();
for (int index = 0; index < tipCount; index++) {
if (updateNode[index]) {
if (tipStatesModel.getModelType() == TipStatesModel.Type.PARTIALS) {
tipStatesModel.getTipPartials(index, tipPartials);
beagle.setTipPartials(index, tipPartials);
} else {
tipStatesModel.getTipStates(index, tipStates);
beagle.setTipStates(index, tipStates);
}
}
}
}
branchUpdateCount = 0;
operationListCount = 0;
if (hasRestrictedPartials) {
for (int i = 0; i <= numRestrictedPartials; i++) {
operationCount[i] = 0;
}
} else {
operationCount[0] = 0;
}
final NodeRef root = treeModel.getRoot();
traverse(treeModel, root, null, true);
if (DEBUG) {
System.out.println("operationCount = " + operationCount[operationListCount]);
}
if (updateSubstitutionModel) {
// TODO More efficient to update only the substitution model that changed, instead of all
substitutionModelDelegate.updateSubstitutionModels(beagle);
// we are currently assuming a no-category model...
}
if (updateSiteModel) {
double[] categoryRates = this.siteRateModel.getCategoryRates();
if (categoryRates == null) {
// (probably a very small alpha) so reject the move.
return Double.NEGATIVE_INFINITY;
}
beagle.setCategoryRates(categoryRates);
}
if (branchUpdateCount > 0) {
substitutionModelDelegate.updateTransitionMatrices(beagle, branchUpdateIndices, branchLengths, branchUpdateCount);
}
if (COUNT_TOTAL_OPERATIONS) {
totalMatrixUpdateCount += branchUpdateCount;
for (int i = 0; i <= numRestrictedPartials; i++) {
totalOperationCount += operationCount[i];
}
}
double logL;
boolean done;
boolean firstRescaleAttempt = true;
do {
if (hasRestrictedPartials) {
for (int i = 0; i <= numRestrictedPartials; i++) {
beagle.updatePartials(operations[i], operationCount[i], Beagle.NONE);
if (i < numRestrictedPartials) {
// restrictNodePartials(restrictedIndices[i]);
}
}
} else {
beagle.updatePartials(operations[0], operationCount[0], Beagle.NONE);
}
int rootIndex = partialBufferHelper.getOffsetIndex(root.getNumber());
double[] categoryWeights = this.siteRateModel.getCategoryProportions();
// This should probably explicitly be the state frequencies for the root node...
double[] frequencies = substitutionModelDelegate.getRootStateFrequencies();
int cumulateScaleBufferIndex = Beagle.NONE;
if (useScaleFactors) {
if (recomputeScaleFactors) {
scaleBufferHelper.flipOffset(internalNodeCount);
cumulateScaleBufferIndex = scaleBufferHelper.getOffsetIndex(internalNodeCount);
beagle.resetScaleFactors(cumulateScaleBufferIndex);
beagle.accumulateScaleFactors(scaleBufferIndices, internalNodeCount, cumulateScaleBufferIndex);
} else {
cumulateScaleBufferIndex = scaleBufferHelper.getOffsetIndex(internalNodeCount);
}
} else if (useAutoScaling) {
beagle.accumulateScaleFactors(scaleBufferIndices, internalNodeCount, Beagle.NONE);
}
// these could be set only when they change but store/restore would need to be considered
beagle.setCategoryWeights(0, categoryWeights);
beagle.setStateFrequencies(0, frequencies);
double[] sumLogLikelihoods = new double[1];
if (DEBUG) {
System.out.println("useScaleFactors=" + useScaleFactors + " recomputeScaleFactors=" + recomputeScaleFactors);
}
beagle.calculateRootLogLikelihoods(new int[] { rootIndex }, new int[] { 0 }, new int[] { 0 }, new int[] { cumulateScaleBufferIndex }, 1, sumLogLikelihoods);
logL = sumLogLikelihoods[0];
if (DEBUG) {
System.out.println(logL);
// if (logL > -90000) {
// System.exit(0);
// }
}
beagle.getSiteLogLikelihoods(patternLogLikelihoods);
if (ascertainedSitePatterns) {
// Need to correct for ascertainedSitePatterns
beagle.getSiteLogLikelihoods(patternLogLikelihoods);
logL = getAscertainmentCorrectedLogLikelihood((AscertainedSitePatterns) patternList, patternLogLikelihoods, patternWeights);
}
if (Double.isNaN(logL) || Double.isInfinite(logL)) {
if (DEBUG) {
System.out.println("Double.isNaN(logL) || Double.isInfinite(logL)");
}
everUnderflowed = true;
logL = Double.NEGATIVE_INFINITY;
if (firstRescaleAttempt && (delayRescalingUntilUnderflow || rescalingScheme == PartialsRescalingScheme.DELAYED)) {
// we have had a potential under/over flow so attempt a rescaling
if (rescalingScheme == PartialsRescalingScheme.DYNAMIC || (rescalingCount == 0)) {
// show a message but only every 1000 rescales
if (rescalingMessageCount % 1000 == 0) {
if (rescalingMessageCount > 0) {
Logger.getLogger("dr.evomodel").info("Underflow calculating likelihood (" + rescalingMessageCount + " messages not shown).");
} else {
Logger.getLogger("dr.evomodel").info("Underflow calculating likelihood. Attempting a rescaling...");
}
}
rescalingMessageCount += 1;
}
useScaleFactors = true;
recomputeScaleFactors = true;
branchUpdateCount = 0;
updateAllNodes();
if (hasRestrictedPartials) {
for (int i = 0; i <= numRestrictedPartials; i++) {
operationCount[i] = 0;
}
} else {
operationCount[0] = 0;
}
// traverse again but without flipping partials indices as we
// just want to overwrite the last attempt. We will flip the
// scale buffer indices though as we are recomputing them.
traverse(treeModel, root, null, false);
// Run through do-while loop again
done = false;
// Only try to rescale once
firstRescaleAttempt = false;
} else {
// we have already tried a rescale, not rescaling or always rescaling
// so just return the likelihood...
done = true;
}
} else {
// No under-/over-flow, then done
done = true;
}
} while (!done);
//so change flags to reflect this.
for (int i = 0; i < nodeCount; i++) {
updateNode[i] = false;
}
updateSubstitutionModel = false;
updateSiteModel = false;
return logL;
}
Aggregations