Search in sources :

Example 1 with SiteModel

use of beast.evolution.sitemodel.SiteModel in project beast2 by CompEvol.

the class BeautiSubTemplate method createSubNet.

private BEASTInterface createSubNet(PartitionContext context, /*BeautiDoc doc,*/
HashMap<String, BEASTInterface> idMap, boolean init) {
    subNetDepth++;
    if (subNetDepth > 10) {
        // looks like we cannot find what we are looking for
        throw new IllegalArgumentException("Potential programmer error: It looks like there is a required input that was not specified in the tenmplate");
    }
    // wrap in a beast element with appropriate name spaces
    String _sXML = "<beast version='2.0' \n" + "namespace='beast.app.beauti:beast.core:beast.evolution.branchratemodel:beast.evolution.speciation:beast.evolution.tree.coalescent:beast.core.util:beast.evolution.nuc:beast.evolution.operators:beast.evolution.sitemodel:beast.evolution.substitutionmodel:beast.evolution.likelihood:beast.evolution:beast.math.distributions'>\n" + xml + "</beast>\n";
    // resolve alignment references
    _sXML = _sXML.replaceAll("idref=[\"']data['\"]", "idref='" + context.partition + "'");
    _sXML = _sXML.replaceAll("[\"']@data['\"]", "'@" + context.partition + "'");
    // ensure uniqueness of IDs
    // _sXML.replaceAll("\\$\\(n\\)", partition);
    _sXML = BeautiDoc.translatePartitionNames(_sXML, context);
    XMLParser parser = new XMLParser();
    parser.setRequiredInputProvider(doc, context);
    List<BEASTInterface> beastObjects = null;
    try {
        beastObjects = parser.parseTemplate(_sXML, idMap, true);
        for (BEASTInterface beastObject : beastObjects) {
            doc.addPlugin(beastObject);
            try {
                Log.warning.println("Adding " + beastObject.getClass().getName() + " " + beastObject);
            } catch (Exception e) {
                Log.err.println("Adding " + beastObject.getClass().getName());
            }
        }
        for (BeautiConnector connector : connectors) {
            if (init && connector.atInitialisationOnly()) {
                // ||
                doc.connect(connector, context);
            }
            // System.out.println(connector.sourceID + " == " + connector.targetID);
            if (connector.targetID != null && connector.targetID.equals("prior")) {
                Log.warning.println(">>> No description for connector " + connector.sourceID + " == " + connector.targetID);
            }
            if (connector.getTipText() != null) {
                String ID = BeautiDoc.translatePartitionNames(connector.sourceID, context);
                String tipText = BeautiDoc.translatePartitionNames(connector.getTipText(), context).trim().replaceAll("\\s+", " ");
                // System.out.println(ID + " -> " + tipText);
                doc.tipTextMap.put(ID, tipText);
            }
        }
        if (suppressedInputs.get() != null) {
            String[] inputs = suppressedInputs.get().split(",");
            for (String input : inputs) {
                input = input.trim();
                doc.beautiConfig.suppressBEASTObjects.add(input);
            }
        }
        if (inlineInput.get() != null) {
            String[] inputs = inlineInput.get().split(",");
            for (String input : inputs) {
                input = input.trim();
                doc.beautiConfig.inlineBEASTObject.add(input);
            }
        }
        if (collapsedInput.get() != null) {
            String[] inputs = collapsedInput.get().split(",");
            for (String input : inputs) {
                input = input.trim();
                doc.beautiConfig.collapsedBEASTObjects.add(input);
            }
        }
    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    if (mainID.equals("[top]")) {
        subNetDepth--;
        return beastObjects.get(0);
    }
    String id = mainID;
    // id.replaceAll("\\$\\(n\\)", partition);
    id = BeautiDoc.translatePartitionNames(id, context);
    BEASTInterface beastObject = doc.pluginmap.get(id);
    if (this == doc.beautiConfig.partitionTemplate.get()) {
        // HACK: need to make sure the subst model is of the correct type
        BEASTInterface treeLikelihood = doc.pluginmap.get("treeLikelihood." + context.partition);
        if (treeLikelihood != null && ((GenericTreeLikelihood) treeLikelihood).siteModelInput.get() instanceof SiteModel.Base) {
            SiteModel.Base siteModel = (SiteModel.Base) ((GenericTreeLikelihood) treeLikelihood).siteModelInput.get();
            SubstitutionModel substModel = siteModel.substModelInput.get();
            try {
                if (!siteModel.canSetSubstModel(substModel)) {
                    setUpSubstModel(siteModel, context);
                }
            } catch (Exception e) {
                setUpSubstModel(siteModel, context);
            }
        }
        // HACK2: rename file name for trace log if it has the default value
        Logger logger = (Logger) doc.pluginmap.get("tracelog");
        if (logger != null) {
            String fileName = logger.fileNameInput.get();
            if (fileName.startsWith("beast.") && treeLikelihood != null) {
                Alignment data = ((GenericTreeLikelihood) treeLikelihood).dataInput.get();
                while (data instanceof FilteredAlignment) {
                    data = ((FilteredAlignment) data).alignmentInput.get();
                }
                fileName = data.getID() + fileName.substring(5);
                try {
                    logger.fileNameInput.setValue(fileName, logger);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
    }
    subNetDepth--;
    // System.err.println(new XMLProducer().toXML(beastObject));
    return beastObject;
}
Also used : GenericTreeLikelihood(beast.evolution.likelihood.GenericTreeLikelihood) SiteModel(beast.evolution.sitemodel.SiteModel) Logger(beast.core.Logger) FilteredAlignment(beast.evolution.alignment.FilteredAlignment) TransformerException(javax.xml.transform.TransformerException) IOException(java.io.IOException) ParserConfigurationException(javax.xml.parsers.ParserConfigurationException) SAXException(org.xml.sax.SAXException) Base(beast.evolution.sitemodel.SiteModelInterface.Base) SubstitutionModel(beast.evolution.substitutionmodel.SubstitutionModel) FilteredAlignment(beast.evolution.alignment.FilteredAlignment) Alignment(beast.evolution.alignment.Alignment) BEASTInterface(beast.core.BEASTInterface) XMLParser(beast.util.XMLParser)

Example 2 with SiteModel

use of beast.evolution.sitemodel.SiteModel in project beast2 by CompEvol.

the class UnorderedAlignmentsTest method getSiteModel.

public static SiteModel getSiteModel() throws Exception {
    Frequencies frequencies = new Frequencies();
    frequencies.initByName("frequencies", new RealParameter("0.25 0.25 0.25 0.25"));
    HKY hky = new HKY();
    hky.initByName("kappa", new RealParameter("1.0"), "frequencies", frequencies);
    SiteModel siteModel = new SiteModel();
    siteModel.initByName("mutationRate", new RealParameter("0.005"), "substModel", hky);
    return siteModel;
}
Also used : HKY(beast.evolution.substitutionmodel.HKY) RealParameter(beast.core.parameter.RealParameter) SiteModel(beast.evolution.sitemodel.SiteModel) Frequencies(beast.evolution.substitutionmodel.Frequencies)

Example 3 with SiteModel

use of beast.evolution.sitemodel.SiteModel in project beast2 by CompEvol.

the class AlignmentListInputEditor method updateModel.

/**
 * set partition of type columnNr to partition model nr rowNr *
 */
void updateModel(int columnNr, int rowNr) {
    Log.warning.println("updateModel: " + rowNr + " " + columnNr + " " + table.getSelectedRow() + " " + table.getSelectedColumn());
    for (int i = 0; i < partitionCount; i++) {
        Log.warning.println(i + " " + tableData[i][0] + " " + tableData[i][SITEMODEL_COLUMN] + " " + tableData[i][CLOCKMODEL_COLUMN] + " " + tableData[i][TREE_COLUMN]);
    }
    getDoc();
    String partition = (String) tableData[rowNr][columnNr];
    // check if partition needs renaming
    String oldName = null;
    boolean isRenaming = false;
    try {
        switch(columnNr) {
            case SITEMODEL_COLUMN:
                if (!doc.pluginmap.containsKey("SiteModel.s:" + partition)) {
                    String id = ((BEASTInterface) likelihoods[rowNr].siteModelInput.get()).getID();
                    oldName = BeautiDoc.parsePartition(id);
                    doc.renamePartition(BeautiDoc.SITEMODEL_PARTITION, oldName, partition);
                    isRenaming = true;
                }
                break;
            case CLOCKMODEL_COLUMN:
                {
                    String id = likelihoods[rowNr].branchRateModelInput.get().getID();
                    String clockModelName = id.substring(0, id.indexOf('.')) + ".c:" + partition;
                    if (!doc.pluginmap.containsKey(clockModelName)) {
                        oldName = BeautiDoc.parsePartition(id);
                        doc.renamePartition(BeautiDoc.CLOCKMODEL_PARTITION, oldName, partition);
                        isRenaming = true;
                    }
                }
                break;
            case TREE_COLUMN:
                if (!doc.pluginmap.containsKey("Tree.t:" + partition)) {
                    String id = likelihoods[rowNr].treeInput.get().getID();
                    oldName = BeautiDoc.parsePartition(id);
                    doc.renamePartition(BeautiDoc.TREEMODEL_PARTITION, oldName, partition);
                    isRenaming = true;
                }
                break;
        }
    } catch (Exception e) {
        JOptionPane.showMessageDialog(this, "Cannot rename item: " + e.getMessage());
        tableData[rowNr][columnNr] = oldName;
        return;
    }
    if (isRenaming) {
        doc.determinePartitions();
        initTableData();
        setUpComboBoxes();
        table.repaint();
        return;
    }
    int partitionID = BeautiDoc.ALIGNMENT_PARTITION;
    switch(columnNr) {
        case SITEMODEL_COLUMN:
            partitionID = BeautiDoc.SITEMODEL_PARTITION;
            break;
        case CLOCKMODEL_COLUMN:
            partitionID = BeautiDoc.CLOCKMODEL_PARTITION;
            break;
        case TREE_COLUMN:
            partitionID = BeautiDoc.TREEMODEL_PARTITION;
            break;
    }
    int partitionNr = doc.getPartitionNr(partition, partitionID);
    GenericTreeLikelihood treeLikelihood = null;
    if (partitionNr >= 0) {
        // we ar linking
        treeLikelihood = likelihoods[partitionNr];
    }
    // (TreeLikelihood) doc.pluginmap.get("treeLikelihood." +
    // tableData[rowNr][NAME_COLUMN]);
    boolean needsRePartition = false;
    PartitionContext oldContext = new PartitionContext(this.likelihoods[rowNr]);
    switch(columnNr) {
        case SITEMODEL_COLUMN:
            {
                SiteModelInterface siteModel = null;
                if (treeLikelihood != null) {
                    // getDoc().getPartitionNr(partition,
                    // BeautiDoc.SITEMODEL_PARTITION) !=
                    // rowNr) {
                    siteModel = treeLikelihood.siteModelInput.get();
                } else {
                    siteModel = (SiteModel) doc.pluginmap.get("SiteModel.s:" + partition);
                    if (siteModel != likelihoods[rowNr].siteModelInput.get()) {
                        PartitionContext context = getPartitionContext(rowNr);
                        try {
                            siteModel = (SiteModel.Base) BeautiDoc.deepCopyPlugin((BEASTInterface) likelihoods[rowNr].siteModelInput.get(), likelihoods[rowNr], (MCMC) doc.mcmc.get(), oldContext, context, doc, null);
                        } catch (RuntimeException e) {
                            JOptionPane.showMessageDialog(this, "Could not clone site model: " + e.getMessage());
                            return;
                        }
                    }
                }
                SiteModelInterface target = this.likelihoods[rowNr].siteModelInput.get();
                if (target instanceof SiteModel.Base && siteModel instanceof SiteModel.Base) {
                    if (!((SiteModel.Base) target).substModelInput.canSetValue(((SiteModel.Base) siteModel).substModelInput.get(), (SiteModel.Base) target)) {
                        throw new IllegalArgumentException("Cannot link site model: substitution models (" + ((SiteModel.Base) target).substModelInput.get().getClass().toString() + " and " + ((SiteModel.Base) siteModel).substModelInput.get().getClass().toString() + ") are incompatible");
                    }
                } else {
                    throw new IllegalArgumentException("Don't know how to link this site model");
                }
                needsRePartition = (this.likelihoods[rowNr].siteModelInput.get() != siteModel);
                this.likelihoods[rowNr].siteModelInput.setValue(siteModel, this.likelihoods[rowNr]);
                partition = ((BEASTInterface) likelihoods[rowNr].siteModelInput.get()).getID();
                partition = BeautiDoc.parsePartition(partition);
                getDoc().setCurrentPartition(BeautiDoc.SITEMODEL_PARTITION, rowNr, partition);
            }
            break;
        case CLOCKMODEL_COLUMN:
            {
                BranchRateModel clockModel = null;
                if (treeLikelihood != null) {
                    // getDoc().getPartitionNr(partition,
                    // BeautiDoc.CLOCKMODEL_PARTITION)
                    // != rowNr) {
                    clockModel = treeLikelihood.branchRateModelInput.get();
                } else {
                    clockModel = getDoc().getClockModel(partition);
                    if (clockModel != likelihoods[rowNr].branchRateModelInput.get()) {
                        PartitionContext context = getPartitionContext(rowNr);
                        try {
                            clockModel = (BranchRateModel) BeautiDoc.deepCopyPlugin(likelihoods[rowNr].branchRateModelInput.get(), likelihoods[rowNr], (MCMC) doc.mcmc.get(), oldContext, context, doc, null);
                        } catch (RuntimeException e) {
                            JOptionPane.showMessageDialog(this, "Could not clone clock model: " + e.getMessage());
                            return;
                        }
                    }
                }
                // make sure that *if* the clock model has a tree as input, it is
                // the same as
                // for the likelihood
                TreeInterface tree = null;
                for (Input<?> input : ((BEASTInterface) clockModel).listInputs()) {
                    if (input.getName().equals("tree")) {
                        tree = (TreeInterface) input.get();
                    }
                }
                if (tree != null && tree != this.likelihoods[rowNr].treeInput.get()) {
                    JOptionPane.showMessageDialog(this, "Cannot link clock model with different trees");
                    throw new IllegalArgumentException("Cannot link clock model with different trees");
                }
                needsRePartition = (this.likelihoods[rowNr].branchRateModelInput.get() != clockModel);
                this.likelihoods[rowNr].branchRateModelInput.setValue(clockModel, this.likelihoods[rowNr]);
                partition = likelihoods[rowNr].branchRateModelInput.get().getID();
                partition = BeautiDoc.parsePartition(partition);
                getDoc().setCurrentPartition(BeautiDoc.CLOCKMODEL_PARTITION, rowNr, partition);
            }
            break;
        case TREE_COLUMN:
            {
                TreeInterface tree = null;
                if (treeLikelihood != null) {
                    // getDoc().getPartitionNr(partition,
                    // BeautiDoc.TREEMODEL_PARTITION) !=
                    // rowNr) {
                    tree = treeLikelihood.treeInput.get();
                } else {
                    tree = (TreeInterface) doc.pluginmap.get("Tree.t:" + partition);
                    if (tree != likelihoods[rowNr].treeInput.get()) {
                        PartitionContext context = getPartitionContext(rowNr);
                        try {
                            tree = (TreeInterface) BeautiDoc.deepCopyPlugin((BEASTInterface) likelihoods[rowNr].treeInput.get(), likelihoods[rowNr], (MCMC) doc.mcmc.get(), oldContext, context, doc, null);
                        } catch (RuntimeException e) {
                            JOptionPane.showMessageDialog(this, "Could not clone tree model: " + e.getMessage());
                            return;
                        }
                        State state = ((MCMC) doc.mcmc.get()).startStateInput.get();
                        List<StateNode> stateNodes = new ArrayList<>();
                        stateNodes.addAll(state.stateNodeInput.get());
                        for (StateNode s : stateNodes) {
                            if (s.getID().endsWith(".t:" + oldContext.tree) && !(s instanceof TreeInterface)) {
                                try {
                                    @SuppressWarnings("unused") StateNode copy = (StateNode) BeautiDoc.deepCopyPlugin(s, likelihoods[rowNr], (MCMC) doc.mcmc.get(), oldContext, context, doc, null);
                                } catch (RuntimeException e) {
                                    JOptionPane.showMessageDialog(this, "Could not clone tree model: " + e.getMessage());
                                    return;
                                }
                            }
                        }
                    }
                }
                // sanity check: make sure taxon sets are compatible
                Taxon.assertSameTaxa(tree.getID(), tree.getTaxonset().getTaxaNames(), likelihoods[rowNr].dataInput.get().getID(), likelihoods[rowNr].dataInput.get().getTaxaNames());
                needsRePartition = (this.likelihoods[rowNr].treeInput.get() != tree);
                Log.warning.println("needsRePartition = " + needsRePartition);
                if (needsRePartition) {
                    TreeInterface oldTree = this.likelihoods[rowNr].treeInput.get();
                    List<TreeInterface> tModels = new ArrayList<>();
                    for (GenericTreeLikelihood likelihood : likelihoods) {
                        if (likelihood.treeInput.get() == oldTree) {
                            tModels.add(likelihood.treeInput.get());
                        }
                    }
                    if (tModels.size() == 1) {
                        // remove old tree from model
                        ((BEASTInterface) oldTree).setInputValue("estimate", false);
                        // use toArray to prevent ConcurrentModificationException
                        for (Object beastObject : BEASTInterface.getOutputs(oldTree).toArray()) {
                            // .toArray(new BEASTInterface[0])) {
                            for (Input<?> input : ((BEASTInterface) beastObject).listInputs()) {
                                try {
                                    if (input.get() == oldTree) {
                                        if (input.getRule() != Input.Validate.REQUIRED) {
                                            input.setValue(tree, /*null*/
                                            (BEASTInterface) beastObject);
                                        // } else {
                                        // input.setValue(tree, (BEASTInterface) beastObject);
                                        }
                                    } else if (input.get() instanceof List) {
                                        @SuppressWarnings("unchecked") List<TreeInterface> list = (List<TreeInterface>) input.get();
                                        if (list.contains(oldTree)) {
                                            // && input.getRule() != Validate.REQUIRED) {
                                            list.remove(oldTree);
                                            if (!list.contains(tree)) {
                                                list.add(tree);
                                            }
                                        }
                                    }
                                } catch (Exception e) {
                                    e.printStackTrace();
                                }
                            }
                        }
                    }
                }
                likelihoods[rowNr].treeInput.setValue(tree, likelihoods[rowNr]);
                // TreeDistribution d = getDoc().getTreePrior(partition);
                // CompoundDistribution prior = (CompoundDistribution)
                // doc.pluginmap.get("prior");
                // if (!getDoc().posteriorPredecessors.contains(d)) {
                // prior.pDistributions.setValue(d, prior);
                // }
                partition = likelihoods[rowNr].treeInput.get().getID();
                partition = BeautiDoc.parsePartition(partition);
                getDoc().setCurrentPartition(BeautiDoc.TREEMODEL_PARTITION, rowNr, partition);
            }
    }
    tableData[rowNr][columnNr] = partition;
    if (needsRePartition) {
        List<BeautiSubTemplate> templates = new ArrayList<>();
        templates.add(doc.beautiConfig.partitionTemplate.get());
        templates.addAll(doc.beautiConfig.subTemplates);
        // keep applying rules till model does not change
        doc.setUpActivePlugins();
        int n;
        do {
            n = doc.posteriorPredecessors.size();
            doc.applyBeautiRules(templates, false, oldContext);
            doc.setUpActivePlugins();
        } while (n != doc.posteriorPredecessors.size());
        doc.determinePartitions();
    }
    if (treeLikelihood == null) {
        initTableData();
        setUpComboBoxes();
    }
    updateStatus();
}
Also used : MCMC(beast.core.MCMC) StateNode(beast.core.StateNode) ArrayList(java.util.ArrayList) Input(beast.core.Input) List(java.util.List) ArrayList(java.util.ArrayList) GenericTreeLikelihood(beast.evolution.likelihood.GenericTreeLikelihood) SiteModel(beast.evolution.sitemodel.SiteModel) TreeInterface(beast.evolution.tree.TreeInterface) BranchRateModel(beast.evolution.branchratemodel.BranchRateModel) State(beast.core.State) BEASTInterface(beast.core.BEASTInterface) EventObject(java.util.EventObject) SiteModelInterface(beast.evolution.sitemodel.SiteModelInterface)

Example 4 with SiteModel

use of beast.evolution.sitemodel.SiteModel in project beast2 by CompEvol.

the class BeagleTreeLikelihood method initialize.

private boolean initialize() {
    m_nNodeCount = treeInput.get().getNodeCount();
    m_bUseAmbiguities = m_useAmbiguities.get();
    m_bUseTipLikelihoods = m_useTipLikelihoods.get();
    if (!(siteModelInput.get() instanceof SiteModel.Base)) {
        throw new IllegalArgumentException("siteModel input should be of type SiteModel.Base");
    }
    m_siteModel = (SiteModel.Base) siteModelInput.get();
    m_siteModel.setDataType(dataInput.get().getDataType());
    substitutionModel = m_siteModel.substModelInput.get();
    branchRateModel = branchRateModelInput.get();
    if (branchRateModel == null) {
        branchRateModel = new StrictClockModel();
    }
    m_branchLengths = new double[m_nNodeCount];
    storedBranchLengths = new double[m_nNodeCount];
    m_nStateCount = dataInput.get().getMaxStateCount();
    patternCount = dataInput.get().getPatternCount();
    // System.err.println("Attempt to load BEAGLE TreeLikelihood");
    // this.branchSubstitutionModel.getEigenCount();
    eigenCount = 1;
    double[] categoryRates = m_siteModel.getCategoryRates(null);
    // check for invariant rates category
    if (m_siteModel.hasPropInvariantCategory) {
        for (int i = 0; i < categoryRates.length; i++) {
            if (categoryRates[i] == 0) {
                proportionInvariant = m_siteModel.getRateForCategory(i, null);
                int stateCount = dataInput.get().getMaxStateCount();
                int patterns = dataInput.get().getPatternCount();
                calcConstantPatternIndices(patterns, stateCount);
                invariantCategory = i;
                double[] tmp = new double[categoryRates.length - 1];
                for (int k = 0; k < invariantCategory; k++) {
                    tmp[k] = categoryRates[k];
                }
                for (int k = invariantCategory + 1; k < categoryRates.length; k++) {
                    tmp[k - 1] = categoryRates[k];
                }
                categoryRates = tmp;
                break;
            }
        }
        if (constantPattern != null && constantPattern.size() > dataInput.get().getPatternCount()) {
            // if there are many more constant patterns than patterns (each pattern can
            // have a number of constant patters, one for each state) it is less efficient
            // to just calculate the TreeLikelihood for constant sites than optimising
            Log.debug("switch off constant sites optimisiation: calculating through separate TreeLikelihood category (as in the olden days)");
            invariantCategory = -1;
            proportionInvariant = 0;
            constantPattern = null;
            categoryRates = m_siteModel.getCategoryRates(null);
        }
    }
    this.categoryCount = m_siteModel.getCategoryCount() - (invariantCategory >= 0 ? 1 : 0);
    tipCount = treeInput.get().getLeafNodeCount();
    internalNodeCount = m_nNodeCount - tipCount;
    int compactPartialsCount = tipCount;
    if (m_bUseAmbiguities) {
        // if we are using ambiguities then we don't use tip partials
        compactPartialsCount = 0;
    }
    // one partials buffer for each tip and two for each internal node (for store restore)
    partialBufferHelper = new BufferIndexHelper(m_nNodeCount, tipCount);
    // two eigen buffers for each decomposition for store and restore.
    eigenBufferHelper = new BufferIndexHelper(eigenCount, 0);
    // two matrices for each node less the root
    matrixBufferHelper = new BufferIndexHelper(m_nNodeCount, 0);
    // one scaling buffer for each internal node plus an extra for the accumulation, then doubled for store/restore
    scaleBufferHelper = new BufferIndexHelper(getScaleBufferCount(), 0);
    // Attempt to get the resource order from the System Property
    if (resourceOrder == null) {
        resourceOrder = parseSystemPropertyIntegerArray(RESOURCE_ORDER_PROPERTY);
    }
    if (preferredOrder == null) {
        preferredOrder = parseSystemPropertyIntegerArray(PREFERRED_FLAGS_PROPERTY);
    }
    if (requiredOrder == null) {
        requiredOrder = parseSystemPropertyIntegerArray(REQUIRED_FLAGS_PROPERTY);
    }
    if (scalingOrder == null) {
        scalingOrder = parseSystemPropertyStringArray(SCALING_PROPERTY);
    }
    // first set the rescaling scheme to use from the parser
    // = rescalingScheme;
    rescalingScheme = PartialsRescalingScheme.DEFAULT;
    rescalingScheme = DEFAULT_RESCALING_SCHEME;
    int[] resourceList = null;
    long preferenceFlags = 0;
    long requirementFlags = 0;
    if (scalingOrder.size() > 0) {
        this.rescalingScheme = PartialsRescalingScheme.parseFromString(scalingOrder.get(instanceCount % scalingOrder.size()));
    }
    if (resourceOrder.size() > 0) {
        // added the zero on the end so that a CPU is selected if requested resource fails
        resourceList = new int[] { resourceOrder.get(instanceCount % resourceOrder.size()), 0 };
        if (resourceList[0] > 0) {
            // Add preference weight against CPU
            preferenceFlags |= BeagleFlag.PROCESSOR_GPU.getMask();
        }
    }
    if (preferredOrder.size() > 0) {
        preferenceFlags = preferredOrder.get(instanceCount % preferredOrder.size());
    }
    if (requiredOrder.size() > 0) {
        requirementFlags = requiredOrder.get(instanceCount % requiredOrder.size());
    }
    if (scaling.get().equals(Scaling.always)) {
        this.rescalingScheme = PartialsRescalingScheme.ALWAYS;
    }
    if (scaling.get().equals(Scaling.none)) {
        this.rescalingScheme = PartialsRescalingScheme.NONE;
    }
    // Define default behaviour here
    if (this.rescalingScheme == PartialsRescalingScheme.DEFAULT) {
        // if GPU: the default is^H^Hwas dynamic scaling in BEAST, now NONE
        if (resourceList != null && resourceList[0] > 1) {
            // this.rescalingScheme = PartialsRescalingScheme.DYNAMIC;
            this.rescalingScheme = PartialsRescalingScheme.NONE;
        } else {
            // if CPU: just run as fast as possible
            // this.rescalingScheme = PartialsRescalingScheme.NONE;
            // Dynamic should run as fast as none until first underflow
            this.rescalingScheme = PartialsRescalingScheme.DYNAMIC;
        }
    }
    if (this.rescalingScheme == PartialsRescalingScheme.AUTO) {
        preferenceFlags |= BeagleFlag.SCALING_AUTO.getMask();
        useAutoScaling = true;
    } else {
    // preferenceFlags |= BeagleFlag.SCALING_MANUAL.getMask();
    }
    String r = System.getProperty(RESCALE_FREQUENCY_PROPERTY);
    if (r != null) {
        rescalingFrequency = Integer.parseInt(r);
        if (rescalingFrequency < 1) {
            rescalingFrequency = RESCALE_FREQUENCY;
        }
    }
    if (preferenceFlags == 0 && resourceList == null) {
        // else determine dataset characteristics
        if (// TODO determine good cut-off
        m_nStateCount == 4 && patternCount < 10000)
            preferenceFlags |= BeagleFlag.PROCESSOR_CPU.getMask();
    }
    if (substitutionModel.canReturnComplexDiagonalization()) {
        requirementFlags |= BeagleFlag.EIGEN_COMPLEX.getMask();
    }
    instanceCount++;
    try {
        beagle = BeagleFactory.loadBeagleInstance(tipCount, partialBufferHelper.getBufferCount(), compactPartialsCount, m_nStateCount, patternCount, // eigenBufferCount
        eigenBufferHelper.getBufferCount(), matrixBufferHelper.getBufferCount(), categoryCount, // Always allocate; they may become necessary
        scaleBufferHelper.getBufferCount(), resourceList, preferenceFlags, requirementFlags);
    } catch (Exception e) {
        beagle = null;
    }
    if (beagle == null) {
        return false;
    }
    InstanceDetails instanceDetails = beagle.getDetails();
    ResourceDetails resourceDetails = null;
    if (instanceDetails != null) {
        resourceDetails = BeagleFactory.getResourceDetails(instanceDetails.getResourceNumber());
        if (resourceDetails != null) {
            StringBuilder sb = new StringBuilder("  Using BEAGLE version: " + BeagleInfo.getVersion() + " resource ");
            sb.append(resourceDetails.getNumber()).append(": ");
            sb.append(resourceDetails.getName()).append("\n");
            if (resourceDetails.getDescription() != null) {
                String[] description = resourceDetails.getDescription().split("\\|");
                for (String desc : description) {
                    if (desc.trim().length() > 0) {
                        sb.append("    ").append(desc.trim()).append("\n");
                    }
                }
            }
            sb.append("    with instance flags: ").append(instanceDetails.toString());
            Log.info.println(sb.toString());
        } else {
            Log.warning.println("  Error retrieving BEAGLE resource for instance: " + instanceDetails.toString());
            beagle = null;
            return false;
        }
    } else {
        Log.warning.println("  No external BEAGLE resources available, or resource list/requirements not met, using Java implementation");
        beagle = null;
        return false;
    }
    Log.warning.println("  " + (m_bUseAmbiguities ? "Using" : "Ignoring") + " ambiguities in tree likelihood.");
    Log.warning.println("  " + (m_bUseTipLikelihoods ? "Using" : "Ignoring") + " character uncertainty in tree likelihood.");
    Log.warning.println("  With " + patternCount + " unique site patterns.");
    Node[] nodes = treeInput.get().getNodesAsArray();
    for (int i = 0; i < tipCount; i++) {
        int taxon = dataInput.get().getTaxonIndex(nodes[i].getID());
        if (m_bUseAmbiguities || m_bUseTipLikelihoods) {
            setPartials(beagle, i, taxon);
        } else {
            setStates(beagle, i, taxon);
        }
    }
    if (dataInput.get().isAscertained) {
        ascertainedSitePatterns = true;
    }
    double[] patternWeights = new double[patternCount];
    for (int i = 0; i < patternCount; i++) {
        patternWeights[i] = dataInput.get().getPatternWeight(i);
    }
    beagle.setPatternWeights(patternWeights);
    if (this.rescalingScheme == PartialsRescalingScheme.AUTO && resourceDetails != null && (resourceDetails.getFlags() & BeagleFlag.SCALING_AUTO.getMask()) == 0) {
        // If auto scaling in BEAGLE is not supported then do it here
        this.rescalingScheme = PartialsRescalingScheme.DYNAMIC;
        Log.warning.println("  Auto rescaling not supported in BEAGLE, using : " + this.rescalingScheme.getText());
    } else {
        Log.warning.println("  Using rescaling scheme : " + this.rescalingScheme.getText());
    }
    if (this.rescalingScheme == PartialsRescalingScheme.DYNAMIC) {
        // If false, BEAST does not rescale until first under-/over-flow.
        everUnderflowed = false;
    }
    updateSubstitutionModel = true;
    updateSiteModel = true;
    // some subst models (e.g. WAG) never become dirty, so set up subst models right now
    setUpSubstModel();
    // set up sitemodel
    beagle.setCategoryRates(categoryRates);
    currentCategoryRates = categoryRates;
    currentFreqs = new double[m_nStateCount];
    currentCategoryWeights = new double[categoryRates.length];
    return true;
}
Also used : InstanceDetails(beagle.InstanceDetails) CalculationNode(beast.core.CalculationNode) Node(beast.evolution.tree.Node) SiteModel(beast.evolution.sitemodel.SiteModel) ResourceDetails(beagle.ResourceDetails) StrictClockModel(beast.evolution.branchratemodel.StrictClockModel)

Example 5 with SiteModel

use of beast.evolution.sitemodel.SiteModel in project beast2 by CompEvol.

the class TreeLikelihoodTest method aminoacidModelTestI.

void aminoacidModelTestI(SubstitutionModel substModel, double expectedValue) throws Exception {
    Alignment data = BEASTTestCase.getAminoAcidAlignment();
    Tree tree = BEASTTestCase.getAminoAcidTree(data);
    SiteModel siteModel = new SiteModel();
    siteModel.initByName("mutationRate", "1.0", "gammaCategoryCount", 1, "substModel", substModel, "proportionInvariant", "0.2");
    TreeLikelihood likelihood = newTreeLikelihood();
    likelihood.initByName("data", data, "tree", tree, "siteModel", siteModel);
    double logP = 0;
    logP = likelihood.calculateLogP();
    assertEquals(expectedValue, logP, BEASTTestCase.PRECISION);
}
Also used : Alignment(beast.evolution.alignment.Alignment) BeagleTreeLikelihood(beast.evolution.likelihood.BeagleTreeLikelihood) TreeLikelihood(beast.evolution.likelihood.TreeLikelihood) Tree(beast.evolution.tree.Tree) SiteModel(beast.evolution.sitemodel.SiteModel)

Aggregations

SiteModel (beast.evolution.sitemodel.SiteModel)40 Alignment (beast.evolution.alignment.Alignment)27 Test (org.junit.Test)23 TreeLikelihood (beast.evolution.likelihood.TreeLikelihood)22 BeagleTreeLikelihood (beast.evolution.likelihood.BeagleTreeLikelihood)21 Tree (beast.evolution.tree.Tree)21 UncertainAlignmentTest (test.beast.evolution.alignment.UncertainAlignmentTest)16 Frequencies (beast.evolution.substitutionmodel.Frequencies)14 JukesCantor (beast.evolution.substitutionmodel.JukesCantor)11 RealParameter (beast.core.parameter.RealParameter)10 HKY (beast.evolution.substitutionmodel.HKY)7 Sequence (beast.evolution.alignment.Sequence)6 ConversionGraph (bacter.ConversionGraph)5 Locus (bacter.Locus)5 GenericTreeLikelihood (beast.evolution.likelihood.GenericTreeLikelihood)5 GeneralSubstitutionModel (beast.evolution.substitutionmodel.GeneralSubstitutionModel)5 Node (beast.evolution.tree.Node)5 ClusterTree (beast.util.ClusterTree)5 Conversion (bacter.Conversion)4 BEASTInterface (beast.core.BEASTInterface)4