Search in sources :

Example 11 with Taxa

use of dr.evolution.util.Taxa in project beast-mcmc by beast-dev.

the class MonophylyStatisticParser method parseTaxonListOrTaxa.

public static TaxonList parseTaxonListOrTaxa(XMLObject cxo) {
    TaxonList taxa = (TaxonList) cxo.getChild(TaxonList.class);
    if (taxa == null) {
        Taxa taxa1 = new Taxa();
        for (int i = 0; i < cxo.getChildCount(); i++) {
            Object ccxo = cxo.getChild(i);
            if (ccxo instanceof Taxon) {
                taxa1.addTaxon((Taxon) ccxo);
            }
        }
        taxa = taxa1;
    }
    return taxa;
}
Also used : Taxa(dr.evolution.util.Taxa) TaxonList(dr.evolution.util.TaxonList) Taxon(dr.evolution.util.Taxon)

Example 12 with Taxa

use of dr.evolution.util.Taxa in project beast-mcmc by beast-dev.

the class AncestralStatesOptionsPanel method setupPanel.

/**
 * Lays out the appropriate components in the panel for this partition
 * model.
 */
void setupPanel() {
    isUpdating = true;
    String selectedItem = (String) mrcaReconstructionCombo.getSelectedItem();
    if (mrcaReconstructionCombo.getItemCount() > 0) {
        mrcaReconstructionCombo.removeAllItems();
    }
    mrcaReconstructionCombo.addItem("Tree Root");
    if (options.taxonSets.size() > 0) {
        for (Taxa taxonSet : options.taxonSets) {
            mrcaReconstructionCombo.addItem("MRCA(" + taxonSet.getId() + ")");
        }
        if (selectedItem != null) {
            mrcaReconstructionCombo.setSelectedItem(selectedItem);
        }
    }
    mrcaReconstructionCombo.setEnabled(mrcaReconstructionCheck.isSelected());
    boolean ancestralReconstructionAvailable = true;
    boolean countingAvailable = true;
    boolean dNdSRobustCountingAvailable = false;
    boolean errorModelAvailable = false;
    switch(partition.getDataType().getType()) {
        case DataType.NUCLEOTIDES:
            errorModelAvailable = true;
            // but will be disabled if not codon partitioned
            dNdSRobustCountingAvailable = true;
            break;
        case DataType.AMINO_ACIDS:
        case DataType.GENERAL:
        case DataType.TWO_STATES:
            break;
        case DataType.CONTINUOUS:
            countingAvailable = false;
            break;
        case DataType.MICRO_SAT:
            ancestralReconstructionAvailable = false;
            countingAvailable = false;
            break;
        default:
            throw new IllegalArgumentException("Unsupported data type");
    }
    removeAll();
    if (ancestralReconstructionAvailable) {
        if (partition.getPartitionSubstitutionModel().getCodonPartitionCount() == 2) {
            // mergedPatterns for codon positions 1&2 will always be compressed...
            // so cannot do any of this stuff. Disable it and provide an explanation.
            addSpanningComponent(new JLabel("<html>Unable to provide these options with the 1+2,3 codon<br>" + "position model. Use a 1,2,3 codon position model instead.<br><html>"));
        }
        JLabel label1 = new JLabel("Ancestral State Reconstruction:");
        addSpanningComponent(label1);
        addComponent(ancestralReconstructionCheck);
        FlowLayout layout = new FlowLayout(FlowLayout.LEFT);
        layout.setHgap(0);
        JPanel panel = new JPanel(layout);
        panel.setOpaque(false);
        panel.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 0));
        panel.add(mrcaReconstructionCheck);
        panel.add(mrcaReconstructionCombo);
        addComponent(panel);
        boolean enabled = true;
        if (partition.getPartitionSubstitutionModel().getCodonPartitionCount() == 2) {
            // mergedPatterns for codon positions 1&2 will always be compressed...
            // so cannot do any of this stuff. Disable it and provide an explanation.
            ancestralReconstructionCheck.setEnabled(false);
            enabled = false;
        }
        label1.setEnabled(enabled);
        panel.setEnabled(enabled);
        ancestralReconstructionCheck.setEnabled(enabled);
        mrcaReconstructionCheck.setEnabled(enabled);
        mrcaReconstructionCombo.setEnabled(enabled);
    }
    if (countingAvailable) {
        if (ancestralReconstructionAvailable) {
            addSeparator();
        }
        JLabel label2 = new JLabel("State Change Count Reconstruction:");
        addSpanningComponent(label2);
        JTextArea text1 = new JTextArea("Select this option to reconstruct counts of state changes using " + "Markov Jumps. This approach is described in Minin & Suchard (2008).");
        text1.setColumns(40);
        PanelUtils.setupComponent(text1);
        addComponent(text1);
        addComponent(countingCheck);
        boolean enableSimpleCounting = true;
        // TODO Simple counting is currently not available for codon partitioned models due to BEAUti limitation
        if (ancestralStatesComponent.dNdSRobustCountingAvailable(partition) || partition.getPartitionSubstitutionModel().getCodonPartitionCount() == 2) {
            enableSimpleCounting = false;
            countingCheck.setSelected(false);
        }
        countingCheck.setEnabled(enableSimpleCounting);
        label2.setEnabled(enableSimpleCounting);
        text1.setEnabled(enableSimpleCounting);
        JTextArea text2 = null;
        if (dNdSRobustCountingAvailable) {
            // addSeparator();
            text2 = new JTextArea("Renaissance counting: select this option to reconstruct counts of synonymous and nonsynonymous " + "changes using Robust Counting. This approach is described in O'Brien, Minin " + "& Suchard (2009) and Lemey, Minin, Bielejec, Kosakovsky-Pond & Suchard " + "(2012):");
            text2.setColumns(40);
            PanelUtils.setupComponent(text2);
            addComponent(text2);
            addComponent(dNdSRobustCountingCheck);
            dNnSText.setColumns(40);
            dNnSText.setBorder(BorderFactory.createEmptyBorder(0, 32, 0, 0));
            PanelUtils.setupComponent(dNnSText);
            addComponent(dNnSText);
            boolean enableRC = ancestralStatesComponent.dNdSRobustCountingAvailable(partition);
            // && !ancestralStatesComponent.isCountingStates(partition);
            dNdSRobustCountingCheck.setEnabled(enableRC);
            ancestralStatesComponent.setDNdSRobustCounting(partition, enableRC && dNdSRobustCountingCheck.isSelected());
            text2.setEnabled(enableRC);
            dNnSText.setEnabled(enableRC);
            if (!enableRC) {
                dNdSRobustCountingCheck.setSelected(false);
            }
        }
        addComponent(completeHistoryLoggingCheck);
        completeHistoryLoggingCheck.setEnabled(countingCheck.isSelected() || dNdSRobustCountingCheck.isSelected());
    }
    if (errorModelAvailable) {
        if (ancestralReconstructionAvailable || countingAvailable) {
            addSeparator();
        }
        JLabel label3 = new JLabel("Sequence error model:");
        addSpanningComponent(label3);
        JLabel label4 = addComponentWithLabel("Error Model:", errorModelCombo);
        boolean enabled = (partition.getPartitionSubstitutionModel().getCodonPartitionCount() != 2);
        label3.setEnabled(enabled);
        label4.setEnabled(enabled);
        errorModelCombo.setEnabled(enabled);
    }
    isUpdating = false;
}
Also used : Taxa(dr.evolution.util.Taxa)

Example 13 with Taxa

use of dr.evolution.util.Taxa in project beast-mcmc by beast-dev.

the class DataPanel method linkTreeModels.

public void linkTreeModels() {
    // keep previous PartitionTreePrior for reuse
    int[] selRows = dataTable.getSelectedRows();
    List<AbstractPartitionData> selectedPartitionData = new ArrayList<AbstractPartitionData>();
    for (int row : selRows) {
        AbstractPartitionData partition = options.dataPartitions.get(row);
        if (!selectedPartitionData.contains(partition))
            selectedPartitionData.add(partition);
    }
    if (selectedPartitionData.size() > 1) {
        if (!options.hasIdenticalTaxa(selectedPartitionData)) {
            String errMsg = "To share a tree, partitions need to have identical taxa.";
            if (selectedPartitionData.get(0).getDataType().getType() == DataType.MICRO_SAT)
                errMsg += "\nThe data must be all diploid or all haploid when you want to link the tree.";
            JOptionPane.showMessageDialog(this, errMsg, "Unsuppoted Configuration", JOptionPane.ERROR_MESSAGE);
            return;
        }
    }
    Object[] treeArray = options.getPartitionTreeModels(selectedPartitionData).toArray();
    if (selectTreeDialog == null) {
        selectTreeDialog = new SelectTreeDialog(frame);
    }
    int result = selectTreeDialog.showDialog(treeArray);
    if (result != JOptionPane.CANCEL_OPTION) {
        PartitionTreeModel treeModel = selectTreeDialog.getTree();
        if (selectTreeDialog.getMakeCopy()) {
            treeModel.setName(selectTreeDialog.getName());
        }
        PartitionTreePrior prior = treeModel.getPartitionTreePrior();
        options.linkTreePriors(prior);
        for (AbstractPartitionData partition : selectedPartitionData) {
            partition.setPartitionTreeModel(treeModel);
            // Clock models need to refer to the same tree as the data (many to one relationship).
            // Make sure the clock model for this partition refers to the same tree as the partition.
            PartitionClockModel clockModel = partition.getPartitionClockModel();
            clockModel.setPartitionTreeModel(treeModel);
        }
        for (Taxa taxa : options.taxonSets) {
            // Issue 454: all the taxon sets are deleted when link/unlink tree
            PartitionTreeModel prevModel = options.taxonSetsTreeModel.get(taxa);
            if (prevModel != treeModel)
                options.taxonSetsTreeModel.put(taxa, treeModel);
        }
    }
    modelsChanged();
    fireDataChanged();
    repaint();
}
Also used : ArrayList(java.util.ArrayList) Taxa(dr.evolution.util.Taxa)

Example 14 with Taxa

use of dr.evolution.util.Taxa in project beast-mcmc by beast-dev.

the class BeastGenerator method generateXML.

/**
 * Generate a beast xml file from these beast options
 *
 * @param file File
 * @throws java.io.IOException IOException
 * @throws dr.app.util.Arguments.ArgumentException
 *                             ArgumentException
 */
public void generateXML(File file) throws GeneratorException, IOException, Arguments.ArgumentException {
    XMLWriter writer = new XMLWriter(new BufferedWriter(new FileWriter(file)));
    writer.writeText("<?xml version=\"1.0\" standalone=\"yes\"?>");
    writer.writeComment("Generated by BEAUTi " + VERSION.getVersionString(), "      by Alexei J. Drummond, Andrew Rambaut and Marc A. Suchard", "      Department of Computer Science, University of Auckland and", "      Institute of Evolutionary Biology, University of Edinburgh", "      David Geffen School of Medicine, University of California, Los Angeles", "      http://beast.community/");
    writer.writeOpenTag("beast", new Attribute.Default<String>("version", BeautiApp.VERSION.getVersion()));
    writer.writeText("");
    // this gives any added implementations of the 'Component' interface a
    // chance to generate XML at this point in the BEAST file.
    generateInsertionPoint(ComponentGenerator.InsertionPoint.BEFORE_TAXA, writer);
    if (options.originDate != null) {
        // Create a dummy taxon whose job is to specify the origin date
        Taxon originTaxon = new Taxon("originTaxon");
        options.originDate.setUnits(options.units);
        originTaxon.setDate(options.originDate);
        writeTaxon(originTaxon, true, false, writer);
    }
    // ++++++++++++++++ Taxon List ++++++++++++++++++
    try {
        // write complete taxon list
        writeTaxa(options.taxonList, writer);
        writer.writeText("");
        if (!options.hasIdenticalTaxa()) {
            // write all taxa in each gene tree regarding each data partition,
            for (AbstractPartitionData partition : options.dataPartitions) {
                if (partition.getTaxonList() != null) {
                    writeDifferentTaxa(partition, writer);
                }
            }
        } else {
            // microsat
            for (PartitionPattern partitionPattern : options.getPartitionPattern()) {
                if (partitionPattern.getTaxonList() != null && partitionPattern.getPatterns().hasMask()) {
                    writeDifferentTaxa(partitionPattern, writer);
                }
            }
        }
    } catch (Exception e) {
        e.printStackTrace(System.err);
        throw new GeneratorException("Taxon list generation has failed:\n" + e.getMessage());
    }
    // ++++++++++++++++ Taxon Sets ++++++++++++++++++
    List<Taxa> taxonSets = options.taxonSets;
    try {
        if (taxonSets != null && taxonSets.size() > 0) {
            tmrcaStatisticsGenerator.writeTaxonSets(writer, taxonSets);
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Taxon sets generation has failed:\n" + e.getMessage());
    }
    generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TAXA, writer);
    // ++++++++++++++++ Alignments ++++++++++++++++++
    List<Alignment> alignments = new ArrayList<Alignment>();
    try {
        for (AbstractPartitionData partition : options.dataPartitions) {
            Alignment alignment = null;
            if (partition instanceof PartitionData) {
                // microsat has no alignment
                alignment = ((PartitionData) partition).getAlignment();
            }
            if (alignment != null && !alignments.contains(alignment)) {
                alignments.add(alignment);
            }
        }
        if (alignments.size() > 0) {
            alignmentGenerator.writeAlignments(alignments, writer);
            generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SEQUENCES, writer);
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Alignments generation has failed:\n" + e.getMessage());
    }
    // ++++++++++++++++ Pattern Lists ++++++++++++++++++
    try {
        // Construct pattern lists even if sampling from a null alignment
        // if (!options.samplePriorOnly) {
        List<Microsatellite> microsatList = new ArrayList<Microsatellite>();
        for (AbstractPartitionData partition : options.dataPartitions) {
            // Each PD has one TreeLikelihood
            if (partition.getTaxonList() != null) {
                switch(partition.getDataType().getType()) {
                    case DataType.NUCLEOTIDES:
                    case DataType.AMINO_ACIDS:
                    case DataType.CODONS:
                    case DataType.COVARION:
                    case DataType.TWO_STATES:
                        patternListGenerator.writePatternList((PartitionData) partition, writer);
                        break;
                    case DataType.GENERAL:
                    case DataType.CONTINUOUS:
                        // attribute patterns which is generated next bit of this method.
                        break;
                    case DataType.MICRO_SAT:
                        // microsat does not have alignment
                        patternListGenerator.writePatternList((PartitionPattern) partition, microsatList, writer);
                        break;
                    default:
                        throw new IllegalArgumentException("Unsupported data type");
                }
                writer.writeText("");
            }
        }
    // }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Pattern lists generation has failed:\n" + e.getMessage());
    }
    generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_PATTERNS, writer);
    // ++++++++++++++++ Tree Prior Model ++++++++++++++++++
    try {
        for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
            treePriorGenerator.writeTreePriorModel(prior, writer);
            writer.writeText("");
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Tree prior model generation has failed:\n" + e.getMessage());
    }
    // ++++++++++++++++ Starting Tree ++++++++++++++++++
    try {
        for (PartitionTreeModel model : options.getPartitionTreeModels()) {
            initialTreeGenerator.writeStartingTree(model, writer);
            writer.writeText("");
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Starting tree generation has failed:\n" + e.getMessage());
    }
    // ++++++++++++++++ Tree Model +++++++++++++++++++
    try {
        for (PartitionTreeModel model : options.getPartitionTreeModels()) {
            treeModelGenerator.writeTreeModel(model, writer);
            writer.writeText("");
        }
        generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_MODEL, writer);
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Tree model generation has failed:\n" + e.getMessage());
    }
    // ++++++++++++++++ Statistics ++++++++++++++++++
    try {
        if (taxonSets != null && taxonSets.size() > 0) {
            tmrcaStatisticsGenerator.writeTMRCAStatistics(writer);
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("TMRCA statistics generation has failed:\n" + e.getMessage());
    }
    // ++++++++++++++++ Tree Prior Likelihood ++++++++++++++++++
    try {
        for (PartitionTreeModel model : options.getPartitionTreeModels()) {
            treePriorGenerator.writePriorLikelihood(model, writer);
            writer.writeText("");
        }
        for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
            treePriorGenerator.writeMultiLociTreePriors(prior, writer);
        }
        generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_PRIOR, writer);
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Tree prior likelihood generation has failed:\n" + e.getMessage());
    }
    // ++++++++++++++++ Branch Rates Model ++++++++++++++++++
    try {
        for (PartitionClockModel model : options.getPartitionClockModels()) {
            clockModelGenerator.writeBranchRatesModel(model, writer);
            writer.writeText("");
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Branch rates model generation has failed:\n" + e.getMessage());
    }
    // ++++++++++++++++ Substitution Model & Site Model ++++++++++++++++++
    try {
        for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
            substitutionModelGenerator.writeSubstitutionSiteModel(model, writer);
            writer.writeText("");
        }
        generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SUBSTITUTION_MODEL, writer);
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Substitution model or site model generation has failed:\n" + e.getMessage());
    }
    // ++++++++++++++++ AllMus parameter ++++++++++++++++++
    try {
        for (PartitionClockModel model : options.getPartitionClockModels()) {
            clockModelGenerator.writeAllMus(model, writer);
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Clock model generation has failed:\n" + e.getMessage());
    }
    // ++++++++++++++++ Site Model ++++++++++++++++++
    // for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
    // substitutionModelGenerator.writeSiteModel(model, writer); // site model
    // substitutionModelGenerator.writeAllMus(model, writer); // allMus
    // writer.writeText("");
    // }
    generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SITE_MODEL, writer);
    // ++++++++++++++++ Tree Likelihood ++++++++++++++++++
    try {
        Map<Pair<Pair<PartitionTreeModel, PartitionClockModel>, DataType>, List<PartitionData>> partitionLists = new HashMap<Pair<Pair<PartitionTreeModel, PartitionClockModel>, DataType>, List<PartitionData>>();
        options.multiPartitionLists.clear();
        options.otherPartitions.clear();
        for (AbstractPartitionData partition : options.dataPartitions) {
            // generate tree likelihoods for alignment data partitions
            if (partition.getTaxonList() != null) {
                if (treeLikelihoodGenerator.canUseMultiPartition(partition)) {
                    // all sequence partitions of the same type as the first into the list for use in a
                    // MultipartitionTreeDataLikelihood. Must also share the same tree, clock model and not be doing
                    // ancestral reconstruction or counting
                    Pair<Pair<PartitionTreeModel, PartitionClockModel>, DataType> key = new Pair(new Pair(partition.getPartitionTreeModel(), partition.getPartitionClockModel()), partition.getDataType());
                    List<PartitionData> partitions = partitionLists.get(key);
                    if (partitions == null) {
                        partitions = new ArrayList<PartitionData>();
                        options.multiPartitionLists.add(partitions);
                    }
                    partitions.add((PartitionData) partition);
                    partitionLists.put(key, partitions);
                } else {
                    options.otherPartitions.add(partition);
                }
            }
        }
        treeLikelihoodGenerator.writeAllTreeLikelihoods(writer);
        generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_LIKELIHOOD, writer);
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Tree likelihood generation has failed:\n" + e.getMessage());
    }
    generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TRAITS, writer);
    // ++++++++++++++++ Operators ++++++++++++++++++
    try {
        generateInsertionPoint(ComponentGenerator.InsertionPoint.BEFORE_OPERATORS, writer);
        List<Operator> operators = options.selectOperators();
        operatorsGenerator.writeOperatorSchedule(operators, writer);
        writer.writeText("");
        generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_OPERATORS, writer);
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Operators generation has failed:\n" + e.getMessage());
    }
    // ++++++++++++++++ MCMC ++++++++++++++++++
    try {
        // XMLWriter writer, List<PartitionSubstitutionModel> models,
        writeMCMC(writer);
        writer.writeText("");
        generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_MCMC, writer);
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("MCMC or log generation has failed:\n" + e.getMessage());
    }
    // ++++++++++++++++  ++++++++++++++++++
    try {
        writeTimerReport(writer);
        writer.writeText("");
        if (options.performTraceAnalysis) {
            writeTraceAnalysis(writer);
        }
        if (options.generateCSV) {
            for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
                treePriorGenerator.writeEBSPAnalysisToCSVfile(prior, writer);
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("The last part of XML generation has failed:\n" + e.getMessage());
    }
    writer.writeCloseTag("beast");
    writer.flush();
    writer.close();
}
Also used : Attribute(dr.util.Attribute) FileWriter(java.io.FileWriter) XMLWriter(dr.app.beauti.util.XMLWriter) BufferedWriter(java.io.BufferedWriter) Taxa(dr.evolution.util.Taxa) Alignment(dr.evolution.alignment.Alignment) DataType(dr.evolution.datatype.DataType) TaxonList(dr.evolution.util.TaxonList) Pair(dr.util.Pair) Microsatellite(dr.evolution.datatype.Microsatellite) Taxon(dr.evolution.util.Taxon) IOException(java.io.IOException)

Example 15 with Taxa

use of dr.evolution.util.Taxa in project beast-mcmc by beast-dev.

the class ClockModelGenerator method writeLog.

public void writeLog(PartitionClockModel model, XMLWriter writer) {
    String prefix = model.getPrefix();
    if (options.useNuRelativeRates()) {
        Parameter allNus = model.getParameter("allNus");
        if (allNus.getSubParameters().size() > 1) {
            // The mu's are the more relevant parameter and allow comparisons with the old parameterization
            // May be confusing to log the nus and mus, but necessary for use with generalized stepping-stone sampling
            writer.writeIDref(CompoundParameterParser.COMPOUND_PARAMETER, prefix + "allNus");
            for (Parameter parameter : allNus.getSubParameters()) {
                String name = parameter.getName();
                writer.writeIDref(StatisticParser.STATISTIC, name.substring(0, name.lastIndexOf(".")) + ".mu");
            }
        }
    } else {
        Parameter allMus = model.getParameter("allMus");
        if (allMus.getSubParameters().size() > 1) {
            writer.writeIDref(CompoundParameterParser.COMPOUND_PARAMETER, prefix + "allMus");
        }
    }
    switch(model.getClockType()) {
        case STRICT_CLOCK:
        case RANDOM_LOCAL_CLOCK:
            writer.writeIDref(ParameterParser.PARAMETER, prefix + "clock.rate");
            break;
        case FIXED_LOCAL_CLOCK:
            writer.writeIDref(ParameterParser.PARAMETER, prefix + "clock.rate");
            for (Taxa taxonSet : options.taxonSets) {
                if (options.taxonSetsMono.get(taxonSet)) {
                    String parameterName = taxonSet.getId() + ".rate";
                    writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + parameterName);
                }
            }
            break;
        case UNCORRELATED:
            if (model.performModelAveraging()) {
                writer.writeIDref(ParameterParser.PARAMETER, prefix + ClockType.UCLD_MEAN);
                writer.writeIDref(ParameterParser.PARAMETER, prefix + ClockType.UCLD_STDEV);
                writer.writeIDref(ParameterParser.PARAMETER, prefix + ClockType.UCGD_MEAN);
                writer.writeIDref(ParameterParser.PARAMETER, prefix + ClockType.UCGD_SHAPE);
                writer.writeIDref(ParameterParser.PARAMETER, prefix + ClockType.UCED_MEAN);
                writer.writeIDref(ParameterParser.PARAMETER, "branchRates.distributionIndex");
                writer.writeIDref(ParameterParser.PARAMETER, "branchRates.quantiles");
            } else {
                switch(model.getClockDistributionType()) {
                    case LOGNORMAL:
                        writer.writeIDref(ParameterParser.PARAMETER, prefix + ClockType.UCLD_MEAN);
                        writer.writeIDref(ParameterParser.PARAMETER, prefix + ClockType.UCLD_STDEV);
                        break;
                    case GAMMA:
                        writer.writeIDref(ParameterParser.PARAMETER, prefix + ClockType.UCGD_MEAN);
                        writer.writeIDref(ParameterParser.PARAMETER, prefix + ClockType.UCGD_SHAPE);
                        break;
                    case CAUCHY:
                        throw new UnsupportedOperationException("Uncorrelated Couchy model not supported yet");
                    // break;
                    case EXPONENTIAL:
                        writer.writeIDref(ParameterParser.PARAMETER, prefix + ClockType.UCED_MEAN);
                        break;
                }
            }
        case AUTOCORRELATED:
            // TODO
            break;
        default:
            throw new IllegalArgumentException("Unknown clock model");
    }
}
Also used : Taxa(dr.evolution.util.Taxa)

Aggregations

Taxa (dr.evolution.util.Taxa)75 Taxon (dr.evolution.util.Taxon)34 ArrayList (java.util.ArrayList)23 Tree (dr.evolution.tree.Tree)19 TaxonList (dr.evolution.util.TaxonList)15 Attribute (dr.util.Attribute)13 DefaultTreeModel (dr.evomodel.tree.DefaultTreeModel)10 TreeModel (dr.evomodel.tree.TreeModel)10 Patterns (dr.evolution.alignment.Patterns)9 Microsatellite (dr.evolution.datatype.Microsatellite)9 IOException (java.io.IOException)8 NewickImporter (dr.evolution.io.NewickImporter)7 Parameter (dr.inference.model.Parameter)6 Date (dr.evolution.util.Date)5 GammaSiteModel (dr.oldevomodel.sitemodel.GammaSiteModel)5 SimpleAlignment (dr.evolution.alignment.SimpleAlignment)4 ImportException (dr.evolution.io.Importer.ImportException)4 BranchRateModel (dr.evomodel.branchratemodel.BranchRateModel)4 CoalescentSimulator (dr.evomodel.coalescent.CoalescentSimulator)4 PartitionTreeModel (dr.app.beauti.options.PartitionTreeModel)3