Search in sources :

Example 1 with AncestralStatesComponentOptions

use of dr.app.beauti.components.ancestralstates.AncestralStatesComponentOptions in project beast-mcmc by beast-dev.

the class DiscreteTraitsComponentGenerator method writeTreeLikelihood.

/**
     * Write Tree Likelihood
     *
     * @param partition PartitionData
     * @param writer    XMLWriter
     */
public void writeTreeLikelihood(AbstractPartitionData partition, XMLWriter writer) {
    String prefix = partition.getName() + ".";
    PartitionSubstitutionModel substModel = partition.getPartitionSubstitutionModel();
    PartitionTreeModel treeModel = partition.getPartitionTreeModel();
    PartitionClockModel clockModel = partition.getPartitionClockModel();
    AncestralStatesComponentOptions ancestralStatesOptions = (AncestralStatesComponentOptions) options.getComponentOptions(AncestralStatesComponentOptions.class);
    String treeLikelihoodTag = TreeLikelihoodParser.ANCESTRAL_TREE_LIKELIHOOD;
    if (ancestralStatesOptions.isCountingStates(partition)) {
        treeLikelihoodTag = MarkovJumpsTreeLikelihoodParser.MARKOV_JUMP_TREE_LIKELIHOOD;
    }
    writer.writeOpenTag(treeLikelihoodTag, new Attribute[] { new Attribute.Default<String>(XMLParser.ID, prefix + TreeLikelihoodParser.TREE_LIKELIHOOD), new Attribute.Default<String>(AncestralStateTreeLikelihoodParser.RECONSTRUCTION_TAG_NAME, prefix + AncestralStateTreeLikelihoodParser.RECONSTRUCTION_TAG) });
    writer.writeIDref(AttributePatternsParser.ATTRIBUTE_PATTERNS, prefix + "pattern");
    writer.writeIDref(TreeModel.TREE_MODEL, treeModel.getPrefix() + TreeModel.TREE_MODEL);
    writer.writeIDref(SiteModel.SITE_MODEL, substModel.getName() + "." + SiteModel.SITE_MODEL);
    writer.writeIDref(GeneralSubstitutionModelParser.GENERAL_SUBSTITUTION_MODEL, substModel.getName() + "." + AbstractSubstitutionModel.MODEL);
    ClockModelGenerator.writeBranchRatesModelRef(clockModel, writer);
    if (substModel.getDiscreteSubstType() == DiscreteSubstModelType.ASYM_SUBST) {
        int stateCount = options.getStatesForDiscreteModel(substModel).size();
        writer.writeComment("The root state frequencies");
        writeDiscreteFrequencyModel(partition.getPrefix() + "root.", substModel.getName() + ".", stateCount, true, writer);
    }
    getCallingGenerator().generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_TREE_LIKELIHOOD, partition, writer);
    writer.writeCloseTag(treeLikelihoodTag);
}
Also used : Attribute(dr.util.Attribute) AncestralStatesComponentOptions(dr.app.beauti.components.ancestralstates.AncestralStatesComponentOptions)

Example 2 with AncestralStatesComponentOptions

use of dr.app.beauti.components.ancestralstates.AncestralStatesComponentOptions in project beast-mcmc by beast-dev.

the class BeastGenerator method generateXML.

/**
     * Generate a beast xml file from these beast options
     *
     * @param file File
     * @throws java.io.IOException IOException
     * @throws dr.app.util.Arguments.ArgumentException
     *                             ArgumentException
     */
public void generateXML(File file) throws GeneratorException, IOException, Arguments.ArgumentException {
    XMLWriter writer = new XMLWriter(new BufferedWriter(new FileWriter(file)));
    writer.writeText("<?xml version=\"1.0\" standalone=\"yes\"?>");
    writer.writeComment("Generated by BEAUTi " + version.getVersionString(), "      by Alexei J. Drummond, Andrew Rambaut and Marc A. Suchard", "      Department of Computer Science, University of Auckland and", "      Institute of Evolutionary Biology, University of Edinburgh", "      David Geffen School of Medicine, University of California, Los Angeles", "      http://beast.bio.ed.ac.uk/");
    writer.writeOpenTag("beast");
    writer.writeText("");
    // this gives any added implementations of the 'Component' interface a
    // chance to generate XML at this point in the BEAST file.
    generateInsertionPoint(ComponentGenerator.InsertionPoint.BEFORE_TAXA, writer);
    if (options.originDate != null) {
        // Create a dummy taxon whose job is to specify the origin date
        Taxon originTaxon = new Taxon("originTaxon");
        options.originDate.setUnits(options.units);
        originTaxon.setDate(options.originDate);
        writeTaxon(originTaxon, true, false, writer);
    }
    //++++++++++++++++ Taxon List ++++++++++++++++++
    try {
        // write complete taxon list
        writeTaxa(options.taxonList, writer);
        writer.writeText("");
        if (!options.hasIdenticalTaxa()) {
            // write all taxa in each gene tree regarding each data partition,
            for (AbstractPartitionData partition : options.dataPartitions) {
                if (partition.getTaxonList() != null) {
                    writeDifferentTaxa(partition, writer);
                }
            }
        } else {
            // microsat
            for (PartitionPattern partitionPattern : options.getPartitionPattern()) {
                if (partitionPattern.getTaxonList() != null && partitionPattern.getPatterns().hasMask()) {
                    writeDifferentTaxa(partitionPattern, writer);
                }
            }
        }
    } catch (Exception e) {
        e.printStackTrace(System.err);
        throw new GeneratorException("Taxon list generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ Taxon Sets ++++++++++++++++++
    List<Taxa> taxonSets = options.taxonSets;
    try {
        if (taxonSets != null && taxonSets.size() > 0 && !options.useStarBEAST) {
            tmrcaStatisticsGenerator.writeTaxonSets(writer, taxonSets);
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Taxon sets generation has failed:\n" + e.getMessage());
    }
    generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TAXA, writer);
    //++++++++++++++++ Alignments ++++++++++++++++++
    List<Alignment> alignments = new ArrayList<Alignment>();
    try {
        for (AbstractPartitionData partition : options.dataPartitions) {
            Alignment alignment = null;
            if (partition instanceof PartitionData) {
                // microsat has no alignment
                alignment = ((PartitionData) partition).getAlignment();
            }
            if (alignment != null && !alignments.contains(alignment)) {
                alignments.add(alignment);
            }
        }
        if (alignments.size() > 0) {
            alignmentGenerator.writeAlignments(alignments, writer);
            generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SEQUENCES, writer);
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Alignments generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ Pattern Lists ++++++++++++++++++
    try {
        if (!options.samplePriorOnly) {
            List<Microsatellite> microsatList = new ArrayList<Microsatellite>();
            for (AbstractPartitionData partition : options.dataPartitions) {
                // Each PD has one TreeLikelihood
                if (partition.getTaxonList() != null) {
                    switch(partition.getDataType().getType()) {
                        case DataType.NUCLEOTIDES:
                        case DataType.AMINO_ACIDS:
                        case DataType.CODONS:
                        case DataType.COVARION:
                        case DataType.TWO_STATES:
                            patternListGenerator.writePatternList((PartitionData) partition, writer);
                            break;
                        case DataType.GENERAL:
                        case DataType.CONTINUOUS:
                            // attribute patterns which is generated next bit of this method.
                            break;
                        case DataType.MICRO_SAT:
                            // microsat does not have alignment
                            patternListGenerator.writePatternList((PartitionPattern) partition, microsatList, writer);
                            break;
                        default:
                            throw new IllegalArgumentException("Unsupported data type");
                    }
                    writer.writeText("");
                }
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Pattern lists generation has failed:\n" + e.getMessage());
    }
    generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_PATTERNS, writer);
    //++++++++++++++++ Tree Prior Model ++++++++++++++++++
    try {
        for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
            treePriorGenerator.writeTreePriorModel(prior, writer);
            writer.writeText("");
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Tree prior model generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ Starting Tree ++++++++++++++++++
    try {
        for (PartitionTreeModel model : options.getPartitionTreeModels()) {
            initialTreeGenerator.writeStartingTree(model, writer);
            writer.writeText("");
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Starting tree generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ Tree Model +++++++++++++++++++
    try {
        for (PartitionTreeModel model : options.getPartitionTreeModels()) {
            treeModelGenerator.writeTreeModel(model, writer);
            writer.writeText("");
        }
        generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_MODEL, writer);
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Tree model generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ Statistics ++++++++++++++++++
    try {
        if (taxonSets != null && taxonSets.size() > 0 && !options.useStarBEAST) {
            tmrcaStatisticsGenerator.writeTMRCAStatistics(writer);
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("TMRCA statistics generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ Tree Prior Likelihood ++++++++++++++++++
    try {
        for (PartitionTreeModel model : options.getPartitionTreeModels()) {
            treePriorGenerator.writePriorLikelihood(model, writer);
            writer.writeText("");
        }
        for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
            treePriorGenerator.writeMultiLociTreePriors(prior, writer);
        }
        generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_PRIOR, writer);
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Tree prior likelihood generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ Branch Rates Model ++++++++++++++++++
    try {
        for (PartitionClockModel model : options.getPartitionClockModels()) {
            clockModelGenerator.writeBranchRatesModel(model, writer);
            writer.writeText("");
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Branch rates model generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ Substitution Model & Site Model ++++++++++++++++++
    try {
        for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
            substitutionModelGenerator.writeSubstitutionSiteModel(model, writer);
            writer.writeText("");
        }
        generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SUBSTITUTION_MODEL, writer);
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Substitution model or site model generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ AllMus parameter ++++++++++++++++++
    try {
        for (PartitionClockModel model : options.getPartitionClockModels()) {
            clockModelGenerator.writeAllMus(model, writer);
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Clock model generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ Site Model ++++++++++++++++++
    //        for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
    //            substitutionModelGenerator.writeSiteModel(model, writer); // site model
    //            substitutionModelGenerator.writeAllMus(model, writer); // allMus
    //            writer.writeText("");
    //        }
    generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SITE_MODEL, writer);
    //++++++++++++++++ Tree Likelihood ++++++++++++++++++
    try {
        AncestralStatesComponentOptions ancestralStatesOptions = (AncestralStatesComponentOptions) options.getComponentOptions(AncestralStatesComponentOptions.class);
        Map<Pair<PartitionTreeModel, DataType>, List<PartitionData>> partitionLists = new HashMap<Pair<PartitionTreeModel, DataType>, List<PartitionData>>();
        List<AbstractPartitionData> otherPartitions = new ArrayList<AbstractPartitionData>();
        for (AbstractPartitionData partition : options.dataPartitions) {
            // generate tree likelihoods for alignment data partitions
            if (partition.getTaxonList() != null) {
                if (partition.getDataType().getType() == DataType.NUCLEOTIDES || partition.getDataType().getType() != DataType.AMINO_ACIDS) {
                    // all sequence partitions of the same type as the first into the list for use in a
                    // MultipartitionTreeDataLikelihood. Must also share the same tree and not be doing
                    // ancestral reconstruction or counting
                    Pair<PartitionTreeModel, DataType> key = new Pair(partition.getPartitionTreeModel(), partition.getDataType());
                    List<PartitionData> partitions = partitionLists.get(key);
                    if (partitions == null) {
                        partitions = new ArrayList<PartitionData>();
                    }
                    partitions.add((PartitionData) partition);
                    partitionLists.put(key, partitions);
                } else {
                    otherPartitions.add(partition);
                }
            }
        }
        for (List<PartitionData> partitions : partitionLists.values()) {
            treeLikelihoodGenerator.writeTreeDataLikelihood(partitions, writer);
            writer.writeText("");
        }
        for (AbstractPartitionData partition : otherPartitions) {
            // generate tree likelihoods for the other data partitions
            if (partition.getTaxonList() != null) {
                if (partition instanceof PartitionData) {
                    treeLikelihoodGenerator.writeTreeLikelihood((PartitionData) partition, writer);
                    writer.writeText("");
                } else if (partition instanceof PartitionPattern) {
                    // microsat
                    treeLikelihoodGenerator.writeTreeLikelihood((PartitionPattern) partition, writer);
                    writer.writeText("");
                } else {
                    throw new GeneratorException("Find unrecognized partition:\n" + partition.getName());
                }
            }
        }
        generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_LIKELIHOOD, writer);
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Tree likelihood generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ *BEAST ++++++++++++++++++
    if (options.useStarBEAST) {
        //++++++++++++++++ species ++++++++++++++++++
        try {
            starBeastGenerator.writeSpecies(writer);
        } catch (Exception e) {
            e.printStackTrace();
            throw new GeneratorException("*BEAST species section generation has failed:\n" + e.getMessage());
        }
        //++++++++++++++++ Species Sets ++++++++++++++++++
        List<Taxa> speciesSets = options.speciesSets;
        try {
            if (speciesSets != null && speciesSets.size() > 0) {
                tmrcaStatisticsGenerator.writeTaxonSets(writer, speciesSets);
            }
        } catch (Exception e) {
            e.printStackTrace();
            throw new GeneratorException("Species sets generation has failed:\n" + e.getMessage());
        }
        //++++++++++++++++ trees ++++++++++++++++++
        try {
            if (speciesSets != null && speciesSets.size() > 0) {
                starBeastGenerator.writeStartingTreeForCalibration(writer);
            }
            starBeastGenerator.writeSpeciesTree(writer, speciesSets != null && speciesSets.size() > 0);
        } catch (Exception e) {
            e.printStackTrace();
            throw new GeneratorException("*BEAST trees generation has failed:\n" + e.getMessage());
        }
        //++++++++++++++++ Statistics ++++++++++++++++++
        try {
            if (speciesSets != null && speciesSets.size() > 0) {
                tmrcaStatisticsGenerator.writeTMRCAStatistics(writer);
            }
        } catch (Exception e) {
            e.printStackTrace();
            throw new GeneratorException("*BEAST TMRCA statistics generation has failed:\n" + e.getMessage());
        }
        //++++++++++++++++ prior and likelihood ++++++++++++++++++
        try {
            starBeastGenerator.writeSTARBEAST(writer);
        } catch (Exception e) {
            e.printStackTrace();
            throw new GeneratorException("*BEAST trees section generation has failed:\n" + e.getMessage());
        }
    }
    generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TRAITS, writer);
    //++++++++++++++++ Operators ++++++++++++++++++
    try {
        generateInsertionPoint(ComponentGenerator.InsertionPoint.BEFORE_OPERATORS, writer);
        List<Operator> operators = options.selectOperators();
        operatorsGenerator.writeOperatorSchedule(operators, writer);
        writer.writeText("");
        generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_OPERATORS, writer);
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Operators generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ MCMC ++++++++++++++++++
    try {
        // XMLWriter writer, List<PartitionSubstitutionModel> models,
        writeMCMC(writer);
        writer.writeText("");
        generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_MCMC, writer);
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("MCMC or log generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++  ++++++++++++++++++
    try {
        writeTimerReport(writer);
        writer.writeText("");
        if (options.performTraceAnalysis) {
            writeTraceAnalysis(writer);
        }
        if (options.generateCSV) {
            for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
                treePriorGenerator.writeEBSPAnalysisToCSVfile(prior, writer);
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("The last part of XML generation has failed:\n" + e.getMessage());
    }
    writer.writeCloseTag("beast");
    writer.flush();
    writer.close();
}
Also used : FileWriter(java.io.FileWriter) XMLWriter(dr.app.beauti.util.XMLWriter) BufferedWriter(java.io.BufferedWriter) Taxa(dr.evolution.util.Taxa) Alignment(dr.evolution.alignment.Alignment) AncestralStatesComponentOptions(dr.app.beauti.components.ancestralstates.AncestralStatesComponentOptions) DataType(dr.evolution.datatype.DataType) TaxonList(dr.evolution.util.TaxonList) Pair(dr.util.Pair) Microsatellite(dr.evolution.datatype.Microsatellite) Taxon(dr.evolution.util.Taxon) IOException(java.io.IOException)

Example 3 with AncestralStatesComponentOptions

use of dr.app.beauti.components.ancestralstates.AncestralStatesComponentOptions in project beast-mcmc by beast-dev.

the class TreeLikelihoodGenerator method writeTreeLikelihood.

/**
     * Write the tree likelihood XML block.
     *
     * @param id        the id of the tree likelihood
     * @param num       the likelihood number
     * @param partition the partition to write likelihood block for
     * @param writer    the writer
     */
private void writeTreeLikelihood(String tag, String id, int num, PartitionData partition, XMLWriter writer) {
    PartitionSubstitutionModel substModel = partition.getPartitionSubstitutionModel();
    PartitionTreeModel treeModel = partition.getPartitionTreeModel();
    PartitionClockModel clockModel = partition.getPartitionClockModel();
    writer.writeComment("Likelihood for tree given sequence data");
    String prefix;
    if (num > 0) {
        prefix = partition.getPrefix() + substModel.getPrefixCodon(num);
    } else {
        prefix = partition.getPrefix();
    }
    String idString = prefix + id;
    Attribute[] attributes;
    if (tag.equals(MarkovJumpsTreeLikelihoodParser.MARKOV_JUMP_TREE_LIKELIHOOD)) {
        AncestralStatesComponentOptions ancestralStatesOptions = (AncestralStatesComponentOptions) options.getComponentOptions(AncestralStatesComponentOptions.class);
        boolean saveCompleteHistory = ancestralStatesOptions.isCompleteHistoryLogging(partition);
        attributes = new Attribute[] { new Attribute.Default<String>(XMLParser.ID, idString), new Attribute.Default<Boolean>(TreeLikelihoodParser.USE_AMBIGUITIES, substModel.isUseAmbiguitiesTreeLikelihood()), new Attribute.Default<Boolean>(MarkovJumpsTreeLikelihoodParser.USE_UNIFORMIZATION, true), new Attribute.Default<Integer>(MarkovJumpsTreeLikelihoodParser.NUMBER_OF_SIMULANTS, 1), new Attribute.Default<String>(AncestralStateTreeLikelihoodParser.RECONSTRUCTION_TAG_NAME, prefix + AncestralStateTreeLikelihoodParser.RECONSTRUCTION_TAG), new Attribute.Default<String>(MarkovJumpsTreeLikelihoodParser.SAVE_HISTORY, saveCompleteHistory ? "true" : "false") };
    } else if (tag.equals(TreeLikelihoodParser.ANCESTRAL_TREE_LIKELIHOOD)) {
        attributes = new Attribute[] { new Attribute.Default<String>(XMLParser.ID, idString), new Attribute.Default<Boolean>(TreeLikelihoodParser.USE_AMBIGUITIES, substModel.isUseAmbiguitiesTreeLikelihood()), new Attribute.Default<String>(AncestralStateTreeLikelihoodParser.RECONSTRUCTION_TAG_NAME, prefix + AncestralStateTreeLikelihoodParser.RECONSTRUCTION_TAG) };
    } else {
        attributes = new Attribute[] { new Attribute.Default<String>(XMLParser.ID, idString), new Attribute.Default<Boolean>(TreeLikelihoodParser.USE_AMBIGUITIES, substModel.isUseAmbiguitiesTreeLikelihood()) };
    }
    writer.writeOpenTag(tag, attributes);
    if (!options.samplePriorOnly) {
        if (num > 0) {
            writeCodonPatternsRef(prefix, num, substModel.getCodonPartitionCount(), writer);
        } else {
            writer.writeIDref(SitePatternsParser.PATTERNS, prefix + SitePatternsParser.PATTERNS);
        }
    } else {
        // We just need to use the dummy alignment
        writer.writeIDref(AlignmentParser.ALIGNMENT, partition.getAlignment().getId());
    }
    writer.writeIDref(TreeModel.TREE_MODEL, treeModel.getPrefix() + TreeModel.TREE_MODEL);
    if (num > 0) {
        writer.writeIDref(GammaSiteModel.SITE_MODEL, substModel.getPrefix(num) + SiteModel.SITE_MODEL);
    } else {
        writer.writeIDref(GammaSiteModel.SITE_MODEL, substModel.getPrefix() + SiteModel.SITE_MODEL);
    }
    ClockModelGenerator.writeBranchRatesModelRef(clockModel, writer);
    generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_TREE_LIKELIHOOD, partition, prefix, writer);
    writer.writeCloseTag(tag);
}
Also used : Attribute(dr.util.Attribute) AncestralStatesComponentOptions(dr.app.beauti.components.ancestralstates.AncestralStatesComponentOptions)

Example 4 with AncestralStatesComponentOptions

use of dr.app.beauti.components.ancestralstates.AncestralStatesComponentOptions in project beast-mcmc by beast-dev.

the class TreeLikelihoodGenerator method writeTreeLikelihood.

/**
     * Write the tree likelihood XML block.
     *
     * @param partition the partition  to write likelihood block for
     * @param writer    the writer
     */
public void writeTreeLikelihood(PartitionData partition, XMLWriter writer) {
    AncestralStatesComponentOptions ancestralStatesOptions = (AncestralStatesComponentOptions) options.getComponentOptions(AncestralStatesComponentOptions.class);
    PartitionSubstitutionModel model = partition.getPartitionSubstitutionModel();
    if (model.isDolloModel()) {
        // DolloComponent will add tree likelihood
        return;
    }
    String treeLikelihoodTag = TreeLikelihoodParser.TREE_LIKELIHOOD;
    if (ancestralStatesOptions.usingAncestralStates(partition)) {
        treeLikelihoodTag = TreeLikelihoodParser.ANCESTRAL_TREE_LIKELIHOOD;
        if (ancestralStatesOptions.isCountingStates(partition)) {
            // dNdS robust counting doesn't as it has its own counting code...
            if (!ancestralStatesOptions.dNdSRobustCounting(partition)) {
                treeLikelihoodTag = MarkovJumpsTreeLikelihoodParser.MARKOV_JUMP_TREE_LIKELIHOOD;
            }
        }
    }
    if (model.getDataType().getType() == DataType.NUCLEOTIDES && model.getCodonHeteroPattern() != null) {
        for (int i = 1; i <= model.getCodonPartitionCount(); i++) {
            writeTreeLikelihood(treeLikelihoodTag, TreeLikelihoodParser.TREE_LIKELIHOOD, i, partition, writer);
        }
    } else {
        writeTreeLikelihood(treeLikelihoodTag, TreeLikelihoodParser.TREE_LIKELIHOOD, -1, partition, writer);
    }
}
Also used : AncestralStatesComponentOptions(dr.app.beauti.components.ancestralstates.AncestralStatesComponentOptions)

Example 5 with AncestralStatesComponentOptions

use of dr.app.beauti.components.ancestralstates.AncestralStatesComponentOptions in project beast-mcmc by beast-dev.

the class PatternListGenerator method writePatternList.

/**
     * Writes the pattern lists
     *
     * @param partition the partition data to write the pattern lists for
     * @param writer    the writer
     */
public void writePatternList(PartitionData partition, XMLWriter writer) {
    writer.writeText("");
    AncestralStatesComponentOptions ancestralStatesOptions = (AncestralStatesComponentOptions) options.getComponentOptions(AncestralStatesComponentOptions.class);
    SequenceErrorModelComponentOptions sequenceErrorOptions = (SequenceErrorModelComponentOptions) options.getComponentOptions(SequenceErrorModelComponentOptions.class);
    PartitionSubstitutionModel model = partition.getPartitionSubstitutionModel();
    String codonHeteroPattern = model.getCodonHeteroPattern();
    int partitionCount = model.getCodonPartitionCount();
    boolean isAncestralStatesModel = (!ancestralStatesOptions.usingAncestralStates(partition) && !sequenceErrorOptions.usingSequenceErrorModel(partition));
    boolean isCovarionModel = model.getDataType().getType() == DataType.COVARION && model.getBinarySubstitutionModel() == BinaryModelType.BIN_COVARION;
    boolean unique = isAncestralStatesModel || isCovarionModel;
    boolean strip = isAncestralStatesModel || isCovarionModel;
    if (model.getDataType().getType() == DataType.NUCLEOTIDES && codonHeteroPattern != null && partitionCount > 1) {
        if (codonHeteroPattern.equals("112")) {
            writer.writeComment("The " + (unique ? "unique " : "") + "patterns for codon positions 1 & 2");
            writer.writeOpenTag(MergePatternsParser.MERGE_PATTERNS, new Attribute[] { new Attribute.Default<String>(XMLParser.ID, partition.getPrefix() + model.getPrefixCodon(1) + SitePatternsParser.PATTERNS) });
            writePatternList(partition, 0, 3, null, unique, strip, writer);
            writePatternList(partition, 1, 3, null, unique, strip, writer);
            writer.writeCloseTag(MergePatternsParser.MERGE_PATTERNS);
            writer.writeComment("The " + (unique ? "unique " : "") + "patterns for codon position 3");
            writePatternList(partition, 2, 3, model.getPrefixCodon(2), unique, strip, writer);
        } else {
            // pattern is 123
            for (int i = 1; i <= 3; i++) {
                writer.writeComment("The " + (unique ? "unique " : "") + "patterns for codon position " + i);
                writePatternList(partition, i - 1, 3, model.getPrefixCodon(i), unique, strip, writer);
            }
        }
    // END: pattern is 123
    } else {
        writePatternList(partition, 0, 1, "", unique, strip, writer);
    }
}
Also used : Attribute(dr.util.Attribute) AncestralStatesComponentOptions(dr.app.beauti.components.ancestralstates.AncestralStatesComponentOptions) PartitionSubstitutionModel(dr.app.beauti.options.PartitionSubstitutionModel) SequenceErrorModelComponentOptions(dr.app.beauti.components.sequenceerror.SequenceErrorModelComponentOptions)

Aggregations

AncestralStatesComponentOptions (dr.app.beauti.components.ancestralstates.AncestralStatesComponentOptions)7 Attribute (dr.util.Attribute)3 ContinuousComponentOptions (dr.app.beauti.components.continuous.ContinuousComponentOptions)1 DiscreteTraitsComponentOptions (dr.app.beauti.components.discrete.DiscreteTraitsComponentOptions)1 SequenceErrorModelComponentOptions (dr.app.beauti.components.sequenceerror.SequenceErrorModelComponentOptions)1 PartitionSubstitutionModel (dr.app.beauti.options.PartitionSubstitutionModel)1 XMLWriter (dr.app.beauti.util.XMLWriter)1 Alignment (dr.evolution.alignment.Alignment)1 DataType (dr.evolution.datatype.DataType)1 Microsatellite (dr.evolution.datatype.Microsatellite)1 Taxa (dr.evolution.util.Taxa)1 Taxon (dr.evolution.util.Taxon)1 TaxonList (dr.evolution.util.TaxonList)1 Pair (dr.util.Pair)1 BufferedWriter (java.io.BufferedWriter)1 FileWriter (java.io.FileWriter)1 IOException (java.io.IOException)1