Search in sources :

Example 31 with Taxon

use of dr.evolution.util.Taxon in project beast-mcmc by beast-dev.

the class BeastGenerator method writeTaxa.

/**
     * Generate a taxa block from these beast options
     *
     * @param writer    the writer
     * @param taxonList the taxon list to write
     * @throws dr.app.util.Arguments.ArgumentException
     *          ArgumentException
     */
private void writeTaxa(TaxonList taxonList, XMLWriter writer) throws Arguments.ArgumentException {
    // -1 (single taxa), 0 (1st gene of multi-taxa)
    writer.writeComment("The list of taxa to be analysed (can also include dates/ages).", "ntax=" + taxonList.getTaxonCount());
    writer.writeOpenTag(TaxaParser.TAXA, new Attribute[] { new Attribute.Default<String>(XMLParser.ID, TaxaParser.TAXA) });
    boolean hasAttr = options.traits.size() > 0;
    boolean firstDate = true;
    for (int i = 0; i < taxonList.getTaxonCount(); i++) {
        Taxon taxon = taxonList.getTaxon(i);
        boolean hasDate = false;
        if (options.clockModelOptions.isTipCalibrated()) {
            hasDate = TaxonList.Utils.hasAttribute(taxonList, i, dr.evolution.util.Date.DATE);
        }
        if (hasDate) {
            dr.evolution.util.Date date = (dr.evolution.util.Date) taxon.getAttribute(dr.evolution.util.Date.DATE);
            if (firstDate) {
                options.units = date.getUnits();
                firstDate = false;
            } else {
                if (options.units != date.getUnits()) {
                    System.err.println("Error: Units in dates do not match.");
                }
            }
        }
        writeTaxon(taxon, hasDate, hasAttr, writer);
    }
    writer.writeCloseTag(TaxaParser.TAXA);
}
Also used : java.util(java.util) Attribute(dr.util.Attribute) Taxon(dr.evolution.util.Taxon)

Example 32 with Taxon

use of dr.evolution.util.Taxon in project beast-mcmc by beast-dev.

the class BeastGenerator method generateXML.

/**
     * Generate a beast xml file from these beast options
     *
     * @param file File
     * @throws java.io.IOException IOException
     * @throws dr.app.util.Arguments.ArgumentException
     *                             ArgumentException
     */
public void generateXML(File file) throws GeneratorException, IOException, Arguments.ArgumentException {
    XMLWriter writer = new XMLWriter(new BufferedWriter(new FileWriter(file)));
    writer.writeText("<?xml version=\"1.0\" standalone=\"yes\"?>");
    writer.writeComment("Generated by BEAUTi " + version.getVersionString(), "      by Alexei J. Drummond, Andrew Rambaut and Marc A. Suchard", "      Department of Computer Science, University of Auckland and", "      Institute of Evolutionary Biology, University of Edinburgh", "      David Geffen School of Medicine, University of California, Los Angeles", "      http://beast.bio.ed.ac.uk/");
    writer.writeOpenTag("beast");
    writer.writeText("");
    // this gives any added implementations of the 'Component' interface a
    // chance to generate XML at this point in the BEAST file.
    generateInsertionPoint(ComponentGenerator.InsertionPoint.BEFORE_TAXA, writer);
    if (options.originDate != null) {
        // Create a dummy taxon whose job is to specify the origin date
        Taxon originTaxon = new Taxon("originTaxon");
        options.originDate.setUnits(options.units);
        originTaxon.setDate(options.originDate);
        writeTaxon(originTaxon, true, false, writer);
    }
    //++++++++++++++++ Taxon List ++++++++++++++++++
    try {
        // write complete taxon list
        writeTaxa(options.taxonList, writer);
        writer.writeText("");
        if (!options.hasIdenticalTaxa()) {
            // write all taxa in each gene tree regarding each data partition,
            for (AbstractPartitionData partition : options.dataPartitions) {
                if (partition.getTaxonList() != null) {
                    writeDifferentTaxa(partition, writer);
                }
            }
        } else {
            // microsat
            for (PartitionPattern partitionPattern : options.getPartitionPattern()) {
                if (partitionPattern.getTaxonList() != null && partitionPattern.getPatterns().hasMask()) {
                    writeDifferentTaxa(partitionPattern, writer);
                }
            }
        }
    } catch (Exception e) {
        e.printStackTrace(System.err);
        throw new GeneratorException("Taxon list generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ Taxon Sets ++++++++++++++++++
    List<Taxa> taxonSets = options.taxonSets;
    try {
        if (taxonSets != null && taxonSets.size() > 0 && !options.useStarBEAST) {
            tmrcaStatisticsGenerator.writeTaxonSets(writer, taxonSets);
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Taxon sets generation has failed:\n" + e.getMessage());
    }
    generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TAXA, writer);
    //++++++++++++++++ Alignments ++++++++++++++++++
    List<Alignment> alignments = new ArrayList<Alignment>();
    try {
        for (AbstractPartitionData partition : options.dataPartitions) {
            Alignment alignment = null;
            if (partition instanceof PartitionData) {
                // microsat has no alignment
                alignment = ((PartitionData) partition).getAlignment();
            }
            if (alignment != null && !alignments.contains(alignment)) {
                alignments.add(alignment);
            }
        }
        if (alignments.size() > 0) {
            alignmentGenerator.writeAlignments(alignments, writer);
            generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SEQUENCES, writer);
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Alignments generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ Pattern Lists ++++++++++++++++++
    try {
        if (!options.samplePriorOnly) {
            List<Microsatellite> microsatList = new ArrayList<Microsatellite>();
            for (AbstractPartitionData partition : options.dataPartitions) {
                // Each PD has one TreeLikelihood
                if (partition.getTaxonList() != null) {
                    switch(partition.getDataType().getType()) {
                        case DataType.NUCLEOTIDES:
                        case DataType.AMINO_ACIDS:
                        case DataType.CODONS:
                        case DataType.COVARION:
                        case DataType.TWO_STATES:
                            patternListGenerator.writePatternList((PartitionData) partition, writer);
                            break;
                        case DataType.GENERAL:
                        case DataType.CONTINUOUS:
                            // attribute patterns which is generated next bit of this method.
                            break;
                        case DataType.MICRO_SAT:
                            // microsat does not have alignment
                            patternListGenerator.writePatternList((PartitionPattern) partition, microsatList, writer);
                            break;
                        default:
                            throw new IllegalArgumentException("Unsupported data type");
                    }
                    writer.writeText("");
                }
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Pattern lists generation has failed:\n" + e.getMessage());
    }
    generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_PATTERNS, writer);
    //++++++++++++++++ Tree Prior Model ++++++++++++++++++
    try {
        for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
            treePriorGenerator.writeTreePriorModel(prior, writer);
            writer.writeText("");
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Tree prior model generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ Starting Tree ++++++++++++++++++
    try {
        for (PartitionTreeModel model : options.getPartitionTreeModels()) {
            initialTreeGenerator.writeStartingTree(model, writer);
            writer.writeText("");
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Starting tree generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ Tree Model +++++++++++++++++++
    try {
        for (PartitionTreeModel model : options.getPartitionTreeModels()) {
            treeModelGenerator.writeTreeModel(model, writer);
            writer.writeText("");
        }
        generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_MODEL, writer);
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Tree model generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ Statistics ++++++++++++++++++
    try {
        if (taxonSets != null && taxonSets.size() > 0 && !options.useStarBEAST) {
            tmrcaStatisticsGenerator.writeTMRCAStatistics(writer);
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("TMRCA statistics generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ Tree Prior Likelihood ++++++++++++++++++
    try {
        for (PartitionTreeModel model : options.getPartitionTreeModels()) {
            treePriorGenerator.writePriorLikelihood(model, writer);
            writer.writeText("");
        }
        for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
            treePriorGenerator.writeMultiLociTreePriors(prior, writer);
        }
        generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_PRIOR, writer);
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Tree prior likelihood generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ Branch Rates Model ++++++++++++++++++
    try {
        for (PartitionClockModel model : options.getPartitionClockModels()) {
            clockModelGenerator.writeBranchRatesModel(model, writer);
            writer.writeText("");
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Branch rates model generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ Substitution Model & Site Model ++++++++++++++++++
    try {
        for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
            substitutionModelGenerator.writeSubstitutionSiteModel(model, writer);
            writer.writeText("");
        }
        generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SUBSTITUTION_MODEL, writer);
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Substitution model or site model generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ AllMus parameter ++++++++++++++++++
    try {
        for (PartitionClockModel model : options.getPartitionClockModels()) {
            clockModelGenerator.writeAllMus(model, writer);
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Clock model generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ Site Model ++++++++++++++++++
    //        for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
    //            substitutionModelGenerator.writeSiteModel(model, writer); // site model
    //            substitutionModelGenerator.writeAllMus(model, writer); // allMus
    //            writer.writeText("");
    //        }
    generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SITE_MODEL, writer);
    //++++++++++++++++ Tree Likelihood ++++++++++++++++++
    try {
        AncestralStatesComponentOptions ancestralStatesOptions = (AncestralStatesComponentOptions) options.getComponentOptions(AncestralStatesComponentOptions.class);
        Map<Pair<PartitionTreeModel, DataType>, List<PartitionData>> partitionLists = new HashMap<Pair<PartitionTreeModel, DataType>, List<PartitionData>>();
        List<AbstractPartitionData> otherPartitions = new ArrayList<AbstractPartitionData>();
        for (AbstractPartitionData partition : options.dataPartitions) {
            // generate tree likelihoods for alignment data partitions
            if (partition.getTaxonList() != null) {
                if (partition.getDataType().getType() == DataType.NUCLEOTIDES || partition.getDataType().getType() != DataType.AMINO_ACIDS) {
                    // all sequence partitions of the same type as the first into the list for use in a
                    // MultipartitionTreeDataLikelihood. Must also share the same tree and not be doing
                    // ancestral reconstruction or counting
                    Pair<PartitionTreeModel, DataType> key = new Pair(partition.getPartitionTreeModel(), partition.getDataType());
                    List<PartitionData> partitions = partitionLists.get(key);
                    if (partitions == null) {
                        partitions = new ArrayList<PartitionData>();
                    }
                    partitions.add((PartitionData) partition);
                    partitionLists.put(key, partitions);
                } else {
                    otherPartitions.add(partition);
                }
            }
        }
        for (List<PartitionData> partitions : partitionLists.values()) {
            treeLikelihoodGenerator.writeTreeDataLikelihood(partitions, writer);
            writer.writeText("");
        }
        for (AbstractPartitionData partition : otherPartitions) {
            // generate tree likelihoods for the other data partitions
            if (partition.getTaxonList() != null) {
                if (partition instanceof PartitionData) {
                    treeLikelihoodGenerator.writeTreeLikelihood((PartitionData) partition, writer);
                    writer.writeText("");
                } else if (partition instanceof PartitionPattern) {
                    // microsat
                    treeLikelihoodGenerator.writeTreeLikelihood((PartitionPattern) partition, writer);
                    writer.writeText("");
                } else {
                    throw new GeneratorException("Find unrecognized partition:\n" + partition.getName());
                }
            }
        }
        generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_LIKELIHOOD, writer);
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Tree likelihood generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ *BEAST ++++++++++++++++++
    if (options.useStarBEAST) {
        //++++++++++++++++ species ++++++++++++++++++
        try {
            starBeastGenerator.writeSpecies(writer);
        } catch (Exception e) {
            e.printStackTrace();
            throw new GeneratorException("*BEAST species section generation has failed:\n" + e.getMessage());
        }
        //++++++++++++++++ Species Sets ++++++++++++++++++
        List<Taxa> speciesSets = options.speciesSets;
        try {
            if (speciesSets != null && speciesSets.size() > 0) {
                tmrcaStatisticsGenerator.writeTaxonSets(writer, speciesSets);
            }
        } catch (Exception e) {
            e.printStackTrace();
            throw new GeneratorException("Species sets generation has failed:\n" + e.getMessage());
        }
        //++++++++++++++++ trees ++++++++++++++++++
        try {
            if (speciesSets != null && speciesSets.size() > 0) {
                starBeastGenerator.writeStartingTreeForCalibration(writer);
            }
            starBeastGenerator.writeSpeciesTree(writer, speciesSets != null && speciesSets.size() > 0);
        } catch (Exception e) {
            e.printStackTrace();
            throw new GeneratorException("*BEAST trees generation has failed:\n" + e.getMessage());
        }
        //++++++++++++++++ Statistics ++++++++++++++++++
        try {
            if (speciesSets != null && speciesSets.size() > 0) {
                tmrcaStatisticsGenerator.writeTMRCAStatistics(writer);
            }
        } catch (Exception e) {
            e.printStackTrace();
            throw new GeneratorException("*BEAST TMRCA statistics generation has failed:\n" + e.getMessage());
        }
        //++++++++++++++++ prior and likelihood ++++++++++++++++++
        try {
            starBeastGenerator.writeSTARBEAST(writer);
        } catch (Exception e) {
            e.printStackTrace();
            throw new GeneratorException("*BEAST trees section generation has failed:\n" + e.getMessage());
        }
    }
    generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TRAITS, writer);
    //++++++++++++++++ Operators ++++++++++++++++++
    try {
        generateInsertionPoint(ComponentGenerator.InsertionPoint.BEFORE_OPERATORS, writer);
        List<Operator> operators = options.selectOperators();
        operatorsGenerator.writeOperatorSchedule(operators, writer);
        writer.writeText("");
        generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_OPERATORS, writer);
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("Operators generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++ MCMC ++++++++++++++++++
    try {
        // XMLWriter writer, List<PartitionSubstitutionModel> models,
        writeMCMC(writer);
        writer.writeText("");
        generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_MCMC, writer);
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("MCMC or log generation has failed:\n" + e.getMessage());
    }
    //++++++++++++++++  ++++++++++++++++++
    try {
        writeTimerReport(writer);
        writer.writeText("");
        if (options.performTraceAnalysis) {
            writeTraceAnalysis(writer);
        }
        if (options.generateCSV) {
            for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
                treePriorGenerator.writeEBSPAnalysisToCSVfile(prior, writer);
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new GeneratorException("The last part of XML generation has failed:\n" + e.getMessage());
    }
    writer.writeCloseTag("beast");
    writer.flush();
    writer.close();
}
Also used : FileWriter(java.io.FileWriter) XMLWriter(dr.app.beauti.util.XMLWriter) BufferedWriter(java.io.BufferedWriter) Taxa(dr.evolution.util.Taxa) Alignment(dr.evolution.alignment.Alignment) AncestralStatesComponentOptions(dr.app.beauti.components.ancestralstates.AncestralStatesComponentOptions) DataType(dr.evolution.datatype.DataType) TaxonList(dr.evolution.util.TaxonList) Pair(dr.util.Pair) Microsatellite(dr.evolution.datatype.Microsatellite) Taxon(dr.evolution.util.Taxon) IOException(java.io.IOException)

Example 33 with Taxon

use of dr.evolution.util.Taxon in project beast-mcmc by beast-dev.

the class BeastGenerator method writeDifferentTaxa.

public void writeDifferentTaxa(AbstractPartitionData dataPartition, XMLWriter writer) {
    TaxonList taxonList = dataPartition.getTaxonList();
    String name = dataPartition.getPartitionTreeModel().getName();
    writer.writeComment("gene name = " + name + ", ntax= " + taxonList.getTaxonCount());
    writer.writeOpenTag(TaxaParser.TAXA, new Attribute[] { new Attribute.Default<String>(XMLParser.ID, name + "." + TaxaParser.TAXA) });
    for (int i = 0; i < taxonList.getTaxonCount(); i++) {
        if (!(dataPartition instanceof PartitionPattern && ((PartitionPattern) dataPartition).getPatterns().isMasked(i))) {
            final Taxon taxon = taxonList.getTaxon(i);
            writer.writeIDref(TaxonParser.TAXON, taxon.getId());
        }
    }
    writer.writeCloseTag(TaxaParser.TAXA);
}
Also used : Attribute(dr.util.Attribute) TaxonList(dr.evolution.util.TaxonList) Taxon(dr.evolution.util.Taxon)

Example 34 with Taxon

use of dr.evolution.util.Taxon in project beast-mcmc by beast-dev.

the class DateGuesser method guessDates.

public void guessDates(TaxonList taxonList, Map<Taxon, String> taxonDateMap) {
    // To avoid duplicating code, add all the taxa into a list and
    // pass it to guessDates(List<Taxon> taxonList)
    List<Taxon> taxa = new ArrayList<Taxon>();
    for (Taxon taxon : taxonList) {
        taxa.add(taxon);
    }
    guessDates(taxa, taxonDateMap);
}
Also used : Taxon(dr.evolution.util.Taxon)

Example 35 with Taxon

use of dr.evolution.util.Taxon in project beast-mcmc by beast-dev.

the class TipDateSamplingComponentOptions method selectOperators.

public void selectOperators(final ModelOptions modelOptions, final List<Operator> ops) {
    if (tipDateSamplingType == TipDateSamplingType.SAMPLE_INDIVIDUALLY || tipDateSamplingType == TipDateSamplingType.SAMPLE_PRECISION) {
        TaxonList taxa = getTaxonSet();
        String description = "Random walk for the age of this tip";
        //OperatorType type = OperatorType.RANDOM_WALK_ABSORBING;
        OperatorType type = OperatorType.UNIFORM;
        if (tipDateSamplingType == TipDateSamplingType.SAMPLE_PRECISION) {
            description = "Uniform sample from precision of age of this tip";
        //type = OperatorType.UNIFORM;
        }
        for (int i = 0; i < taxa.getTaxonCount(); i++) {
            Taxon taxon = taxa.getTaxon(i);
            Operator operator = tipDateOperators.get(taxon);
            if (operator == null) {
                Parameter parameter = getTipDateParameter(taxon);
                //                    operator = new Operator("age(" + taxon.getId() + ")", "", parameter, OperatorType.SCALE, 0.75, 1.0);
                operator = new Operator.Builder("age(" + taxon.getId() + ")", description, parameter, type, 1.0, 1.0).build();
                if (tipDateSamplingType == TipDateSamplingType.SAMPLE_INDIVIDUALLY) {
                    operator.setWeight(2.0);
                }
                tipDateOperators.put(taxon, operator);
            }
            ops.add(operator);
        }
    } else if (tipDateSamplingType == TipDateSamplingType.SAMPLE_JOINT) {
        ops.add(modelOptions.getOperator("treeModel.tipDates"));
    }
}
Also used : TaxonList(dr.evolution.util.TaxonList) Taxon(dr.evolution.util.Taxon) OperatorType(dr.app.beauti.types.OperatorType)

Aggregations

Taxon (dr.evolution.util.Taxon)151 Taxa (dr.evolution.util.Taxa)31 ArrayList (java.util.ArrayList)24 TaxonList (dr.evolution.util.TaxonList)19 NodeRef (dr.evolution.tree.NodeRef)18 Date (dr.evolution.util.Date)18 Tree (dr.evolution.tree.Tree)15 Sequence (dr.evolution.sequence.Sequence)12 TreeModel (dr.evomodel.tree.TreeModel)12 Parameter (dr.inference.model.Parameter)12 Attribute (dr.util.Attribute)11 SimpleAlignment (dr.evolution.alignment.SimpleAlignment)10 Patterns (dr.evolution.alignment.Patterns)9 NewickImporter (dr.evolution.io.NewickImporter)7 BranchRateModel (dr.evomodel.branchratemodel.BranchRateModel)7 Microsatellite (dr.evolution.datatype.Microsatellite)6 IOException (java.io.IOException)5 HashSet (java.util.HashSet)5 PatternList (dr.evolution.alignment.PatternList)4 DataType (dr.evolution.datatype.DataType)4