Search in sources :

Example 21 with SimpleAlignment

use of dr.evolution.alignment.SimpleAlignment in project beast-mcmc by beast-dev.

the class DataLikelihoodTester method main.

public static void main(String[] args) {
    // turn off logging to avoid screen noise...
    Logger logger = Logger.getLogger("dr");
    logger.setUseParentHandlers(false);
    SimpleAlignment alignment = createAlignment(sequences, Nucleotides.INSTANCE);
    TreeModel treeModel;
    try {
        treeModel = createSpecifiedTree("((human:0.1,chimp:0.1):0.1,gorilla:0.2)");
    } catch (Exception e) {
        throw new RuntimeException("Unable to parse Newick tree");
    }
    System.out.print("\nTest BeagleTreeLikelihood (kappa = 1): ");
    //substitutionModel
    Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 });
    Parameter kappa = new Parameter.Default(HKYParser.KAPPA, 1.0, 0, 100);
    FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs);
    HKY hky = new HKY(kappa, f);
    //siteModel
    double alpha = 0.5;
    GammaSiteRateModel siteRateModel = new GammaSiteRateModel("gammaModel", alpha, 4);
    //        GammaSiteRateModel siteRateModel = new GammaSiteRateModel("siteRateModel");
    siteRateModel.setSubstitutionModel(hky);
    Parameter mu = new Parameter.Default(GammaSiteModelParser.SUBSTITUTION_RATE, 1.0, 0, Double.POSITIVE_INFINITY);
    siteRateModel.setRelativeRateParameter(mu);
    FrequencyModel f2 = new FrequencyModel(Nucleotides.INSTANCE, freqs);
    Parameter kappa2 = new Parameter.Default(HKYParser.KAPPA, 10.0, 0, 100);
    HKY hky2 = new HKY(kappa2, f2);
    GammaSiteRateModel siteRateModel2 = new GammaSiteRateModel("gammaModel", alpha, 4);
    siteRateModel2.setSubstitutionModel(hky2);
    siteRateModel2.setRelativeRateParameter(mu);
    //treeLikelihood
    SitePatterns patterns = new SitePatterns(alignment, null, 0, -1, 1, true);
    BranchModel branchModel = new HomogeneousBranchModel(siteRateModel.getSubstitutionModel(), siteRateModel.getSubstitutionModel().getFrequencyModel());
    BranchModel branchModel2 = new HomogeneousBranchModel(siteRateModel2.getSubstitutionModel(), siteRateModel2.getSubstitutionModel().getFrequencyModel());
    BranchRateModel branchRateModel = new DefaultBranchRateModel();
    BeagleTreeLikelihood treeLikelihood = new BeagleTreeLikelihood(patterns, treeModel, branchModel, siteRateModel, branchRateModel, null, false, PartialsRescalingScheme.AUTO, true);
    double logLikelihood = treeLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 1): ");
    BeagleDataLikelihoodDelegate dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, PartialsRescalingScheme.NONE, false);
    TreeDataLikelihood treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(5.0);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 5): ");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 10): ");
    dataLikelihoodDelegate = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel2, siteRateModel2, false, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(dataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky2.setKappa(11.0);
    System.out.print("\nTest BeagleDataLikelihoodDelegate (kappa = 11): ");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(1.0);
    hky2.setKappa(10.0);
    MultiPartitionDataLikelihoodDelegate multiPartitionDataLikelihoodDelegate;
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 1):");
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(5.0);
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 5):");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(1.0);
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 1 partition (kappa = 10):");
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("\nTest MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
    List<PatternList> patternLists = new ArrayList<PatternList>();
    patternLists.add(patterns);
    patternLists.add(patterns);
    List<SiteRateModel> siteRateModels = new ArrayList<SiteRateModel>();
    siteRateModels.add(siteRateModel);
    siteRateModels.add(siteRateModel2);
    List<BranchModel> branchModels = new ArrayList<BranchModel>();
    branchModels.add(branchModel);
    branchModels.add(branchModel2);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: this is 2x the logLikelihood of the 2nd partition)\n\n");
    System.exit(0);
    //START ADDITIONAL TEST #1 - Guy Baele
    System.out.println("-- Test #1 SiteRateModels -- ");
    //alpha in partition 1 reject followed by alpha in partition 2 reject
    System.out.print("Adjust alpha in partition 1: ");
    siteRateModel.setAlpha(0.4);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return alpha in partition 1 to original value: ");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
    System.out.print("Adjust alpha in partition 2: ");
    siteRateModel2.setAlpha(0.35);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return alpha in partition 2 to original value: ");
    siteRateModel2.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n");
    //alpha in partition 1 accept followed by alpha in partition 2 accept
    System.out.print("Adjust alpha in partition 1: ");
    siteRateModel.setAlpha(0.4);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Adjust alpha in partition 2: ");
    siteRateModel2.setAlpha(0.35);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only setting alpha in partition 2)");
    System.out.print("Return alpha in partition 1 to original value: ");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 2 has not been returned to original value yet)");
    System.out.print("Return alpha in partition 2 to original value: ");
    siteRateModel2.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    //adjusting alphas in both partitions without explicitly calling getLogLikelihood() in between
    System.out.print("Adjust both alphas in partitions 1 and 2: ");
    siteRateModel.setAlpha(0.4);
    siteRateModel2.setAlpha(0.35);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return alpha in partition 2 to original value: ");
    siteRateModel2.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: alpha in partition 1 has not been returned to original value yet)");
    System.out.print("Return alpha in partition 1 to original value: ");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n\n");
    //END ADDITIONAL TEST - Guy Baele
    //START ADDITIONAL TEST #2 - Guy Baele
    System.out.println("-- Test #2 SiteRateModels -- ");
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    //1 siteRateModel shared across 2 partitions
    siteRateModels = new ArrayList<SiteRateModel>();
    siteRateModels.add(siteRateModel);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    System.out.print("Adjust alpha in shared siteRateModel: ");
    siteRateModel.setAlpha(0.4);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: same logLikelihood as only adjusted alpha for partition 1)");
    siteRateModel.setAlpha(0.5);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n\n");
    //END ADDITIONAL TEST - Guy Baele
    //START ADDITIONAL TEST #3 - Guy Baele
    System.out.println("-- Test #3 SiteRateModels -- ");
    siteRateModel = new GammaSiteRateModel("gammaModel");
    siteRateModel.setSubstitutionModel(hky);
    siteRateModel.setRelativeRateParameter(mu);
    siteRateModel2 = new GammaSiteRateModel("gammaModel2");
    siteRateModel2.setSubstitutionModel(hky2);
    siteRateModel2.setRelativeRateParameter(mu);
    siteRateModels = new ArrayList<SiteRateModel>();
    siteRateModels.add(siteRateModel);
    siteRateModels.add(siteRateModel2);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    System.out.print("Adjust kappa in partition 1: ");
    hky.setKappa(5.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: logLikelihood has not changed?)");
    System.out.print("Return kappa in partition 1 to original value: ");
    hky.setKappa(1.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    System.out.print("Adjust kappa in partition 2: ");
    hky2.setKappa(11.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood);
    System.out.print("Return kappa in partition 2 to original value: ");
    hky2.setKappa(10.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.println("logLikelihood = " + logLikelihood + " (i.e. reject: OK)\n\n");
    //END ADDITIONAL TEST - Guy Baele
    //START ADDITIONAL TEST #4 - Guy Baele
    System.out.println("-- Test #4 SiteRateModels -- ");
    SimpleAlignment secondAlignment = createAlignment(moreSequences, Nucleotides.INSTANCE);
    SitePatterns morePatterns = new SitePatterns(secondAlignment, null, 0, -1, 1, true);
    BeagleDataLikelihoodDelegate dataLikelihoodDelegateOne = new BeagleDataLikelihoodDelegate(treeModel, patterns, branchModel, siteRateModel, false, PartialsRescalingScheme.NONE, false);
    TreeDataLikelihood treeDataLikelihoodOne = new TreeDataLikelihood(dataLikelihoodDelegateOne, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
    System.out.println("\nBeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 1) = " + logLikelihood);
    hky.setKappa(10.0);
    logLikelihood = treeDataLikelihoodOne.getLogLikelihood();
    System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 1 (kappa = 10) = " + logLikelihood);
    hky.setKappa(1.0);
    BeagleDataLikelihoodDelegate dataLikelihoodDelegateTwo = new BeagleDataLikelihoodDelegate(treeModel, morePatterns, branchModel2, siteRateModel2, false, PartialsRescalingScheme.NONE, false);
    TreeDataLikelihood treeDataLikelihoodTwo = new TreeDataLikelihood(dataLikelihoodDelegateTwo, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihoodTwo.getLogLikelihood();
    System.out.println("BeagleDataLikelihoodDelegate logLikelihood partition 2 (kappa = 10) = " + logLikelihood + "\n");
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) patterns), Collections.singletonList((BranchModel) branchModel), Collections.singletonList((SiteRateModel) siteRateModel), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 1):");
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(10.0);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 1st partition (kappa = 10):");
    System.out.println("logLikelihood = " + logLikelihood);
    hky.setKappa(1.0);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, Collections.singletonList((PatternList) morePatterns), Collections.singletonList((BranchModel) branchModel2), Collections.singletonList((SiteRateModel) siteRateModel2), true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 2nd partition (kappa = 10):");
    System.out.println("logLikelihood = " + logLikelihood + "\n");
    patternLists = new ArrayList<PatternList>();
    patternLists.add(patterns);
    patternLists.add(morePatterns);
    multiPartitionDataLikelihoodDelegate = new MultiPartitionDataLikelihoodDelegate(treeModel, patternLists, branchModels, siteRateModels, true, PartialsRescalingScheme.NONE, false);
    treeDataLikelihood = new TreeDataLikelihood(multiPartitionDataLikelihoodDelegate, treeModel, branchRateModel);
    logLikelihood = treeDataLikelihood.getLogLikelihood();
    System.out.print("Test MultiPartitionDataLikelihoodDelegate 2 partitions (kappa = 1, 10): ");
    System.out.println("logLikelihood = " + logLikelihood + " (NOT OK: should be the sum of both separate logLikelihoods)\nKappa value of partition 2 is used to compute logLikelihood for both partitions?");
//END ADDITIONAL TEST - Guy Baele
}
Also used : FrequencyModel(dr.evomodel.substmodel.FrequencyModel) ArrayList(java.util.ArrayList) PatternList(dr.evolution.alignment.PatternList) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) BranchModel(dr.evomodel.branchmodel.BranchModel) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) SiteRateModel(dr.evomodel.siteratemodel.SiteRateModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) TreeModel(dr.evomodel.tree.TreeModel) SimpleAlignment(dr.evolution.alignment.SimpleAlignment) SitePatterns(dr.evolution.alignment.SitePatterns) BeagleTreeLikelihood(dr.evomodel.treelikelihood.BeagleTreeLikelihood) HomogeneousBranchModel(dr.evomodel.branchmodel.HomogeneousBranchModel) GammaSiteRateModel(dr.evomodel.siteratemodel.GammaSiteRateModel) DefaultBranchRateModel(dr.evomodel.branchratemodel.DefaultBranchRateModel) BranchRateModel(dr.evomodel.branchratemodel.BranchRateModel) HKY(dr.evomodel.substmodel.nucleotide.HKY) Parameter(dr.inference.model.Parameter)

Example 22 with SimpleAlignment

use of dr.evolution.alignment.SimpleAlignment in project beast-mcmc by beast-dev.

the class DataLikelihoodTester2 method createAlignment.

private static SimpleAlignment createAlignment(Object[][] taxa_sequence, DataType dataType) {
    SimpleAlignment alignment = new SimpleAlignment();
    alignment.setDataType(dataType);
    //        alignment.setDataType(Nucleotides.INSTANCE);
    // 6, 17
    Taxon[] taxa = new Taxon[taxa_sequence[0].length];
    System.out.println("Taxon len = " + taxa_sequence[0].length);
    System.out.println("Alignment len = " + taxa_sequence[1].length);
    if (taxa_sequence.length > 2)
        System.out.println("Date len = " + taxa_sequence[2].length);
    for (int i = 0; i < taxa_sequence[0].length; i++) {
        taxa[i] = new Taxon(taxa_sequence[0][i].toString());
        if (taxa_sequence.length > 2) {
            Date date = new Date((Double) taxa_sequence[2][i], Units.Type.YEARS, (Boolean) taxa_sequence[3][0]);
            taxa[i].setDate(date);
        }
        //taxonList.addTaxon(taxon);
        Sequence sequence = new Sequence(taxa_sequence[1][i].toString());
        sequence.setTaxon(taxa[i]);
        sequence.setDataType(dataType);
        alignment.addSequence(sequence);
    }
    return alignment;
}
Also used : SimpleAlignment(dr.evolution.alignment.SimpleAlignment) Taxon(dr.evolution.util.Taxon) Sequence(dr.evolution.sequence.Sequence) Date(dr.evolution.util.Date)

Example 23 with SimpleAlignment

use of dr.evolution.alignment.SimpleAlignment in project beast-mcmc by beast-dev.

the class BeagleSequenceSimulatorConsoleApp method simulate.

// END: constructor
public void simulate(String[] args) {
    try {
        // /////////////////////////////////
        // ---SPLIT PARTITION ARGUMENTS---//
        // /////////////////////////////////
        int from = 0;
        int to = 0;
        ArrayList<String[]> argsList = new ArrayList<String[]>();
        for (String arg : args) {
            if (arg.equalsIgnoreCase(SPLIT_PARTITION)) {
                argsList.add(Arrays.copyOfRange(args, from, to));
                from = to + 1;
            }
            // END: split check
            to++;
        }
        if (args[0].contains(HELP)) {
            gracefullyExit(null);
        } else if (argsList.size() == 0) {
            gracefullyExit("Empty or incorrect arguments list.");
        }
        // END: failed split check
        String[] leftoverArguments = Arrays.copyOfRange(args, from, args.length);
        if (leftoverArguments.length > 2) {
            gracefullyExit("Unrecognized option " + leftoverArguments[2]);
        }
        ArrayList<Partition> partitionsList = new ArrayList<Partition>();
        for (String[] partitionArgs : argsList) {
            // /////////////
            // ---PARSE---//
            // /////////////
            arguments.parseArguments(partitionArgs);
            // ///////////////////
            // ---INTERROGATE---//
            // ///////////////////
            PartitionData data = new PartitionData();
            String option = null;
            double[] values = null;
            if (partitionArgs.length == 0 || arguments.hasOption(HELP)) {
                gracefullyExit(null);
            }
            // Tree / Taxa
            if (arguments.hasOption(TREE_FILE)) {
                File treeFile = new File(arguments.getStringOption(TREE_FILE));
                Tree tree = Utils.importTreeFromFile(treeFile);
                data.record = new TreesTableRecord(treeFile.getName(), tree);
            } else if (arguments.hasOption(TAXA_SET)) {
                File taxaFile = new File(arguments.getStringOption(TAXA_SET));
                Taxa taxa = Utils.importTaxaFromFile(taxaFile);
                data.record = new TreesTableRecord(taxaFile.getName(), taxa);
            } else {
                throw new RuntimeException("Tree file / Taxa set not specified.");
            }
            // Demographic Model
            if (arguments.hasOption(DEMOGRAPHIC_MODEL)) {
                option = arguments.getStringOption(DEMOGRAPHIC_MODEL);
                if (option.equalsIgnoreCase(NO_DEMOGRAPHIC_MODEL)) {
                    int index = 0;
                    data.demographicModelIndex = index;
                } else if (option.equalsIgnoreCase(CONSTANT_POPULATION)) {
                    int index = 1;
                    data.demographicModelIndex = index;
                    if (arguments.hasOption(CONSTANT_POPULATION_PARAMETER_VALUES)) {
                        values = arguments.getRealArrayOption(CONSTANT_POPULATION_PARAMETER_VALUES);
                        parseDemographicValues(index, values, data);
                    }
                } else if (option.equalsIgnoreCase(EXPONENTIAL_GROWTH_RATE)) {
                    int index = 2;
                    data.demographicModelIndex = index;
                    if (arguments.hasOption(EXPONENTIAL_GROWTH_RATE_PARAMETER_VALUES)) {
                        values = arguments.getRealArrayOption(EXPONENTIAL_GROWTH_RATE_PARAMETER_VALUES);
                        parseDemographicValues(index, values, data);
                    }
                } else if (option.equalsIgnoreCase(EXPONENTIAL_DOUBLING_TIME)) {
                    int index = 3;
                    data.demographicModelIndex = index;
                    if (arguments.hasOption(EXPONENTIAL_GROWTH_DOUBLING_TIME_PARAMETER_VALUES)) {
                        values = arguments.getRealArrayOption(EXPONENTIAL_GROWTH_DOUBLING_TIME_PARAMETER_VALUES);
                        parseDemographicValues(index, values, data);
                    }
                } else {
                    gracefullyExit("Unrecognized option.");
                }
            }
            // Branch Substitution Model
            if (arguments.hasOption(BRANCH_SUBSTITUTION_MODEL)) {
                option = arguments.getStringOption(BRANCH_SUBSTITUTION_MODEL);
                if (option.equalsIgnoreCase(HKY)) {
                    int index = 0;
                    data.substitutionModelIndex = index;
                    if (arguments.hasOption(HKY_SUBSTITUTION_PARAMETER_VALUES)) {
                        values = arguments.getRealArrayOption(HKY_SUBSTITUTION_PARAMETER_VALUES);
                        parseSubstitutionValues(index, values, data);
                    }
                } else if (option.equalsIgnoreCase(GTR)) {
                    int index = 1;
                    data.substitutionModelIndex = index;
                    if (arguments.hasOption(GTR_SUBSTITUTION_PARAMETER_VALUES)) {
                        values = arguments.getRealArrayOption(GTR_SUBSTITUTION_PARAMETER_VALUES);
                        parseSubstitutionValues(index, values, data);
                    }
                } else if (option.equalsIgnoreCase(TN93)) {
                    int index = 2;
                    data.substitutionModelIndex = index;
                    if (arguments.hasOption(TN93_SUBSTITUTION_PARAMETER_VALUES)) {
                        values = arguments.getRealArrayOption(TN93_SUBSTITUTION_PARAMETER_VALUES);
                        parseSubstitutionValues(index, values, data);
                    }
                } else if (option.equalsIgnoreCase(GY94_CODON_MODEL)) {
                    int index = 3;
                    data.substitutionModelIndex = index;
                    if (arguments.hasOption(GY94_SUBSTITUTION_PARAMETER_VALUES)) {
                        values = arguments.getRealArrayOption(GY94_SUBSTITUTION_PARAMETER_VALUES);
                        parseSubstitutionValues(index, values, data);
                    }
                } else {
                    gracefullyExit("Unrecognized option.");
                }
            }
            // Site Rate Model
            if (arguments.hasOption(SITE_RATE_MODEL)) {
                option = arguments.getStringOption(SITE_RATE_MODEL);
                if (option.equalsIgnoreCase(NO_SITE_RATE_MODEL)) {
                    int index = 0;
                    data.siteRateModelIndex = index;
                } else if (option.equalsIgnoreCase(GAMMA_SITE_RATE_MODEL)) {
                    int index = 1;
                    data.siteRateModelIndex = index;
                    if (arguments.hasOption(GAMMA_SITE_RATE_MODEL_PARAMETER_VALUES)) {
                        values = arguments.getRealArrayOption(GAMMA_SITE_RATE_MODEL_PARAMETER_VALUES);
                        parseSiteRateValues(index, values, data);
                    }
                } else {
                    gracefullyExit("Unrecognized option.");
                }
            }
            // Clock Rate Model
            if (arguments.hasOption(CLOCK_RATE_MODEL)) {
                option = arguments.getStringOption(CLOCK_RATE_MODEL);
                if (option.equalsIgnoreCase(STRICT_CLOCK)) {
                    int index = 0;
                    data.clockModelIndex = index;
                    if (arguments.hasOption(STRICT_CLOCK_PARAMETER_VALUES)) {
                        values = arguments.getRealArrayOption(STRICT_CLOCK_PARAMETER_VALUES);
                        parseClockValues(index, values, data);
                    }
                } else if (option.equalsIgnoreCase(LOGNORMAL_RELAXED_CLOCK)) {
                    int index = 1;
                    data.clockModelIndex = index;
                    if (arguments.hasOption(LOGNORMAL_RELAXED_CLOCK_PARAMETER_VALUES)) {
                        values = arguments.getRealArrayOption(LOGNORMAL_RELAXED_CLOCK_PARAMETER_VALUES);
                        parseClockValues(index, values, data);
                    }
                    // set to true/false
                    if (arguments.hasOption(LRC_PARAMETERS_IN_REAL_SPACE)) {
                        boolean lrcParametersInRealSpace = (arguments.getStringOption(LRC_PARAMETERS_IN_REAL_SPACE).equalsIgnoreCase(TRUE) ? true : false);
                        data.lrcParametersInRealSpace = lrcParametersInRealSpace;
                    }
                } else if (option.equalsIgnoreCase(EXPONENTIAL_RELAXED_CLOCK)) {
                    int index = 2;
                    data.clockModelIndex = index;
                    if (arguments.hasOption(EXPONENTIAL_RELAXED_CLOCK_PARAMETER_VALUES)) {
                        values = arguments.getRealArrayOption(EXPONENTIAL_RELAXED_CLOCK_PARAMETER_VALUES);
                        parseClockValues(index, values, data);
                    }
                } else if (option.equalsIgnoreCase(INVERSE_GAUSSIAN_RELAXED_CLOCK)) {
                    int index = 3;
                    data.clockModelIndex = index;
                    if (arguments.hasOption(INVERSE_GAUSSIAN_RELAXED_CLOCK_PARAMETER_VALUES)) {
                        values = arguments.getRealArrayOption(INVERSE_GAUSSIAN_RELAXED_CLOCK_PARAMETER_VALUES);
                        parseClockValues(index, values, data);
                    }
                } else {
                    gracefullyExit("Unrecognized option.");
                }
            }
            // Frequency Model
            if (arguments.hasOption(BASE_FREQUENCIES)) {
                option = arguments.getStringOption(BASE_FREQUENCIES);
                if (option.equalsIgnoreCase(NUCLEOTIDE_FREQUENCIES)) {
                    int index = 0;
                    data.frequencyModelIndex = index;
                    if (arguments.hasOption(NUCLEOTIDE_FREQUENCY_PARAMETER_VALUES)) {
                        values = arguments.getRealArrayOption(NUCLEOTIDE_FREQUENCY_PARAMETER_VALUES);
                        parseFrequencyValues(index, values, data);
                    }
                } else if (option.equalsIgnoreCase(CODON_FREQUENCIES)) {
                    int index = 1;
                    data.frequencyModelIndex = index;
                    if (arguments.hasOption(CODON_FREQUENCY_PARAMETER_VALUES)) {
                        values = arguments.getRealArrayOption(CODON_FREQUENCY_PARAMETER_VALUES);
                        parseFrequencyValues(index, values, data);
                    }
                } else {
                    gracefullyExit("Unrecognized option.");
                }
            }
            if (arguments.hasOption(FROM)) {
                data.from = arguments.getIntegerOption(FROM);
            }
            if (arguments.hasOption(TO)) {
                data.to = arguments.getIntegerOption(TO);
            }
            if (arguments.hasOption(EVERY)) {
                data.every = arguments.getIntegerOption(EVERY);
            }
            // END: EVERY option check
            // create partition
            Partition partition = new Partition(//
            data.createTreeModel(), //
            data.createBranchModel(), //
            data.createSiteRateModel(), //
            data.createClockRateModel(), //
            data.createFrequencyModel(), // from
            data.from - 1, // to
            data.to - 1, // every
            data.every);
            if (arguments.hasOption(ROOT_SEQUENCE)) {
                data.ancestralSequenceString = arguments.getStringOption(ROOT_SEQUENCE);
                partition.setRootSequence(data.createAncestralSequence());
            }
            // END: ANCESTRAL_SEQUENCE option check
            partitionsList.add(partition);
            //                System.err.println(data.from);
            dataList.add(data);
        }
        if (this.VERBOSE) {
            //        		System.out.println(dataList.get(0).from + " " + dataList.get(1).from);
            Utils.printPartitionDataList(dataList);
            System.out.println();
        }
        SimpleAlignment alignment = new SimpleAlignment();
        String outputFile = null;
        if (leftoverArguments.length > 0) {
            outputFile = leftoverArguments[0];
            String[] file = outputFile.split("\\.", 2);
            if (file.length > 1) {
                String extension = outputFile.split("\\.", 2)[1];
                // TODO Delegate here to enum-class; switches are not generic
                if (extension.equalsIgnoreCase(SimpleAlignment.OutputType.FASTA.getText()) || extension.equalsIgnoreCase("fst")) {
                    dataList.outputFormat = SimpleAlignment.OutputType.FASTA;
                } else if (extension.equalsIgnoreCase(SimpleAlignment.OutputType.NEXUS.getText()) || extension.equalsIgnoreCase("nxs")) {
                    dataList.outputFormat = SimpleAlignment.OutputType.NEXUS;
                } else if (extension.equalsIgnoreCase(SimpleAlignment.OutputType.XML.getText())) {
                    dataList.outputFormat = SimpleAlignment.OutputType.XML;
                } else {
                    dataList.outputFormat = SimpleAlignment.OutputType.FASTA;
                }
            //END: extension check
            } else {
                outputFile = file[0] + "." + SimpleAlignment.OutputType.FASTA.toString().toLowerCase();
                dataList.outputFormat = SimpleAlignment.OutputType.FASTA;
            }
        //END: 
        }
        if (leftoverArguments.length > 1) {
            dataList.startingSeed = Long.parseLong(leftoverArguments[1]);
            dataList.setSeed = true;
        }
        if (dataList.setSeed) {
            MathUtils.setSeed(dataList.startingSeed);
        }
        if (leftoverArguments.length > 2) {
            dataList.outputAncestralSequences = Boolean.parseBoolean(leftoverArguments[2]);
        }
        if (leftoverArguments.length > 3) {
            dataList.useParallel = Boolean.parseBoolean(leftoverArguments[3]);
        }
        BeagleSequenceSimulator beagleSequenceSimulator = new BeagleSequenceSimulator(partitionsList);
        alignment = beagleSequenceSimulator.simulate(dataList.useParallel, dataList.outputAncestralSequences);
        alignment.setOutputType(dataList.outputFormat);
        PrintWriter writer = new PrintWriter(new FileWriter(outputFile));
        writer.println(alignment.toString());
        writer.close();
    } catch (ArgumentException e) {
        System.out.println();
        printUsage(arguments);
        System.out.println();
        System.out.println(e.getMessage());
        e.printStackTrace();
        System.exit(1);
    } catch (IOException e) {
        System.out.println();
        printUsage(arguments);
        System.out.println();
        System.out.println(e.getMessage());
        e.printStackTrace();
        System.exit(1);
    } catch (ImportException e) {
        System.out.println();
        printUsage(arguments);
        System.out.println();
        System.out.println(e.getMessage());
        e.printStackTrace();
        System.exit(1);
    }
// END: try-catch block
}
Also used : Partition(dr.app.beagle.tools.Partition) FileWriter(java.io.FileWriter) ArrayList(java.util.ArrayList) IOException(java.io.IOException) BeagleSequenceSimulator(dr.app.beagle.tools.BeagleSequenceSimulator) ImportException(dr.evolution.io.Importer.ImportException) Taxa(dr.evolution.util.Taxa) SimpleAlignment(dr.evolution.alignment.SimpleAlignment) Tree(dr.evolution.tree.Tree) ArgumentException(dr.app.util.Arguments.ArgumentException) File(java.io.File) PrintWriter(java.io.PrintWriter)

Example 24 with SimpleAlignment

use of dr.evolution.alignment.SimpleAlignment in project beast-mcmc by beast-dev.

the class RecomboGen method generate.

public Alignment generate() {
    List<Node> tips = new ArrayList<Node>();
    // add all the tips
    for (int i = 0; i < taxa.getTaxonCount(); i++) {
        tips.add(new Node(taxa.getTaxon(i)));
    }
    Collections.sort(tips);
    List<Node> unusedTips = new ArrayList<Node>(tips);
    double time = 0;
    List<Node> nodes = new ArrayList<Node>();
    // Add any tips with zero sampling time.
    List<Node> nodesToAdd = new ArrayList<Node>();
    for (Node tip : unusedTips) {
        if (tip.getTime() == 0.0) {
            nodesToAdd.add(tip);
            System.out.println("Time: " + time + " adding " + tip.getTaxon());
        }
    }
    nodes.addAll(nodesToAdd);
    unusedTips.removeAll(nodesToAdd);
    do {
        boolean tipsAdded;
        do {
            tipsAdded = false;
            final int lineageCount = nodes.size();
            // get time to next event...
            // create unit uniform random variate
            final double U = MathUtils.nextDouble();
            final double interval = -Math.log(U) / (lineageCount * recombinationRate);
            final double nextTime = time + interval;
            // Add any tips for which we have reached their sampling time.
            nodesToAdd.clear();
            for (Node tip : unusedTips) {
                if (tip.getTime() <= nextTime) {
                    nodesToAdd.add(tip);
                    tipsAdded = true;
                    System.out.println("Time: " + tip.getTime() + " adding " + tip.getTaxon());
                    // reset the current time (the tips are sorted in time order
                    // so this will be the oldest added tip).
                    time = tip.getTime();
                }
            }
            nodes.addAll(nodesToAdd);
            unusedTips.removeAll(nodesToAdd);
            if (!tipsAdded) {
                time = nextTime;
            }
        } while (// only continue when no tips are added
        tipsAdded);
        int r = MathUtils.nextInt(nodes.size());
        Node node = nodes.get(r);
        // create two new parent nodes
        Node parent1 = new Node(node, time);
        Node parent2 = new Node(node, time);
        // select a break point in interval [1, length - 2] on
        // a zero-indexed line.
        int breakPoint = MathUtils.nextInt(length - 2) + 1;
        // setup child node with parents and break point
        node.setParent1(parent1);
        node.setParent2(parent2);
        node.setBreakPoint(breakPoint);
        System.out.println("Time: " + time + " recombining " + (node.getTaxon() != null ? node.getTaxon() : r) + " at breakpoint " + breakPoint);
        nodes.add(parent1);
        nodes.add(parent2);
        nodes.remove(node);
    } while (unusedTips.size() > 0);
    // Construct a taxon set for coalescent simulation of deep tree
    Taxa treeTaxa = new Taxa();
    int i = 0;
    Map<Node, Taxon> nodeMap = new HashMap<Node, Taxon>();
    for (Node node : nodes) {
        Taxon taxon = new Taxon("Taxon" + i);
        treeTaxa.addTaxon(taxon);
        nodeMap.put(node, taxon);
        i++;
    }
    CoalescentSimulator coalSim = new CoalescentSimulator();
    ConstantPopulation demo = new ConstantPopulation(Units.Type.YEARS);
    demo.setN0(ancestralPopulationSize);
    Tree tree = coalSim.simulateTree(treeTaxa, demo);
    System.out.println("Tree MRCA " + tree.getNodeHeight(tree.getRoot()) + time);
    SequenceSimulator seqSim = new SequenceSimulator(tree, siteModel, branchRateModel, length);
    Alignment ancestralAlignment = seqSim.simulate();
    SimpleAlignment alignment = new SimpleAlignment();
    // generated recombinant history
    for (Node tip : tips) {
        String seqString = generateRecombinant(tip, nodeMap, ancestralAlignment);
        Sequence sequence = new Sequence(tip.getTaxon(), seqString);
        //            System.out.println(">" + tip.getTaxon() + "\r" + sequence);
        alignment.addSequence(sequence);
    }
    return alignment;
}
Also used : Sequence(dr.evolution.sequence.Sequence) Alignment(dr.evolution.alignment.Alignment) SimpleAlignment(dr.evolution.alignment.SimpleAlignment) SimpleAlignment(dr.evolution.alignment.SimpleAlignment)

Example 25 with SimpleAlignment

use of dr.evolution.alignment.SimpleAlignment in project beast-mcmc by beast-dev.

the class SequenceSimulator method simulate.

//END: sequence2intArray
/**
	 * perform the actual sequence generation
	 * @return alignment containing randomly generated sequences for the nodes in the
	 * leaves of the tree
	 */
public Alignment simulate() {
    NodeRef root = m_tree.getRoot();
    double[] categoryProbs = m_siteModel.getCategoryProportions();
    int[] category = new int[m_sequenceLength];
    for (int i = 0; i < m_sequenceLength; i++) {
        category[i] = MathUtils.randomChoicePDF(categoryProbs);
    }
    int[] seq = new int[m_sequenceLength];
    if (has_ancestralSequence) {
        seq = sequence2intArray(ancestralSequence);
    } else {
        FrequencyModel frequencyModel = m_siteModel.getFrequencyModel();
        for (int i = 0; i < m_sequenceLength; i++) {
            seq[i] = MathUtils.randomChoicePDF(frequencyModel.getFrequencies());
        }
    }
    if (DEBUG) {
        synchronized (this) {
            System.out.println();
            System.out.println("root Sequence:");
            Utils.printArray(seq);
        }
    }
    SimpleAlignment alignment = new SimpleAlignment();
    alignment.setReportCountStatistics(false);
    alignment.setDataType(m_siteModel.getFrequencyModel().getDataType());
    traverse(root, seq, category, alignment);
    return alignment;
}
Also used : FrequencyModel(dr.oldevomodel.substmodel.FrequencyModel) NodeRef(dr.evolution.tree.NodeRef) SimpleAlignment(dr.evolution.alignment.SimpleAlignment)

Aggregations

SimpleAlignment (dr.evolution.alignment.SimpleAlignment)28 Sequence (dr.evolution.sequence.Sequence)15 Taxon (dr.evolution.util.Taxon)10 ArrayList (java.util.ArrayList)9 TreeModel (dr.evomodel.tree.TreeModel)7 Parameter (dr.inference.model.Parameter)7 Tree (dr.evolution.tree.Tree)6 BranchRateModel (dr.evomodel.branchratemodel.BranchRateModel)6 GammaSiteRateModel (dr.evomodel.siteratemodel.GammaSiteRateModel)6 BeagleSequenceSimulator (dr.app.beagle.tools.BeagleSequenceSimulator)5 Partition (dr.app.beagle.tools.Partition)5 ImportException (dr.evolution.io.Importer.ImportException)5 FrequencyModel (dr.evomodel.substmodel.FrequencyModel)5 Date (dr.evolution.util.Date)4 HomogeneousBranchModel (dr.evomodel.branchmodel.HomogeneousBranchModel)4 DefaultBranchRateModel (dr.evomodel.branchratemodel.DefaultBranchRateModel)4 HKY (dr.evomodel.substmodel.nucleotide.HKY)4 IOException (java.io.IOException)4 Taxa (dr.evolution.util.Taxa)3 BranchModel (dr.evomodel.branchmodel.BranchModel)3