use of dr.evolution.io.NewickImporter in project beast-mcmc by beast-dev.
the class CoalGenFrame method importFromFile.
protected void importFromFile(File file) throws IOException, Importer.ImportException {
BufferedReader reader = new BufferedReader(new FileReader(file));
String line = reader.readLine();
Tree tree;
if (line.toUpperCase().startsWith("#NEXUS")) {
NexusImporter importer = new NexusImporter(reader);
tree = importer.importTree(null);
} else {
NewickImporter importer = new NewickImporter(reader);
tree = importer.importTree(null);
}
data.taxonList = tree;
statusLabel.setText(Integer.toString(data.taxonList.getTaxonCount()) + " taxa loaded.");
reader.close();
fireTaxaChanged();
}
use of dr.evolution.io.NewickImporter in project beast-mcmc by beast-dev.
the class Utils method importTreeFromFile.
// END: importTaxaFromFile
public static Tree importTreeFromFile(File file) throws IOException, ImportException {
Tree tree = null;
BufferedReader reader = new BufferedReader(new FileReader(file));
String line = reader.readLine();
if (line.toUpperCase().startsWith("#NEXUS")) {
NexusImporter importer = new NexusImporter(reader);
tree = importer.importTree(null);
} else {
NewickImporter importer = new NewickImporter(reader);
tree = importer.importTree(null);
}
reader.close();
return tree;
}
use of dr.evolution.io.NewickImporter in project beast-mcmc by beast-dev.
the class SeqGen method main.
public static void main(String[] argv) {
String treeFileName = argv[0];
String outputFileStem = argv[1];
int length = 500;
double[] frequencies = new double[] { 0.25, 0.25, 0.25, 0.25 };
double kappa = 10.0;
double alpha = 0.5;
double substitutionRate = argv.length < 3 ? 1.0E-3 : Double.parseDouble(argv[2]);
int categoryCount = argv.length < 4 ? 8 : Integer.parseInt(argv[3]);
//1.56E-6;
double damageRate = argv.length < 5 ? 0 : Double.parseDouble(argv[4]);
System.out.println("substitutionRate = " + substitutionRate + "; categoryCount = " + categoryCount + "; damageRate = " + damageRate);
FrequencyModel freqModel = new FrequencyModel(dr.evolution.datatype.Nucleotides.INSTANCE, frequencies);
HKY hkyModel = new HKY(kappa, freqModel);
SiteModel siteModel = null;
if (categoryCount > 1) {
siteModel = new GammaSiteModel(hkyModel, alpha, categoryCount);
} else {
// no rate heterogeneity
siteModel = new GammaSiteModel(hkyModel);
}
List<Tree> trees = new ArrayList<Tree>();
FileReader reader = null;
try {
reader = new FileReader(treeFileName);
// TreeImporter importer = new NexusImporter(reader);
TreeImporter importer = new NewickImporter(reader);
while (importer.hasTree()) {
Tree tree = importer.importNextTree();
trees.add(tree);
System.out.println("tree height = " + tree.getNodeHeight(tree.getRoot()) + "; leave nodes = " + tree.getExternalNodeCount());
}
} catch (FileNotFoundException e) {
e.printStackTrace();
return;
} catch (Importer.ImportException e) {
e.printStackTrace();
return;
} catch (IOException e) {
e.printStackTrace();
return;
}
SeqGen seqGen = new SeqGen(length, substitutionRate, freqModel, hkyModel, siteModel, damageRate);
int i = 1;
for (Tree tree : trees) {
Alignment alignment = seqGen.simulate(tree);
FileWriter writer = null;
try {
// writer = new FileWriter(outputFileStem + (i < 10 ? "00" : (i < 100 ? "0" : "")) + i + ".nex");
// NexusExporter exporter = new NexusExporter(writer);
//
// exporter.exportAlignment(alignment);
//
// writer.close();
String outputFileName = outputFileStem + "-" + substitutionRate + ".fasta";
writer = new FileWriter(outputFileName);
BufferedWriter bf = new BufferedWriter(writer);
FastaExporter exporter = new FastaExporter(bf);
exporter.exportSequences(alignment.getSequenceList());
bf.close();
System.out.println("Write " + i + "th sequence file : " + outputFileName);
i++;
} catch (IOException e) {
e.printStackTrace();
return;
}
}
}
use of dr.evolution.io.NewickImporter in project beast-mcmc by beast-dev.
the class SequenceSimulator method main.
// getDefaultSiteModel
public static void main(String[] args) {
try {
int nReplications = 10;
// create tree
NewickImporter importer = new NewickImporter("((A:1.0,B:1.0)AB:1.0,(C:1.0,D:1.0)CD:1.0)ABCD;");
Tree tree = importer.importTree(null);
// create site model
SiteModel siteModel = getDefaultSiteModel();
// create branch rate model
BranchRateModel branchRateModel = new DefaultBranchRateModel();
// feed to sequence simulator and generate leaves
SequenceSimulator treeSimulator = new SequenceSimulator(tree, siteModel, branchRateModel, nReplications);
Sequence ancestralSequence = new Sequence();
ancestralSequence.appendSequenceString("TCAGGTCAAG");
treeSimulator.setAncestralSequence(ancestralSequence);
System.out.println(treeSimulator.simulate().toString());
} catch (Exception e) {
e.printStackTrace();
}
//END: try-catch block
}
use of dr.evolution.io.NewickImporter in project beast-mcmc by beast-dev.
the class TreeStatFrame method processTreeFile.
protected void processTreeFile(File inFile, File outFile) throws IOException, Importer.ImportException {
processTreeFileAction.setEnabled(false);
BufferedReader r = new BufferedReader(new FileReader(inFile));
String line = r.readLine();
r.close();
final ProgressMonitorInputStream in = new ProgressMonitorInputStream(this, "Reading " + inFile.getName(), new FileInputStream(inFile));
in.getProgressMonitor().setMillisToDecideToPopup(0);
in.getProgressMonitor().setMillisToPopup(0);
final Reader reader = new InputStreamReader(new BufferedInputStream(in));
// final Reader reader = new FileReader(inFile);
final TreeImporter importer;
if (line.toUpperCase().startsWith("#NEXUS")) {
importer = new NexusImporter(reader);
} else {
reader.close();
importer = new NewickImporter(reader);
}
final Tree firstTree = importer.importNextTree();
boolean isUltrametric = TreeUtils.isUltrametric(firstTree);
boolean isBinary = TreeUtils.isBinary(firstTree);
boolean stop = false;
// check that the trees conform with the requirements of the selected statistics
for (int i = 0; i < treeStatData.statistics.size(); i++) {
TreeSummaryStatistic tss = (TreeSummaryStatistic) treeStatData.statistics.get(i);
String label = tss.getSummaryStatisticName();
if (!isUltrametric && !tss.allowsNonultrametricTrees()) {
if (JOptionPane.showConfirmDialog(this, "Warning: These trees may not be ultrametric and this is\na requirement of the " + label + " statistic. Do you wish to continue?", "Warning", JOptionPane.YES_NO_OPTION) != JOptionPane.YES_OPTION) {
stop = true;
break;
}
// don't ask the question again...
isUltrametric = true;
}
if (!isBinary && !tss.allowsPolytomies()) {
if (JOptionPane.showConfirmDialog(this, "Warning: These trees may not be strictly bifurcating and this is\na requirement of the " + label + " statistic. Do you wish to continue?", "Warning", JOptionPane.YES_NO_OPTION) != JOptionPane.YES_OPTION) {
stop = true;
break;
}
// don't ask the question again...
isBinary = true;
}
}
if (stop) {
processTreeFileAction.setEnabled(true);
return;
}
final PrintWriter writer = new PrintWriter(new FileWriter(outFile));
// Thread readThread = new Thread() {
// public void run() {
Tree tree = firstTree;
writer.print("state");
for (int i = 0; i < treeStatData.statistics.size(); i++) {
TreeSummaryStatistic tss = (TreeSummaryStatistic) treeStatData.statistics.get(i);
int dim = tss.getStatisticDimensions(tree);
for (int j = 0; j < dim; j++) {
writer.print("\t" + tss.getStatisticLabel(tree, j));
}
}
writer.println();
state = 0;
do {
writer.print(state);
for (int i = 0; i < treeStatData.statistics.size(); i++) {
TreeSummaryStatistic tss = (TreeSummaryStatistic) treeStatData.statistics.get(i);
double[] stats = tss.getSummaryStatistic(tree);
for (int j = 0; j < stats.length; j++) {
writer.print("\t" + stats[j]);
}
}
writer.println();
state += 1;
final int currentState = state;
in.getProgressMonitor().setNote("Processing Tree " + currentState + "...");
// EventQueue.invokeLater(
// new Runnable() {
// public void run() {
// progressLabel.setText("Processing Tree " + currentState + "...");
// }
// });
// try {
tree = importer.importNextTree();
// } catch (final IOException e) {
// EventQueue.invokeLater(
// new Runnable() {
// public void run() {
// JOptionPane.showMessageDialog(TreeStatFrame.this, "File I/O Error: " + e.getMessage(),
// "File I/O Error",
// JOptionPane.ERROR_MESSAGE);
// }
// });
// } catch (final Importer.ImportException e) {
// EventQueue.invokeLater(
// new Runnable() {
// public void run() {
// JOptionPane.showMessageDialog(TreeStatFrame.this, "Error importing tree: " + e.getMessage(),
// "Tree Import Error",
// JOptionPane.ERROR_MESSAGE);
// }
// });
// }
} while (tree != null);
// }
// };
//
// readThread.start();
// while (readThread.isAlive()) {
// Thread.yield();
// }
reader.close();
writer.close();
progressLabel.setText("" + state + " trees processed.");
processTreeFileAction.setEnabled(true);
}
Aggregations