use of beast.util.TreeParser in project beast2 by CompEvol.
the class TreeParserTest method testMultifurcations.
@Test
public void testMultifurcations() throws Exception {
String newick = "((A:1.0,B:1.0,C:1.0):1.0,(D:1.0,E:1.0,F:1.0,G:1.0):1.0):0.0;";
boolean isLabeled = true;
TreeParser treeParser = new TreeParser(newick, false, false, isLabeled, 1, false);
Assert.assertEquals(newick.split(";")[0], treeParser.getRoot().toNewick());
}
use of beast.util.TreeParser in project beast2 by CompEvol.
the class TreeParserTest method testDuplicates.
@Test
public void testDuplicates() throws Exception {
String newick = "((A:1.0,B:1.0):1.0,(C:1.0,A:1.0):1.0):0.0;";
boolean exceptionRaised = false;
try {
boolean isLabeled = true;
TreeParser treeParser = new TreeParser(newick, false, false, isLabeled, 1);
System.out.println(treeParser.getRoot().toNewick());
} catch (RuntimeException e) {
e.printStackTrace();
exceptionRaised = true;
}
Assert.assertTrue(exceptionRaised);
}
use of beast.util.TreeParser in project MultiTypeTree by tgvaughan.
the class MultiTypeTreeFromUntypedNewick method initAndValidate.
@Override
public void initAndValidate() {
super.initAndValidate();
migrationModel = migrationModelInput.get();
// Read in flat tree
flatTree = new TreeParser();
flatTree.initByName("IsLabelledNewick", true, "adjustTipHeights", adjustTipHeightsInput.get(), "singlechild", true, "newick", newickStringInput.get());
// Create typed tree lacking type information
typedNodes = new MultiTypeNode[flatTree.getNodeCount()];
for (int i = 0; i < typedNodes.length; i++) {
typedNodes[i] = new MultiTypeNode();
typedNodes[i].setNr(i);
typedNodes[i].setHeight(flatTree.getNode(i).getHeight());
typedNodes[i].setID(flatTree.getNode(i).getID());
}
for (int i = 0; i < typedNodes.length; i++) {
MultiTypeNode typedNode = typedNodes[i];
Node node = flatTree.getNode(i);
if (node.isRoot())
typedNode.setParent(null);
else
typedNode.setParent(typedNodes[node.getParent().getNr()]);
while (typedNode.children.size() < node.getChildCount()) typedNode.children.add(null);
for (int c = 0; c < node.getChildCount(); c++) {
typedNode.setChild(c, typedNodes[node.getChild(c).getNr()]);
}
}
// Assign node types
if (getTypeTrait() != null) {
for (int i = 0; i < flatTree.getLeafNodeCount(); i++) {
MultiTypeNode typedNode = typedNodes[i];
typedNode.setNodeType(migrationModel.getTypeSet().getTypeIndex(getTypeTrait().getStringValue(typedNode.getID())));
}
} else {
throw new IllegalArgumentException("Trait set (with name '" + typeLabelInput.get() + "') " + "must be provided.");
}
// Clear any existing type info
while (true) {
for (int i = 0; i < flatTree.getNodeCount(); i++) {
typedNodes[i].clearChanges();
if (i >= flatTree.getLeafNodeCount())
typedNodes[i].setNodeType(-1);
}
int[] leafNrs = Randomizer.shuffled(flatTree.getLeafNodeCount());
try {
boolean isFirst = true;
for (int leafNr : leafNrs) {
if (isFirst) {
colourFirstLineage(typedNodes[leafNr]);
isFirst = false;
} else {
colourLineage(typedNodes[leafNr]);
}
}
} catch (NoValidPathException ex) {
Log.info.println("Colour simulation failed. Retrying.");
continue;
}
break;
}
// Construct MTT
root = typedNodes[typedNodes.length - 1];
root.parent = null;
nodeCount = root.getNodeCount();
internalNodeCount = root.getInternalNodeCount();
leafNodeCount = root.getLeafNodeCount();
initArrays();
}
use of beast.util.TreeParser in project bacter by tgvaughan.
the class MarginalTreeTest method testOverlapping.
@Test
public void testOverlapping() throws Exception {
// Conversion graph
String str = "[&locus,2,1000,0.3260126313706676,10,5000,0.42839862922656696] " + "[&locus,10,2000,0.3381366423491633,2,8000,0.5683827224649434] " + "[&locus,10,3000,0.2807615297583804,14,7000,0.3415740002783274] " + "[&locus,2,7500,0.1,2,9999,0.4] " + "[&locus,2,7600,0.05,18,7700,2.0] " + "(((0:0.04916909893812008,1:0.04916909893812008)10:0.5465237639426681," + "(4:0.3773111326866937,(((8:0.22180790639747835," + "(3:0.07561592852503513,6:0.07561592852503513)11:0.14619197787244323)" + "13:0.010206467073885589,9:0.23201437347136394)14:0.116542689187905," + "(7:0.10746702934931932,5:0.10746702934931932)12:0.24109003330994963)" + "15:0.02875407002742475)16:0.21838173019409446)17:1.1073878800617445," + "2:1.7030807429425328)18:0.0";
Locus locus = new Locus("locus", 10000);
locus.setID("locus");
ConversionGraph acg = new ConversionGraph();
acg.initByName("fromString", str, "locus", locus);
// System.out.println(acg.getExtendedNewick(true));
// Test all marginals against truth
// (I have eyeballed each of these trees and claim that they are correct.)
String[] correctNewickStrings = { "(((0:0.04916909893812016,1:0.04916909893812016)10:0.5465237639426681,(((((3:0.07561592852503529,6:0.07561592852503529)11:0.1461919778724432,8:0.2218079063974785)13:0.010206467073885506,9:0.232014373471364)14:0.11654268918790511,(5:0.1074670293493194,7:0.1074670293493194)12:0.24109003330994971)15:0.028754070027424694,4:0.3773111326866938)16:0.21838173019409446)17:1.1073878800617445,2:1.7030807429425328)18:0.0;", "(((0:0.04916909893812016,1:0.04916909893812016)10:0.3792295302884468,2:0.42839862922656696)17:0.1672942336542213,(((((3:0.07561592852503529,6:0.07561592852503529)11:0.1461919778724432,8:0.2218079063974785)13:0.010206467073885506,9:0.232014373471364)14:0.11654268918790511,(5:0.1074670293493194,7:0.1074670293493194)12:0.24109003330994971)15:0.028754070027424694,4:0.3773111326866938)16:0.21838173019409446)18:0.0;", "((2:0.5956928628807883,(((((3:0.07561592852503529,6:0.07561592852503529)11:0.1461919778724432,8:0.2218079063974785)13:0.010206467073885506,9:0.232014373471364)14:0.11654268918790511,(5:0.1074670293493194,7:0.1074670293493194)12:0.24109003330994971)15:0.028754070027424694,4:0.3773111326866938)16:0.21838173019409446)17:1.1073878800617445,(0:0.04916909893812016,1:0.04916909893812016)10:1.6539116440044126)18:0.0;", "(2:0.5956928628807883,((((((3:0.07561592852503529,6:0.07561592852503529)11:0.1461919778724432,8:0.2218079063974785)13:0.010206467073885506,9:0.232014373471364)14:0.1095596268069634,(0:0.04916909893812016,1:0.04916909893812016)10:0.29240490134020725)15:0.006983062380941707,(5:0.1074670293493194,7:0.1074670293493194)12:0.24109003330994971)16:0.028754070027424694,4:0.3773111326866938)17:0.21838173019409446)18:0.0;", "(((((((3:0.07561592852503529,6:0.07561592852503529)11:0.1461919778724432,8:0.2218079063974785)13:0.010206467073885506,9:0.232014373471364)14:0.1095596268069634,(0:0.04916909893812016,1:0.04916909893812016)10:0.29240490134020725)15:0.006983062380941707,(5:0.1074670293493194,7:0.1074670293493194)12:0.24109003330994971)16:0.028754070027424694,4:0.3773111326866938)17:1.325769610255839,2:1.7030807429425328)18:0.0;", "((((((3:0.07561592852503529,6:0.07561592852503529)11:0.1461919778724432,8:0.2218079063974785)13:0.010206467073885506,9:0.232014373471364)14:0.11654268918790511,(5:0.1074670293493194,7:0.1074670293493194)12:0.24109003330994971)15:0.028754070027424694,4:0.3773111326866938)16:1.325769610255839,(2:0.5683827224649434,(0:0.04916909893812016,1:0.04916909893812016)10:0.5192136235268232)17:1.1346980204775894)18:0.0;", "((((((3:0.07561592852503529,6:0.07561592852503529)11:0.1461919778724432,8:0.2218079063974785)13:0.010206467073885506,9:0.232014373471364)14:0.11654268918790511,(5:0.1074670293493194,7:0.1074670293493194)12:0.24109003330994971)15:0.028754070027424694,4:0.3773111326866938)16:1.325769610255839,(2:0.5683827224649434,(0:0.04916909893812016,1:0.04916909893812016)10:0.5192136235268232)17:1.1346980204775894)18:0.0;", "(((((((3:0.07561592852503529,6:0.07561592852503529)11:0.1461919778724432,8:0.2218079063974785)13:0.010206467073885506,9:0.232014373471364)14:0.11654268918790511,(5:0.1074670293493194,7:0.1074670293493194)12:0.24109003330994971)15:0.028754070027424694,4:0.3773111326866938)16:1.325769610255839,(0:0.04916909893812016,1:0.04916909893812016)10:1.6539116440044126)17:0.29691925705746725,2:2.0)18:0.0;", "((((((3:0.07561592852503529,6:0.07561592852503529)11:0.1461919778724432,8:0.2218079063974785)13:0.010206467073885506,9:0.232014373471364)14:0.11654268918790511,(5:0.1074670293493194,7:0.1074670293493194)12:0.24109003330994971)15:0.028754070027424694,4:0.3773111326866938)16:1.325769610255839,(2:0.5683827224649434,(0:0.04916909893812016,1:0.04916909893812016)10:0.5192136235268232)17:1.1346980204775894)18:0.0;", "(((0:0.04916909893812016,1:0.04916909893812016)10:0.5465237639426681,(((((3:0.07561592852503529,6:0.07561592852503529)11:0.1461919778724432,8:0.2218079063974785)13:0.010206467073885506,9:0.232014373471364)14:0.11654268918790511,(5:0.1074670293493194,7:0.1074670293493194)12:0.24109003330994971)15:0.028754070027424694,4:0.3773111326866938)16:0.21838173019409446)17:1.1073878800617445,2:1.7030807429425328)18:0.0;" };
for (int r = 0; r < acg.getRegionCount(locus); r++) {
MarginalTree marginalTree = new MarginalTree(acg, acg.getRegions(locus).get(r));
// System.out.println(marginalTree + ";");
assertTrue(treesEquivalent(marginalTree.getRoot(), new TreeParser(correctNewickStrings[r], false, true, false, 0).getRoot(), 1e-15));
}
}
use of beast.util.TreeParser in project bacter by tgvaughan.
the class ACGLikelihoodApproxTest method testACGHeightMap.
@Test
public void testACGHeightMap() throws Exception {
List<Sequence> sequences = new ArrayList<>();
// 01234567890123456789
sequences.add(new Sequence("t1", "GGGGGGGGGGGGGGGGGGGG"));
sequences.add(new Sequence("t2", "CCCCCCCCCCCCCCCCCCCC"));
sequences.add(new Sequence("t3", "TTTTTTTTTTTTTTTTTTTT"));
Alignment alignment = new Alignment(sequences, "nucleotide");
Locus locus = new Locus("locus", alignment);
TreeParser tree = new TreeParser(alignment, "((t1:1,t2:1):1,t3:2):0;");
ConversionGraph acg = new ConversionGraph();
acg.assignFrom(tree);
acg.initByName("locus", locus);
Conversion conversion = new Conversion();
conversion.setNode1(acg.getNode(0));
conversion.setHeight1(0.5);
conversion.setNode2(acg.getNode(2));
conversion.setHeight2(1.5);
conversion.setStartSite(0);
conversion.setEndSite(9);
conversion.setLocus(locus);
acg.addConversion(conversion);
ACGLikelihoodApprox likelihoodApprox = new ACGLikelihoodApprox();
likelihoodApprox.initByName("acg", acg, "substitutionRate", "1.0", "alignment", alignment, "locus", locus);
Map<Double, Coalescence> heightMap = likelihoodApprox.getCoalescenceHeights();
Assert.assertEquals(3, heightMap.size());
Assert.assertTrue(heightMap.containsKey(1.0));
Assert.assertTrue(heightMap.containsKey(1.5));
Assert.assertTrue(heightMap.containsKey(2.0));
Assert.assertTrue("height map contains incorrect coalescence.", heightMap.get(1.0).equals(new Coalescence("[10,20]{0}{1}")));
Assert.assertTrue("height map contains incorrect coalescence.", heightMap.get(1.5).equals(new Coalescence("[0,10]{0}{2}")));
Assert.assertTrue("height map contains incorrect coalescence.", heightMap.get(2.0).equals(new Coalescence("[0,10]{0,2}{1} [10,20]{0,1}{2}")));
}
Aggregations