use of it.uniroma1.dis.wsngroup.gexf4j.core.Graph in project gexf4j by francesco-ficarola.
the class GexfImplFileWritingTest method testStaticGexfGraph.
/**
* Test of getGraph method, of class GexfImpl.
*/
@Test
public void testStaticGexfGraph() throws IOException, SAXException {
System.out.println("getGraph");
Gexf gexf = new GexfImpl();
Calendar date = Calendar.getInstance();
date.set(2012, 4, 02);
gexf.getMetadata().setLastModified(date.getTime()).setCreator("Gephi.org").setDescription("A Web network");
Graph graph = gexf.getGraph();
graph.setDefaultEdgeType(EdgeType.UNDIRECTED).setMode(Mode.STATIC);
AttributeList attrList = new AttributeListImpl(AttributeClass.NODE);
graph.getAttributeLists().add(attrList);
Attribute attUrl = attrList.createAttribute("0", AttributeType.STRING, "url");
Attribute attIndegree = attrList.createAttribute("1", AttributeType.FLOAT, "indegree");
Attribute attFrog = attrList.createAttribute("2", AttributeType.BOOLEAN, "frog").setDefaultValue("true");
Node gephi = graph.createNode("0");
gephi.setLabel("Gephi").getAttributeValues().addValue(attUrl, "http://gephi.org").addValue(attIndegree, "1");
Node webatlas = graph.createNode("1");
webatlas.setLabel("Webatlas").getAttributeValues().addValue(attUrl, "http://webatlas.fr").addValue(attIndegree, "2");
Node rtgi = graph.createNode("2");
rtgi.setLabel("RTGI").getAttributeValues().addValue(attUrl, "http://rtgi.fr").addValue(attIndegree, "1");
Node blab = graph.createNode("3");
blab.setLabel("BarabasiLab").getAttributeValues().addValue(attUrl, "http://barabasilab.com").addValue(attIndegree, "1").addValue(attFrog, "false");
gephi.connectTo("0", webatlas);
gephi.connectTo("1", rtgi);
webatlas.connectTo("2", gephi);
rtgi.connectTo("3", webatlas);
gephi.connectTo("4", blab);
StaxGraphWriter graphWriter = new StaxGraphWriter();
StringWriter stringWriter = new StringWriter();
graphWriter.writeToStream(gexf, new FileWriter("target/testStatic.gexf"), "UTF-8");
graphWriter.writeToStream(gexf, stringWriter, "UTF-8");
String found = IOUtils.toString(new FileReader("target/testStatic.gexf"));
String start = stringWriter.toString();
Diff myDiff = new Diff(found, start);
// myDiff.overrideElementQualifier(new ElementNameAndTextQualifier());
assertTrue("XML similar " + myDiff.toString(), myDiff.similar());
}
use of it.uniroma1.dis.wsngroup.gexf4j.core.Graph in project gridss by PapenfussLab.
the class Poc method exportOverlapGraph.
// OverlapGraph { read, read, offset }
// ReadPairGraph { read1, read2 }
// FragmentSizeDistribution
// CompressedOverlapNode { _seq, (read, contig_offset) }
// Algorithm details to resolve:
// - how to allocate read overlap
// - just allocate reads at contig resolution
// - How to generate haplotypes?
// - efficient overlap calculation
// - how to deal with errors
// - sequencing errors
// - chimeric fragments
// Am I just making an OLC haplotype caller? (would that be problematic anyway?)
/**
* Load overlap string graph
*/
public void exportOverlapGraph(List<SAMRecord> reads, int minOverlap, File file) {
Gexf gexf = new GexfImpl();
gexf.getMetadata().setLastModified(new Date()).setCreator("GRIDSS").setDescription("Raw overlap string graph");
gexf.setVisualization(true);
Graph graph = gexf.getGraph().setIDType(IDType.STRING).setDefaultEdgeType(EdgeType.DIRECTED).setMode(Mode.STATIC);
AttributeList nodeAttrList = new AttributeListImpl(AttributeClass.NODE).setMode(Mode.STATIC);
graph.getAttributeLists().add(nodeAttrList);
AttributeList edgeAttrList = new AttributeListImpl(AttributeClass.EDGE).setMode(Mode.STATIC);
Attribute attrSeq = edgeAttrList.createAttribute("seq", AttributeType.STRING, "sequence");
// Attribute attrOverlap = edgeAttrList.createAttribute("l", AttributeType.INTEGER, "overlap length");
// Attribute attrMatch = edgeAttrList.createAttribute("m", AttributeType.INTEGER, "base matches");
// Attribute attrMismatch = edgeAttrList.createAttribute("mm", AttributeType.INTEGER, "base mismatches");
graph.getAttributeLists().add(edgeAttrList);
HashMap<Read, Node> startLookup = Maps.newHashMap();
HashMap<Read, Node> endLookup = Maps.newHashMap();
OverlapLookup ol = new OverlapLookup(minOverlap);
for (SAMRecord sam : reads) {
Read r = Read.create(sam);
Node startnode = graph.createNode("start_" + sam.getReadName() + "/" + (SAMRecordUtil.getSegmentIndex(sam) + 1));
Node endnode = graph.createNode("end_" + sam.getReadName() + "/" + (SAMRecordUtil.getSegmentIndex(sam) + 1));
startLookup.put(r, startnode);
endLookup.put(r, endnode);
ol.add(r);
}
for (Read r : startLookup.keySet()) {
Node rend = endLookup.get(r);
for (Overlap o : ol.successors(r)) {
Node ostart = startLookup.get(o.read2);
Edge edge = ostart.connectTo(rend).setEdgeType(EdgeType.DIRECTED);
edge.setWeight(o.overlap);
edge.getAttributeValues().createValue(attrSeq, o.read2.getRead().getReadBases().toString().substring(0, o.overlap));
}
}
GexfHelper.saveTo(gexf, file);
}
Aggregations