use of org.apache.flink.graph.Edge in project flink by splunk.
the class StarGraph method generate.
@Override
public Graph<LongValue, NullValue, NullValue> generate() {
Preconditions.checkState(vertexCount >= 2);
// Vertices
DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSequence(env, parallelism, vertexCount);
// Edges
LongValueSequenceIterator iterator = new LongValueSequenceIterator(1, this.vertexCount - 1);
DataSet<Edge<LongValue, NullValue>> edges = env.fromParallelCollection(iterator, LongValue.class).setParallelism(parallelism).name("Edge iterators").flatMap(new LinkVertexToCenter()).setParallelism(parallelism).name("Star graph edges");
// Graph
return Graph.fromDataSet(vertices, edges, env);
}
use of org.apache.flink.graph.Edge in project flink by splunk.
the class CirculantGraph method generate.
@Override
public Graph<LongValue, NullValue, NullValue> generate() {
// Vertices
DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSequence(env, parallelism, vertexCount);
// Edges
LongValueSequenceIterator iterator = new LongValueSequenceIterator(0, this.vertexCount - 1);
// Validate ranges
Collections.sort(offsetRanges);
Iterator<OffsetRange> iter = offsetRanges.iterator();
OffsetRange lastRange = iter.next();
while (iter.hasNext()) {
OffsetRange nextRange = iter.next();
if (lastRange.overlaps(nextRange)) {
throw new IllegalArgumentException("Overlapping ranges " + lastRange + " and " + nextRange);
}
lastRange = nextRange;
}
DataSet<Edge<LongValue, NullValue>> edges = env.fromParallelCollection(iterator, LongValue.class).setParallelism(parallelism).name("Edge iterators").flatMap(new LinkVertexToOffsets(vertexCount, offsetRanges)).setParallelism(parallelism).name("Circulant graph edges");
// Graph
return Graph.fromDataSet(vertices, edges, env);
}
use of org.apache.flink.graph.Edge in project flink by splunk.
the class GridGraph method generate.
@Override
public Graph<LongValue, NullValue, NullValue> generate() {
Preconditions.checkState(!dimensions.isEmpty(), "No dimensions added to GridGraph");
// Vertices
DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSequence(env, parallelism, vertexCount);
// Edges
LongValueSequenceIterator iterator = new LongValueSequenceIterator(0, this.vertexCount - 1);
DataSet<Edge<LongValue, NullValue>> edges = env.fromParallelCollection(iterator, LongValue.class).setParallelism(parallelism).name("Edge iterators").flatMap(new LinkVertexToNeighbors(vertexCount, dimensions)).setParallelism(parallelism).name("Grid graph edges");
// Graph
return Graph.fromDataSet(vertices, edges, env);
}
use of org.apache.flink.graph.Edge in project flink by splunk.
the class PageRank method runInternal.
@Override
public DataSet<Result<K>> runInternal(Graph<K, VV, EV> input) throws Exception {
// vertex degree
DataSet<Vertex<K, Degrees>> vertexDegree = input.run(new VertexDegrees<K, VV, EV>().setIncludeZeroDegreeVertices(includeZeroDegreeVertices).setParallelism(parallelism));
// vertex count
DataSet<LongValue> vertexCount = GraphUtils.count(vertexDegree);
// s, t, d(s)
DataSet<Edge<K, LongValue>> edgeSourceDegree = input.run(new EdgeSourceDegrees<K, VV, EV>().setParallelism(parallelism)).map(new ExtractSourceDegree<>()).setParallelism(parallelism).name("Extract source degree");
// vertices with zero in-edges
DataSet<Tuple2<K, DoubleValue>> sourceVertices = vertexDegree.flatMap(new InitializeSourceVertices<>()).setParallelism(parallelism).name("Initialize source vertex scores");
// s, initial pagerank(s)
DataSet<Tuple2<K, DoubleValue>> initialScores = vertexDegree.map(new InitializeVertexScores<>()).withBroadcastSet(vertexCount, VERTEX_COUNT).setParallelism(parallelism).name("Initialize scores");
IterativeDataSet<Tuple2<K, DoubleValue>> iterative = initialScores.iterate(maxIterations).setParallelism(parallelism);
// s, projected pagerank(s)
DataSet<Tuple2<K, DoubleValue>> vertexScores = iterative.coGroup(edgeSourceDegree).where(0).equalTo(0).with(new SendScore<>()).setParallelism(parallelism).name("Send score").groupBy(0).reduce(new SumScore<>()).setCombineHint(CombineHint.HASH).setParallelism(parallelism).name("Sum");
// ignored ID, total pagerank
DataSet<Tuple2<K, DoubleValue>> sumOfScores = vertexScores.reduce(new SumVertexScores<>()).setParallelism(parallelism).name("Sum");
// s, adjusted pagerank(s)
DataSet<Tuple2<K, DoubleValue>> adjustedScores = vertexScores.union(sourceVertices).name("Union with source vertices").map(new AdjustScores<>(dampingFactor)).withBroadcastSet(sumOfScores, SUM_OF_SCORES).withBroadcastSet(vertexCount, VERTEX_COUNT).setParallelism(parallelism).name("Adjust scores");
DataSet<Tuple2<K, DoubleValue>> passThrough;
if (convergenceThreshold < Double.MAX_VALUE) {
passThrough = iterative.join(adjustedScores).where(0).equalTo(0).with(new ChangeInScores<>()).setParallelism(parallelism).name("Change in scores");
iterative.registerAggregationConvergenceCriterion(CHANGE_IN_SCORES, new DoubleSumAggregator(), new ScoreConvergence(convergenceThreshold));
} else {
passThrough = adjustedScores;
}
return iterative.closeWith(passThrough).map(new TranslateResult<>()).setParallelism(parallelism).name("Map result");
}
use of org.apache.flink.graph.Edge in project flink by splunk.
the class EdgeDegreesPairTest method testWithRMatGraph.
@Test
public void testWithRMatGraph() throws Exception {
DataSet<Edge<LongValue, Tuple3<NullValue, Degrees, Degrees>>> degreesPair = directedRMatGraph(10, 16).run(new EdgeDegreesPair<>());
Checksum checksum = new ChecksumHashCode<Edge<LongValue, Tuple3<NullValue, Degrees, Degrees>>>().run(degreesPair).execute();
assertEquals(12009, checksum.getCount());
assertEquals(0x0000176fe94702a3L, checksum.getChecksum());
}
Aggregations