use of org.apache.flink.graph.Vertex in project flink by apache.
the class ConnectedComponents method main.
@SuppressWarnings("serial")
public static void main(String[] args) throws Exception {
if (!parseParameters(args)) {
return;
}
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Edge<Long, NullValue>> edges = getEdgesDataSet(env);
Graph<Long, Long, NullValue> graph = Graph.fromDataSet(edges, new MapFunction<Long, Long>() {
@Override
public Long map(Long value) throws Exception {
return value;
}
}, env);
DataSet<Vertex<Long, Long>> verticesWithMinIds = graph.run(new GSAConnectedComponents<Long, Long, NullValue>(maxIterations));
// emit result
if (fileOutput) {
verticesWithMinIds.writeAsCsv(outputPath, "\n", ",");
// since file sinks are lazy, we trigger the execution explicitly
env.execute("Connected Components Example");
} else {
verticesWithMinIds.print();
}
}
use of org.apache.flink.graph.Vertex in project flink by apache.
the class CompleteGraph method generate.
@Override
public Graph<LongValue, NullValue, NullValue> generate() {
// Vertices
DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSequence(env, parallelism, vertexCount);
// Edges
LongValueSequenceIterator iterator = new LongValueSequenceIterator(0, this.vertexCount - 1);
DataSet<Edge<LongValue, NullValue>> edges = env.fromParallelCollection(iterator, LongValue.class).setParallelism(parallelism).name("Edge iterators").flatMap(new LinkVertexToAll(vertexCount)).setParallelism(parallelism).name("Complete graph edges");
// Graph
return Graph.fromDataSet(vertices, edges, env);
}
use of org.apache.flink.graph.Vertex in project flink by apache.
the class SpargelCompilerTest method testSpargelCompilerWithBroadcastVariable.
@SuppressWarnings("serial")
@Test
public void testSpargelCompilerWithBroadcastVariable() {
final String broadcastVariableName = "broadcast variable";
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(DEFAULT_PARALLELISM);
// compose test program
DataSet<Long> bcVar = env.fromElements(1L);
DataSet<Vertex<Long, Long>> initialVertices = env.fromElements(new Tuple2<>(1L, 1L), new Tuple2<>(2L, 2L)).map(new Tuple2ToVertexMap<>());
DataSet<Edge<Long, NullValue>> edges = env.fromElements(new Tuple2<>(1L, 2L)).map(new MapFunction<Tuple2<Long, Long>, Edge<Long, NullValue>>() {
public Edge<Long, NullValue> map(Tuple2<Long, Long> edge) {
return new Edge<>(edge.f0, edge.f1, NullValue.getInstance());
}
});
Graph<Long, Long, NullValue> graph = Graph.fromDataSet(initialVertices, edges, env);
ScatterGatherConfiguration parameters = new ScatterGatherConfiguration();
parameters.addBroadcastSetForScatterFunction(broadcastVariableName, bcVar);
parameters.addBroadcastSetForGatherFunction(broadcastVariableName, bcVar);
DataSet<Vertex<Long, Long>> result = graph.runScatterGatherIteration(new ConnectedComponents.CCMessenger<>(BasicTypeInfo.LONG_TYPE_INFO), new ConnectedComponents.CCUpdater<>(), 100).getVertices();
result.output(new DiscardingOutputFormat<>());
Plan p = env.createProgramPlan("Spargel Connected Components");
OptimizedPlan op = compileNoStats(p);
// check the sink
SinkPlanNode sink = op.getDataSinks().iterator().next();
assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
assertEquals(DEFAULT_PARALLELISM, sink.getParallelism());
// check the iteration
WorksetIterationPlanNode iteration = (WorksetIterationPlanNode) sink.getInput().getSource();
assertEquals(DEFAULT_PARALLELISM, iteration.getParallelism());
// check the solution set join and the delta
PlanNode ssDelta = iteration.getSolutionSetDeltaPlanNode();
assertTrue(ssDelta instanceof // this is only true if the update functions preserves
DualInputPlanNode);
// the partitioning
DualInputPlanNode ssJoin = (DualInputPlanNode) ssDelta;
assertEquals(DEFAULT_PARALLELISM, ssJoin.getParallelism());
assertEquals(ShipStrategyType.PARTITION_HASH, ssJoin.getInput1().getShipStrategy());
assertEquals(new FieldList(0), ssJoin.getInput1().getShipStrategyKeys());
// check the workset set join
DualInputPlanNode edgeJoin = (DualInputPlanNode) ssJoin.getInput1().getSource();
assertEquals(DEFAULT_PARALLELISM, edgeJoin.getParallelism());
assertEquals(ShipStrategyType.PARTITION_HASH, edgeJoin.getInput1().getShipStrategy());
assertEquals(ShipStrategyType.FORWARD, edgeJoin.getInput2().getShipStrategy());
assertTrue(edgeJoin.getInput1().getTempMode().isCached());
assertEquals(new FieldList(0), edgeJoin.getInput1().getShipStrategyKeys());
// check that the initial partitioning is pushed out of the loop
assertEquals(ShipStrategyType.PARTITION_HASH, iteration.getInput1().getShipStrategy());
assertEquals(ShipStrategyType.PARTITION_HASH, iteration.getInput2().getShipStrategy());
assertEquals(new FieldList(0), iteration.getInput1().getShipStrategyKeys());
assertEquals(new FieldList(0), iteration.getInput2().getShipStrategyKeys());
}
use of org.apache.flink.graph.Vertex in project flink by apache.
the class ScatterGatherConfigurationITCase method testRunWithConfiguration.
@Test
public void testRunWithConfiguration() throws Exception {
/*
* Test Graph's runScatterGatherIteration when configuration parameters are provided
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
Graph<Long, Long, Long> graph = Graph.fromCollection(TestGraphUtils.getLongLongVertices(), TestGraphUtils.getLongLongEdges(), env).mapVertices(new AssignOneMapper());
// create the configuration object
ScatterGatherConfiguration parameters = new ScatterGatherConfiguration();
parameters.addBroadcastSetForScatterFunction("messagingBcastSet", env.fromElements(4, 5, 6));
parameters.addBroadcastSetForGatherFunction("updateBcastSet", env.fromElements(1, 2, 3));
parameters.registerAggregator("superstepAggregator", new LongSumAggregator());
parameters.setOptNumVertices(true);
Graph<Long, Long, Long> res = graph.runScatterGatherIteration(new MessageFunction(), new UpdateFunction(), 10, parameters);
DataSet<Vertex<Long, Long>> data = res.getVertices();
List<Vertex<Long, Long>> result = data.collect();
expectedResult = "1,11\n" + "2,11\n" + "3,11\n" + "4,11\n" + "5,11";
compareResultAsTuples(result, expectedResult);
}
use of org.apache.flink.graph.Vertex in project flink by apache.
the class ScatterGatherConfigurationITCase method testIterationDefaultDirection.
@Test
public void testIterationDefaultDirection() throws Exception {
/*
* Test that if no direction parameter is given, the iteration works as before
* (i.e. it collects messages from the in-neighbors and sends them to the out-neighbors)
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
Graph<Long, HashSet<Long>, Long> graph = Graph.fromCollection(TestGraphUtils.getLongLongVertices(), TestGraphUtils.getLongLongEdges(), env).mapVertices(new InitialiseHashSetMapper());
DataSet<Vertex<Long, HashSet<Long>>> resultedVertices = graph.runScatterGatherIteration(new IdMessengerTrg(), new VertexUpdateDirection(), 5).getVertices();
List<Vertex<Long, HashSet<Long>>> result = resultedVertices.collect();
expectedResult = "1,[5]\n" + "2,[1]\n" + "3,[1, 2]\n" + "4,[3]\n" + "5,[3, 4]";
compareResultAsTuples(result, expectedResult);
}
Aggregations