use of edu.iu.dsc.tws.task.impl.ComputeGraphBuilder in project twister2 by DSC-SPIDAL.
the class ParallelDataFlowsExample method generateSecondJob.
private static DataFlowGraph generateSecondJob(Config config, int parallelismValue, int workers, DataFlowJobConfig jobConfig) {
ConnectedSource connectedSource = new ConnectedSource("reduce", "first_out");
ConnectedSink connectedSink = new ConnectedSink();
ComputeGraphBuilder graphBuilderX = ComputeGraphBuilder.newBuilder(config);
graphBuilderX.addSource("source1", connectedSource, parallelismValue);
ComputeConnection reduceConn = graphBuilderX.addCompute("sink1", connectedSink, 1);
reduceConn.reduce("source1").viaEdge("reduce").withReductionFunction(new Aggregator()).withDataType(MessageTypes.OBJECT);
graphBuilderX.setMode(OperationMode.BATCH);
ComputeGraph batchGraph = graphBuilderX.build();
DataFlowGraph job = DataFlowGraph.newSubGraphJob("second_graph", batchGraph).setWorkers(workers).addDataFlowJobConfig(jobConfig).setGraphType("non-iterative");
return job;
}
use of edu.iu.dsc.tws.task.impl.ComputeGraphBuilder in project twister2 by DSC-SPIDAL.
the class ParallelDataFlowsExample method generateFirstJob.
private static DataFlowGraph generateFirstJob(Config config, int parallelismValue, int workers, DataFlowJobConfig jobConfig) {
FirstSourceTask firstSourceTask = new FirstSourceTask();
ConnectedSink connectedSink = new ConnectedSink("first_out");
ComputeGraphBuilder graphBuilderX = ComputeGraphBuilder.newBuilder(config);
graphBuilderX.addSource("source1", firstSourceTask, parallelismValue);
ComputeConnection partitionConnection = graphBuilderX.addCompute("sink1", connectedSink, parallelismValue);
partitionConnection.partition("source1").viaEdge("partition").withDataType(MessageTypes.OBJECT);
graphBuilderX.setMode(OperationMode.BATCH);
ComputeGraph batchGraph = graphBuilderX.build();
DataFlowGraph job = DataFlowGraph.newSubGraphJob("first_graph", batchGraph).setWorkers(workers).addDataFlowJobConfig(jobConfig).setGraphType("non-iterative");
return job;
}
use of edu.iu.dsc.tws.task.impl.ComputeGraphBuilder in project twister2 by DSC-SPIDAL.
the class KMeansComputeJob method buildCentroidsTG.
public static ComputeGraph buildCentroidsTG(String centroidDirectory, int csize, int parallelismValue, int dimension, Config conf, String filetype) {
PointDataSource cs = new PointDataSource(Context.TWISTER2_DIRECT_EDGE, centroidDirectory, "centroids", dimension, csize, filetype);
ComputeGraphBuilder centroidsComputeGraphBuilder = ComputeGraphBuilder.newBuilder(conf);
centroidsComputeGraphBuilder.addSource("centroidsource", cs, parallelismValue);
centroidsComputeGraphBuilder.setMode(OperationMode.BATCH);
centroidsComputeGraphBuilder.setTaskGraphName("centTG");
return centroidsComputeGraphBuilder.build();
}
use of edu.iu.dsc.tws.task.impl.ComputeGraphBuilder in project twister2 by DSC-SPIDAL.
the class KMeansComputeJob method buildKMeansTG.
public static ComputeGraph buildKMeansTG(int parallelismValue, Config conf) {
KMeansSourceTask kMeansSourceTask = new KMeansSourceTask();
KMeansAllReduceTask kMeansAllReduceTask = new KMeansAllReduceTask();
ComputeGraphBuilder kmeansComputeGraphBuilder = ComputeGraphBuilder.newBuilder(conf);
// Add source, and sink tasks to the task graph builder for the third task graph
kmeansComputeGraphBuilder.addSource("kmeanssource", kMeansSourceTask, parallelismValue);
ComputeConnection kMeanscomputeConnection = kmeansComputeGraphBuilder.addCompute("kmeanssink", kMeansAllReduceTask, parallelismValue);
// Creating the communication edges between the tasks for the third task graph
kMeanscomputeConnection.allreduce("kmeanssource").viaEdge("all-reduce").withReductionFunction(new CentroidAggregator()).withDataType(MessageTypes.OBJECT);
kmeansComputeGraphBuilder.setMode(OperationMode.BATCH);
kmeansComputeGraphBuilder.setTaskGraphName("kmeansTG");
return kmeansComputeGraphBuilder.build();
}
use of edu.iu.dsc.tws.task.impl.ComputeGraphBuilder in project twister2 by DSC-SPIDAL.
the class KMeansConnectedDataflowExample method generateThirdJob.
private static DataFlowGraph generateThirdJob(Config config, int parallelismValue, int instances, int iterations, int dimension, DataFlowJobConfig jobConfig) {
KMeansSourceTask kMeansSourceTask = new KMeansSourceTask(dimension);
KMeansAllReduceTask kMeansAllReduceTask = new KMeansAllReduceTask();
ComputeGraphBuilder kmeansComputeGraphBuilder = ComputeGraphBuilder.newBuilder(config);
// Add source, and sink tasks to the task graph builder for the third task graph
kmeansComputeGraphBuilder.addSource("kmeanssource", kMeansSourceTask, parallelismValue);
ComputeConnection kMeanscomputeConnection = kmeansComputeGraphBuilder.addCompute("kmeanssink", kMeansAllReduceTask, parallelismValue);
// Creating the communication edges between the tasks for the third task graph
kMeanscomputeConnection.allreduce("kmeanssource").viaEdge("all-reduce").withReductionFunction(new CentroidAggregator()).withDataType(MessageTypes.OBJECT);
kmeansComputeGraphBuilder.setMode(OperationMode.BATCH);
kmeansComputeGraphBuilder.setTaskGraphName("kmeansTG");
ComputeGraph thirdGraph = kmeansComputeGraphBuilder.build();
DataFlowGraph job = DataFlowGraph.newSubGraphJob("kmeansTG", thirdGraph).setWorkers(instances).addDataFlowJobConfig(jobConfig).setGraphType("iterative").setIterations(iterations);
return job;
}
Aggregations