use of edu.iu.dsc.tws.comms.selectors.HashingSelector in project twister2 by DSC-SPIDAL.
the class WordCountWorker method execute.
@Override
public void execute(WorkerEnvironment wEnv) {
this.workerEnv = wEnv;
this.workerId = workerEnv.getWorkerId();
taskStages.add(NO_OF_TASKS);
taskStages.add(NO_OF_TASKS);
// lets create the task plan
this.logicalPlan = Utils.createStageLogicalPlan(workerEnv, taskStages);
setupTasks();
// create the communication
wordAggregator = new WordAggregator();
keyGather = new BKeyedReduce(workerEnv.getCommunicator(), logicalPlan, sources, destinations, new ReduceFunction() {
@Override
public void init(Config cfg, DataFlowOperation op, Map<Integer, List<Integer>> expectedIds) {
}
@Override
public Object reduce(Object t1, Object t2) {
return (Integer) t1 + (Integer) t2;
}
}, wordAggregator, MessageTypes.OBJECT, MessageTypes.INTEGER, new HashingSelector());
// assign the task ids to the workers, and run them using threads
scheduleTasks();
// progress the communication
progress();
// close communication
workerEnv.close();
}
use of edu.iu.dsc.tws.comms.selectors.HashingSelector in project twister2 by DSC-SPIDAL.
the class WordCountWorker method execute.
@Override
public void execute(Config cfg, JobAPI.Job job, IWorkerController workerController, IPersistentVolume persistentVolume, IVolatileVolume volatileVolume) {
this.config = cfg;
this.id = workerController.getWorkerInfo().getWorkerID();
this.noOfTasksPerExecutor = NO_OF_TASKS / workerController.getNumberOfWorkers();
setupTasks(workerController);
setupNetwork(workerController);
// create the communication
keyedPartition = new SPartition(channel, logicalPlan, sources, destinations, MessageTypes.OBJECT, new WordAggregate(), new HashingSelector());
scheduleTasks();
progress();
}
use of edu.iu.dsc.tws.comms.selectors.HashingSelector in project twister2 by DSC-SPIDAL.
the class SKeyedPartitionExample method compute.
@Override
protected void compute(WorkerEnvironment workerEnv) {
LogicalPlanBuilder logicalPlanBuilder = LogicalPlanBuilder.plan(jobParameters.getSources(), jobParameters.getTargets(), workerEnv).withFairDistribution();
// create the communication
partition = new SKeyedPartition(workerEnv.getCommunicator(), logicalPlanBuilder, MessageTypes.INTEGER, MessageTypes.INTEGER_ARRAY, new PartitionReceiver(), new HashingSelector());
this.resultsVerifier = new ResultsVerifier<>(inputDataArray, (ints, args) -> new Tuple<>(-1, ints), new TupleComparator<>(// any int
(d1, d2) -> true, IntArrayComparator.getInstance()));
Set<Integer> tasksOfExecutor = logicalPlanBuilder.getSourcesOnThisWorker();
// now initialize the workers
for (int t : tasksOfExecutor) {
// the map thread where data is produced
Thread mapThread = new Thread(new MapWorker(t));
mapThread.start();
}
}
Aggregations