use of edu.iu.dsc.tws.api.compute.schedule.elements.WorkerSchedulePlan in project twister2 by DSC-SPIDAL.
the class DataLocalityBatchTaskSchedulerTest method testUniqueSchedules1.
@Test
public void testUniqueSchedules1() {
int parallel = 4;
int workers = 2;
ComputeGraph graph = createGraph(parallel);
DataLocalityBatchTaskScheduler scheduler = new DataLocalityBatchTaskScheduler();
Config config = getConfig();
scheduler.initialize(config);
generateData(config);
WorkerPlan workerPlan = createWorkPlan(workers);
TaskSchedulePlan plan1 = scheduler.schedule(graph, workerPlan);
WorkerPlan workerPlan2 = createWorkPlan2(workers);
for (int i = 0; i < 10; i++) {
TaskSchedulePlan plan2 = scheduler.schedule(graph, workerPlan2);
Assert.assertEquals(plan1.getContainers().size(), plan2.getContainers().size());
Map<Integer, WorkerSchedulePlan> containersMap = plan2.getContainersMap();
for (Map.Entry<Integer, WorkerSchedulePlan> entry : containersMap.entrySet()) {
WorkerSchedulePlan workerSchedulePlan = entry.getValue();
Set<TaskInstancePlan> containerPlanTaskInstances = workerSchedulePlan.getTaskInstances();
Assert.assertEquals(containerPlanTaskInstances.size() / graph.getTaskVertexSet().size(), TaskSchedulerContext.defaultTaskInstancesPerContainer(config));
}
}
}
use of edu.iu.dsc.tws.api.compute.schedule.elements.WorkerSchedulePlan in project twister2 by DSC-SPIDAL.
the class DataLocalityTaskSchedulerTest method testUniqueSchedules1.
@Test
public void testUniqueSchedules1() {
int parallel = 2;
int workers = 2;
ComputeGraph graph = createGraph(parallel);
DataLocalityStreamingTaskScheduler scheduler = new DataLocalityStreamingTaskScheduler();
Config config = getConfig();
scheduler.initialize(config, 1);
generateData(config);
WorkerPlan workerPlan = createWorkPlan(workers);
TaskSchedulePlan plan1 = scheduler.schedule(graph, workerPlan);
WorkerPlan workerPlan2 = createWorkPlan2(workers);
for (int i = 0; i < 10; i++) {
TaskSchedulePlan plan2 = scheduler.schedule(graph, workerPlan2);
Assert.assertEquals(plan1.getContainers().size(), plan2.getContainers().size());
Map<Integer, WorkerSchedulePlan> containersMap = plan2.getContainersMap();
for (Map.Entry<Integer, WorkerSchedulePlan> entry : containersMap.entrySet()) {
WorkerSchedulePlan workerSchedulePlan = entry.getValue();
Set<TaskInstancePlan> containerPlanTaskInstances = workerSchedulePlan.getTaskInstances();
Assert.assertEquals(containerPlanTaskInstances.size(), TaskSchedulerContext.defaultTaskInstancesPerContainer(config));
}
}
}
use of edu.iu.dsc.tws.api.compute.schedule.elements.WorkerSchedulePlan in project twister2 by DSC-SPIDAL.
the class DataLocalityTaskSchedulerTest method testUniqueSchedules3.
@Test
public void testUniqueSchedules3() {
int parallel = 10;
int workers = 3;
ComputeGraph graph = createGraphWithComputeTaskAndConstraints(parallel);
DataLocalityStreamingTaskScheduler scheduler = new DataLocalityStreamingTaskScheduler();
Config config = getConfig();
scheduler.initialize(config, 1);
generateData(config);
WorkerPlan workerPlan = createWorkPlan(workers);
TaskSchedulePlan plan1 = scheduler.schedule(graph, workerPlan);
Assert.assertNotNull(plan1);
Map<Integer, WorkerSchedulePlan> containersMap = plan1.getContainersMap();
for (Map.Entry<Integer, WorkerSchedulePlan> entry : containersMap.entrySet()) {
WorkerSchedulePlan workerSchedulePlan = entry.getValue();
Set<TaskInstancePlan> containerPlanTaskInstances = workerSchedulePlan.getTaskInstances();
Assert.assertEquals(containerPlanTaskInstances.size(), Integer.parseInt(graph.getGraphConstraints().get(Context.TWISTER2_MAX_TASK_INSTANCES_PER_WORKER)));
}
}
use of edu.iu.dsc.tws.api.compute.schedule.elements.WorkerSchedulePlan in project twister2 by DSC-SPIDAL.
the class BatchTaskSchedulerTest method testUniqueSchedules6.
@Test
public void testUniqueSchedules6() {
int parallel = 200;
int workers = 400;
ComputeGraph[] graph = new ComputeGraph[3];
Arrays.setAll(graph, i -> createGraphWithDifferentParallelism(parallel, "graph" + i));
BatchTaskScheduler scheduler = new BatchTaskScheduler();
scheduler.initialize(Config.newBuilder().build());
WorkerPlan workerPlan = createWorkPlan(workers);
Map<String, TaskSchedulePlan> plan1 = scheduler.schedule(workerPlan, graph[0], graph[1]);
for (Map.Entry<String, TaskSchedulePlan> taskSchedulePlanEntry : plan1.entrySet()) {
TaskSchedulePlan plan2 = taskSchedulePlanEntry.getValue();
Map<Integer, WorkerSchedulePlan> containersMap = plan2.getContainersMap();
int index = 0;
for (Map.Entry<Integer, WorkerSchedulePlan> entry : containersMap.entrySet()) {
WorkerSchedulePlan workerSchedulePlan = entry.getValue();
Set<TaskInstancePlan> containerPlanTaskInstances = workerSchedulePlan.getTaskInstances();
index++;
if (index <= parallel) {
Assert.assertEquals(containerPlanTaskInstances.size(), workers / parallel);
} else {
Assert.assertEquals(containerPlanTaskInstances.size(), 0);
}
}
}
}
use of edu.iu.dsc.tws.api.compute.schedule.elements.WorkerSchedulePlan in project twister2 by DSC-SPIDAL.
the class BatchTaskSchedulerTest method testUniqueSchedules4.
@Test
public void testUniqueSchedules4() {
int parallel = 400;
int workers = 200;
ComputeGraph[] graph = new ComputeGraph[3];
Arrays.setAll(graph, i -> createGraph(parallel, "graph" + i));
BatchTaskScheduler scheduler = new BatchTaskScheduler();
scheduler.initialize(Config.newBuilder().build());
WorkerPlan workerPlan = createWorkPlan(workers);
Map<String, TaskSchedulePlan> taskSchedulePlanMap = scheduler.schedule(workerPlan, graph[0], graph[1], graph[2]);
for (Map.Entry<String, TaskSchedulePlan> taskSchedulePlanEntry : taskSchedulePlanMap.entrySet()) {
TaskSchedulePlan taskSchedulePlan = taskSchedulePlanEntry.getValue();
Map<Integer, WorkerSchedulePlan> containersMap = taskSchedulePlan.getContainersMap();
for (Map.Entry<Integer, WorkerSchedulePlan> entry : containersMap.entrySet()) {
WorkerSchedulePlan workerSchedulePlan = entry.getValue();
Set<TaskInstancePlan> containerPlanTaskInstances = workerSchedulePlan.getTaskInstances();
Assert.assertEquals(containerPlanTaskInstances.size(), (parallel / workers) * graph[0].getTaskVertexSet().size());
}
}
}
Aggregations