use of edu.iu.dsc.tws.api.compute.schedule.elements.TaskInstancePlan in project twister2 by DSC-SPIDAL.
the class RoundRobinBatchTaskSchedulerTest method testUniqueSchedules1.
@Test
public void testUniqueSchedules1() {
int parallel = 16;
int workers = 2;
ComputeGraph graph = createGraph(parallel);
RoundRobinBatchTaskScheduler scheduler = new RoundRobinBatchTaskScheduler();
scheduler.initialize(Config.newBuilder().build());
WorkerPlan workerPlan = createWorkPlan(workers);
for (int i = 0; i < 1; i++) {
TaskSchedulePlan plan1 = scheduler.schedule(graph, workerPlan);
TaskSchedulePlan plan2 = scheduler.schedule(graph, workerPlan);
Assert.assertEquals(plan1.getContainers().size(), plan2.getContainers().size());
Map<Integer, WorkerSchedulePlan> containersMap = plan1.getContainersMap();
for (Map.Entry<Integer, WorkerSchedulePlan> entry : containersMap.entrySet()) {
WorkerSchedulePlan workerSchedulePlan = entry.getValue();
Set<TaskInstancePlan> containerPlanTaskInstances = workerSchedulePlan.getTaskInstances();
Assert.assertEquals(containerPlanTaskInstances.size(), graph.vertex("source").getParallelism());
}
}
}
use of edu.iu.dsc.tws.api.compute.schedule.elements.TaskInstancePlan in project twister2 by DSC-SPIDAL.
the class DataLocalityBatchTaskSchedulerTest method testUniqueSchedules2.
@Test
public void testUniqueSchedules2() {
int parallel = 4;
int workers = 2;
ComputeGraph graph = createGraphWithConstraints(parallel);
DataLocalityBatchTaskScheduler scheduler = new DataLocalityBatchTaskScheduler();
Config config = getConfig();
scheduler.initialize(config, 1);
generateData(config);
DataGenerator dataGenerator = new DataGenerator(config);
dataGenerator.generate(new Path(String.valueOf(config.get(DataObjectConstants.DINPUT_DIRECTORY))), 1000, 2);
WorkerPlan workerPlan = createWorkPlan(workers);
TaskSchedulePlan plan1 = scheduler.schedule(graph, workerPlan);
Assert.assertNotNull(plan1);
Map<Integer, WorkerSchedulePlan> containersMap = plan1.getContainersMap();
for (Map.Entry<Integer, WorkerSchedulePlan> entry : containersMap.entrySet()) {
WorkerSchedulePlan workerSchedulePlan = entry.getValue();
Set<TaskInstancePlan> containerPlanTaskInstances = workerSchedulePlan.getTaskInstances();
LOG.info("container plan instances and parallel:" + containerPlanTaskInstances.size() + "\t" + parallel);
Assert.assertEquals(containerPlanTaskInstances.size(), parallel);
}
}
use of edu.iu.dsc.tws.api.compute.schedule.elements.TaskInstancePlan in project twister2 by DSC-SPIDAL.
the class DataLocalityBatchTaskSchedulerTest method testUniqueSchedules4.
@Test
public void testUniqueSchedules4() {
int parallel = 4;
int workers = 2;
ComputeGraph graph = createGraphWithMultipleComputeTaskAndConstraints(parallel);
DataLocalityBatchTaskScheduler scheduler = new DataLocalityBatchTaskScheduler();
Config config = getConfig();
scheduler.initialize(config, 1);
generateData(config);
WorkerPlan workerPlan = createWorkPlan(workers);
TaskSchedulePlan plan1 = scheduler.schedule(graph, workerPlan);
Assert.assertNotNull(plan1);
Map<Integer, WorkerSchedulePlan> containersMap = plan1.getContainersMap();
for (Map.Entry<Integer, WorkerSchedulePlan> entry : containersMap.entrySet()) {
WorkerSchedulePlan workerSchedulePlan = entry.getValue();
Set<TaskInstancePlan> containerPlanTaskInstances = workerSchedulePlan.getTaskInstances();
Assert.assertEquals(containerPlanTaskInstances.size(), workers * graph.getTaskVertexSet().size());
}
}
use of edu.iu.dsc.tws.api.compute.schedule.elements.TaskInstancePlan in project twister2 by DSC-SPIDAL.
the class DataLocalityBatchTaskSchedulerTest method testUniqueSchedules3.
@Test
public void testUniqueSchedules3() {
int parallel = 4;
int workers = 2;
ComputeGraph graph = createGraphWithComputeTaskAndConstraints(parallel);
DataLocalityBatchTaskScheduler scheduler = new DataLocalityBatchTaskScheduler();
Config config = getConfig();
scheduler.initialize(config, 1);
generateData(config);
DataGenerator dataGenerator = new DataGenerator(config);
dataGenerator.generate(new Path(String.valueOf(config.get(DataObjectConstants.DINPUT_DIRECTORY))), 1000, 2);
WorkerPlan workerPlan = createWorkPlan(workers);
TaskSchedulePlan plan1 = scheduler.schedule(graph, workerPlan);
Assert.assertNotNull(plan1);
Map<Integer, WorkerSchedulePlan> containersMap = plan1.getContainersMap();
for (Map.Entry<Integer, WorkerSchedulePlan> entry : containersMap.entrySet()) {
WorkerSchedulePlan workerSchedulePlan = entry.getValue();
Set<TaskInstancePlan> containerPlanTaskInstances = workerSchedulePlan.getTaskInstances();
LOG.info("container plan instances and parallel:" + containerPlanTaskInstances.size() + "\t" + workers * graph.getTaskVertexSet().size());
Assert.assertEquals(containerPlanTaskInstances.size(), workers * graph.getTaskVertexSet().size());
}
}
use of edu.iu.dsc.tws.api.compute.schedule.elements.TaskInstancePlan in project twister2 by DSC-SPIDAL.
the class DataLocalityTaskSchedulerTest method testUniqueSchedules2.
@Test
public void testUniqueSchedules2() {
int parallel = 10;
int workers = 2;
ComputeGraph graph = createGraphWithConstraints(parallel);
DataLocalityStreamingTaskScheduler scheduler = new DataLocalityStreamingTaskScheduler();
Config config = getConfig();
scheduler.initialize(config, 1);
generateData(config);
WorkerPlan workerPlan = createWorkPlan(workers);
TaskSchedulePlan plan1 = scheduler.schedule(graph, workerPlan);
Assert.assertNotNull(plan1);
Map<Integer, WorkerSchedulePlan> containersMap = plan1.getContainersMap();
for (Map.Entry<Integer, WorkerSchedulePlan> entry : containersMap.entrySet()) {
WorkerSchedulePlan workerSchedulePlan = entry.getValue();
Set<TaskInstancePlan> containerPlanTaskInstances = workerSchedulePlan.getTaskInstances();
Assert.assertEquals(containerPlanTaskInstances.size(), Integer.parseInt(graph.getGraphConstraints().get(Context.TWISTER2_MAX_TASK_INSTANCES_PER_WORKER)));
}
}
Aggregations