use of edu.iu.dsc.tws.api.data.Path in project twister2 by DSC-SPIDAL.
the class DataLocalityBatchTaskSchedulerTest method generateData.
private void generateData(Config config) {
DataGenerator dataGenerator = new DataGenerator(config);
dataGenerator.generate(new Path(String.valueOf(config.get(DataObjectConstants.DINPUT_DIRECTORY))), Integer.parseInt(String.valueOf(config.get(DataObjectConstants.DSIZE))), Integer.parseInt(String.valueOf(config.get(DataObjectConstants.DIMENSIONS))));
}
use of edu.iu.dsc.tws.api.data.Path in project twister2 by DSC-SPIDAL.
the class DataLocalityBatchTaskSchedulerTest method testUniqueSchedules2.
@Test
public void testUniqueSchedules2() {
int parallel = 4;
int workers = 2;
ComputeGraph graph = createGraphWithConstraints(parallel);
DataLocalityBatchTaskScheduler scheduler = new DataLocalityBatchTaskScheduler();
Config config = getConfig();
scheduler.initialize(config, 1);
generateData(config);
DataGenerator dataGenerator = new DataGenerator(config);
dataGenerator.generate(new Path(String.valueOf(config.get(DataObjectConstants.DINPUT_DIRECTORY))), 1000, 2);
WorkerPlan workerPlan = createWorkPlan(workers);
TaskSchedulePlan plan1 = scheduler.schedule(graph, workerPlan);
Assert.assertNotNull(plan1);
Map<Integer, WorkerSchedulePlan> containersMap = plan1.getContainersMap();
for (Map.Entry<Integer, WorkerSchedulePlan> entry : containersMap.entrySet()) {
WorkerSchedulePlan workerSchedulePlan = entry.getValue();
Set<TaskInstancePlan> containerPlanTaskInstances = workerSchedulePlan.getTaskInstances();
LOG.info("container plan instances and parallel:" + containerPlanTaskInstances.size() + "\t" + parallel);
Assert.assertEquals(containerPlanTaskInstances.size(), parallel);
}
}
use of edu.iu.dsc.tws.api.data.Path in project twister2 by DSC-SPIDAL.
the class DataLocalityBatchTaskSchedulerTest method testUniqueSchedules3.
@Test
public void testUniqueSchedules3() {
int parallel = 4;
int workers = 2;
ComputeGraph graph = createGraphWithComputeTaskAndConstraints(parallel);
DataLocalityBatchTaskScheduler scheduler = new DataLocalityBatchTaskScheduler();
Config config = getConfig();
scheduler.initialize(config, 1);
generateData(config);
DataGenerator dataGenerator = new DataGenerator(config);
dataGenerator.generate(new Path(String.valueOf(config.get(DataObjectConstants.DINPUT_DIRECTORY))), 1000, 2);
WorkerPlan workerPlan = createWorkPlan(workers);
TaskSchedulePlan plan1 = scheduler.schedule(graph, workerPlan);
Assert.assertNotNull(plan1);
Map<Integer, WorkerSchedulePlan> containersMap = plan1.getContainersMap();
for (Map.Entry<Integer, WorkerSchedulePlan> entry : containersMap.entrySet()) {
WorkerSchedulePlan workerSchedulePlan = entry.getValue();
Set<TaskInstancePlan> containerPlanTaskInstances = workerSchedulePlan.getTaskInstances();
LOG.info("container plan instances and parallel:" + containerPlanTaskInstances.size() + "\t" + workers * graph.getTaskVertexSet().size());
Assert.assertEquals(containerPlanTaskInstances.size(), workers * graph.getTaskVertexSet().size());
}
}
use of edu.iu.dsc.tws.api.data.Path in project twister2 by DSC-SPIDAL.
the class DataLocalityTaskSchedulerTest method generateData.
private void generateData(Config config) {
DataGenerator dataGenerator = new DataGenerator(config);
dataGenerator.generate(new Path(String.valueOf(config.get(DataObjectConstants.DINPUT_DIRECTORY))), Integer.parseInt(String.valueOf(config.get(DataObjectConstants.DSIZE))), Integer.parseInt(String.valueOf(config.get(DataObjectConstants.DIMENSIONS))));
}
use of edu.iu.dsc.tws.api.data.Path in project twister2 by DSC-SPIDAL.
the class TextBasedSourceFunction method prepare.
@Override
public void prepare(TSetContext context) {
super.prepare(context);
this.ctx = context;
Config cfg = ctx.getConfig();
if ("complete".equals(partitionerType)) {
this.dataSource = new DataSource(cfg, new LocalCompleteCSVInputPartitioner(new Path(datainputDirectory), context.getParallelism(), dataSize, cfg), parallel);
} else {
this.dataSource = new DataSource(cfg, new LocalCSVInputPartitioner(new Path(datainputDirectory), parallel, dataSize, cfg), parallel);
}
this.dataSplit = this.dataSource.getNextSplit(context.getIndex());
}
Aggregations