Search in sources :

Example 16 with TaskPlan

use of edu.iu.dsc.tws.comms.core.TaskPlan in project twister2 by DSC-SPIDAL.

the class SimpleTGraphExample method init.

/**
 * Init method to submit the task to the executor
 */
public void init(Config cfg, int containerId, ResourcePlan plan) {
    LOG.log(Level.INFO, "Starting the example with container id: " + plan.getThisId());
    taskExecutor = new TaskExecutorFixedThread();
    this.status = Status.INIT;
    TaskPlan taskPlan = Utils.createTaskPlan(cfg, plan);
    TWSNetwork network = new TWSNetwork(cfg, taskPlan);
    TWSCommunication channel = network.getDataFlowTWSCommunication();
    Set<Integer> sources = new HashSet<>();
    sources.add(0);
    int destination = 1;
    Map<String, Object> newCfg = new HashMap<>();
    LinkedQueue<Message> pongQueue = new LinkedQueue<Message>();
    taskExecutor.registerQueue(0, pongQueue);
    direct = channel.direct(newCfg, MessageType.OBJECT, 0, sources, destination, new SimpleTGraphExample.PingPongReceive());
    taskExecutor.initCommunication(channel, direct);
    TMapper tMapper = new TMapper("1");
    TReducer tReducer = new TReducer("2");
    TShuffler tShuffler = new TShuffler("3");
    TReducer tMergeFinal = new TReducer("4");
    // Add the real input data files in the array list...
    tMapper.addInputData("mapper1", new ArrayList<>());
    tMapper.addInputData("mapper2", new ArrayList<>());
    // Add the real input data files in the array list...
    tReducer.addInputData("reducer1", new ArrayList<>());
    tReducer.addInputData("reducer2", new ArrayList<>());
    // Add the real input data files in the array list...
    tShuffler.addInputData("shuffler1", new ArrayList<>());
    tShuffler.addInputData("shuffler2", new ArrayList<>());
    // Add the real input data files in the array list...
    tMergeFinal.addInputData("merge1", new ArrayList<>());
    tMergeFinal.addInputData("merge2", new ArrayList<>());
    // Mention the output data files to be generated in the array list...
    tMapper.addOutputData("mapperOut1", new ArrayList<>());
    tMapper.addOutputData("mapperOut2", new ArrayList<>());
    if (taskGraphFlag >= 0) {
        // just for verification (replace with proper value)
        /*dataflowTaskGraphGenerator = new DataflowTaskGraphGenerator()
          .generateTGraph(tMapper, tShuffler, new DataflowOperation("Map"))
          .generateTGraph(tMapper, tReducer, new DataflowOperation("Shuffle"))
          .generateTGraph(tShuffler, tReducerFinal, new DataflowOperation("finalReduce"))
          .generateTGraph(tReducer, tReducerFinal, new DataflowOperation("finalReduce"));*/
        dataflowTaskGraphGenerator = new DataflowTaskGraphGenerator().generateTGraph(tMapper).generateTGraph(tMapper, tReducer, new DataflowOperation("Reduce")).generateTGraph(tMapper, tShuffler, new DataflowOperation("Shuffle")).generateTGraph(tReducer, tMergeFinal, new DataflowOperation("Merge1")).generateTGraph(tShuffler, tMergeFinal, new DataflowOperation("Merge2"));
        LOG.info("Generated Dataflow Task Graph Vertices:" + dataflowTaskGraphGenerator.getTGraph().getTaskVertexSet());
        if (dataflowTaskGraphGenerator != null) {
            dataflowTGraphParser = new DataflowTGraphParser(dataflowTaskGraphGenerator);
            parsedTaskSet = dataflowTGraphParser.dataflowTGraphParseAndSchedule();
            LOG.info("parsed task set:" + parsedTaskSet);
        }
        // parsedTaskSet = executionGraph.parseTaskGraph(dataflowTaskGraphGenerator);
        if (!parsedTaskSet.isEmpty()) {
            // newly added for testing
            executionGraph = new ExecutionGraph(parsedTaskSet);
            String message = executionGraph.generateExecutionGraph(containerId);
            // String message = executionGraph.generateExecutionGraph(containerId, parsedTaskSet);
            /*TaskExecutorFixedThread taskExecutionGraph =
            executionGraph.generateExecutionGraph(containerId, parsedTaskSet);*/
            LOG.info(message);
        }
    }
// It removes only the first tax vertex in the parsedTaskSet.
// dataflowTaskGraphGenerator.removeTaskVertex(parsedTaskSet.iterator().next());
// It is getting concurrent modification exception...!
/*for (TaskGraphMapper processedTask : parsedTaskSet) {
      dataflowTaskGraphGenerator.removeTaskVertex(processedTask);
    }*/
}
Also used : Message(edu.iu.dsc.tws.task.api.Message) TaskExecutorFixedThread(edu.iu.dsc.tws.task.core.TaskExecutorFixedThread) HashMap(java.util.HashMap) TWSCommunication(edu.iu.dsc.tws.comms.core.TWSCommunication) TaskPlan(edu.iu.dsc.tws.comms.core.TaskPlan) TWSNetwork(edu.iu.dsc.tws.comms.core.TWSNetwork) LinkedQueue(edu.iu.dsc.tws.task.api.LinkedQueue) DataflowTaskGraphGenerator(edu.iu.dsc.tws.task.taskgraphbuilder.DataflowTaskGraphGenerator) DataflowTGraphParser(edu.iu.dsc.tws.task.taskgraphbuilder.DataflowTGraphParser) DataflowOperation(edu.iu.dsc.tws.task.taskgraphbuilder.DataflowOperation) ExecutionGraph(edu.iu.dsc.tws.task.executiongraph.ExecutionGraph) HashSet(java.util.HashSet)

Example 17 with TaskPlan

use of edu.iu.dsc.tws.comms.core.TaskPlan in project twister2 by DSC-SPIDAL.

the class SimpleTaskGraph method init.

/**
 * Init method to submit the task to the executor
 */
public void init(Config cfg, int containerId, ResourcePlan plan) {
    LOG.log(Level.INFO, "Starting the example with container id: " + plan.getThisId());
    taskExecutor = new TaskExecutorFixedThread();
    this.status = Status.INIT;
    TaskPlan taskPlan = Utils.createTaskPlan(cfg, plan);
    TWSNetwork network = new TWSNetwork(cfg, taskPlan);
    TWSCommunication channel = network.getDataFlowTWSCommunication();
    Set<Integer> sources = new HashSet<>();
    sources.add(0);
    int destination = 1;
    Map<String, Object> newCfg = new HashMap<>();
    LinkedQueue<Message> pongQueue = new LinkedQueue<Message>();
    taskExecutor.registerQueue(0, pongQueue);
    direct = channel.direct(newCfg, MessageType.OBJECT, 0, sources, destination, new SimpleTaskGraph.PingPongReceive());
    taskExecutor.initCommunication(channel, direct);
    // For Dataflow Task Graph Generation call the dataflow task graph generator
    MapWorker sourceTask = new MapWorker(0, direct);
    ReceiveWorker sinkTask = new ReceiveWorker();
    dataflowTaskGraphGenerator = new DataflowTaskGraphGenerator().generateDataflowGraph(sourceTask, sinkTask, direct);
    if (dataflowTaskGraphGenerator != null) {
        dataflowTaskGraphParser = new DataflowTaskGraphParser(dataflowTaskGraphGenerator);
        parsedTaskSet = dataflowTaskGraphParser.dataflowTaskGraphParseAndSchedule();
    }
    if (!parsedTaskSet.isEmpty()) {
        if (containerId == 0) {
            LOG.info("Job in if loop is::::::::::::" + parsedTaskSet.iterator().next());
            taskExecutor.registerTask(parsedTaskSet.iterator().next());
            // taskExecutor.registerTask(new MapWorker(0, direct));
            taskExecutor.submitTask(0);
            taskExecutor.progres();
        // /dataflowTaskGraphGenerator.removeTaskVertex(parsedTaskSet.iterator().next());
        } else if (containerId == 1) {
            int index = 0;
            for (Task processedTask : parsedTaskSet) {
                if (index == 0) {
                    ++index;
                } else if (index == 1) {
                    LOG.info("Job in else loop is::::::::::::" + processedTask);
                    ArrayList<Integer> inq = new ArrayList<>();
                    inq.add(0);
                    taskExecutor.setTaskMessageProcessLimit(10000);
                    taskExecutor.registerSinkTask(processedTask, inq);
                    taskExecutor.progres();
                    // /dataflowTaskGraphGenerator.removeTaskVertex(parsedTaskSet.iterator().next());
                    ++index;
                } else if (index > 1) {
                    // Just for verification
                    LOG.info("Task Index is greater than 1");
                    LOG.info("Submit the job to pipeline task");
                    break;
                }
            }
        }
    }
// This scheduling loop will be used in the future, leave it for reference.
/*int index = 0;
    if (!parsedTaskSet.isEmpty()) {
      for (Task processedTask : parsedTaskSet) {
        if (containerId == index) {
          taskExecutor.registerTask(processedTask);
          taskExecutor.submitTask(0);
          taskExecutor.progres();
          index++;
        } else if (index == 1) {
          ArrayList<Integer> inq = new ArrayList<>();
          inq.add(0);
          taskExecutor.setTaskMessageProcessLimit(10000);
          taskExecutor.registerSinkTask(processedTask, inq);
          taskExecutor.progres();
          index++;
        } else if(index > 1){
          List<Task> taskList = new ArrayList<>();
          for (Task processedTasks : parsedTaskSet) {
            taskList.add(processedTasks);
          }
          //This loop should be properly written...!
          taskExecutionOptimizer = new TaskExecutionOptimizer(taskExecutor);
          Map<Integer, List<Task>> taskMap = new HashMap<>();
          taskMap.put(1, taskList);
        }
      }
    }*/
}
Also used : SourceTask(edu.iu.dsc.tws.task.api.SourceTask) Task(edu.iu.dsc.tws.task.api.Task) SinkTask(edu.iu.dsc.tws.task.api.SinkTask) Message(edu.iu.dsc.tws.task.api.Message) TaskExecutorFixedThread(edu.iu.dsc.tws.task.core.TaskExecutorFixedThread) HashMap(java.util.HashMap) TWSCommunication(edu.iu.dsc.tws.comms.core.TWSCommunication) TaskPlan(edu.iu.dsc.tws.comms.core.TaskPlan) TWSNetwork(edu.iu.dsc.tws.comms.core.TWSNetwork) LinkedQueue(edu.iu.dsc.tws.task.api.LinkedQueue) DataflowTaskGraphGenerator(edu.iu.dsc.tws.task.taskgraphbuilder.DataflowTaskGraphGenerator) ArrayList(java.util.ArrayList) DataflowTaskGraphParser(edu.iu.dsc.tws.task.taskgraphbuilder.DataflowTaskGraphParser) HashSet(java.util.HashSet)

Example 18 with TaskPlan

use of edu.iu.dsc.tws.comms.core.TaskPlan in project twister2 by DSC-SPIDAL.

the class SimpleTaskQueue method init.

/**
 * Initialize the container
 */
public void init(Config cfg, int containerId, ResourcePlan plan) {
    LOG.log(Level.INFO, "Starting the example with container id: " + plan.getThisId());
    // Creates task an task executor instance to be used in this container
    taskExecutor = new TaskExecutorFixedThread();
    this.status = Status.INIT;
    // lets create the task plan
    TaskPlan taskPlan = Utils.createTaskPlan(cfg, plan);
    // first get the communication config file
    TWSNetwork network = new TWSNetwork(cfg, taskPlan);
    TWSCommunication channel = network.getDataFlowTWSCommunication();
    // we are sending messages from 0th task to 1st task
    Set<Integer> sources = new HashSet<>();
    sources.add(0);
    int dests = 1;
    Map<String, Object> newCfg = new HashMap<>();
    LOG.info("-------------------------------------------");
    LOG.info("Setting up reduce dataflow operation");
    LOG.info("-------------------------------------------");
    // this method calls the init method
    // I think this is wrong
    // TODO: Does the task genereate the communication or is it done by a controller for examples
    // the direct comm between task 0 and 1 is it done by the container or the the task
    // TODO: if the task creates the dataflowop does the task progress it or the executor
    // TODO : FOR NOW the dataflowop is created at container and sent to task
    LinkedQueue<Message> pongQueue = new LinkedQueue<Message>();
    taskExecutor.registerQueue(0, pongQueue);
    direct = channel.direct(newCfg, MessageType.OBJECT, 0, sources, dests, new PingPongReceive());
    taskExecutor.initCommunication(channel, direct);
    if (containerId == 0) {
        // the map thread where data is produced
        LOG.info("-------------------------------------------");
        LOG.log(Level.INFO, "Starting map thread");
        LOG.info("-------------------------------------------");
        LOG.info("-------------------------------------------");
        LOG.log(Level.INFO, "Container Id 0");
        LOG.info("-------------------------------------------");
        taskExecutor.registerTask(new MapWorker(0, direct));
        taskExecutor.submitTask(0);
        taskExecutor.progres();
    } else if (containerId == 1) {
        LOG.info("-------------------------------------------");
        LOG.log(Level.INFO, "Container Id 1 : Receiving End");
        LOG.info("-------------------------------------------");
        ArrayList<Integer> inq = new ArrayList<>();
        inq.add(0);
        taskExecutor.setTaskMessageProcessLimit(100);
        taskExecutor.registerSinkTask(new RecieveWorker(1), inq);
        taskExecutor.progres();
    }
}
Also used : Message(edu.iu.dsc.tws.task.api.Message) TaskExecutorFixedThread(edu.iu.dsc.tws.task.core.TaskExecutorFixedThread) HashMap(java.util.HashMap) TWSCommunication(edu.iu.dsc.tws.comms.core.TWSCommunication) TaskPlan(edu.iu.dsc.tws.comms.core.TaskPlan) TWSNetwork(edu.iu.dsc.tws.comms.core.TWSNetwork) LinkedQueue(edu.iu.dsc.tws.task.api.LinkedQueue) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet)

Example 19 with TaskPlan

use of edu.iu.dsc.tws.comms.core.TaskPlan in project twister2 by DSC-SPIDAL.

the class Utils method createReduceTaskPlan.

/**
 * Let assume we have 2 tasks per container and one additional for first container,
 * which will be the destination
 * @param plan the resource plan from scheduler
 * @return task plan
 */
public static TaskPlan createReduceTaskPlan(Config cfg, ResourcePlan plan, int noOfTasks) {
    int noOfProcs = plan.noOfContainers();
    LOG.log(Level.INFO, "No of containers: " + noOfProcs);
    Map<Integer, Set<Integer>> executorToGraphNodes = new HashMap<>();
    Map<Integer, Set<Integer>> groupsToExeuctors = new HashMap<>();
    int thisExecutor = plan.getThisId();
    List<ResourceContainer> containers = plan.getContainers();
    Map<String, List<ResourceContainer>> containersPerNode = new HashMap<>();
    for (ResourceContainer c : containers) {
        String name = (String) c.getProperty(SchedulerContext.WORKER_NAME);
        List<ResourceContainer> containerList;
        if (!containersPerNode.containsKey(name)) {
            containerList = new ArrayList<>();
            containersPerNode.put(name, containerList);
        } else {
            containerList = containersPerNode.get(name);
        }
        containerList.add(c);
    }
    int taskPerExecutor = noOfTasks / noOfProcs;
    for (int i = 0; i < noOfProcs; i++) {
        Set<Integer> nodesOfExecutor = new HashSet<>();
        for (int j = 0; j < taskPerExecutor; j++) {
            nodesOfExecutor.add(i * taskPerExecutor + j);
        }
        if (i == 0) {
            nodesOfExecutor.add(noOfTasks);
        }
        executorToGraphNodes.put(i, nodesOfExecutor);
    }
    int i = 0;
    // we take each container as an executor
    for (Map.Entry<String, List<ResourceContainer>> e : containersPerNode.entrySet()) {
        Set<Integer> executorsOfGroup = new HashSet<>();
        for (ResourceContainer c : e.getValue()) {
            executorsOfGroup.add(c.getId());
        }
        groupsToExeuctors.put(i, executorsOfGroup);
        i++;
    }
    String print = printMap(executorToGraphNodes);
    LOG.fine("Executor To Graph: " + print);
    print = printMap(groupsToExeuctors);
    LOG.fine("Groups to executors: " + print);
    // and reduce task in 0th process
    return new TaskPlan(executorToGraphNodes, groupsToExeuctors, thisExecutor);
}
Also used : Set(java.util.Set) HashSet(java.util.HashSet) HashMap(java.util.HashMap) TaskPlan(edu.iu.dsc.tws.comms.core.TaskPlan) ArrayList(java.util.ArrayList) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map) ResourceContainer(edu.iu.dsc.tws.rsched.spi.resource.ResourceContainer) HashSet(java.util.HashSet)

Example 20 with TaskPlan

use of edu.iu.dsc.tws.comms.core.TaskPlan in project twister2 by DSC-SPIDAL.

the class BaseAllReduceCommunication method init.

@Override
public void init(Config cfg, int containerId, ResourcePlan plan) {
    LOG.log(Level.INFO, "Starting the example with container id: " + plan.getThisId());
    this.config = cfg;
    this.resourcePlan = plan;
    this.id = containerId;
    this.status = Status.INIT;
    this.noOfTasksPerExecutor = NO_OF_TASKS / plan.noOfContainers();
    // lets create the task plan
    TaskPlan taskPlan = Utils.createReduceTaskPlan(cfg, plan, NO_OF_TASKS);
    // first get the communication config file
    TWSNetwork network = new TWSNetwork(cfg, taskPlan);
    TWSCommunication channel = network.getDataFlowTWSCommunication();
    Set<Integer> sources = new HashSet<>();
    for (int i = 0; i < NO_OF_TASKS / 2; i++) {
        sources.add(i);
    }
    Set<Integer> destinations = new HashSet<>();
    for (int i = 0; i < NO_OF_TASKS / 2; i++) {
        destinations.add(NO_OF_TASKS / 2 + i);
    }
    int dest = NO_OF_TASKS;
    Map<String, Object> newCfg = new HashMap<>();
    LOG.info("Setting up reduce dataflow operation");
    try {
        // this method calls the init method
        // I think this is wrong
        allReduce = channel.allReduce(newCfg, MessageType.OBJECT, 0, 1, sources, destinations, dest, new IndentityFunction(), new FinalReduceReceive(), true);
        if (id == 0 || id == 1) {
            for (int i = 0; i < noOfTasksPerExecutor; i++) {
                // the map thread where data is produced
                LOG.info(String.format("%d Starting %d", id, i + id * noOfTasksPerExecutor));
                Thread mapThread = new Thread(new MapWorker(i + id * noOfTasksPerExecutor));
                mapThread.start();
            }
        }
        // we need to progress the communication
        while (true) {
            try {
                // progress the channel
                channel.progress();
                // we should progress the communication directive
                allReduce.progress();
                Thread.yield();
            } catch (Throwable t) {
                t.printStackTrace();
            }
        }
    } catch (Throwable t) {
        t.printStackTrace();
    }
}
Also used : HashMap(java.util.HashMap) TWSCommunication(edu.iu.dsc.tws.comms.core.TWSCommunication) TaskPlan(edu.iu.dsc.tws.comms.core.TaskPlan) TWSNetwork(edu.iu.dsc.tws.comms.core.TWSNetwork) HashSet(java.util.HashSet)

Aggregations

TaskPlan (edu.iu.dsc.tws.comms.core.TaskPlan)35 HashMap (java.util.HashMap)33 HashSet (java.util.HashSet)33 TWSNetwork (edu.iu.dsc.tws.comms.core.TWSNetwork)29 TWSCommunication (edu.iu.dsc.tws.comms.core.TWSCommunication)28 ArrayList (java.util.ArrayList)12 LinkedQueue (edu.iu.dsc.tws.task.api.LinkedQueue)10 Message (edu.iu.dsc.tws.task.api.Message)10 TaskExecutorFixedThread (edu.iu.dsc.tws.task.core.TaskExecutorFixedThread)10 RandomString (edu.iu.dsc.tws.examples.utils.RandomString)7 DataflowTaskGraphGenerator (edu.iu.dsc.tws.task.taskgraphbuilder.DataflowTaskGraphGenerator)7 List (java.util.List)7 Random (java.util.Random)7 Map (java.util.Map)5 GatherBatchFinalReceiver (edu.iu.dsc.tws.comms.mpi.io.gather.GatherBatchFinalReceiver)4 GatherBatchPartialReceiver (edu.iu.dsc.tws.comms.mpi.io.gather.GatherBatchPartialReceiver)4 ResourceContainer (edu.iu.dsc.tws.rsched.spi.resource.ResourceContainer)4 SinkTask (edu.iu.dsc.tws.task.api.SinkTask)4 SourceTask (edu.iu.dsc.tws.task.api.SourceTask)4 Task (edu.iu.dsc.tws.task.api.Task)4