Search in sources :

Example 26 with TaskPlan

use of edu.iu.dsc.tws.comms.core.TaskPlan in project twister2 by DSC-SPIDAL.

the class SimpleTGraph method init.

/**
 * Init method to submit the task to the executor
 */
public void init(Config cfg, int containerId, ResourcePlan plan) {
    LOG.log(Level.INFO, "Starting the example with container id: " + plan.getThisId());
    taskExecutor = new TaskExecutorFixedThread();
    this.status = Status.INIT;
    TaskPlan taskPlan = Utils.createTaskPlan(cfg, plan);
    TWSNetwork network = new TWSNetwork(cfg, taskPlan);
    TWSCommunication channel = network.getDataFlowTWSCommunication();
    Set<Integer> sources = new HashSet<>();
    sources.add(0);
    int destination = 1;
    Map<String, Object> newCfg = new HashMap<>();
    LinkedQueue<Message> pongQueue = new LinkedQueue<Message>();
    taskExecutor.registerQueue(0, pongQueue);
    direct = channel.direct(newCfg, MessageType.OBJECT, 0, sources, destination, new SimpleTGraph.PingPongReceive());
    taskExecutor.initCommunication(channel, direct);
    // For Dataflow Task Graph Generation call the dataflow task graph generator
    // MapWorker sourceTask = new MapWorker(0, direct);
    // ReceiveWorker sinkTask = new ReceiveWorker();
    TMapper tMapper = new TMapper("1");
    TReducer tReducer = new TReducer("2");
    TShuffler tShuffler = new TShuffler("3");
    // Add the real input data files in the array list...
    tMapper.addInputData("mapper1", new ArrayList<>());
    tMapper.addInputData("reducer1", new ArrayList<>());
    // Mention the output data files to be generated in the array list...
    tMapper.addOutputData("mapperOut1", new ArrayList<>());
    tMapper.addOutputData("mapperOut2", new ArrayList<>());
    dataflowTaskGraphGenerator = new DataflowTaskGraphGenerator().generateTGraph(tMapper, tShuffler, new DataflowOperation("Map")).generateTGraph(tShuffler, tReducer, new DataflowOperation("Shuffle"));
    LOG.info("Generated Dataflow Task Graph Vertices:" + dataflowTaskGraphGenerator.getTGraph().getTaskVertexSet());
    LOG.info("Generated Dataflow Task Edges:" + dataflowTaskGraphGenerator.getTGraph().getAllTaskEdges(tMapper, tShuffler).toString());
    LOG.info("Generated Dataflow Task Edges:" + dataflowTaskGraphGenerator.getTGraph().getAllTaskEdges(tShuffler, tReducer).toString());
    if (containerId == 0) {
        Thread mapThread = new Thread(new TMapper("1"));
        LOG.log(Level.INFO, "Starting map thread");
        mapThread.start();
        // we need to progress the communication
        while (true) {
            // progress the channel
            channel.progress();
            // we should progress the communication directive
            direct.progress();
            Thread.yield();
        }
    } else if (containerId == 1) {
        while (status != Status.LOAD_RECEIVE_FINISHED) {
            channel.progress();
            direct.progress();
        }
    }
/*if (dataflowTaskGraphGenerator != null) {
      dataflowTaskGraphParser = new DataflowTaskGraphParser(dataflowTaskGraphGenerator);
      parsedTaskSet = dataflowTaskGraphParser.dataflowTaskGraphParseAndSchedule();
    }
    if (!parsedTaskSet.isEmpty()) {
      if (containerId == 0) {
        LOG.info("Job in if loop is::::::::::::" + parsedTaskSet.iterator().next());
        taskExecutor.registerTask(parsedTaskSet.iterator().next());
        //taskExecutor.registerTask(new MapWorker(0, direct));
        taskExecutor.submitTask(0);
        taskExecutor.progres();
        ///dataflowTaskGraphGenerator.removeTaskVertex(parsedTaskSet.iterator().next());
      } else if (containerId == 1) {
        int index = 0;
        for (Task processedTask : parsedTaskSet) {
          if (index == 0) {
            ++index;
          } else if (index == 1) {
            LOG.info("Job in else loop is::::::::::::" + processedTask);
            ArrayList<Integer> inq = new ArrayList<>();
            inq.add(0);
            taskExecutor.setTaskMessageProcessLimit(10000);
            taskExecutor.registerSinkTask(processedTask, inq);
            taskExecutor.progres();
            ///dataflowTaskGraphGenerator.removeTaskVertex(parsedTaskSet.iterator().next());
            ++index;
          } else if (index > 1) { //Just for verification
            LOG.info("Task Index is greater than 1");
            LOG.info("Submit the job to pipeline task");
            break;
          }
        }
      }
    }*/
}
Also used : Message(edu.iu.dsc.tws.task.api.Message) TaskExecutorFixedThread(edu.iu.dsc.tws.task.core.TaskExecutorFixedThread) HashMap(java.util.HashMap) TWSCommunication(edu.iu.dsc.tws.comms.core.TWSCommunication) TaskPlan(edu.iu.dsc.tws.comms.core.TaskPlan) TWSNetwork(edu.iu.dsc.tws.comms.core.TWSNetwork) LinkedQueue(edu.iu.dsc.tws.task.api.LinkedQueue) DataflowTaskGraphGenerator(edu.iu.dsc.tws.task.taskgraphbuilder.DataflowTaskGraphGenerator) TaskExecutorFixedThread(edu.iu.dsc.tws.task.core.TaskExecutorFixedThread) DataflowOperation(edu.iu.dsc.tws.task.taskgraphbuilder.DataflowOperation) HashSet(java.util.HashSet)

Example 27 with TaskPlan

use of edu.iu.dsc.tws.comms.core.TaskPlan in project twister2 by DSC-SPIDAL.

the class SimpleTaskQueueWithMM method init.

/**
 * Initialize the container
 */
public void init(Config cfg, int containerId, ResourcePlan plan) {
    LOG.log(Level.INFO, "Starting the example with container id: " + plan.getThisId());
    // Creates task an task executor instance to be used in this container
    taskExecutor = new TaskExecutorFixedThread();
    this.status = Status.INIT;
    // lets create the task plan
    TaskPlan taskPlan = Utils.createTaskPlan(cfg, plan);
    // first get the communication config file
    TWSNetwork network = new TWSNetwork(cfg, taskPlan);
    TWSCommunication channel = network.getDataFlowTWSCommunication();
    // we are sending messages from 0th task to 1st task
    Set<Integer> sources = new HashSet<>();
    sources.add(0);
    int dests = 1;
    Map<String, Object> newCfg = new HashMap<>();
    LOG.info("Setting up reduce dataflow operation");
    Path dataPath = new Path("/home/pulasthi/work/twister2/lmdbdatabase");
    MemoryManager memoryManager = new LMDBMemoryManager(dataPath);
    // this method calls the init method
    // I think this is wrong
    // TODO: Does the task genereate the communication or is it done by a controller for examples
    // the direct comm between task 0 and 1 is it done by the container or the the task
    // TODO: if the task creates the dataflowop does the task progress it or the executor
    // TODO : FOR NOW the dataflowop is created at container and sent to task
    LinkedQueue<Message> pongQueue = new LinkedQueue<Message>();
    taskExecutor.registerQueue(0, pongQueue);
    direct = channel.direct(newCfg, MessageType.OBJECT, 0, sources, dests, new PingPongReceive());
    taskExecutor.initCommunication(channel, direct);
    // Memory Manager
    if (containerId == 0) {
        byte[] val = Longs.toByteArray(1231212121213L);
        byte[] val2 = Longs.toByteArray(22222222L);
        ByteBuffer valbuf = ByteBuffer.allocateDirect(8192);
        memoryManager.put(0, "temp", valbuf);
    // memoryManager.put(0, "temp", val);
    // memoryManager.put(0, "temp", val2);
    // the map thread where data is produced
    // LOG.log(Level.INFO, "Starting map thread");
    // SourceTask<Object> mapTask = new MapWorker(0, direct);
    // mapTask.setMemoryManager(memoryManager);
    // taskExecutor.registerTask(mapTask);
    // taskExecutor.submitTask(0);
    // taskExecutor.progres();
    } else if (containerId == 1) {
        byte[] val3 = Longs.toByteArray(3333333L);
        ByteBuffer val3buf = ByteBuffer.wrap(val3);
        try {
            Thread.sleep(2000);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        ByteBuffer results = memoryManager.get(0, "temp");
        if (results.limit() == 8192) {
            System.out.println("Correct " + results.limit());
        }
        ByteBuffer valbuf2 = ByteBuffer.allocateDirect(16192);
        memoryManager.put(0, "temp", valbuf2);
        results = memoryManager.get(0, "temp");
        if (results.limit() == 16192) {
            System.out.println("Correct " + results.limit());
        }
        ByteBuffer results2 = memoryManager.get(0, "temp");
        ByteBuffer results3 = memoryManager.get(0, "temp");
        if (results2 == null) {
            System.out.println("Missing key is null");
        }
        if (results3.getLong() == 1231212121213L) {
            System.out.println("Long value is correct");
        }
        memoryManager.append(0, "temp", val3buf);
        ByteBuffer resultsappend = memoryManager.get(0, "temp");
        System.out.println("Long value 1 :" + resultsappend.getLong());
        System.out.println("Long value 1 :" + resultsappend.getLong());
    // ArrayList<Integer> inq = new ArrayList<>();
    // inq.add(0);
    // taskExecutor.setTaskMessageProcessLimit(10000);
    // SinkTask<Object> recTask = new RecieveWorker(1);
    // recTask.setMemoryManager(memoryManager);
    // taskExecutor.registerSinkTask(recTask, inq);
    // taskExecutor.progres();
    }
}
Also used : Path(edu.iu.dsc.tws.data.fs.Path) Message(edu.iu.dsc.tws.task.api.Message) TaskExecutorFixedThread(edu.iu.dsc.tws.task.core.TaskExecutorFixedThread) HashMap(java.util.HashMap) TWSCommunication(edu.iu.dsc.tws.comms.core.TWSCommunication) TaskPlan(edu.iu.dsc.tws.comms.core.TaskPlan) TWSNetwork(edu.iu.dsc.tws.comms.core.TWSNetwork) LinkedQueue(edu.iu.dsc.tws.task.api.LinkedQueue) LMDBMemoryManager(edu.iu.dsc.tws.data.memory.lmdb.LMDBMemoryManager) MemoryManager(edu.iu.dsc.tws.data.memory.MemoryManager) LMDBMemoryManager(edu.iu.dsc.tws.data.memory.lmdb.LMDBMemoryManager) ByteBuffer(java.nio.ByteBuffer) HashSet(java.util.HashSet)

Example 28 with TaskPlan

use of edu.iu.dsc.tws.comms.core.TaskPlan in project twister2 by DSC-SPIDAL.

the class SimpleTaskgraph method init.

/**
 * Init method to submit the task to the executor
 */
public void init(Config cfg, int containerId, ResourcePlan plan) {
    LOG.log(Level.INFO, "Starting the example with container id: " + plan.getThisId());
    taskExecutor = new TaskExecutorFixedThread();
    this.status = Status.INIT;
    TaskPlan taskPlan = Utils.createTaskPlan(cfg, plan);
    TWSNetwork network = new TWSNetwork(cfg, taskPlan);
    TWSCommunication channel = network.getDataFlowTWSCommunication();
    Set<Integer> sources = new HashSet<>();
    sources.add(0);
    int destination = 1;
    Map<String, Object> newCfg = new HashMap<>();
    LinkedQueue<Message> pongQueue = new LinkedQueue<Message>();
    taskExecutor.registerQueue(0, pongQueue);
    direct = channel.direct(newCfg, MessageType.OBJECT, 0, sources, destination, new SimpleTaskgraph.PingPongReceive());
    taskExecutor.initCommunication(channel, direct);
    // For Dataflow Task Graph Generation call the dataflow task graph generator
    MapWorker sourceTask = new MapWorker(0, direct);
    ReceiveWorker sinkTask = new ReceiveWorker();
    // commented this line for separating the communication component
    // dataflowTaskGraphGenerator = new DataflowTaskGraphGenerator().generateDataflowGraph(
    // sourceTask, sinkTask, direct);
    dataflowTaskGraphGenerator = new DataflowTaskGraphGenerator().generateTaskGraph(sourceTask, sinkTask, new DataflowOperation("Map"));
    if (dataflowTaskGraphGenerator != null) {
        taskGraphParser = new TaskGraphParser(dataflowTaskGraphGenerator);
        parsedTaskSet = taskGraphParser.taskGraphParseAndSchedule();
    }
    if (!parsedTaskSet.isEmpty()) {
        if (containerId == 0) {
            LOG.info("Job in if loop is::::::::::::" + parsedTaskSet.iterator().next());
            taskExecutor.registerTask(parsedTaskSet.iterator().next());
            // taskExecutor.registerTask(new MapWorker(0, direct));
            taskExecutor.submitTask(0);
            taskExecutor.progres();
        // /dataflowTaskGraphGenerator.removeTaskVertex(parsedTaskSet.iterator().next());
        } else if (containerId == 1) {
            int index = 0;
            for (Task processedTask : parsedTaskSet) {
                if (index == 0) {
                    ++index;
                } else if (index == 1) {
                    LOG.info("Job in else loop is::::::::::::" + processedTask);
                    ArrayList<Integer> inq = new ArrayList<>();
                    inq.add(0);
                    // 10000
                    taskExecutor.setTaskMessageProcessLimit(100);
                    taskExecutor.registerSinkTask(processedTask, inq);
                    taskExecutor.progres();
                    // /dataflowTaskGraphGenerator.removeTaskVertex(parsedTaskSet.iterator().next());
                    ++index;
                } else if (index > 1) {
                    // Just for verification
                    LOG.info("Task Index is greater than 1");
                    LOG.info("Submit the job to pipeline task");
                    break;
                }
            }
        }
    }
}
Also used : SourceTask(edu.iu.dsc.tws.task.api.SourceTask) Task(edu.iu.dsc.tws.task.api.Task) SinkTask(edu.iu.dsc.tws.task.api.SinkTask) Message(edu.iu.dsc.tws.task.api.Message) TaskExecutorFixedThread(edu.iu.dsc.tws.task.core.TaskExecutorFixedThread) HashMap(java.util.HashMap) TWSCommunication(edu.iu.dsc.tws.comms.core.TWSCommunication) TaskPlan(edu.iu.dsc.tws.comms.core.TaskPlan) TWSNetwork(edu.iu.dsc.tws.comms.core.TWSNetwork) LinkedQueue(edu.iu.dsc.tws.task.api.LinkedQueue) DataflowTaskGraphGenerator(edu.iu.dsc.tws.task.taskgraphbuilder.DataflowTaskGraphGenerator) ArrayList(java.util.ArrayList) DataflowOperation(edu.iu.dsc.tws.task.taskgraphbuilder.DataflowOperation) TaskGraphParser(edu.iu.dsc.tws.task.taskgraphbuilder.TaskGraphParser) HashSet(java.util.HashSet)

Example 29 with TaskPlan

use of edu.iu.dsc.tws.comms.core.TaskPlan in project twister2 by DSC-SPIDAL.

the class Utils method createTaskPlan.

/**
 * Let assume we have 1 task per container
 * @param plan the resource plan from scheduler
 * @return task plan
 */
public static TaskPlan createTaskPlan(Config cfg, ResourcePlan plan) {
    int noOfProcs = plan.noOfContainers();
    LOG.log(Level.INFO, "No of containers: " + noOfProcs);
    Map<Integer, Set<Integer>> executorToGraphNodes = new HashMap<>();
    Map<Integer, Set<Integer>> groupsToExeuctors = new HashMap<>();
    int thisExecutor = plan.getThisId();
    List<ResourceContainer> containers = plan.getContainers();
    Map<String, List<ResourceContainer>> containersPerNode = new HashMap<>();
    for (ResourceContainer c : containers) {
        String name = (String) c.getProperty(SchedulerContext.WORKER_NAME);
        List<ResourceContainer> containerList;
        if (!containersPerNode.containsKey(name)) {
            containerList = new ArrayList<>();
            containersPerNode.put(name, containerList);
        } else {
            containerList = containersPerNode.get(name);
        }
        containerList.add(c);
    }
    for (int i = 0; i < noOfProcs; i++) {
        Set<Integer> nodesOfExecutor = new HashSet<>();
        nodesOfExecutor.add(i);
        executorToGraphNodes.put(i, nodesOfExecutor);
    }
    int i = 0;
    // we take each container as an executor
    for (Map.Entry<String, List<ResourceContainer>> e : containersPerNode.entrySet()) {
        Set<Integer> executorsOfGroup = new HashSet<>();
        for (ResourceContainer c : e.getValue()) {
            executorsOfGroup.add(c.getId());
        }
        groupsToExeuctors.put(i, executorsOfGroup);
        i++;
    }
    String print = printMap(executorToGraphNodes);
    LOG.fine("Executor To Graph: " + print);
    print = printMap(groupsToExeuctors);
    LOG.fine("Groups to executors: " + print);
    // and reduce task in 0th process
    return new TaskPlan(executorToGraphNodes, groupsToExeuctors, thisExecutor);
}
Also used : Set(java.util.Set) HashSet(java.util.HashSet) HashMap(java.util.HashMap) TaskPlan(edu.iu.dsc.tws.comms.core.TaskPlan) ArrayList(java.util.ArrayList) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map) ResourceContainer(edu.iu.dsc.tws.rsched.spi.resource.ResourceContainer) HashSet(java.util.HashSet)

Example 30 with TaskPlan

use of edu.iu.dsc.tws.comms.core.TaskPlan in project twister2 by DSC-SPIDAL.

the class BaseBatchAggregate method init.

@Override
public void init(Config cfg, int containerId, ResourcePlan plan) {
    LOG.log(Level.INFO, "Starting the example with container id: " + plan.getThisId());
    this.config = cfg;
    this.resourcePlan = plan;
    this.id = containerId;
    this.noOfTasksPerExecutor = NO_OF_TASKS / plan.noOfContainers();
    // lets create the task plan
    TaskPlan taskPlan = Utils.createReduceTaskPlan(cfg, plan, NO_OF_TASKS);
    // first get the communication config file
    TWSNetwork network = new TWSNetwork(cfg, taskPlan);
    TWSCommunication channel = network.getDataFlowTWSCommunication();
    Set<Integer> sources = new HashSet<>();
    for (int i = 0; i < NO_OF_TASKS; i++) {
        sources.add(i);
    }
    int dest = NO_OF_TASKS;
    Map<String, Object> newCfg = new HashMap<>();
    LOG.info("Setting up reduce dataflow operation");
    try {
        aggregate = channel.gather(newCfg, MessageType.OBJECT, 0, sources, dest, new FinalReduceReceive());
        for (int i = 0; i < noOfTasksPerExecutor; i++) {
            // the map thread where data is produced
            LOG.info(String.format("%d Starting thread %d", id, i + id * noOfTasksPerExecutor));
            Thread mapThread = new Thread(new MapWorker(i + id * noOfTasksPerExecutor));
            mapThread.start();
        }
        // we need to progress the communication
        while (true) {
            try {
                // progress the channel
                channel.progress();
                // we should progress the communication directive
                aggregate.progress();
                Thread.yield();
            } catch (Throwable t) {
                t.printStackTrace();
            }
        }
    } catch (Throwable t) {
        t.printStackTrace();
    }
}
Also used : HashMap(java.util.HashMap) TWSCommunication(edu.iu.dsc.tws.comms.core.TWSCommunication) TaskPlan(edu.iu.dsc.tws.comms.core.TaskPlan) TWSNetwork(edu.iu.dsc.tws.comms.core.TWSNetwork) HashSet(java.util.HashSet)

Aggregations

TaskPlan (edu.iu.dsc.tws.comms.core.TaskPlan)35 HashMap (java.util.HashMap)33 HashSet (java.util.HashSet)33 TWSNetwork (edu.iu.dsc.tws.comms.core.TWSNetwork)29 TWSCommunication (edu.iu.dsc.tws.comms.core.TWSCommunication)28 ArrayList (java.util.ArrayList)12 LinkedQueue (edu.iu.dsc.tws.task.api.LinkedQueue)10 Message (edu.iu.dsc.tws.task.api.Message)10 TaskExecutorFixedThread (edu.iu.dsc.tws.task.core.TaskExecutorFixedThread)10 RandomString (edu.iu.dsc.tws.examples.utils.RandomString)7 DataflowTaskGraphGenerator (edu.iu.dsc.tws.task.taskgraphbuilder.DataflowTaskGraphGenerator)7 List (java.util.List)7 Random (java.util.Random)7 Map (java.util.Map)5 GatherBatchFinalReceiver (edu.iu.dsc.tws.comms.mpi.io.gather.GatherBatchFinalReceiver)4 GatherBatchPartialReceiver (edu.iu.dsc.tws.comms.mpi.io.gather.GatherBatchPartialReceiver)4 ResourceContainer (edu.iu.dsc.tws.rsched.spi.resource.ResourceContainer)4 SinkTask (edu.iu.dsc.tws.task.api.SinkTask)4 SourceTask (edu.iu.dsc.tws.task.api.SourceTask)4 Task (edu.iu.dsc.tws.task.api.Task)4