Search in sources :

Example 71 with TaskId

use of org.apache.hadoop.mapreduce.v2.api.records.TaskId in project hadoop by apache.

the class DefaultSpeculator method addSpeculativeAttempt.

//Add attempt to a given Task.
protected void addSpeculativeAttempt(TaskId taskID) {
    LOG.info("DefaultSpeculator.addSpeculativeAttempt -- we are speculating " + taskID);
    eventHandler.handle(new TaskEvent(taskID, TaskEventType.T_ADD_SPEC_ATTEMPT));
    mayHaveSpeculated.add(taskID);
}
Also used : TaskEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent)

Example 72 with TaskId

use of org.apache.hadoop.mapreduce.v2.api.records.TaskId in project hadoop by apache.

the class TaskAttemptImpl method createJobCounterUpdateEventTASucceeded.

private static JobCounterUpdateEvent createJobCounterUpdateEventTASucceeded(TaskAttemptImpl taskAttempt) {
    TaskId taskId = taskAttempt.attemptId.getTaskId();
    JobCounterUpdateEvent jce = new JobCounterUpdateEvent(taskId.getJobId());
    updateMillisCounters(jce, taskAttempt);
    return jce;
}
Also used : TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) JobCounterUpdateEvent(org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent)

Example 73 with TaskId

use of org.apache.hadoop.mapreduce.v2.api.records.TaskId in project hadoop by apache.

the class CountersBlock method getCounters.

private void getCounters(AppContext ctx) {
    JobId jobID = null;
    TaskId taskID = null;
    String tid = $(TASK_ID);
    if (!tid.isEmpty()) {
        taskID = MRApps.toTaskID(tid);
        jobID = taskID.getJobId();
    } else {
        String jid = $(JOB_ID);
        if (jid != null && !jid.isEmpty()) {
            jobID = MRApps.toJobID(jid);
        }
    }
    if (jobID == null) {
        return;
    }
    job = ctx.getJob(jobID);
    if (job == null) {
        return;
    }
    if (taskID != null) {
        task = job.getTask(taskID);
        if (task == null) {
            return;
        }
        total = task.getCounters();
        return;
    }
    // Get all types of counters
    Map<TaskId, Task> tasks = job.getTasks();
    total = job.getAllCounters();
    boolean needTotalCounters = false;
    if (total == null) {
        total = new Counters();
        needTotalCounters = true;
    }
    map = new Counters();
    reduce = new Counters();
    for (Task t : tasks.values()) {
        Counters counters = t.getCounters();
        if (counters == null) {
            continue;
        }
        switch(t.getType()) {
            case MAP:
                map.incrAllCounters(counters);
                break;
            case REDUCE:
                reduce.incrAllCounters(counters);
                break;
        }
        if (needTotalCounters) {
            total.incrAllCounters(counters);
        }
    }
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) Counters(org.apache.hadoop.mapreduce.Counters) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId)

Example 74 with TaskId

use of org.apache.hadoop.mapreduce.v2.api.records.TaskId in project hadoop by apache.

the class SingleCounterBlock method populateMembers.

private void populateMembers(AppContext ctx) {
    JobId jobID = null;
    TaskId taskID = null;
    String tid = $(TASK_ID);
    if ($(TITLE).contains("MAPS")) {
        counterType = TaskType.MAP;
    } else if ($(TITLE).contains("REDUCES")) {
        counterType = TaskType.REDUCE;
    } else {
        counterType = null;
    }
    if (!tid.isEmpty()) {
        taskID = MRApps.toTaskID(tid);
        jobID = taskID.getJobId();
    } else {
        String jid = $(JOB_ID);
        if (!jid.isEmpty()) {
            jobID = MRApps.toJobID(jid);
        }
    }
    if (jobID == null) {
        return;
    }
    job = ctx.getJob(jobID);
    if (job == null) {
        return;
    }
    if (taskID != null) {
        task = job.getTask(taskID);
        if (task == null) {
            return;
        }
        for (Map.Entry<TaskAttemptId, TaskAttempt> entry : task.getAttempts().entrySet()) {
            long value = 0;
            Counters counters = entry.getValue().getCounters();
            CounterGroup group = (counters != null) ? counters.getGroup($(COUNTER_GROUP)) : null;
            if (group != null) {
                Counter c = group.findCounter($(COUNTER_NAME));
                if (c != null) {
                    value = c.getValue();
                }
            }
            values.put(MRApps.toString(entry.getKey()), value);
        }
        return;
    }
    // Get all types of counters
    Map<TaskId, Task> tasks = job.getTasks();
    for (Map.Entry<TaskId, Task> entry : tasks.entrySet()) {
        long value = 0;
        Counters counters = entry.getValue().getCounters();
        CounterGroup group = (counters != null) ? counters.getGroup($(COUNTER_GROUP)) : null;
        if (group != null) {
            Counter c = group.findCounter($(COUNTER_NAME));
            if (c != null) {
                value = c.getValue();
            }
        }
        if (counterType == null || counterType == entry.getValue().getType()) {
            values.put(MRApps.toString(entry.getKey()), value);
        }
    }
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) CounterGroup(org.apache.hadoop.mapreduce.CounterGroup) Counter(org.apache.hadoop.mapreduce.Counter) Counters(org.apache.hadoop.mapreduce.Counters) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Map(java.util.Map) TreeMap(java.util.TreeMap) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId)

Example 75 with TaskId

use of org.apache.hadoop.mapreduce.v2.api.records.TaskId in project hadoop by apache.

the class AMWebServices method getJobTask.

@GET
@Path("/jobs/{jobid}/tasks/{taskid}")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public TaskInfo getJobTask(@Context HttpServletRequest hsr, @PathParam("jobid") String jid, @PathParam("taskid") String tid) {
    init();
    Job job = getJobFromJobIdString(jid, appCtx);
    checkAccess(job, hsr);
    Task task = getTaskFromTaskIdString(tid, job);
    return new TaskInfo(task);
}
Also used : TaskInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Aggregations

TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)113 Test (org.junit.Test)75 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)69 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)60 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)58 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)56 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)42 Configuration (org.apache.hadoop.conf.Configuration)29 AppContext (org.apache.hadoop.mapreduce.v2.app.AppContext)24 TaskAttemptEvent (org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent)24 Path (org.apache.hadoop.fs.Path)23 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)22 HashMap (java.util.HashMap)20 JobConf (org.apache.hadoop.mapred.JobConf)17 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)17 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)17 MapTaskAttemptImpl (org.apache.hadoop.mapred.MapTaskAttemptImpl)16 TaskAttemptListener (org.apache.hadoop.mapreduce.v2.app.TaskAttemptListener)16 InetSocketAddress (java.net.InetSocketAddress)15 TaskSplitMetaInfo (org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo)15