Search in sources :

Example 76 with TaskAttempt

use of org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt in project hadoop by apache.

the class MockJobs method newTask.

public static Task newTask(JobId jid, int i, int m, final boolean hasFailedTasks) {
    final TaskId tid = Records.newRecord(TaskId.class);
    tid.setJobId(jid);
    tid.setId(i);
    tid.setTaskType(TASK_TYPES.next());
    final TaskReport report = newTaskReport(tid);
    final Map<TaskAttemptId, TaskAttempt> attempts = newTaskAttempts(tid, m);
    return new Task() {

        @Override
        public TaskId getID() {
            return tid;
        }

        @Override
        public TaskReport getReport() {
            return report;
        }

        @Override
        public Counters getCounters() {
            if (hasFailedTasks) {
                return null;
            }
            return new Counters(TypeConverter.fromYarn(report.getCounters()));
        }

        @Override
        public float getProgress() {
            return report.getProgress();
        }

        @Override
        public TaskType getType() {
            return tid.getTaskType();
        }

        @Override
        public Map<TaskAttemptId, TaskAttempt> getAttempts() {
            return attempts;
        }

        @Override
        public TaskAttempt getAttempt(TaskAttemptId attemptID) {
            return attempts.get(attemptID);
        }

        @Override
        public boolean isFinished() {
            switch(report.getTaskState()) {
                case SUCCEEDED:
                case KILLED:
                case FAILED:
                    return true;
            }
            return false;
        }

        @Override
        public boolean canCommit(TaskAttemptId taskAttemptID) {
            return false;
        }

        @Override
        public TaskState getState() {
            return report.getTaskState();
        }
    };
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) Counters(org.apache.hadoop.mapreduce.Counters) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)

Example 77 with TaskAttempt

use of org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt in project hadoop by apache.

the class TestAMInfos method testAMInfosWithoutRecoveryEnabled.

@Test
public void testAMInfosWithoutRecoveryEnabled() throws Exception {
    int runCount = 0;
    MRApp app = new MRAppWithHistory(1, 0, false, this.getClass().getName(), true, ++runCount);
    Configuration conf = new Configuration();
    conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false);
    Job job = app.submit(conf);
    app.waitForState(job, JobState.RUNNING);
    long am1StartTime = app.getAllAMInfos().get(0).getStartTime();
    Assert.assertEquals("No of tasks not correct", 1, job.getTasks().size());
    Iterator<Task> it = job.getTasks().values().iterator();
    Task mapTask = it.next();
    app.waitForState(mapTask, TaskState.RUNNING);
    TaskAttempt taskAttempt = mapTask.getAttempts().values().iterator().next();
    app.waitForState(taskAttempt, TaskAttemptState.RUNNING);
    // stop the app
    app.stop();
    // rerun
    app = new MRAppWithHistory(1, 0, false, this.getClass().getName(), false, ++runCount);
    conf = new Configuration();
    // in rerun the AMInfo will be recovered from previous run even if recovery
    // is not enabled.
    conf.setBoolean(MRJobConfig.MR_AM_JOB_RECOVERY_ENABLE, false);
    conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false);
    job = app.submit(conf);
    app.waitForState(job, JobState.RUNNING);
    Assert.assertEquals("No of tasks not correct", 1, job.getTasks().size());
    it = job.getTasks().values().iterator();
    mapTask = it.next();
    // There should be two AMInfos
    List<AMInfo> amInfos = app.getAllAMInfos();
    Assert.assertEquals(2, amInfos.size());
    AMInfo amInfoOne = amInfos.get(0);
    Assert.assertEquals(am1StartTime, amInfoOne.getStartTime());
    app.stop();
}
Also used : AMInfo(org.apache.hadoop.mapreduce.v2.api.records.AMInfo) MRAppWithHistory(org.apache.hadoop.mapreduce.v2.app.TestRecovery.MRAppWithHistory) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) Configuration(org.apache.hadoop.conf.Configuration) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) Test(org.junit.Test)

Example 78 with TaskAttempt

use of org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt in project hadoop by apache.

the class StartEndTimesBase method updateAttempt.

@Override
public void updateAttempt(TaskAttemptStatus status, long timestamp) {
    TaskAttemptId attemptID = status.id;
    TaskId taskID = attemptID.getTaskId();
    JobId jobID = taskID.getJobId();
    Job job = context.getJob(jobID);
    if (job == null) {
        return;
    }
    Task task = job.getTask(taskID);
    if (task == null) {
        return;
    }
    Long boxedStart = startTimes.get(attemptID);
    long start = boxedStart == null ? Long.MIN_VALUE : boxedStart;
    TaskAttempt taskAttempt = task.getAttempt(attemptID);
    if (taskAttempt.getState() == TaskAttemptState.SUCCEEDED) {
        boolean isNew = false;
        // is this  a new success?
        synchronized (doneTasks) {
            if (!doneTasks.contains(task)) {
                doneTasks.add(task);
                isNew = true;
            }
        }
        //  local data] we only count the first one.
        if (isNew) {
            long finish = timestamp;
            if (start > 1L && finish > 1L && start <= finish) {
                long duration = finish - start;
                DataStatistics statistics = dataStatisticsForTask(taskID);
                if (statistics != null) {
                    statistics.add(duration);
                }
            }
        }
    }
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId)

Example 79 with TaskAttempt

use of org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt in project hadoop by apache.

the class AMWebServices method getJobTaskAttemptIdCounters.

@GET
@Path("/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}/counters")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public JobTaskAttemptCounterInfo getJobTaskAttemptIdCounters(@Context HttpServletRequest hsr, @PathParam("jobid") String jid, @PathParam("taskid") String tid, @PathParam("attemptid") String attId) {
    init();
    Job job = getJobFromJobIdString(jid, appCtx);
    checkAccess(job, hsr);
    Task task = getTaskFromTaskIdString(tid, job);
    TaskAttempt ta = getTaskAttemptFromTaskAttemptString(attId, task);
    return new JobTaskAttemptCounterInfo(ta);
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) JobTaskAttemptCounterInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptCounterInfo) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 80 with TaskAttempt

use of org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt in project hadoop by apache.

the class AMWebServices method killJobTaskAttempt.

protected Response killJobTaskAttempt(TaskAttempt ta, UserGroupInformation callerUGI, HttpServletRequest hsr) throws IOException, InterruptedException {
    Preconditions.checkNotNull(ta, "ta cannot be null");
    String userName = callerUGI.getUserName();
    final TaskAttemptId attemptId = ta.getID();
    try {
        callerUGI.doAs(new PrivilegedExceptionAction<KillTaskAttemptResponse>() {

            @Override
            public KillTaskAttemptResponse run() throws IOException, YarnException {
                KillTaskAttemptRequest req = new KillTaskAttemptRequestPBImpl();
                req.setTaskAttemptId(attemptId);
                return service.forceKillTaskAttempt(req);
            }
        });
    } catch (UndeclaredThrowableException ue) {
        // bubble that up to the user
        if (ue.getCause() instanceof YarnException) {
            YarnException ye = (YarnException) ue.getCause();
            if (ye.getCause() instanceof AccessControlException) {
                String taId = attemptId.toString();
                String msg = "Unauthorized attempt to kill task attempt " + taId + " by remote user " + userName;
                return Response.status(Status.FORBIDDEN).entity(msg).build();
            } else {
                throw ue;
            }
        } else {
            throw ue;
        }
    }
    JobTaskAttemptState ret = new JobTaskAttemptState();
    ret.setState(TaskAttemptState.KILLED.toString());
    return Response.status(Status.OK).entity(ret).build();
}
Also used : KillTaskAttemptRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptRequest) JobTaskAttemptState(org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptState) KillTaskAttemptRequestPBImpl(org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillTaskAttemptRequestPBImpl) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) UndeclaredThrowableException(java.lang.reflect.UndeclaredThrowableException) AccessControlException(java.security.AccessControlException) IOException(java.io.IOException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) KillTaskAttemptResponse(org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptResponse)

Aggregations

TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)102 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)86 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)76 Test (org.junit.Test)63 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)60 Configuration (org.apache.hadoop.conf.Configuration)45 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)32 TaskAttemptEvent (org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent)32 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)29 ClientResponse (com.sun.jersey.api.client.ClientResponse)18 WebResource (com.sun.jersey.api.client.WebResource)18 JSONObject (org.codehaus.jettison.json.JSONObject)12 TaskAttemptReport (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport)9 IOException (java.io.IOException)8 Path (javax.ws.rs.Path)8 Produces (javax.ws.rs.Produces)8 StringReader (java.io.StringReader)7 HashMap (java.util.HashMap)7 GET (javax.ws.rs.GET)7 DocumentBuilder (javax.xml.parsers.DocumentBuilder)7