Search in sources :

Example 11 with TaskInfo

use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo in project hadoop by apache.

the class MRAppMaster method parsePreviousJobHistory.

private void parsePreviousJobHistory() throws IOException {
    FSDataInputStream in = getPreviousJobHistoryStream(getConfig(), appAttemptID);
    JobHistoryParser parser = new JobHistoryParser(in);
    JobInfo jobInfo = parser.parse();
    Exception parseException = parser.getParseException();
    if (parseException != null) {
        LOG.info("Got an error parsing job-history file" + ", ignoring incomplete events.", parseException);
    }
    Map<org.apache.hadoop.mapreduce.TaskID, TaskInfo> taskInfos = jobInfo.getAllTasks();
    for (TaskInfo taskInfo : taskInfos.values()) {
        if (TaskState.SUCCEEDED.toString().equals(taskInfo.getTaskStatus())) {
            Iterator<Entry<TaskAttemptID, TaskAttemptInfo>> taskAttemptIterator = taskInfo.getAllTaskAttempts().entrySet().iterator();
            while (taskAttemptIterator.hasNext()) {
                Map.Entry<TaskAttemptID, TaskAttemptInfo> currentEntry = taskAttemptIterator.next();
                if (!jobInfo.getAllCompletedTaskAttempts().containsKey(currentEntry.getKey())) {
                    taskAttemptIterator.remove();
                }
            }
            completedTasksFromPreviousRun.put(TypeConverter.toYarn(taskInfo.getTaskId()), taskInfo);
            LOG.info("Read from history task " + TypeConverter.toYarn(taskInfo.getTaskId()));
        }
    }
    LOG.info("Read completed tasks from history " + completedTasksFromPreviousRun.size());
    recoveredJobStartTime = jobInfo.getLaunchTime();
    // recover AMInfos
    List<JobHistoryParser.AMInfo> jhAmInfoList = jobInfo.getAMInfos();
    if (jhAmInfoList != null) {
        for (JobHistoryParser.AMInfo jhAmInfo : jhAmInfoList) {
            AMInfo amInfo = MRBuilderUtils.newAMInfo(jhAmInfo.getAppAttemptId(), jhAmInfo.getStartTime(), jhAmInfo.getContainerId(), jhAmInfo.getNodeManagerHost(), jhAmInfo.getNodeManagerPort(), jhAmInfo.getNodeManagerHttpPort());
            amInfos.add(amInfo);
        }
    }
}
Also used : TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) InvocationTargetException(java.lang.reflect.InvocationTargetException) NoSuchAlgorithmException(java.security.NoSuchAlgorithmException) IOException(java.io.IOException) YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) TaskInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo) AMInfo(org.apache.hadoop.mapreduce.v2.api.records.AMInfo) Entry(java.util.Map.Entry) JobHistoryParser(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser) JobInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo) TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap)

Example 12 with TaskInfo

use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo in project hadoop by apache.

the class TestRecovery method testRecoveryTaskSuccessAllAttemptsSucceed.

@Test
public void testRecoveryTaskSuccessAllAttemptsSucceed() {
    LOG.info("--- START:  testRecoveryTaskSuccessAllAttemptsFail ---");
    long clusterTimestamp = System.currentTimeMillis();
    EventHandler mockEventHandler = mock(EventHandler.class);
    MapTaskImpl recoverMapTask = getMockMapTask(clusterTimestamp, mockEventHandler);
    TaskId taskId = recoverMapTask.getID();
    JobID jobID = new JobID(Long.toString(clusterTimestamp), 1);
    TaskID taskID = new TaskID(jobID, org.apache.hadoop.mapreduce.TaskType.MAP, taskId.getId());
    //Mock up the TaskAttempts
    Map<TaskAttemptID, TaskAttemptInfo> mockTaskAttempts = new HashMap<TaskAttemptID, TaskAttemptInfo>();
    TaskAttemptID taId1 = new TaskAttemptID(taskID, 2);
    TaskAttemptInfo mockTAinfo1 = getMockTaskAttemptInfo(taId1, TaskAttemptState.SUCCEEDED);
    mockTaskAttempts.put(taId1, mockTAinfo1);
    TaskAttemptID taId2 = new TaskAttemptID(taskID, 1);
    TaskAttemptInfo mockTAinfo2 = getMockTaskAttemptInfo(taId2, TaskAttemptState.SUCCEEDED);
    mockTaskAttempts.put(taId2, mockTAinfo2);
    OutputCommitter mockCommitter = mock(OutputCommitter.class);
    TaskInfo mockTaskInfo = mock(TaskInfo.class);
    when(mockTaskInfo.getTaskStatus()).thenReturn("SUCCEEDED");
    when(mockTaskInfo.getTaskId()).thenReturn(taskID);
    when(mockTaskInfo.getAllTaskAttempts()).thenReturn(mockTaskAttempts);
    recoverMapTask.handle(new TaskRecoverEvent(taskId, mockTaskInfo, mockCommitter, true));
    ArgumentCaptor<Event> arg = ArgumentCaptor.forClass(Event.class);
    verify(mockEventHandler, atLeast(1)).handle((org.apache.hadoop.yarn.event.Event) arg.capture());
    Map<TaskAttemptID, TaskAttemptState> finalAttemptStates = new HashMap<TaskAttemptID, TaskAttemptState>();
    finalAttemptStates.put(taId1, TaskAttemptState.SUCCEEDED);
    finalAttemptStates.put(taId2, TaskAttemptState.SUCCEEDED);
    List<EventType> jobHistoryEvents = new ArrayList<EventType>();
    jobHistoryEvents.add(EventType.TASK_STARTED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_FINISHED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_FINISHED);
    jobHistoryEvents.add(EventType.TASK_FINISHED);
    recoveryChecker(recoverMapTask, TaskState.SUCCEEDED, finalAttemptStates, arg, jobHistoryEvents, 2L, 0L);
}
Also used : OutputCommitter(org.apache.hadoop.mapreduce.OutputCommitter) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskID(org.apache.hadoop.mapreduce.TaskID) HashMap(java.util.HashMap) TaskAttemptEventType(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType) EventType(org.apache.hadoop.mapreduce.jobhistory.EventType) TaskEventType(org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) ArrayList(java.util.ArrayList) EventHandler(org.apache.hadoop.yarn.event.EventHandler) JobHistoryEventHandler(org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler) MapTaskImpl(org.apache.hadoop.mapreduce.v2.app.job.impl.MapTaskImpl) TaskInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo) TaskAttemptState(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState) TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) TaskAttemptContainerLaunchedEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent) Event(org.apache.hadoop.mapreduce.jobhistory.Event) TaskRecoverEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskRecoverEvent) JobTaskEvent(org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent) JobHistoryEvent(org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent) JobCounterUpdateEvent(org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent) ContainerLauncherEvent(org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent) TaskAttemptEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent) TaskEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent) JobID(org.apache.hadoop.mapreduce.JobID) TaskRecoverEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskRecoverEvent) Test(org.junit.Test)

Example 13 with TaskInfo

use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo in project hadoop by apache.

the class TestRecovery method testRecoveryAllFailAttempts.

@Test
public void testRecoveryAllFailAttempts() {
    LOG.info("--- START: testRecoveryAllFailAttempts ---");
    long clusterTimestamp = System.currentTimeMillis();
    EventHandler mockEventHandler = mock(EventHandler.class);
    MapTaskImpl recoverMapTask = getMockMapTask(clusterTimestamp, mockEventHandler);
    TaskId taskId = recoverMapTask.getID();
    JobID jobID = new JobID(Long.toString(clusterTimestamp), 1);
    TaskID taskID = new TaskID(jobID, org.apache.hadoop.mapreduce.TaskType.MAP, taskId.getId());
    //Mock up the TaskAttempts
    Map<TaskAttemptID, TaskAttemptInfo> mockTaskAttempts = new HashMap<TaskAttemptID, TaskAttemptInfo>();
    TaskAttemptID taId1 = new TaskAttemptID(taskID, 2);
    TaskAttemptInfo mockTAinfo1 = getMockTaskAttemptInfo(taId1, TaskAttemptState.FAILED);
    mockTaskAttempts.put(taId1, mockTAinfo1);
    TaskAttemptID taId2 = new TaskAttemptID(taskID, 1);
    TaskAttemptInfo mockTAinfo2 = getMockTaskAttemptInfo(taId2, TaskAttemptState.FAILED);
    mockTaskAttempts.put(taId2, mockTAinfo2);
    OutputCommitter mockCommitter = mock(OutputCommitter.class);
    TaskInfo mockTaskInfo = mock(TaskInfo.class);
    when(mockTaskInfo.getTaskStatus()).thenReturn("FAILED");
    when(mockTaskInfo.getTaskId()).thenReturn(taskID);
    when(mockTaskInfo.getAllTaskAttempts()).thenReturn(mockTaskAttempts);
    recoverMapTask.handle(new TaskRecoverEvent(taskId, mockTaskInfo, mockCommitter, true));
    ArgumentCaptor<Event> arg = ArgumentCaptor.forClass(Event.class);
    verify(mockEventHandler, atLeast(1)).handle((org.apache.hadoop.yarn.event.Event) arg.capture());
    Map<TaskAttemptID, TaskAttemptState> finalAttemptStates = new HashMap<TaskAttemptID, TaskAttemptState>();
    finalAttemptStates.put(taId1, TaskAttemptState.FAILED);
    finalAttemptStates.put(taId2, TaskAttemptState.FAILED);
    List<EventType> jobHistoryEvents = new ArrayList<EventType>();
    jobHistoryEvents.add(EventType.TASK_STARTED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_FAILED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_FAILED);
    jobHistoryEvents.add(EventType.TASK_FAILED);
    recoveryChecker(recoverMapTask, TaskState.FAILED, finalAttemptStates, arg, jobHistoryEvents, 2L, 2L);
}
Also used : OutputCommitter(org.apache.hadoop.mapreduce.OutputCommitter) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskID(org.apache.hadoop.mapreduce.TaskID) HashMap(java.util.HashMap) TaskAttemptEventType(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType) EventType(org.apache.hadoop.mapreduce.jobhistory.EventType) TaskEventType(org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) ArrayList(java.util.ArrayList) EventHandler(org.apache.hadoop.yarn.event.EventHandler) JobHistoryEventHandler(org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler) MapTaskImpl(org.apache.hadoop.mapreduce.v2.app.job.impl.MapTaskImpl) TaskInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo) TaskAttemptState(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState) TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) TaskAttemptContainerLaunchedEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent) Event(org.apache.hadoop.mapreduce.jobhistory.Event) TaskRecoverEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskRecoverEvent) JobTaskEvent(org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent) JobHistoryEvent(org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent) JobCounterUpdateEvent(org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent) ContainerLauncherEvent(org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent) TaskAttemptEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent) TaskEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent) JobID(org.apache.hadoop.mapreduce.JobID) TaskRecoverEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskRecoverEvent) Test(org.junit.Test)

Example 14 with TaskInfo

use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo in project hadoop by apache.

the class HsTasksBlock method render.

/*
   * (non-Javadoc)
   * @see org.apache.hadoop.yarn.webapp.view.HtmlBlock#render(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block)
   */
@Override
protected void render(Block html) {
    if (app.getJob() == null) {
        html.h2($(TITLE));
        return;
    }
    TaskType type = null;
    String symbol = $(TASK_TYPE);
    if (!symbol.isEmpty()) {
        type = MRApps.taskType(symbol);
    }
    THEAD<TABLE<Hamlet>> thead;
    if (type != null)
        thead = html.table("#" + app.getJob().getID() + type).$class("dt-tasks").thead();
    else
        thead = html.table("#tasks").thead();
    //Create the spanning row
    int attemptColSpan = type == TaskType.REDUCE ? 8 : 3;
    thead.tr().th().$colspan(5).$class("ui-state-default")._("Task")._().th().$colspan(attemptColSpan).$class("ui-state-default")._("Successful Attempt")._()._();
    TR<THEAD<TABLE<Hamlet>>> theadRow = thead.tr().th("Name").th("State").th("Start Time").th("Finish Time").th("Elapsed Time").th(//Attempt
    "Start Time");
    if (type == TaskType.REDUCE) {
        //Attempt
        theadRow.th("Shuffle Finish Time");
        //Attempt
        theadRow.th("Merge Finish Time");
    }
    //Attempt
    theadRow.th("Finish Time");
    if (type == TaskType.REDUCE) {
        //Attempt
        theadRow.th("Elapsed Time Shuffle");
        //Attempt
        theadRow.th("Elapsed Time Merge");
        //Attempt
        theadRow.th("Elapsed Time Reduce");
    }
    //Attempt
    theadRow.th("Elapsed Time");
    TBODY<TABLE<Hamlet>> tbody = theadRow._()._().tbody();
    // Write all the data into a JavaScript array of arrays for JQuery
    // DataTables to display
    StringBuilder tasksTableData = new StringBuilder("[\n");
    for (Task task : app.getJob().getTasks().values()) {
        if (type != null && task.getType() != type) {
            continue;
        }
        TaskInfo info = new TaskInfo(task);
        String tid = info.getId();
        long startTime = info.getStartTime();
        long finishTime = info.getFinishTime();
        long elapsed = info.getElapsedTime();
        long attemptStartTime = -1;
        long shuffleFinishTime = -1;
        long sortFinishTime = -1;
        long attemptFinishTime = -1;
        long elapsedShuffleTime = -1;
        long elapsedSortTime = -1;
        ;
        long elapsedReduceTime = -1;
        long attemptElapsed = -1;
        TaskAttempt successful = info.getSuccessful();
        if (successful != null) {
            TaskAttemptInfo ta;
            if (type == TaskType.REDUCE) {
                ReduceTaskAttemptInfo rta = new ReduceTaskAttemptInfo(successful, type);
                shuffleFinishTime = rta.getShuffleFinishTime();
                sortFinishTime = rta.getMergeFinishTime();
                elapsedShuffleTime = rta.getElapsedShuffleTime();
                elapsedSortTime = rta.getElapsedMergeTime();
                elapsedReduceTime = rta.getElapsedReduceTime();
                ta = rta;
            } else {
                ta = new TaskAttemptInfo(successful, type, false);
            }
            attemptStartTime = ta.getStartTime();
            attemptFinishTime = ta.getFinishTime();
            attemptElapsed = ta.getElapsedTime();
        }
        tasksTableData.append("[\"").append("<a href='" + url("task", tid)).append("'>").append(tid).append("</a>\",\"").append(info.getState()).append("\",\"").append(startTime).append("\",\"").append(finishTime).append("\",\"").append(elapsed).append("\",\"").append(attemptStartTime).append("\",\"");
        if (type == TaskType.REDUCE) {
            tasksTableData.append(shuffleFinishTime).append("\",\"").append(sortFinishTime).append("\",\"");
        }
        tasksTableData.append(attemptFinishTime).append("\",\"");
        if (type == TaskType.REDUCE) {
            tasksTableData.append(elapsedShuffleTime).append("\",\"").append(elapsedSortTime).append("\",\"").append(elapsedReduceTime).append("\",\"");
        }
        tasksTableData.append(attemptElapsed).append("\"],\n");
    }
    //Remove the last comma and close off the array of arrays
    if (tasksTableData.charAt(tasksTableData.length() - 2) == ',') {
        tasksTableData.delete(tasksTableData.length() - 2, tasksTableData.length() - 1);
    }
    tasksTableData.append("]");
    html.script().$type("text/javascript")._("var tasksTableData=" + tasksTableData)._();
    TR<TFOOT<TABLE<Hamlet>>> footRow = tbody._().tfoot().tr();
    footRow.th().input("search_init").$type(InputType.text).$name("task").$value("ID")._()._().th().input("search_init").$type(InputType.text).$name("state").$value("State")._()._().th().input("search_init").$type(InputType.text).$name("start_time").$value("Start Time")._()._().th().input("search_init").$type(InputType.text).$name("finish_time").$value("Finish Time")._()._().th().input("search_init").$type(InputType.text).$name("elapsed_time").$value("Elapsed Time")._()._().th().input("search_init").$type(InputType.text).$name("attempt_start_time").$value("Start Time")._()._();
    if (type == TaskType.REDUCE) {
        footRow.th().input("search_init").$type(InputType.text).$name("shuffle_time").$value("Shuffle Time")._()._();
        footRow.th().input("search_init").$type(InputType.text).$name("merge_time").$value("Merge Time")._()._();
    }
    footRow.th().input("search_init").$type(InputType.text).$name("attempt_finish").$value("Finish Time")._()._();
    if (type == TaskType.REDUCE) {
        footRow.th().input("search_init").$type(InputType.text).$name("elapsed_shuffle_time").$value("Elapsed Shuffle Time")._()._();
        footRow.th().input("search_init").$type(InputType.text).$name("elapsed_merge_time").$value("Elapsed Merge Time")._()._();
        footRow.th().input("search_init").$type(InputType.text).$name("elapsed_reduce_time").$value("Elapsed Reduce Time")._()._();
    }
    footRow.th().input("search_init").$type(InputType.text).$name("attempt_elapsed").$value("Elapsed Time")._()._();
    footRow._()._()._();
}
Also used : Hamlet(org.apache.hadoop.yarn.webapp.hamlet.Hamlet) ReduceTaskAttemptInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.ReduceTaskAttemptInfo) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TFOOT(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TFOOT) TABLE(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE) TaskInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo) THEAD(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.THEAD) TaskType(org.apache.hadoop.mapreduce.v2.api.records.TaskType) TaskAttemptInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo) ReduceTaskAttemptInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.ReduceTaskAttemptInfo) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)

Example 15 with TaskInfo

use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo in project hadoop by apache.

the class HsWebServices method getJobTasks.

@GET
@Path("/mapreduce/jobs/{jobid}/tasks")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public TasksInfo getJobTasks(@Context HttpServletRequest hsr, @PathParam("jobid") String jid, @QueryParam("type") String type) {
    init();
    Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
    checkAccess(job, hsr);
    TasksInfo allTasks = new TasksInfo();
    for (Task task : job.getTasks().values()) {
        TaskType ttype = null;
        if (type != null && !type.isEmpty()) {
            try {
                ttype = MRApps.taskType(type);
            } catch (YarnRuntimeException e) {
                throw new BadRequestException("tasktype must be either m or r");
            }
        }
        if (ttype != null && task.getType() != ttype) {
            continue;
        }
        allTasks.add(new TaskInfo(task));
    }
    return allTasks;
}
Also used : YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) TaskInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskType(org.apache.hadoop.mapreduce.v2.api.records.TaskType) BadRequestException(org.apache.hadoop.yarn.webapp.BadRequestException) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) TasksInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.TasksInfo) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Aggregations

TaskInfo (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo)12 TaskAttemptInfo (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo)10 HashMap (java.util.HashMap)9 TaskID (org.apache.hadoop.mapreduce.TaskID)9 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)9 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)9 Test (org.junit.Test)8 TaskAttemptID (org.apache.hadoop.mapreduce.TaskAttemptID)7 JobHistoryEvent (org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent)7 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)7 ArrayList (java.util.ArrayList)6 TaskAttemptState (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState)6 JobTaskEvent (org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent)6 TaskInfo (org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo)6 JobID (org.apache.hadoop.mapreduce.JobID)5 OutputCommitter (org.apache.hadoop.mapreduce.OutputCommitter)5 Event (org.apache.hadoop.mapreduce.jobhistory.Event)5 EventType (org.apache.hadoop.mapreduce.jobhistory.EventType)5 JobHistoryEventHandler (org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler)5 JobCounterUpdateEvent (org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent)5