Search in sources :

Example 6 with TaskAttemptState

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState in project hadoop by apache.

the class MockJobs method newTaskAttempt.

public static TaskAttempt newTaskAttempt(TaskId tid, int i) {
    final TaskAttemptId taid = Records.newRecord(TaskAttemptId.class);
    taid.setTaskId(tid);
    taid.setId(i);
    final TaskAttemptReport report = newTaskAttemptReport(taid);
    return new TaskAttempt() {

        @Override
        public NodeId getNodeId() throws UnsupportedOperationException {
            throw new UnsupportedOperationException();
        }

        @Override
        public TaskAttemptId getID() {
            return taid;
        }

        @Override
        public TaskAttemptReport getReport() {
            return report;
        }

        @Override
        public long getLaunchTime() {
            return report.getStartTime();
        }

        @Override
        public long getFinishTime() {
            return report.getFinishTime();
        }

        @Override
        public int getShufflePort() {
            return ShuffleHandler.DEFAULT_SHUFFLE_PORT;
        }

        @Override
        public Counters getCounters() {
            if (report != null && report.getCounters() != null) {
                return new Counters(TypeConverter.fromYarn(report.getCounters()));
            }
            return null;
        }

        @Override
        public float getProgress() {
            return report.getProgress();
        }

        @Override
        public Phase getPhase() {
            return report.getPhase();
        }

        @Override
        public TaskAttemptState getState() {
            return report.getTaskAttemptState();
        }

        @Override
        public boolean isFinished() {
            switch(report.getTaskAttemptState()) {
                case SUCCEEDED:
                case FAILED:
                case KILLED:
                    return true;
            }
            return false;
        }

        @Override
        public ContainerId getAssignedContainerID() {
            ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(taid.getTaskId().getJobId().getAppId(), 0);
            ContainerId id = ContainerId.newContainerId(appAttemptId, 0);
            return id;
        }

        @Override
        public String getNodeHttpAddress() {
            return "localhost:8042";
        }

        @Override
        public List<String> getDiagnostics() {
            return Lists.newArrayList(report.getDiagnosticInfo());
        }

        @Override
        public String getAssignedContainerMgrAddress() {
            return "localhost:9998";
        }

        @Override
        public long getShuffleFinishTime() {
            return report.getShuffleFinishTime();
        }

        @Override
        public long getSortFinishTime() {
            return report.getSortFinishTime();
        }

        @Override
        public String getNodeRackName() {
            return "/default-rack";
        }
    };
}
Also used : TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) Counters(org.apache.hadoop.mapreduce.Counters) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId)

Example 7 with TaskAttemptState

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState in project hadoop by apache.

the class TestRecovery method testRecoverySuccessAttempt.

@Test
public void testRecoverySuccessAttempt() {
    LOG.info("--- START: testRecoverySuccessAttempt ---");
    long clusterTimestamp = System.currentTimeMillis();
    EventHandler mockEventHandler = mock(EventHandler.class);
    MapTaskImpl recoverMapTask = getMockMapTask(clusterTimestamp, mockEventHandler);
    TaskId taskId = recoverMapTask.getID();
    JobID jobID = new JobID(Long.toString(clusterTimestamp), 1);
    TaskID taskID = new TaskID(jobID, org.apache.hadoop.mapreduce.TaskType.MAP, taskId.getId());
    //Mock up the TaskAttempts
    Map<TaskAttemptID, TaskAttemptInfo> mockTaskAttempts = new HashMap<TaskAttemptID, TaskAttemptInfo>();
    TaskAttemptID taId1 = new TaskAttemptID(taskID, 2);
    TaskAttemptInfo mockTAinfo1 = getMockTaskAttemptInfo(taId1, TaskAttemptState.SUCCEEDED);
    mockTaskAttempts.put(taId1, mockTAinfo1);
    TaskAttemptID taId2 = new TaskAttemptID(taskID, 1);
    TaskAttemptInfo mockTAinfo2 = getMockTaskAttemptInfo(taId2, TaskAttemptState.FAILED);
    mockTaskAttempts.put(taId2, mockTAinfo2);
    OutputCommitter mockCommitter = mock(OutputCommitter.class);
    TaskInfo mockTaskInfo = mock(TaskInfo.class);
    when(mockTaskInfo.getTaskStatus()).thenReturn("SUCCEEDED");
    when(mockTaskInfo.getTaskId()).thenReturn(taskID);
    when(mockTaskInfo.getAllTaskAttempts()).thenReturn(mockTaskAttempts);
    recoverMapTask.handle(new TaskRecoverEvent(taskId, mockTaskInfo, mockCommitter, true));
    ArgumentCaptor<Event> arg = ArgumentCaptor.forClass(Event.class);
    verify(mockEventHandler, atLeast(1)).handle((org.apache.hadoop.yarn.event.Event) arg.capture());
    Map<TaskAttemptID, TaskAttemptState> finalAttemptStates = new HashMap<TaskAttemptID, TaskAttemptState>();
    finalAttemptStates.put(taId1, TaskAttemptState.SUCCEEDED);
    finalAttemptStates.put(taId2, TaskAttemptState.FAILED);
    List<EventType> jobHistoryEvents = new ArrayList<EventType>();
    jobHistoryEvents.add(EventType.TASK_STARTED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_FINISHED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_FAILED);
    jobHistoryEvents.add(EventType.TASK_FINISHED);
    recoveryChecker(recoverMapTask, TaskState.SUCCEEDED, finalAttemptStates, arg, jobHistoryEvents, 2L, 1L);
}
Also used : OutputCommitter(org.apache.hadoop.mapreduce.OutputCommitter) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskID(org.apache.hadoop.mapreduce.TaskID) HashMap(java.util.HashMap) TaskAttemptEventType(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType) EventType(org.apache.hadoop.mapreduce.jobhistory.EventType) TaskEventType(org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) ArrayList(java.util.ArrayList) EventHandler(org.apache.hadoop.yarn.event.EventHandler) JobHistoryEventHandler(org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler) MapTaskImpl(org.apache.hadoop.mapreduce.v2.app.job.impl.MapTaskImpl) TaskInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo) TaskAttemptState(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState) TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) TaskAttemptContainerLaunchedEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent) Event(org.apache.hadoop.mapreduce.jobhistory.Event) TaskRecoverEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskRecoverEvent) JobTaskEvent(org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent) JobHistoryEvent(org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent) JobCounterUpdateEvent(org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent) ContainerLauncherEvent(org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent) TaskAttemptEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent) TaskEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent) JobID(org.apache.hadoop.mapreduce.JobID) TaskRecoverEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskRecoverEvent) Test(org.junit.Test)

Example 8 with TaskAttemptState

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState in project hadoop by apache.

the class TestRecovery method testRecoveryTaskSuccessAllAttemptsFail.

@Test
public void testRecoveryTaskSuccessAllAttemptsFail() {
    LOG.info("--- START:  testRecoveryTaskSuccessAllAttemptsFail ---");
    long clusterTimestamp = System.currentTimeMillis();
    EventHandler mockEventHandler = mock(EventHandler.class);
    MapTaskImpl recoverMapTask = getMockMapTask(clusterTimestamp, mockEventHandler);
    TaskId taskId = recoverMapTask.getID();
    JobID jobID = new JobID(Long.toString(clusterTimestamp), 1);
    TaskID taskID = new TaskID(jobID, org.apache.hadoop.mapreduce.TaskType.MAP, taskId.getId());
    //Mock up the TaskAttempts
    Map<TaskAttemptID, TaskAttemptInfo> mockTaskAttempts = new HashMap<TaskAttemptID, TaskAttemptInfo>();
    TaskAttemptID taId1 = new TaskAttemptID(taskID, 2);
    TaskAttemptInfo mockTAinfo1 = getMockTaskAttemptInfo(taId1, TaskAttemptState.FAILED);
    mockTaskAttempts.put(taId1, mockTAinfo1);
    TaskAttemptID taId2 = new TaskAttemptID(taskID, 1);
    TaskAttemptInfo mockTAinfo2 = getMockTaskAttemptInfo(taId2, TaskAttemptState.FAILED);
    mockTaskAttempts.put(taId2, mockTAinfo2);
    OutputCommitter mockCommitter = mock(OutputCommitter.class);
    TaskInfo mockTaskInfo = mock(TaskInfo.class);
    when(mockTaskInfo.getTaskStatus()).thenReturn("SUCCEEDED");
    when(mockTaskInfo.getTaskId()).thenReturn(taskID);
    when(mockTaskInfo.getAllTaskAttempts()).thenReturn(mockTaskAttempts);
    recoverMapTask.handle(new TaskRecoverEvent(taskId, mockTaskInfo, mockCommitter, true));
    ArgumentCaptor<Event> arg = ArgumentCaptor.forClass(Event.class);
    verify(mockEventHandler, atLeast(1)).handle((org.apache.hadoop.yarn.event.Event) arg.capture());
    Map<TaskAttemptID, TaskAttemptState> finalAttemptStates = new HashMap<TaskAttemptID, TaskAttemptState>();
    finalAttemptStates.put(taId1, TaskAttemptState.FAILED);
    finalAttemptStates.put(taId2, TaskAttemptState.FAILED);
    // check for one new attempt launched since successful attempt not found
    TaskAttemptID taId3 = new TaskAttemptID(taskID, 2000);
    finalAttemptStates.put(taId3, TaskAttemptState.NEW);
    List<EventType> jobHistoryEvents = new ArrayList<EventType>();
    jobHistoryEvents.add(EventType.TASK_STARTED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_FAILED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_FAILED);
    recoveryChecker(recoverMapTask, TaskState.RUNNING, finalAttemptStates, arg, jobHistoryEvents, 2L, 2L);
}
Also used : OutputCommitter(org.apache.hadoop.mapreduce.OutputCommitter) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskID(org.apache.hadoop.mapreduce.TaskID) HashMap(java.util.HashMap) TaskAttemptEventType(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType) EventType(org.apache.hadoop.mapreduce.jobhistory.EventType) TaskEventType(org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) ArrayList(java.util.ArrayList) EventHandler(org.apache.hadoop.yarn.event.EventHandler) JobHistoryEventHandler(org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler) MapTaskImpl(org.apache.hadoop.mapreduce.v2.app.job.impl.MapTaskImpl) TaskInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo) TaskAttemptState(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState) TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) TaskAttemptContainerLaunchedEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent) Event(org.apache.hadoop.mapreduce.jobhistory.Event) TaskRecoverEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskRecoverEvent) JobTaskEvent(org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent) JobHistoryEvent(org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent) JobCounterUpdateEvent(org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent) ContainerLauncherEvent(org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent) TaskAttemptEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent) TaskEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent) JobID(org.apache.hadoop.mapreduce.JobID) TaskRecoverEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskRecoverEvent) Test(org.junit.Test)

Example 9 with TaskAttemptState

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState in project hadoop by apache.

the class TaskImpl method recover.

/**
   * Recover a completed task from a previous application attempt
   * @param taskInfo recovered info about the task
   * @param recoverTaskOutput whether to recover task outputs
   * @return state of the task after recovery
   */
private TaskStateInternal recover(TaskInfo taskInfo, OutputCommitter committer, boolean recoverTaskOutput) {
    LOG.info("Recovering task " + taskId + " from prior app attempt, status was " + taskInfo.getTaskStatus());
    scheduledTime = taskInfo.getStartTime();
    sendTaskStartedEvent();
    Collection<TaskAttemptInfo> attemptInfos = taskInfo.getAllTaskAttempts().values();
    if (attemptInfos.size() > 0) {
        metrics.launchedTask(this);
    }
    // recover the attempts for this task in the order they finished
    // so task attempt completion events are ordered properly
    int savedNextAttemptNumber = nextAttemptNumber;
    ArrayList<TaskAttemptInfo> taInfos = new ArrayList<TaskAttemptInfo>(taskInfo.getAllTaskAttempts().values());
    Collections.sort(taInfos, TA_INFO_COMPARATOR);
    for (TaskAttemptInfo taInfo : taInfos) {
        nextAttemptNumber = taInfo.getAttemptId().getId();
        TaskAttemptImpl attempt = addAttempt(Avataar.VIRGIN);
        // handle the recovery inline so attempts complete before task does
        attempt.handle(new TaskAttemptRecoverEvent(attempt.getID(), taInfo, committer, recoverTaskOutput));
        finishedAttempts.add(attempt.getID());
        TaskAttemptCompletionEventStatus taces = null;
        TaskAttemptState attemptState = attempt.getState();
        switch(attemptState) {
            case FAILED:
                taces = TaskAttemptCompletionEventStatus.FAILED;
                break;
            case KILLED:
                taces = TaskAttemptCompletionEventStatus.KILLED;
                break;
            case SUCCEEDED:
                taces = TaskAttemptCompletionEventStatus.SUCCEEDED;
                break;
            default:
                throw new IllegalStateException("Unexpected attempt state during recovery: " + attemptState);
        }
        if (attemptState == TaskAttemptState.FAILED) {
            failedAttempts.add(attempt.getID());
            if (failedAttempts.size() >= maxAttempts) {
                taces = TaskAttemptCompletionEventStatus.TIPFAILED;
            }
        }
        // TODO: this shouldn't be necessary after MAPREDUCE-4330
        if (successfulAttempt == null) {
            handleTaskAttemptCompletion(attempt.getID(), taces);
            if (attemptState == TaskAttemptState.SUCCEEDED) {
                successfulAttempt = attempt.getID();
            }
        }
    }
    nextAttemptNumber = savedNextAttemptNumber;
    TaskStateInternal taskState = TaskStateInternal.valueOf(taskInfo.getTaskStatus());
    switch(taskState) {
        case SUCCEEDED:
            if (successfulAttempt != null) {
                sendTaskSucceededEvents();
            } else {
                LOG.info("Missing successful attempt for task " + taskId + ", recovering as RUNNING");
                // there must have been a fetch failure and the retry wasn't complete
                taskState = TaskStateInternal.RUNNING;
                metrics.runningTask(this);
                addAndScheduleAttempt(Avataar.VIRGIN);
            }
            break;
        case FAILED:
        case KILLED:
            {
                if (taskState == TaskStateInternal.KILLED && attemptInfos.size() == 0) {
                    metrics.endWaitingTask(this);
                }
                TaskFailedEvent tfe = new TaskFailedEvent(taskInfo.getTaskId(), taskInfo.getFinishTime(), taskInfo.getTaskType(), taskInfo.getError(), taskInfo.getTaskStatus(), taskInfo.getFailedDueToAttemptId(), taskInfo.getCounters());
                eventHandler.handle(new JobHistoryEvent(taskId.getJobId(), tfe));
                eventHandler.handle(new JobTaskEvent(taskId, getExternalState(taskState)));
                break;
            }
        default:
            throw new java.lang.AssertionError("Unexpected recovered task state: " + taskState);
    }
    return taskState;
}
Also used : TaskStateInternal(org.apache.hadoop.mapreduce.v2.app.job.TaskStateInternal) ArrayList(java.util.ArrayList) TaskAttemptCompletionEventStatus(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEventStatus) JobHistoryEvent(org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent) TaskAttemptState(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState) JobTaskEvent(org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent) TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) TaskFailedEvent(org.apache.hadoop.mapreduce.jobhistory.TaskFailedEvent) TaskAttemptRecoverEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptRecoverEvent)

Example 10 with TaskAttemptState

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState in project hadoop by apache.

the class TestRecovery method testRecoveryTaskSuccessAllAttemptsSucceed.

@Test
public void testRecoveryTaskSuccessAllAttemptsSucceed() {
    LOG.info("--- START:  testRecoveryTaskSuccessAllAttemptsFail ---");
    long clusterTimestamp = System.currentTimeMillis();
    EventHandler mockEventHandler = mock(EventHandler.class);
    MapTaskImpl recoverMapTask = getMockMapTask(clusterTimestamp, mockEventHandler);
    TaskId taskId = recoverMapTask.getID();
    JobID jobID = new JobID(Long.toString(clusterTimestamp), 1);
    TaskID taskID = new TaskID(jobID, org.apache.hadoop.mapreduce.TaskType.MAP, taskId.getId());
    //Mock up the TaskAttempts
    Map<TaskAttemptID, TaskAttemptInfo> mockTaskAttempts = new HashMap<TaskAttemptID, TaskAttemptInfo>();
    TaskAttemptID taId1 = new TaskAttemptID(taskID, 2);
    TaskAttemptInfo mockTAinfo1 = getMockTaskAttemptInfo(taId1, TaskAttemptState.SUCCEEDED);
    mockTaskAttempts.put(taId1, mockTAinfo1);
    TaskAttemptID taId2 = new TaskAttemptID(taskID, 1);
    TaskAttemptInfo mockTAinfo2 = getMockTaskAttemptInfo(taId2, TaskAttemptState.SUCCEEDED);
    mockTaskAttempts.put(taId2, mockTAinfo2);
    OutputCommitter mockCommitter = mock(OutputCommitter.class);
    TaskInfo mockTaskInfo = mock(TaskInfo.class);
    when(mockTaskInfo.getTaskStatus()).thenReturn("SUCCEEDED");
    when(mockTaskInfo.getTaskId()).thenReturn(taskID);
    when(mockTaskInfo.getAllTaskAttempts()).thenReturn(mockTaskAttempts);
    recoverMapTask.handle(new TaskRecoverEvent(taskId, mockTaskInfo, mockCommitter, true));
    ArgumentCaptor<Event> arg = ArgumentCaptor.forClass(Event.class);
    verify(mockEventHandler, atLeast(1)).handle((org.apache.hadoop.yarn.event.Event) arg.capture());
    Map<TaskAttemptID, TaskAttemptState> finalAttemptStates = new HashMap<TaskAttemptID, TaskAttemptState>();
    finalAttemptStates.put(taId1, TaskAttemptState.SUCCEEDED);
    finalAttemptStates.put(taId2, TaskAttemptState.SUCCEEDED);
    List<EventType> jobHistoryEvents = new ArrayList<EventType>();
    jobHistoryEvents.add(EventType.TASK_STARTED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_FINISHED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED);
    jobHistoryEvents.add(EventType.MAP_ATTEMPT_FINISHED);
    jobHistoryEvents.add(EventType.TASK_FINISHED);
    recoveryChecker(recoverMapTask, TaskState.SUCCEEDED, finalAttemptStates, arg, jobHistoryEvents, 2L, 0L);
}
Also used : OutputCommitter(org.apache.hadoop.mapreduce.OutputCommitter) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskID(org.apache.hadoop.mapreduce.TaskID) HashMap(java.util.HashMap) TaskAttemptEventType(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType) EventType(org.apache.hadoop.mapreduce.jobhistory.EventType) TaskEventType(org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) ArrayList(java.util.ArrayList) EventHandler(org.apache.hadoop.yarn.event.EventHandler) JobHistoryEventHandler(org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler) MapTaskImpl(org.apache.hadoop.mapreduce.v2.app.job.impl.MapTaskImpl) TaskInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo) TaskAttemptState(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState) TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) TaskAttemptContainerLaunchedEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent) Event(org.apache.hadoop.mapreduce.jobhistory.Event) TaskRecoverEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskRecoverEvent) JobTaskEvent(org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent) JobHistoryEvent(org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent) JobCounterUpdateEvent(org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent) ContainerLauncherEvent(org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent) TaskAttemptEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent) TaskEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent) JobID(org.apache.hadoop.mapreduce.JobID) TaskRecoverEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskRecoverEvent) Test(org.junit.Test)

Aggregations

TaskAttemptState (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState)8 HashMap (java.util.HashMap)7 JobHistoryEvent (org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent)7 JobTaskEvent (org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent)7 Test (org.junit.Test)7 ArrayList (java.util.ArrayList)6 TaskAttemptID (org.apache.hadoop.mapreduce.TaskAttemptID)6 Event (org.apache.hadoop.mapreduce.jobhistory.Event)6 TaskAttemptInfo (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo)6 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)6 JobCounterUpdateEvent (org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent)6 TaskAttemptContainerLaunchedEvent (org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent)6 TaskAttemptEvent (org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent)6 TaskEvent (org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent)6 TaskRecoverEvent (org.apache.hadoop.mapreduce.v2.app.job.event.TaskRecoverEvent)6 ContainerLauncherEvent (org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent)6 JobID (org.apache.hadoop.mapreduce.JobID)5 OutputCommitter (org.apache.hadoop.mapreduce.OutputCommitter)5 TaskID (org.apache.hadoop.mapreduce.TaskID)5 EventType (org.apache.hadoop.mapreduce.jobhistory.EventType)5