Search in sources :

Example 11 with TaskAttemptInfo

use of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo in project hadoop by apache.

the class TestRecovery method getMockTaskAttemptInfo.

private TaskAttemptInfo getMockTaskAttemptInfo(TaskAttemptID tai, TaskAttemptState tas) {
    ContainerId ci = mock(ContainerId.class);
    Counters counters = mock(Counters.class);
    TaskType tt = TaskType.MAP;
    long finishTime = System.currentTimeMillis();
    TaskAttemptInfo mockTAinfo = mock(TaskAttemptInfo.class);
    when(mockTAinfo.getAttemptId()).thenReturn(tai);
    when(mockTAinfo.getContainerId()).thenReturn(ci);
    when(mockTAinfo.getCounters()).thenReturn(counters);
    when(mockTAinfo.getError()).thenReturn("");
    when(mockTAinfo.getFinishTime()).thenReturn(finishTime);
    when(mockTAinfo.getHostname()).thenReturn("localhost");
    when(mockTAinfo.getHttpPort()).thenReturn(23);
    when(mockTAinfo.getMapFinishTime()).thenReturn(finishTime - 1000L);
    when(mockTAinfo.getPort()).thenReturn(24);
    when(mockTAinfo.getRackname()).thenReturn("defaultRack");
    when(mockTAinfo.getShuffleFinishTime()).thenReturn(finishTime - 2000L);
    when(mockTAinfo.getShufflePort()).thenReturn(25);
    when(mockTAinfo.getSortFinishTime()).thenReturn(finishTime - 3000L);
    when(mockTAinfo.getStartTime()).thenReturn(finishTime - 10000);
    when(mockTAinfo.getState()).thenReturn("task in progress");
    when(mockTAinfo.getTaskStatus()).thenReturn(tas.toString());
    when(mockTAinfo.getTaskType()).thenReturn(tt);
    when(mockTAinfo.getTrackerName()).thenReturn("TrackerName");
    return mockTAinfo;
}
Also used : ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) TaskType(org.apache.hadoop.mapreduce.TaskType) TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) Counters(org.apache.hadoop.mapreduce.Counters)

Example 12 with TaskAttemptInfo

use of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo in project hadoop by apache.

the class TestCompletedTask method testCompletedTaskAttempt.

/**
   * test some methods of CompletedTaskAttempt
   */
@Test(timeout = 5000)
public void testCompletedTaskAttempt() {
    TaskAttemptInfo attemptInfo = mock(TaskAttemptInfo.class);
    when(attemptInfo.getRackname()).thenReturn("Rackname");
    when(attemptInfo.getShuffleFinishTime()).thenReturn(11L);
    when(attemptInfo.getSortFinishTime()).thenReturn(12L);
    when(attemptInfo.getShufflePort()).thenReturn(10);
    JobID jobId = new JobID("12345", 0);
    TaskID taskId = new TaskID(jobId, TaskType.REDUCE, 0);
    TaskAttemptID taskAttemptId = new TaskAttemptID(taskId, 0);
    when(attemptInfo.getAttemptId()).thenReturn(taskAttemptId);
    CompletedTaskAttempt taskAttemt = new CompletedTaskAttempt(null, attemptInfo);
    assertEquals("Rackname", taskAttemt.getNodeRackName());
    assertEquals(Phase.CLEANUP, taskAttemt.getPhase());
    assertTrue(taskAttemt.isFinished());
    assertEquals(11L, taskAttemt.getShuffleFinishTime());
    assertEquals(12L, taskAttemt.getSortFinishTime());
    assertEquals(10, taskAttemt.getShufflePort());
}
Also used : TaskID(org.apache.hadoop.mapreduce.TaskID) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) JobID(org.apache.hadoop.mapreduce.JobID) Test(org.junit.Test)

Example 13 with TaskAttemptInfo

use of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo in project hadoop by apache.

the class TestCompletedTask method testTaskStartTimes.

@Test(timeout = 5000)
public void testTaskStartTimes() {
    TaskId taskId = mock(TaskId.class);
    TaskInfo taskInfo = mock(TaskInfo.class);
    Map<TaskAttemptID, TaskAttemptInfo> taskAttempts = new TreeMap<TaskAttemptID, TaskAttemptInfo>();
    TaskAttemptID id = new TaskAttemptID("0", 0, TaskType.MAP, 0, 0);
    TaskAttemptInfo info = mock(TaskAttemptInfo.class);
    when(info.getAttemptId()).thenReturn(id);
    when(info.getStartTime()).thenReturn(10l);
    taskAttempts.put(id, info);
    id = new TaskAttemptID("1", 0, TaskType.MAP, 1, 1);
    info = mock(TaskAttemptInfo.class);
    when(info.getAttemptId()).thenReturn(id);
    when(info.getStartTime()).thenReturn(20l);
    taskAttempts.put(id, info);
    when(taskInfo.getAllTaskAttempts()).thenReturn(taskAttempts);
    CompletedTask task = new CompletedTask(taskId, taskInfo);
    TaskReport report = task.getReport();
    // Make sure the startTime returned by report is the lesser of the 
    // attempy launch times
    assertTrue(report.getStartTime() == 10);
}
Also used : TaskInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) TreeMap(java.util.TreeMap) CompletedTask(org.apache.hadoop.mapreduce.v2.hs.CompletedTask) Test(org.junit.Test)

Example 14 with TaskAttemptInfo

use of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo in project hadoop by apache.

the class TestJobHistoryParsing method testHistoryParsingForFailedAttempts.

@Test(timeout = 30000)
public void testHistoryParsingForFailedAttempts() throws Exception {
    LOG.info("STARTING testHistoryParsingForFailedAttempts");
    try {
        Configuration conf = new Configuration();
        conf.setClass(NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class);
        RackResolver.init(conf);
        MRApp app = new MRAppWithHistoryWithFailedAttempt(2, 1, true, this.getClass().getName(), true);
        app.submit(conf);
        Job job = app.getContext().getAllJobs().values().iterator().next();
        JobId jobId = job.getID();
        app.waitForState(job, JobState.SUCCEEDED);
        // make sure all events are flushed
        app.waitForState(Service.STATE.STOPPED);
        JobHistory jobHistory = new JobHistory();
        jobHistory.init(conf);
        HistoryFileInfo fileInfo = jobHistory.getJobFileInfo(jobId);
        JobHistoryParser parser;
        JobInfo jobInfo;
        synchronized (fileInfo) {
            Path historyFilePath = fileInfo.getHistoryFile();
            FSDataInputStream in = null;
            FileContext fc = null;
            try {
                fc = FileContext.getFileContext(conf);
                in = fc.open(fc.makeQualified(historyFilePath));
            } catch (IOException ioe) {
                LOG.info("Can not open history file: " + historyFilePath, ioe);
                throw (new Exception("Can not open History File"));
            }
            parser = new JobHistoryParser(in);
            jobInfo = parser.parse();
        }
        Exception parseException = parser.getParseException();
        Assert.assertNull("Caught an expected exception " + parseException, parseException);
        int noOffailedAttempts = 0;
        Map<TaskID, TaskInfo> allTasks = jobInfo.getAllTasks();
        for (Task task : job.getTasks().values()) {
            TaskInfo taskInfo = allTasks.get(TypeConverter.fromYarn(task.getID()));
            for (TaskAttempt taskAttempt : task.getAttempts().values()) {
                TaskAttemptInfo taskAttemptInfo = taskInfo.getAllTaskAttempts().get(TypeConverter.fromYarn((taskAttempt.getID())));
                // Verify rack-name for all task attempts
                Assert.assertEquals("rack-name is incorrect", taskAttemptInfo.getRackname(), RACK_NAME);
                if (taskAttemptInfo.getTaskStatus().equals("FAILED")) {
                    noOffailedAttempts++;
                }
            }
        }
        Assert.assertEquals("No of Failed tasks doesn't match.", 2, noOffailedAttempts);
    } finally {
        LOG.info("FINISHED testHistoryParsingForFailedAttempts");
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HistoryFileInfo(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskID(org.apache.hadoop.mapreduce.TaskID) Configuration(org.apache.hadoop.conf.Configuration) IOException(java.io.IOException) IOException(java.io.IOException) TaskInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo) JobHistoryParser(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser) JobInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo) TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) FileContext(org.apache.hadoop.fs.FileContext) MRApp(org.apache.hadoop.mapreduce.v2.app.MRApp) Test(org.junit.Test)

Example 15 with TaskAttemptInfo

use of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo in project hadoop by apache.

the class TimelineEntityConverterV1 method createTaskAttemptEntities.

private Set<TimelineEntity> createTaskAttemptEntities(TaskInfo taskInfo) {
    Set<TimelineEntity> taskAttempts = new HashSet<TimelineEntity>();
    Map<TaskAttemptID, TaskAttemptInfo> taskAttemptInfoMap = taskInfo.getAllTaskAttempts();
    LOG.info("task " + taskInfo.getTaskId() + " has " + taskAttemptInfoMap.size() + " task attempts");
    for (TaskAttemptInfo taskAttemptInfo : taskAttemptInfoMap.values()) {
        TimelineEntity taskAttempt = createTaskAttemptEntity(taskAttemptInfo);
        taskAttempts.add(taskAttempt);
    }
    return taskAttempts;
}
Also used : TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) HashSet(java.util.HashSet)

Aggregations

TaskAttemptInfo (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo)15 TaskInfo (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo)9 TaskAttemptID (org.apache.hadoop.mapreduce.TaskAttemptID)8 TaskID (org.apache.hadoop.mapreduce.TaskID)8 Test (org.junit.Test)8 HashMap (java.util.HashMap)7 ArrayList (java.util.ArrayList)6 JobID (org.apache.hadoop.mapreduce.JobID)6 JobHistoryEvent (org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent)6 TaskAttemptState (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState)6 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)6 JobTaskEvent (org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent)6 OutputCommitter (org.apache.hadoop.mapreduce.OutputCommitter)5 Event (org.apache.hadoop.mapreduce.jobhistory.Event)5 EventType (org.apache.hadoop.mapreduce.jobhistory.EventType)5 JobHistoryEventHandler (org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler)5 JobCounterUpdateEvent (org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent)5 TaskAttemptContainerLaunchedEvent (org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent)5 TaskAttemptEvent (org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent)5 TaskAttemptEventType (org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType)5