Search in sources :

Example 1 with JobHistory

use of org.apache.hadoop.mapreduce.v2.hs.JobHistory in project hadoop by apache.

the class TestJobHistoryParsing method checkHistoryParsing.

private void checkHistoryParsing(final int numMaps, final int numReduces, final int numSuccessfulMaps) throws Exception {
    Configuration conf = new Configuration();
    conf.set(MRJobConfig.USER_NAME, System.getProperty("user.name"));
    long amStartTimeEst = System.currentTimeMillis();
    conf.setClass(NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class);
    RackResolver.init(conf);
    MRApp app = new MRAppWithHistory(numMaps, numReduces, true, this.getClass().getName(), true);
    app.submit(conf);
    Job job = app.getContext().getAllJobs().values().iterator().next();
    JobId jobId = job.getID();
    LOG.info("JOBID is " + TypeConverter.fromYarn(jobId).toString());
    app.waitForState(job, JobState.SUCCEEDED);
    // make sure all events are flushed
    app.waitForState(Service.STATE.STOPPED);
    String jobhistoryDir = JobHistoryUtils.getHistoryIntermediateDoneDirForUser(conf);
    FileContext fc = null;
    try {
        fc = FileContext.getFileContext(conf);
    } catch (IOException ioe) {
        LOG.info("Can not get FileContext", ioe);
        throw (new Exception("Can not get File Context"));
    }
    if (numMaps == numSuccessfulMaps) {
        String summaryFileName = JobHistoryUtils.getIntermediateSummaryFileName(jobId);
        Path summaryFile = new Path(jobhistoryDir, summaryFileName);
        String jobSummaryString = getJobSummary(fc, summaryFile);
        Assert.assertNotNull(jobSummaryString);
        Assert.assertTrue(jobSummaryString.contains("resourcesPerMap=100"));
        Assert.assertTrue(jobSummaryString.contains("resourcesPerReduce=100"));
        Map<String, String> jobSummaryElements = new HashMap<String, String>();
        StringTokenizer strToken = new StringTokenizer(jobSummaryString, ",");
        while (strToken.hasMoreTokens()) {
            String keypair = strToken.nextToken();
            jobSummaryElements.put(keypair.split("=")[0], keypair.split("=")[1]);
        }
        Assert.assertEquals("JobId does not match", jobId.toString(), jobSummaryElements.get("jobId"));
        Assert.assertEquals("JobName does not match", "test", jobSummaryElements.get("jobName"));
        Assert.assertTrue("submitTime should not be 0", Long.parseLong(jobSummaryElements.get("submitTime")) != 0);
        Assert.assertTrue("launchTime should not be 0", Long.parseLong(jobSummaryElements.get("launchTime")) != 0);
        Assert.assertTrue("firstMapTaskLaunchTime should not be 0", Long.parseLong(jobSummaryElements.get("firstMapTaskLaunchTime")) != 0);
        Assert.assertTrue("firstReduceTaskLaunchTime should not be 0", Long.parseLong(jobSummaryElements.get("firstReduceTaskLaunchTime")) != 0);
        Assert.assertTrue("finishTime should not be 0", Long.parseLong(jobSummaryElements.get("finishTime")) != 0);
        Assert.assertEquals("Mismatch in num map slots", numSuccessfulMaps, Integer.parseInt(jobSummaryElements.get("numMaps")));
        Assert.assertEquals("Mismatch in num reduce slots", numReduces, Integer.parseInt(jobSummaryElements.get("numReduces")));
        Assert.assertEquals("User does not match", System.getProperty("user.name"), jobSummaryElements.get("user"));
        Assert.assertEquals("Queue does not match", "default", jobSummaryElements.get("queue"));
        Assert.assertEquals("Status does not match", "SUCCEEDED", jobSummaryElements.get("status"));
    }
    JobHistory jobHistory = new JobHistory();
    jobHistory.init(conf);
    HistoryFileInfo fileInfo = jobHistory.getJobFileInfo(jobId);
    JobInfo jobInfo;
    long numFinishedMaps;
    synchronized (fileInfo) {
        Path historyFilePath = fileInfo.getHistoryFile();
        FSDataInputStream in = null;
        LOG.info("JobHistoryFile is: " + historyFilePath);
        try {
            in = fc.open(fc.makeQualified(historyFilePath));
        } catch (IOException ioe) {
            LOG.info("Can not open history file: " + historyFilePath, ioe);
            throw (new Exception("Can not open History File"));
        }
        JobHistoryParser parser = new JobHistoryParser(in);
        final EventReader realReader = new EventReader(in);
        EventReader reader = Mockito.mock(EventReader.class);
        if (numMaps == numSuccessfulMaps) {
            reader = realReader;
        } else {
            // Hack!
            final AtomicInteger numFinishedEvents = new AtomicInteger(0);
            Mockito.when(reader.getNextEvent()).thenAnswer(new Answer<HistoryEvent>() {

                public HistoryEvent answer(InvocationOnMock invocation) throws IOException {
                    HistoryEvent event = realReader.getNextEvent();
                    if (event instanceof TaskFinishedEvent) {
                        numFinishedEvents.incrementAndGet();
                    }
                    if (numFinishedEvents.get() <= numSuccessfulMaps) {
                        return event;
                    } else {
                        throw new IOException("test");
                    }
                }
            });
        }
        jobInfo = parser.parse(reader);
        numFinishedMaps = computeFinishedMaps(jobInfo, numMaps, numSuccessfulMaps);
        if (numFinishedMaps != numMaps) {
            Exception parseException = parser.getParseException();
            Assert.assertNotNull("Didn't get expected parse exception", parseException);
        }
    }
    Assert.assertEquals("Incorrect username ", System.getProperty("user.name"), jobInfo.getUsername());
    Assert.assertEquals("Incorrect jobName ", "test", jobInfo.getJobname());
    Assert.assertEquals("Incorrect queuename ", "default", jobInfo.getJobQueueName());
    Assert.assertEquals("incorrect conf path", "test", jobInfo.getJobConfPath());
    Assert.assertEquals("incorrect finishedMap ", numSuccessfulMaps, numFinishedMaps);
    Assert.assertEquals("incorrect finishedReduces ", numReduces, jobInfo.getFinishedReduces());
    Assert.assertEquals("incorrect uberized ", job.isUber(), jobInfo.getUberized());
    Map<TaskID, TaskInfo> allTasks = jobInfo.getAllTasks();
    int totalTasks = allTasks.size();
    Assert.assertEquals("total number of tasks is incorrect  ", (numMaps + numReduces), totalTasks);
    // Verify aminfo
    Assert.assertEquals(1, jobInfo.getAMInfos().size());
    Assert.assertEquals(MRApp.NM_HOST, jobInfo.getAMInfos().get(0).getNodeManagerHost());
    AMInfo amInfo = jobInfo.getAMInfos().get(0);
    Assert.assertEquals(MRApp.NM_PORT, amInfo.getNodeManagerPort());
    Assert.assertEquals(MRApp.NM_HTTP_PORT, amInfo.getNodeManagerHttpPort());
    Assert.assertEquals(1, amInfo.getAppAttemptId().getAttemptId());
    Assert.assertEquals(amInfo.getAppAttemptId(), amInfo.getContainerId().getApplicationAttemptId());
    Assert.assertTrue(amInfo.getStartTime() <= System.currentTimeMillis() && amInfo.getStartTime() >= amStartTimeEst);
    ContainerId fakeCid = MRApp.newContainerId(-1, -1, -1, -1);
    // Assert at taskAttempt level
    for (TaskInfo taskInfo : allTasks.values()) {
        int taskAttemptCount = taskInfo.getAllTaskAttempts().size();
        Assert.assertEquals("total number of task attempts ", 1, taskAttemptCount);
        TaskAttemptInfo taInfo = taskInfo.getAllTaskAttempts().values().iterator().next();
        Assert.assertNotNull(taInfo.getContainerId());
        // Verify the wrong ctor is not being used. Remove after mrv1 is removed.
        Assert.assertFalse(taInfo.getContainerId().equals(fakeCid));
    }
    // Deep compare Job and JobInfo
    for (Task task : job.getTasks().values()) {
        TaskInfo taskInfo = allTasks.get(TypeConverter.fromYarn(task.getID()));
        Assert.assertNotNull("TaskInfo not found", taskInfo);
        for (TaskAttempt taskAttempt : task.getAttempts().values()) {
            TaskAttemptInfo taskAttemptInfo = taskInfo.getAllTaskAttempts().get(TypeConverter.fromYarn((taskAttempt.getID())));
            Assert.assertNotNull("TaskAttemptInfo not found", taskAttemptInfo);
            Assert.assertEquals("Incorrect shuffle port for task attempt", taskAttempt.getShufflePort(), taskAttemptInfo.getShufflePort());
            if (numMaps == numSuccessfulMaps) {
                Assert.assertEquals(MRApp.NM_HOST, taskAttemptInfo.getHostname());
                Assert.assertEquals(MRApp.NM_PORT, taskAttemptInfo.getPort());
                // Verify rack-name
                Assert.assertEquals("rack-name is incorrect", taskAttemptInfo.getRackname(), RACK_NAME);
            }
        }
    }
    // test output for HistoryViewer
    PrintStream stdps = System.out;
    try {
        System.setOut(new PrintStream(outContent));
        HistoryViewer viewer;
        synchronized (fileInfo) {
            viewer = new HistoryViewer(fc.makeQualified(fileInfo.getHistoryFile()).toString(), conf, true);
        }
        viewer.print();
        for (TaskInfo taskInfo : allTasks.values()) {
            String test = (taskInfo.getTaskStatus() == null ? "" : taskInfo.getTaskStatus()) + " " + taskInfo.getTaskType() + " task list for " + taskInfo.getTaskId().getJobID();
            Assert.assertTrue(outContent.toString().indexOf(test) > 0);
            Assert.assertTrue(outContent.toString().indexOf(taskInfo.getTaskId().toString()) > 0);
        }
    } finally {
        System.setOut(stdps);
    }
}
Also used : MRAppWithHistory(org.apache.hadoop.mapreduce.v2.hs.TestJobHistoryEvents.MRAppWithHistory) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) Configuration(org.apache.hadoop.conf.Configuration) HashMap(java.util.HashMap) TaskInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo) HistoryViewer(org.apache.hadoop.mapreduce.jobhistory.HistoryViewer) JobInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) MRApp(org.apache.hadoop.mapreduce.v2.app.MRApp) Path(org.apache.hadoop.fs.Path) EventReader(org.apache.hadoop.mapreduce.jobhistory.EventReader) PrintStream(java.io.PrintStream) HistoryFileInfo(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo) TaskID(org.apache.hadoop.mapreduce.TaskID) IOException(java.io.IOException) HistoryEvent(org.apache.hadoop.mapreduce.jobhistory.HistoryEvent) IOException(java.io.IOException) AMInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.AMInfo) StringTokenizer(java.util.StringTokenizer) TaskFinishedEvent(org.apache.hadoop.mapreduce.jobhistory.TaskFinishedEvent) JobHistoryParser(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) InvocationOnMock(org.mockito.invocation.InvocationOnMock) TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) FileContext(org.apache.hadoop.fs.FileContext)

Example 2 with JobHistory

use of org.apache.hadoop.mapreduce.v2.hs.JobHistory in project hadoop by apache.

the class TestJobHistoryParsing method testJobHistoryMethods.

/**
   * Simple test some methods of JobHistory
   */
@Test(timeout = 20000)
public void testJobHistoryMethods() throws Exception {
    LOG.info("STARTING testJobHistoryMethods");
    try {
        Configuration configuration = new Configuration();
        configuration.setClass(NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class);
        RackResolver.init(configuration);
        MRApp app = new MRAppWithHistory(1, 1, true, this.getClass().getName(), true);
        app.submit(configuration);
        Job job = app.getContext().getAllJobs().values().iterator().next();
        JobId jobId = job.getID();
        LOG.info("JOBID is " + TypeConverter.fromYarn(jobId).toString());
        app.waitForState(job, JobState.SUCCEEDED);
        // make sure job history events are handled
        app.waitForState(Service.STATE.STOPPED);
        JobHistory jobHistory = new JobHistory();
        jobHistory.init(configuration);
        // Method getAllJobs
        Assert.assertEquals(1, jobHistory.getAllJobs().size());
        // and with ApplicationId
        Assert.assertEquals(1, jobHistory.getAllJobs(app.getAppID()).size());
        JobsInfo jobsinfo = jobHistory.getPartialJobs(0L, 10L, null, "default", 0L, System.currentTimeMillis() + 1, 0L, System.currentTimeMillis() + 1, JobState.SUCCEEDED);
        Assert.assertEquals(1, jobsinfo.getJobs().size());
        Assert.assertNotNull(jobHistory.getApplicationAttemptId());
        // test Application Id
        Assert.assertEquals("application_0_0000", jobHistory.getApplicationID().toString());
        Assert.assertEquals("Job History Server", jobHistory.getApplicationName());
        // method does not work
        Assert.assertNull(jobHistory.getEventHandler());
        // method does not work
        Assert.assertNull(jobHistory.getClock());
        // method does not work
        Assert.assertNull(jobHistory.getClusterInfo());
    } finally {
        LOG.info("FINISHED testJobHistoryMethods");
    }
}
Also used : MRAppWithHistory(org.apache.hadoop.mapreduce.v2.hs.TestJobHistoryEvents.MRAppWithHistory) Configuration(org.apache.hadoop.conf.Configuration) JobsInfo(org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) MRApp(org.apache.hadoop.mapreduce.v2.app.MRApp) Test(org.junit.Test)

Example 3 with JobHistory

use of org.apache.hadoop.mapreduce.v2.hs.JobHistory in project hadoop by apache.

the class TestJobHistory method testRefreshJobRetentionSettings.

@Test
public void testRefreshJobRetentionSettings() throws IOException, InterruptedException {
    String root = "mockfs://foo/";
    String historyDoneDir = root + "mapred/history/done";
    long now = System.currentTimeMillis();
    long someTimeYesterday = now - (25l * 3600 * 1000);
    long timeBefore200Secs = now - (200l * 1000);
    // Get yesterday's date in YY/MM/DD format
    String timestampComponent = JobHistoryUtils.timestampDirectoryComponent(someTimeYesterday);
    // Create a folder under yesterday's done dir
    Path donePathYesterday = new Path(historyDoneDir, timestampComponent + "/" + "000000");
    FileStatus dirCreatedYesterdayStatus = new FileStatus(0, true, 0, 0, someTimeYesterday, donePathYesterday);
    // Get today's date in YY/MM/DD format
    timestampComponent = JobHistoryUtils.timestampDirectoryComponent(timeBefore200Secs);
    // Create a folder under today's done dir
    Path donePathToday = new Path(historyDoneDir, timestampComponent + "/" + "000000");
    FileStatus dirCreatedTodayStatus = new FileStatus(0, true, 0, 0, timeBefore200Secs, donePathToday);
    // Create a jhist file with yesterday's timestamp under yesterday's done dir
    Path fileUnderYesterdayDir = new Path(donePathYesterday.toString(), "job_1372363578825_0015-" + someTimeYesterday + "-user-Sleep+job-" + someTimeYesterday + "-1-1-SUCCEEDED-default.jhist");
    FileStatus fileUnderYesterdayDirStatus = new FileStatus(10, false, 0, 0, someTimeYesterday, fileUnderYesterdayDir);
    // Create a jhist file with today's timestamp under today's done dir
    Path fileUnderTodayDir = new Path(donePathYesterday.toString(), "job_1372363578825_0016-" + timeBefore200Secs + "-user-Sleep+job-" + timeBefore200Secs + "-1-1-SUCCEEDED-default.jhist");
    FileStatus fileUnderTodayDirStatus = new FileStatus(10, false, 0, 0, timeBefore200Secs, fileUnderTodayDir);
    HistoryFileManager historyManager = spy(new HistoryFileManager());
    jobHistory = spy(new JobHistory());
    List<FileStatus> fileStatusList = new LinkedList<FileStatus>();
    fileStatusList.add(dirCreatedYesterdayStatus);
    fileStatusList.add(dirCreatedTodayStatus);
    // Make the initial delay of history job cleaner as 4 secs
    doReturn(4).when(jobHistory).getInitDelaySecs();
    doReturn(historyManager).when(jobHistory).createHistoryFileManager();
    List<FileStatus> list1 = new LinkedList<FileStatus>();
    list1.add(fileUnderYesterdayDirStatus);
    doReturn(list1).when(historyManager).scanDirectoryForHistoryFiles(eq(donePathYesterday), any(FileContext.class));
    List<FileStatus> list2 = new LinkedList<FileStatus>();
    list2.add(fileUnderTodayDirStatus);
    doReturn(list2).when(historyManager).scanDirectoryForHistoryFiles(eq(donePathToday), any(FileContext.class));
    doReturn(fileStatusList).when(historyManager).getHistoryDirsForCleaning(Mockito.anyLong());
    doReturn(true).when(historyManager).deleteDir(any(FileStatus.class));
    JobListCache jobListCache = mock(JobListCache.class);
    HistoryFileInfo fileInfo = mock(HistoryFileInfo.class);
    doReturn(jobListCache).when(historyManager).createJobListCache();
    when(jobListCache.get(any(JobId.class))).thenReturn(fileInfo);
    doNothing().when(fileInfo).delete();
    // Set job retention time to 24 hrs and cleaner interval to 2 secs
    Configuration conf = new Configuration();
    conf.setLong(JHAdminConfig.MR_HISTORY_MAX_AGE_MS, 24l * 3600 * 1000);
    conf.setLong(JHAdminConfig.MR_HISTORY_CLEANER_INTERVAL_MS, 2 * 1000);
    jobHistory.init(conf);
    jobHistory.start();
    assertEquals(2 * 1000l, jobHistory.getCleanerInterval());
    // Only yesterday's jhist file should get deleted
    verify(fileInfo, timeout(20000).times(1)).delete();
    fileStatusList.remove(dirCreatedYesterdayStatus);
    // Now reset job retention time to 10 secs
    conf.setLong(JHAdminConfig.MR_HISTORY_MAX_AGE_MS, 10 * 1000);
    // Set cleaner interval to 1 sec
    conf.setLong(JHAdminConfig.MR_HISTORY_CLEANER_INTERVAL_MS, 1 * 1000);
    doReturn(conf).when(jobHistory).createConf();
    // Do refresh job retention settings
    jobHistory.refreshJobRetentionSettings();
    // Cleaner interval should be updated
    assertEquals(1 * 1000l, jobHistory.getCleanerInterval());
    // Today's jhist file will also be deleted now since it falls below the
    // retention threshold
    verify(fileInfo, timeout(20000).times(2)).delete();
}
Also used : Path(org.apache.hadoop.fs.Path) HistoryFileInfo(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo) FileStatus(org.apache.hadoop.fs.FileStatus) Configuration(org.apache.hadoop.conf.Configuration) LinkedList(java.util.LinkedList) JobListCache(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.JobListCache) FileContext(org.apache.hadoop.fs.FileContext) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 4 with JobHistory

use of org.apache.hadoop.mapreduce.v2.hs.JobHistory in project hadoop by apache.

the class TestHsJobBlock method testHsJobBlockForOversizeJobShouldDisplayWarningMessage.

@Test
public void testHsJobBlockForOversizeJobShouldDisplayWarningMessage() {
    int maxAllowedTaskNum = 100;
    Configuration config = new Configuration();
    config.setInt(JHAdminConfig.MR_HS_LOADED_JOBS_TASKS_MAX, maxAllowedTaskNum);
    JobHistory jobHistory = new JobHistoryStubWithAllOversizeJobs(maxAllowedTaskNum);
    jobHistory.init(config);
    HsJobBlock jobBlock = new HsJobBlock(jobHistory) {

        // override this so that job block can fetch a job id.
        @Override
        public Map<String, String> moreParams() {
            Map<String, String> map = new HashMap<>();
            map.put(AMParams.JOB_ID, "job_0000_0001");
            return map;
        }
    };
    // set up the test block to render HsJobBLock to
    OutputStream outputStream = new ByteArrayOutputStream();
    HtmlBlock.Block block = createBlockToCreateTo(outputStream);
    jobBlock.render(block);
    block.getWriter().flush();
    String out = outputStream.toString();
    Assert.assertTrue("Should display warning message for jobs that have too " + "many tasks", out.contains("Any job larger than " + maxAllowedTaskNum + " will not be loaded"));
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) JobHistory(org.apache.hadoop.mapreduce.v2.hs.JobHistory) HashMap(java.util.HashMap) OutputStream(java.io.OutputStream) ByteArrayOutputStream(org.apache.commons.io.output.ByteArrayOutputStream) ByteArrayOutputStream(org.apache.commons.io.output.ByteArrayOutputStream) HtmlBlock(org.apache.hadoop.yarn.webapp.view.HtmlBlock) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) HtmlBlockForTest(org.apache.hadoop.yarn.webapp.view.HtmlBlockForTest) Test(org.junit.Test)

Example 5 with JobHistory

use of org.apache.hadoop.mapreduce.v2.hs.JobHistory in project hadoop by apache.

the class TestBlocks method testPullTaskLink.

@Test
public void testPullTaskLink() {
    Task task = getTask(0);
    String taskId = task.getID().toString();
    Assert.assertEquals("pull links doesn't work correctly", "Task failed <a href=\"/jobhistory/task/" + taskId + "\">" + taskId + "</a>", HsJobBlock.addTaskLinks("Task failed " + taskId));
    Assert.assertEquals("pull links doesn't work correctly", "Task failed <a href=\"/jobhistory/task/" + taskId + "\">" + taskId + "</a>\n Job failed as tasks failed. failedMaps:1 failedReduces:0", HsJobBlock.addTaskLinks("Task failed " + taskId + "\n " + "Job failed as tasks failed. failedMaps:1 failedReduces:0"));
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) Test(org.junit.Test) AppForTest(org.apache.hadoop.mapreduce.v2.app.webapp.AppForTest)

Aggregations

Configuration (org.apache.hadoop.conf.Configuration)20 Test (org.junit.Test)20 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)18 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)17 IOException (java.io.IOException)9 MRApp (org.apache.hadoop.mapreduce.v2.app.MRApp)9 HistoryFileInfo (org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo)9 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)8 Path (org.apache.hadoop.fs.Path)6 FileContext (org.apache.hadoop.fs.FileContext)5 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)5 HashMap (java.util.HashMap)4 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)4 JobHistoryParser (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser)4 JobInfo (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo)4 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)4 BlockForTest (org.apache.hadoop.yarn.webapp.view.BlockForTest)4 TaskID (org.apache.hadoop.mapreduce.TaskID)3 TaskInfo (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo)3 YarnRuntimeException (org.apache.hadoop.yarn.exceptions.YarnRuntimeException)3