Search in sources :

Example 16 with JobHistory

use of org.apache.hadoop.mapreduce.v2.hs.JobHistory in project hadoop by apache.

the class TestJobHistoryParsing method testHistoryParsingForFailedAttempts.

@Test(timeout = 30000)
public void testHistoryParsingForFailedAttempts() throws Exception {
    LOG.info("STARTING testHistoryParsingForFailedAttempts");
    try {
        Configuration conf = new Configuration();
        conf.setClass(NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class);
        RackResolver.init(conf);
        MRApp app = new MRAppWithHistoryWithFailedAttempt(2, 1, true, this.getClass().getName(), true);
        app.submit(conf);
        Job job = app.getContext().getAllJobs().values().iterator().next();
        JobId jobId = job.getID();
        app.waitForState(job, JobState.SUCCEEDED);
        // make sure all events are flushed
        app.waitForState(Service.STATE.STOPPED);
        JobHistory jobHistory = new JobHistory();
        jobHistory.init(conf);
        HistoryFileInfo fileInfo = jobHistory.getJobFileInfo(jobId);
        JobHistoryParser parser;
        JobInfo jobInfo;
        synchronized (fileInfo) {
            Path historyFilePath = fileInfo.getHistoryFile();
            FSDataInputStream in = null;
            FileContext fc = null;
            try {
                fc = FileContext.getFileContext(conf);
                in = fc.open(fc.makeQualified(historyFilePath));
            } catch (IOException ioe) {
                LOG.info("Can not open history file: " + historyFilePath, ioe);
                throw (new Exception("Can not open History File"));
            }
            parser = new JobHistoryParser(in);
            jobInfo = parser.parse();
        }
        Exception parseException = parser.getParseException();
        Assert.assertNull("Caught an expected exception " + parseException, parseException);
        int noOffailedAttempts = 0;
        Map<TaskID, TaskInfo> allTasks = jobInfo.getAllTasks();
        for (Task task : job.getTasks().values()) {
            TaskInfo taskInfo = allTasks.get(TypeConverter.fromYarn(task.getID()));
            for (TaskAttempt taskAttempt : task.getAttempts().values()) {
                TaskAttemptInfo taskAttemptInfo = taskInfo.getAllTaskAttempts().get(TypeConverter.fromYarn((taskAttempt.getID())));
                // Verify rack-name for all task attempts
                Assert.assertEquals("rack-name is incorrect", taskAttemptInfo.getRackname(), RACK_NAME);
                if (taskAttemptInfo.getTaskStatus().equals("FAILED")) {
                    noOffailedAttempts++;
                }
            }
        }
        Assert.assertEquals("No of Failed tasks doesn't match.", 2, noOffailedAttempts);
    } finally {
        LOG.info("FINISHED testHistoryParsingForFailedAttempts");
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HistoryFileInfo(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskID(org.apache.hadoop.mapreduce.TaskID) Configuration(org.apache.hadoop.conf.Configuration) IOException(java.io.IOException) IOException(java.io.IOException) TaskInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo) JobHistoryParser(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser) JobInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo) TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) FileContext(org.apache.hadoop.fs.FileContext) MRApp(org.apache.hadoop.mapreduce.v2.app.MRApp) Test(org.junit.Test)

Example 17 with JobHistory

use of org.apache.hadoop.mapreduce.v2.hs.JobHistory in project hadoop by apache.

the class TestHsJobBlock method testHsJobBlockForNormalSizeJobShouldNotDisplayWarningMessage.

@Test
public void testHsJobBlockForNormalSizeJobShouldNotDisplayWarningMessage() {
    Configuration config = new Configuration();
    config.setInt(JHAdminConfig.MR_HS_LOADED_JOBS_TASKS_MAX, -1);
    JobHistory jobHistory = new JobHitoryStubWithAllNormalSizeJobs();
    jobHistory.init(config);
    HsJobBlock jobBlock = new HsJobBlock(jobHistory) {

        // override this so that the job block can fetch a job id.
        @Override
        public Map<String, String> moreParams() {
            Map<String, String> map = new HashMap<>();
            map.put(AMParams.JOB_ID, "job_0000_0001");
            return map;
        }

        // override this to avoid view context lookup in render()
        @Override
        public ResponseInfo info(String about) {
            return new ResponseInfo().about(about);
        }

        // override this to avoid view context lookup in render()
        @Override
        public String url(String... parts) {
            return StringHelper.ujoin("", parts);
        }
    };
    // set up the test block to render HsJobBLock to
    OutputStream outputStream = new ByteArrayOutputStream();
    HtmlBlock.Block block = createBlockToCreateTo(outputStream);
    jobBlock.render(block);
    block.getWriter().flush();
    String out = outputStream.toString();
    Assert.assertTrue("Should display job overview for the job.", out.contains("ApplicationMaster"));
}
Also used : ResponseInfo(org.apache.hadoop.yarn.webapp.ResponseInfo) Configuration(org.apache.hadoop.conf.Configuration) JobHistory(org.apache.hadoop.mapreduce.v2.hs.JobHistory) HashMap(java.util.HashMap) OutputStream(java.io.OutputStream) ByteArrayOutputStream(org.apache.commons.io.output.ByteArrayOutputStream) ByteArrayOutputStream(org.apache.commons.io.output.ByteArrayOutputStream) HtmlBlock(org.apache.hadoop.yarn.webapp.view.HtmlBlock) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) HtmlBlockForTest(org.apache.hadoop.yarn.webapp.view.HtmlBlockForTest) Test(org.junit.Test)

Example 18 with JobHistory

use of org.apache.hadoop.mapreduce.v2.hs.JobHistory in project hadoop by apache.

the class TestBlocks method testHsController.

/**
   * test HsController
   */
@Test
public void testHsController() throws Exception {
    AppContext ctx = mock(AppContext.class);
    ApplicationId appId = ApplicationIdPBImpl.newInstance(0, 5);
    when(ctx.getApplicationID()).thenReturn(appId);
    AppForTest app = new AppForTest(ctx);
    Configuration config = new Configuration();
    RequestContext requestCtx = mock(RequestContext.class);
    HsControllerForTest controller = new HsControllerForTest(app, config, requestCtx);
    controller.index();
    assertEquals("JobHistory", controller.get(Params.TITLE, ""));
    assertEquals(HsJobPage.class, controller.jobPage());
    assertEquals(HsCountersPage.class, controller.countersPage());
    assertEquals(HsTasksPage.class, controller.tasksPage());
    assertEquals(HsTaskPage.class, controller.taskPage());
    assertEquals(HsAttemptsPage.class, controller.attemptsPage());
    controller.set(AMParams.JOB_ID, "job_01_01");
    controller.set(AMParams.TASK_ID, "task_01_01_m_01");
    controller.set(AMParams.TASK_TYPE, "m");
    controller.set(AMParams.ATTEMPT_STATE, "State");
    Job job = mock(Job.class);
    Task task = mock(Task.class);
    when(job.getTask(any(TaskId.class))).thenReturn(task);
    JobId jobID = MRApps.toJobID("job_01_01");
    when(ctx.getJob(jobID)).thenReturn(job);
    when(job.checkAccess(any(UserGroupInformation.class), any(JobACL.class))).thenReturn(true);
    controller.job();
    assertEquals(HsJobPage.class, controller.getClazz());
    controller.jobCounters();
    assertEquals(HsCountersPage.class, controller.getClazz());
    controller.taskCounters();
    assertEquals(HsCountersPage.class, controller.getClazz());
    controller.tasks();
    assertEquals(HsTasksPage.class, controller.getClazz());
    controller.task();
    assertEquals(HsTaskPage.class, controller.getClazz());
    controller.attempts();
    assertEquals(HsAttemptsPage.class, controller.getClazz());
    assertEquals(HsConfPage.class, controller.confPage());
    assertEquals(HsAboutPage.class, controller.aboutPage());
    controller.about();
    assertEquals(HsAboutPage.class, controller.getClazz());
    controller.logs();
    assertEquals(HsLogsPage.class, controller.getClazz());
    controller.nmlogs();
    assertEquals(AggregatedLogsPage.class, controller.getClazz());
    assertEquals(HsSingleCounterPage.class, controller.singleCounterPage());
    controller.singleJobCounter();
    assertEquals(HsSingleCounterPage.class, controller.getClazz());
    controller.singleTaskCounter();
    assertEquals(HsSingleCounterPage.class, controller.getClazz());
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) Configuration(org.apache.hadoop.conf.Configuration) AppContext(org.apache.hadoop.mapreduce.v2.app.AppContext) RequestContext(org.apache.hadoop.yarn.webapp.Controller.RequestContext) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) AppForTest(org.apache.hadoop.mapreduce.v2.app.webapp.AppForTest) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobACL(org.apache.hadoop.mapreduce.JobACL) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) Test(org.junit.Test) AppForTest(org.apache.hadoop.mapreduce.v2.app.webapp.AppForTest)

Example 19 with JobHistory

use of org.apache.hadoop.mapreduce.v2.hs.JobHistory in project hadoop by apache.

the class TestJobHistory method testLoadJobErrorCases.

@Test
public void testLoadJobErrorCases() throws IOException {
    HistoryFileManager historyManager = mock(HistoryFileManager.class);
    jobHistory = spy(new JobHistory());
    doReturn(historyManager).when(jobHistory).createHistoryFileManager();
    Configuration conf = new Configuration();
    // Set the cache threshold to 50 tasks
    conf.setInt(JHAdminConfig.MR_HISTORY_LOADED_TASKS_CACHE_SIZE, 50);
    jobHistory.init(conf);
    jobHistory.start();
    CachedHistoryStorage storage = spy((CachedHistoryStorage) jobHistory.getHistoryStorage());
    assertTrue(storage.getUseLoadedTasksCache());
    assertEquals(storage.getLoadedTasksCacheSize(), 50);
    // Create jobs for bad fileInfo results
    Job[] jobs = new Job[4];
    JobId[] jobIds = new JobId[4];
    for (int i = 0; i < jobs.length; i++) {
        jobs[i] = mock(Job.class);
        jobIds[i] = mock(JobId.class);
        when(jobs[i].getID()).thenReturn(jobIds[i]);
        when(jobs[i].getTotalMaps()).thenReturn(10);
        when(jobs[i].getTotalReduces()).thenReturn(2);
    }
    HistoryFileInfo loadJobException = mock(HistoryFileInfo.class);
    when(loadJobException.loadJob()).thenThrow(new IOException("History file not found"));
    when(historyManager.getFileInfo(jobIds[0])).thenThrow(new IOException(""));
    when(historyManager.getFileInfo(jobIds[1])).thenReturn(null);
    when(historyManager.getFileInfo(jobIds[2])).thenReturn(loadJobException);
    try {
        storage.getFullJob(jobIds[0]);
        fail("Did not get expected YarnRuntimeException for getFileInfo() throwing IOException");
    } catch (YarnRuntimeException e) {
    // Expected
    }
    // fileInfo==null should return null
    Job job = storage.getFullJob(jobIds[1]);
    assertNull(job);
    try {
        storage.getFullJob(jobIds[2]);
        fail("Did not get expected YarnRuntimeException for fileInfo.loadJob() throwing IOException");
    } catch (YarnRuntimeException e) {
    // Expected
    }
}
Also used : YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) HistoryFileInfo(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo) Configuration(org.apache.hadoop.conf.Configuration) IOException(java.io.IOException) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 20 with JobHistory

use of org.apache.hadoop.mapreduce.v2.hs.JobHistory in project hadoop by apache.

the class TestJobHistory method testRefreshLoadedJobCache.

@Test
public void testRefreshLoadedJobCache() throws Exception {
    HistoryFileManager historyManager = mock(HistoryFileManager.class);
    jobHistory = spy(new JobHistory());
    doReturn(historyManager).when(jobHistory).createHistoryFileManager();
    Configuration conf = new Configuration();
    // Set the cache size to 2
    conf.setInt(JHAdminConfig.MR_HISTORY_LOADED_JOB_CACHE_SIZE, 2);
    jobHistory.init(conf);
    jobHistory.start();
    CachedHistoryStorage storage = spy((CachedHistoryStorage) jobHistory.getHistoryStorage());
    assertFalse(storage.getUseLoadedTasksCache());
    Job[] jobs = new Job[3];
    JobId[] jobIds = new JobId[3];
    for (int i = 0; i < 3; i++) {
        jobs[i] = mock(Job.class);
        jobIds[i] = mock(JobId.class);
        when(jobs[i].getID()).thenReturn(jobIds[i]);
    }
    HistoryFileInfo fileInfo = mock(HistoryFileInfo.class);
    when(historyManager.getFileInfo(any(JobId.class))).thenReturn(fileInfo);
    when(fileInfo.loadJob()).thenReturn(jobs[0]).thenReturn(jobs[1]).thenReturn(jobs[2]);
    // getFullJob will put the job in the cache if it isn't there
    for (int i = 0; i < 3; i++) {
        storage.getFullJob(jobs[i].getID());
    }
    Cache<JobId, Job> jobCache = storage.getLoadedJobCache();
    // Verify some jobs are stored in the cache.  Hard to predict eviction
    // in Guava version.
    assertTrue(jobCache.size() > 0);
    // Setting cache size to 3
    conf.setInt(JHAdminConfig.MR_HISTORY_LOADED_JOB_CACHE_SIZE, 3);
    doReturn(conf).when(storage).createConf();
    when(fileInfo.loadJob()).thenReturn(jobs[0]).thenReturn(jobs[1]).thenReturn(jobs[2]);
    jobHistory.refreshLoadedJobCache();
    for (int i = 0; i < 3; i++) {
        storage.getFullJob(jobs[i].getID());
    }
    jobCache = storage.getLoadedJobCache();
    // Verify some jobs are stored in the cache.  Hard to predict eviction
    // in Guava version.
    assertTrue(jobCache.size() > 0);
}
Also used : HistoryFileInfo(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo) Configuration(org.apache.hadoop.conf.Configuration) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Aggregations

Configuration (org.apache.hadoop.conf.Configuration)20 Test (org.junit.Test)20 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)18 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)17 IOException (java.io.IOException)9 MRApp (org.apache.hadoop.mapreduce.v2.app.MRApp)9 HistoryFileInfo (org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo)9 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)8 Path (org.apache.hadoop.fs.Path)6 FileContext (org.apache.hadoop.fs.FileContext)5 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)5 HashMap (java.util.HashMap)4 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)4 JobHistoryParser (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser)4 JobInfo (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo)4 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)4 BlockForTest (org.apache.hadoop.yarn.webapp.view.BlockForTest)4 TaskID (org.apache.hadoop.mapreduce.TaskID)3 TaskInfo (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo)3 YarnRuntimeException (org.apache.hadoop.yarn.exceptions.YarnRuntimeException)3