use of org.apache.hadoop.mapreduce.v2.hs.JobHistory in project hadoop by apache.
the class TestJobHistoryParsing method testHistoryParsingForFailedAttempts.
@Test(timeout = 30000)
public void testHistoryParsingForFailedAttempts() throws Exception {
LOG.info("STARTING testHistoryParsingForFailedAttempts");
try {
Configuration conf = new Configuration();
conf.setClass(NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class);
RackResolver.init(conf);
MRApp app = new MRAppWithHistoryWithFailedAttempt(2, 1, true, this.getClass().getName(), true);
app.submit(conf);
Job job = app.getContext().getAllJobs().values().iterator().next();
JobId jobId = job.getID();
app.waitForState(job, JobState.SUCCEEDED);
// make sure all events are flushed
app.waitForState(Service.STATE.STOPPED);
JobHistory jobHistory = new JobHistory();
jobHistory.init(conf);
HistoryFileInfo fileInfo = jobHistory.getJobFileInfo(jobId);
JobHistoryParser parser;
JobInfo jobInfo;
synchronized (fileInfo) {
Path historyFilePath = fileInfo.getHistoryFile();
FSDataInputStream in = null;
FileContext fc = null;
try {
fc = FileContext.getFileContext(conf);
in = fc.open(fc.makeQualified(historyFilePath));
} catch (IOException ioe) {
LOG.info("Can not open history file: " + historyFilePath, ioe);
throw (new Exception("Can not open History File"));
}
parser = new JobHistoryParser(in);
jobInfo = parser.parse();
}
Exception parseException = parser.getParseException();
Assert.assertNull("Caught an expected exception " + parseException, parseException);
int noOffailedAttempts = 0;
Map<TaskID, TaskInfo> allTasks = jobInfo.getAllTasks();
for (Task task : job.getTasks().values()) {
TaskInfo taskInfo = allTasks.get(TypeConverter.fromYarn(task.getID()));
for (TaskAttempt taskAttempt : task.getAttempts().values()) {
TaskAttemptInfo taskAttemptInfo = taskInfo.getAllTaskAttempts().get(TypeConverter.fromYarn((taskAttempt.getID())));
// Verify rack-name for all task attempts
Assert.assertEquals("rack-name is incorrect", taskAttemptInfo.getRackname(), RACK_NAME);
if (taskAttemptInfo.getTaskStatus().equals("FAILED")) {
noOffailedAttempts++;
}
}
}
Assert.assertEquals("No of Failed tasks doesn't match.", 2, noOffailedAttempts);
} finally {
LOG.info("FINISHED testHistoryParsingForFailedAttempts");
}
}
use of org.apache.hadoop.mapreduce.v2.hs.JobHistory in project hadoop by apache.
the class TestHsJobBlock method testHsJobBlockForNormalSizeJobShouldNotDisplayWarningMessage.
@Test
public void testHsJobBlockForNormalSizeJobShouldNotDisplayWarningMessage() {
Configuration config = new Configuration();
config.setInt(JHAdminConfig.MR_HS_LOADED_JOBS_TASKS_MAX, -1);
JobHistory jobHistory = new JobHitoryStubWithAllNormalSizeJobs();
jobHistory.init(config);
HsJobBlock jobBlock = new HsJobBlock(jobHistory) {
// override this so that the job block can fetch a job id.
@Override
public Map<String, String> moreParams() {
Map<String, String> map = new HashMap<>();
map.put(AMParams.JOB_ID, "job_0000_0001");
return map;
}
// override this to avoid view context lookup in render()
@Override
public ResponseInfo info(String about) {
return new ResponseInfo().about(about);
}
// override this to avoid view context lookup in render()
@Override
public String url(String... parts) {
return StringHelper.ujoin("", parts);
}
};
// set up the test block to render HsJobBLock to
OutputStream outputStream = new ByteArrayOutputStream();
HtmlBlock.Block block = createBlockToCreateTo(outputStream);
jobBlock.render(block);
block.getWriter().flush();
String out = outputStream.toString();
Assert.assertTrue("Should display job overview for the job.", out.contains("ApplicationMaster"));
}
use of org.apache.hadoop.mapreduce.v2.hs.JobHistory in project hadoop by apache.
the class TestBlocks method testHsController.
/**
* test HsController
*/
@Test
public void testHsController() throws Exception {
AppContext ctx = mock(AppContext.class);
ApplicationId appId = ApplicationIdPBImpl.newInstance(0, 5);
when(ctx.getApplicationID()).thenReturn(appId);
AppForTest app = new AppForTest(ctx);
Configuration config = new Configuration();
RequestContext requestCtx = mock(RequestContext.class);
HsControllerForTest controller = new HsControllerForTest(app, config, requestCtx);
controller.index();
assertEquals("JobHistory", controller.get(Params.TITLE, ""));
assertEquals(HsJobPage.class, controller.jobPage());
assertEquals(HsCountersPage.class, controller.countersPage());
assertEquals(HsTasksPage.class, controller.tasksPage());
assertEquals(HsTaskPage.class, controller.taskPage());
assertEquals(HsAttemptsPage.class, controller.attemptsPage());
controller.set(AMParams.JOB_ID, "job_01_01");
controller.set(AMParams.TASK_ID, "task_01_01_m_01");
controller.set(AMParams.TASK_TYPE, "m");
controller.set(AMParams.ATTEMPT_STATE, "State");
Job job = mock(Job.class);
Task task = mock(Task.class);
when(job.getTask(any(TaskId.class))).thenReturn(task);
JobId jobID = MRApps.toJobID("job_01_01");
when(ctx.getJob(jobID)).thenReturn(job);
when(job.checkAccess(any(UserGroupInformation.class), any(JobACL.class))).thenReturn(true);
controller.job();
assertEquals(HsJobPage.class, controller.getClazz());
controller.jobCounters();
assertEquals(HsCountersPage.class, controller.getClazz());
controller.taskCounters();
assertEquals(HsCountersPage.class, controller.getClazz());
controller.tasks();
assertEquals(HsTasksPage.class, controller.getClazz());
controller.task();
assertEquals(HsTaskPage.class, controller.getClazz());
controller.attempts();
assertEquals(HsAttemptsPage.class, controller.getClazz());
assertEquals(HsConfPage.class, controller.confPage());
assertEquals(HsAboutPage.class, controller.aboutPage());
controller.about();
assertEquals(HsAboutPage.class, controller.getClazz());
controller.logs();
assertEquals(HsLogsPage.class, controller.getClazz());
controller.nmlogs();
assertEquals(AggregatedLogsPage.class, controller.getClazz());
assertEquals(HsSingleCounterPage.class, controller.singleCounterPage());
controller.singleJobCounter();
assertEquals(HsSingleCounterPage.class, controller.getClazz());
controller.singleTaskCounter();
assertEquals(HsSingleCounterPage.class, controller.getClazz());
}
use of org.apache.hadoop.mapreduce.v2.hs.JobHistory in project hadoop by apache.
the class TestJobHistory method testLoadJobErrorCases.
@Test
public void testLoadJobErrorCases() throws IOException {
HistoryFileManager historyManager = mock(HistoryFileManager.class);
jobHistory = spy(new JobHistory());
doReturn(historyManager).when(jobHistory).createHistoryFileManager();
Configuration conf = new Configuration();
// Set the cache threshold to 50 tasks
conf.setInt(JHAdminConfig.MR_HISTORY_LOADED_TASKS_CACHE_SIZE, 50);
jobHistory.init(conf);
jobHistory.start();
CachedHistoryStorage storage = spy((CachedHistoryStorage) jobHistory.getHistoryStorage());
assertTrue(storage.getUseLoadedTasksCache());
assertEquals(storage.getLoadedTasksCacheSize(), 50);
// Create jobs for bad fileInfo results
Job[] jobs = new Job[4];
JobId[] jobIds = new JobId[4];
for (int i = 0; i < jobs.length; i++) {
jobs[i] = mock(Job.class);
jobIds[i] = mock(JobId.class);
when(jobs[i].getID()).thenReturn(jobIds[i]);
when(jobs[i].getTotalMaps()).thenReturn(10);
when(jobs[i].getTotalReduces()).thenReturn(2);
}
HistoryFileInfo loadJobException = mock(HistoryFileInfo.class);
when(loadJobException.loadJob()).thenThrow(new IOException("History file not found"));
when(historyManager.getFileInfo(jobIds[0])).thenThrow(new IOException(""));
when(historyManager.getFileInfo(jobIds[1])).thenReturn(null);
when(historyManager.getFileInfo(jobIds[2])).thenReturn(loadJobException);
try {
storage.getFullJob(jobIds[0]);
fail("Did not get expected YarnRuntimeException for getFileInfo() throwing IOException");
} catch (YarnRuntimeException e) {
// Expected
}
// fileInfo==null should return null
Job job = storage.getFullJob(jobIds[1]);
assertNull(job);
try {
storage.getFullJob(jobIds[2]);
fail("Did not get expected YarnRuntimeException for fileInfo.loadJob() throwing IOException");
} catch (YarnRuntimeException e) {
// Expected
}
}
use of org.apache.hadoop.mapreduce.v2.hs.JobHistory in project hadoop by apache.
the class TestJobHistory method testRefreshLoadedJobCache.
@Test
public void testRefreshLoadedJobCache() throws Exception {
HistoryFileManager historyManager = mock(HistoryFileManager.class);
jobHistory = spy(new JobHistory());
doReturn(historyManager).when(jobHistory).createHistoryFileManager();
Configuration conf = new Configuration();
// Set the cache size to 2
conf.setInt(JHAdminConfig.MR_HISTORY_LOADED_JOB_CACHE_SIZE, 2);
jobHistory.init(conf);
jobHistory.start();
CachedHistoryStorage storage = spy((CachedHistoryStorage) jobHistory.getHistoryStorage());
assertFalse(storage.getUseLoadedTasksCache());
Job[] jobs = new Job[3];
JobId[] jobIds = new JobId[3];
for (int i = 0; i < 3; i++) {
jobs[i] = mock(Job.class);
jobIds[i] = mock(JobId.class);
when(jobs[i].getID()).thenReturn(jobIds[i]);
}
HistoryFileInfo fileInfo = mock(HistoryFileInfo.class);
when(historyManager.getFileInfo(any(JobId.class))).thenReturn(fileInfo);
when(fileInfo.loadJob()).thenReturn(jobs[0]).thenReturn(jobs[1]).thenReturn(jobs[2]);
// getFullJob will put the job in the cache if it isn't there
for (int i = 0; i < 3; i++) {
storage.getFullJob(jobs[i].getID());
}
Cache<JobId, Job> jobCache = storage.getLoadedJobCache();
// Verify some jobs are stored in the cache. Hard to predict eviction
// in Guava version.
assertTrue(jobCache.size() > 0);
// Setting cache size to 3
conf.setInt(JHAdminConfig.MR_HISTORY_LOADED_JOB_CACHE_SIZE, 3);
doReturn(conf).when(storage).createConf();
when(fileInfo.loadJob()).thenReturn(jobs[0]).thenReturn(jobs[1]).thenReturn(jobs[2]);
jobHistory.refreshLoadedJobCache();
for (int i = 0; i < 3; i++) {
storage.getFullJob(jobs[i].getID());
}
jobCache = storage.getLoadedJobCache();
// Verify some jobs are stored in the cache. Hard to predict eviction
// in Guava version.
assertTrue(jobCache.size() > 0);
}
Aggregations