use of org.apache.hadoop.mapreduce.v2.hs.CompletedJob in project hadoop by apache.
the class TestJobHistoryEntities method testGetTaskAttemptCompletionEvent.
/**
* Simple test of some methods of CompletedJob
* @throws Exception
*/
@Test(timeout = 30000)
public void testGetTaskAttemptCompletionEvent() throws Exception {
HistoryFileInfo info = mock(HistoryFileInfo.class);
when(info.getConfFile()).thenReturn(fullConfPath);
completedJob = new CompletedJob(conf, jobId, fullHistoryPath, loadTasks, "user", info, jobAclsManager);
TaskCompletionEvent[] events = completedJob.getMapAttemptCompletionEvents(0, 1000);
assertEquals(10, completedJob.getMapAttemptCompletionEvents(0, 10).length);
int currentEventId = 0;
for (TaskCompletionEvent taskAttemptCompletionEvent : events) {
int eventId = taskAttemptCompletionEvent.getEventId();
assertTrue(eventId >= currentEventId);
currentEventId = eventId;
}
assertNull(completedJob.loadConfFile());
// job name
assertEquals("Sleep job", completedJob.getName());
// queue name
assertEquals("default", completedJob.getQueueName());
// progress
assertEquals(1.0, completedJob.getProgress(), 0.001);
// 12 rows in answer
assertEquals(12, completedJob.getTaskAttemptCompletionEvents(0, 1000).length);
// select first 10 rows
assertEquals(10, completedJob.getTaskAttemptCompletionEvents(0, 10).length);
// select 5-10 rows include 5th
assertEquals(7, completedJob.getTaskAttemptCompletionEvents(5, 10).length);
// without errors
assertEquals(1, completedJob.getDiagnostics().size());
assertEquals("", completedJob.getDiagnostics().get(0));
assertEquals(0, completedJob.getJobACLs().size());
}
use of org.apache.hadoop.mapreduce.v2.hs.CompletedJob in project hadoop by apache.
the class TestJobHistoryEntities method testCompletedJobWithDiagnostics.
@Test(timeout = 30000)
public void testCompletedJobWithDiagnostics() throws Exception {
final String jobError = "Job Diagnostics";
JobInfo jobInfo = spy(new JobInfo());
when(jobInfo.getErrorInfo()).thenReturn(jobError);
when(jobInfo.getJobStatus()).thenReturn(JobState.FAILED.toString());
when(jobInfo.getAMInfos()).thenReturn(Collections.<JobHistoryParser.AMInfo>emptyList());
final JobHistoryParser mockParser = mock(JobHistoryParser.class);
when(mockParser.parse()).thenReturn(jobInfo);
HistoryFileInfo info = mock(HistoryFileInfo.class);
when(info.getConfFile()).thenReturn(fullConfPath);
when(info.getHistoryFile()).thenReturn(fullHistoryPath);
CompletedJob job = new CompletedJob(conf, jobId, fullHistoryPath, loadTasks, "user", info, jobAclsManager) {
@Override
protected JobHistoryParser createJobHistoryParser(Path historyFileAbsolute) throws IOException {
return mockParser;
}
};
assertEquals(jobError, job.getReport().getDiagnostics());
}
use of org.apache.hadoop.mapreduce.v2.hs.CompletedJob in project hadoop by apache.
the class TestJobHistoryEntities method testCompletedTaskAttempt.
@Test(timeout = 10000)
public void testCompletedTaskAttempt() throws Exception {
HistoryFileInfo info = mock(HistoryFileInfo.class);
when(info.getConfFile()).thenReturn(fullConfPath);
completedJob = new CompletedJob(conf, jobId, fullHistoryPath, loadTasks, "user", info, jobAclsManager);
TaskId mt1Id = MRBuilderUtils.newTaskId(jobId, 0, TaskType.MAP);
TaskId rt1Id = MRBuilderUtils.newTaskId(jobId, 0, TaskType.REDUCE);
TaskAttemptId mta1Id = MRBuilderUtils.newTaskAttemptId(mt1Id, 0);
TaskAttemptId rta1Id = MRBuilderUtils.newTaskAttemptId(rt1Id, 0);
Task mt1 = completedJob.getTask(mt1Id);
Task rt1 = completedJob.getTask(rt1Id);
TaskAttempt mta1 = mt1.getAttempt(mta1Id);
assertEquals(TaskAttemptState.SUCCEEDED, mta1.getState());
assertEquals("localhost:45454", mta1.getAssignedContainerMgrAddress());
assertEquals("localhost:9999", mta1.getNodeHttpAddress());
TaskAttemptReport mta1Report = mta1.getReport();
assertEquals(TaskAttemptState.SUCCEEDED, mta1Report.getTaskAttemptState());
assertEquals("localhost", mta1Report.getNodeManagerHost());
assertEquals(45454, mta1Report.getNodeManagerPort());
assertEquals(9999, mta1Report.getNodeManagerHttpPort());
TaskAttempt rta1 = rt1.getAttempt(rta1Id);
assertEquals(TaskAttemptState.SUCCEEDED, rta1.getState());
assertEquals("localhost:45454", rta1.getAssignedContainerMgrAddress());
assertEquals("localhost:9999", rta1.getNodeHttpAddress());
TaskAttemptReport rta1Report = rta1.getReport();
assertEquals(TaskAttemptState.SUCCEEDED, rta1Report.getTaskAttemptState());
assertEquals("localhost", rta1Report.getNodeManagerHost());
assertEquals(45454, rta1Report.getNodeManagerPort());
assertEquals(9999, rta1Report.getNodeManagerHttpPort());
}
use of org.apache.hadoop.mapreduce.v2.hs.CompletedJob in project hadoop by apache.
the class TestJobHistoryEntities method testCopmletedJobReportWithZeroTasks.
@Test(timeout = 100000)
public void testCopmletedJobReportWithZeroTasks() throws Exception {
HistoryFileInfo info = mock(HistoryFileInfo.class);
when(info.getConfFile()).thenReturn(fullConfPath);
when(info.getHistoryFile()).thenReturn(fullHistoryPathZeroReduces);
completedJob = new CompletedJob(conf, jobId, fullHistoryPathZeroReduces, loadTasks, "user", info, jobAclsManager);
JobReport jobReport = completedJob.getReport();
// Make sure that the number reduces (completed and total) are equal to zero.
assertEquals(0, completedJob.getTotalReduces());
assertEquals(0, completedJob.getCompletedReduces());
// Verify that the reduce progress is 1.0 (not NaN)
assertEquals(1.0, jobReport.getReduceProgress(), 0.001);
assertEquals(fullHistoryPathZeroReduces.toString(), jobReport.getHistoryFile());
}
use of org.apache.hadoop.mapreduce.v2.hs.CompletedJob in project hadoop by apache.
the class TestJobHistoryEntities method testCompletedTask.
@Test(timeout = 10000)
public void testCompletedTask() throws Exception {
HistoryFileInfo info = mock(HistoryFileInfo.class);
when(info.getConfFile()).thenReturn(fullConfPath);
completedJob = new CompletedJob(conf, jobId, fullHistoryPath, loadTasks, "user", info, jobAclsManager);
TaskId mt1Id = MRBuilderUtils.newTaskId(jobId, 0, TaskType.MAP);
TaskId rt1Id = MRBuilderUtils.newTaskId(jobId, 0, TaskType.REDUCE);
Map<TaskId, Task> mapTasks = completedJob.getTasks(TaskType.MAP);
Map<TaskId, Task> reduceTasks = completedJob.getTasks(TaskType.REDUCE);
assertEquals(10, mapTasks.size());
assertEquals(2, reduceTasks.size());
Task mt1 = mapTasks.get(mt1Id);
assertEquals(1, mt1.getAttempts().size());
assertEquals(TaskState.SUCCEEDED, mt1.getState());
TaskReport mt1Report = mt1.getReport();
assertEquals(TaskState.SUCCEEDED, mt1Report.getTaskState());
assertEquals(mt1Id, mt1Report.getTaskId());
Task rt1 = reduceTasks.get(rt1Id);
assertEquals(1, rt1.getAttempts().size());
assertEquals(TaskState.SUCCEEDED, rt1.getState());
TaskReport rt1Report = rt1.getReport();
assertEquals(TaskState.SUCCEEDED, rt1Report.getTaskState());
assertEquals(rt1Id, rt1Report.getTaskId());
}
Aggregations