Search in sources :

Example 1 with CompletedJob

use of org.apache.hadoop.mapreduce.v2.hs.CompletedJob in project hadoop by apache.

the class TestJobHistoryEntities method testGetTaskAttemptCompletionEvent.

/**
   * Simple test of some methods of CompletedJob
   * @throws Exception
   */
@Test(timeout = 30000)
public void testGetTaskAttemptCompletionEvent() throws Exception {
    HistoryFileInfo info = mock(HistoryFileInfo.class);
    when(info.getConfFile()).thenReturn(fullConfPath);
    completedJob = new CompletedJob(conf, jobId, fullHistoryPath, loadTasks, "user", info, jobAclsManager);
    TaskCompletionEvent[] events = completedJob.getMapAttemptCompletionEvents(0, 1000);
    assertEquals(10, completedJob.getMapAttemptCompletionEvents(0, 10).length);
    int currentEventId = 0;
    for (TaskCompletionEvent taskAttemptCompletionEvent : events) {
        int eventId = taskAttemptCompletionEvent.getEventId();
        assertTrue(eventId >= currentEventId);
        currentEventId = eventId;
    }
    assertNull(completedJob.loadConfFile());
    // job name
    assertEquals("Sleep job", completedJob.getName());
    // queue name
    assertEquals("default", completedJob.getQueueName());
    // progress
    assertEquals(1.0, completedJob.getProgress(), 0.001);
    // 12 rows in answer
    assertEquals(12, completedJob.getTaskAttemptCompletionEvents(0, 1000).length);
    // select first 10 rows
    assertEquals(10, completedJob.getTaskAttemptCompletionEvents(0, 10).length);
    // select 5-10 rows include 5th
    assertEquals(7, completedJob.getTaskAttemptCompletionEvents(5, 10).length);
    // without errors
    assertEquals(1, completedJob.getDiagnostics().size());
    assertEquals("", completedJob.getDiagnostics().get(0));
    assertEquals(0, completedJob.getJobACLs().size());
}
Also used : HistoryFileInfo(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo) TaskCompletionEvent(org.apache.hadoop.mapred.TaskCompletionEvent) Test(org.junit.Test)

Example 2 with CompletedJob

use of org.apache.hadoop.mapreduce.v2.hs.CompletedJob in project hadoop by apache.

the class TestJobHistoryEntities method testCompletedJobWithDiagnostics.

@Test(timeout = 30000)
public void testCompletedJobWithDiagnostics() throws Exception {
    final String jobError = "Job Diagnostics";
    JobInfo jobInfo = spy(new JobInfo());
    when(jobInfo.getErrorInfo()).thenReturn(jobError);
    when(jobInfo.getJobStatus()).thenReturn(JobState.FAILED.toString());
    when(jobInfo.getAMInfos()).thenReturn(Collections.<JobHistoryParser.AMInfo>emptyList());
    final JobHistoryParser mockParser = mock(JobHistoryParser.class);
    when(mockParser.parse()).thenReturn(jobInfo);
    HistoryFileInfo info = mock(HistoryFileInfo.class);
    when(info.getConfFile()).thenReturn(fullConfPath);
    when(info.getHistoryFile()).thenReturn(fullHistoryPath);
    CompletedJob job = new CompletedJob(conf, jobId, fullHistoryPath, loadTasks, "user", info, jobAclsManager) {

        @Override
        protected JobHistoryParser createJobHistoryParser(Path historyFileAbsolute) throws IOException {
            return mockParser;
        }
    };
    assertEquals(jobError, job.getReport().getDiagnostics());
}
Also used : Path(org.apache.hadoop.fs.Path) HistoryFileInfo(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo) JobHistoryParser(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser) JobInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo) Test(org.junit.Test)

Example 3 with CompletedJob

use of org.apache.hadoop.mapreduce.v2.hs.CompletedJob in project hadoop by apache.

the class TestJobHistoryEntities method testCompletedTaskAttempt.

@Test(timeout = 10000)
public void testCompletedTaskAttempt() throws Exception {
    HistoryFileInfo info = mock(HistoryFileInfo.class);
    when(info.getConfFile()).thenReturn(fullConfPath);
    completedJob = new CompletedJob(conf, jobId, fullHistoryPath, loadTasks, "user", info, jobAclsManager);
    TaskId mt1Id = MRBuilderUtils.newTaskId(jobId, 0, TaskType.MAP);
    TaskId rt1Id = MRBuilderUtils.newTaskId(jobId, 0, TaskType.REDUCE);
    TaskAttemptId mta1Id = MRBuilderUtils.newTaskAttemptId(mt1Id, 0);
    TaskAttemptId rta1Id = MRBuilderUtils.newTaskAttemptId(rt1Id, 0);
    Task mt1 = completedJob.getTask(mt1Id);
    Task rt1 = completedJob.getTask(rt1Id);
    TaskAttempt mta1 = mt1.getAttempt(mta1Id);
    assertEquals(TaskAttemptState.SUCCEEDED, mta1.getState());
    assertEquals("localhost:45454", mta1.getAssignedContainerMgrAddress());
    assertEquals("localhost:9999", mta1.getNodeHttpAddress());
    TaskAttemptReport mta1Report = mta1.getReport();
    assertEquals(TaskAttemptState.SUCCEEDED, mta1Report.getTaskAttemptState());
    assertEquals("localhost", mta1Report.getNodeManagerHost());
    assertEquals(45454, mta1Report.getNodeManagerPort());
    assertEquals(9999, mta1Report.getNodeManagerHttpPort());
    TaskAttempt rta1 = rt1.getAttempt(rta1Id);
    assertEquals(TaskAttemptState.SUCCEEDED, rta1.getState());
    assertEquals("localhost:45454", rta1.getAssignedContainerMgrAddress());
    assertEquals("localhost:9999", rta1.getNodeHttpAddress());
    TaskAttemptReport rta1Report = rta1.getReport();
    assertEquals(TaskAttemptState.SUCCEEDED, rta1Report.getTaskAttemptState());
    assertEquals("localhost", rta1Report.getNodeManagerHost());
    assertEquals(45454, rta1Report.getNodeManagerPort());
    assertEquals(9999, rta1Report.getNodeManagerHttpPort());
}
Also used : HistoryFileInfo(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Test(org.junit.Test)

Example 4 with CompletedJob

use of org.apache.hadoop.mapreduce.v2.hs.CompletedJob in project hadoop by apache.

the class TestJobHistoryEntities method testCopmletedJobReportWithZeroTasks.

@Test(timeout = 100000)
public void testCopmletedJobReportWithZeroTasks() throws Exception {
    HistoryFileInfo info = mock(HistoryFileInfo.class);
    when(info.getConfFile()).thenReturn(fullConfPath);
    when(info.getHistoryFile()).thenReturn(fullHistoryPathZeroReduces);
    completedJob = new CompletedJob(conf, jobId, fullHistoryPathZeroReduces, loadTasks, "user", info, jobAclsManager);
    JobReport jobReport = completedJob.getReport();
    // Make sure that the number reduces (completed and total) are equal to zero.
    assertEquals(0, completedJob.getTotalReduces());
    assertEquals(0, completedJob.getCompletedReduces());
    // Verify that the reduce progress is 1.0 (not NaN)
    assertEquals(1.0, jobReport.getReduceProgress(), 0.001);
    assertEquals(fullHistoryPathZeroReduces.toString(), jobReport.getHistoryFile());
}
Also used : HistoryFileInfo(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport) Test(org.junit.Test)

Example 5 with CompletedJob

use of org.apache.hadoop.mapreduce.v2.hs.CompletedJob in project hadoop by apache.

the class TestJobHistoryEntities method testCompletedTask.

@Test(timeout = 10000)
public void testCompletedTask() throws Exception {
    HistoryFileInfo info = mock(HistoryFileInfo.class);
    when(info.getConfFile()).thenReturn(fullConfPath);
    completedJob = new CompletedJob(conf, jobId, fullHistoryPath, loadTasks, "user", info, jobAclsManager);
    TaskId mt1Id = MRBuilderUtils.newTaskId(jobId, 0, TaskType.MAP);
    TaskId rt1Id = MRBuilderUtils.newTaskId(jobId, 0, TaskType.REDUCE);
    Map<TaskId, Task> mapTasks = completedJob.getTasks(TaskType.MAP);
    Map<TaskId, Task> reduceTasks = completedJob.getTasks(TaskType.REDUCE);
    assertEquals(10, mapTasks.size());
    assertEquals(2, reduceTasks.size());
    Task mt1 = mapTasks.get(mt1Id);
    assertEquals(1, mt1.getAttempts().size());
    assertEquals(TaskState.SUCCEEDED, mt1.getState());
    TaskReport mt1Report = mt1.getReport();
    assertEquals(TaskState.SUCCEEDED, mt1Report.getTaskState());
    assertEquals(mt1Id, mt1Report.getTaskId());
    Task rt1 = reduceTasks.get(rt1Id);
    assertEquals(1, rt1.getAttempts().size());
    assertEquals(TaskState.SUCCEEDED, rt1.getState());
    TaskReport rt1Report = rt1.getReport();
    assertEquals(TaskState.SUCCEEDED, rt1Report.getTaskState());
    assertEquals(rt1Id, rt1Report.getTaskId());
}
Also used : HistoryFileInfo(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) Test(org.junit.Test)

Aggregations

HistoryFileInfo (org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo)9 Test (org.junit.Test)9 Path (org.apache.hadoop.fs.Path)4 Configuration (org.apache.hadoop.conf.Configuration)3 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)2 JobReport (org.apache.hadoop.mapreduce.v2.api.records.JobReport)2 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)2 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)2 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)2 JobIndexInfo (org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo)2 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)2 JobACLsManager (org.apache.hadoop.mapred.JobACLsManager)1 TaskCompletionEvent (org.apache.hadoop.mapred.TaskCompletionEvent)1 JobHistoryParser (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser)1 JobInfo (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo)1 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)1 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)1 TaskAttemptReport (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport)1 TaskReport (org.apache.hadoop.mapreduce.v2.api.records.TaskReport)1 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)1