Search in sources :

Example 11 with TaskAttemptReport

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport in project hadoop by apache.

the class MRApp method waitForInternalState.

public void waitForInternalState(TaskAttemptImpl attempt, TaskAttemptStateInternal finalState) throws Exception {
    int timeoutSecs = 0;
    TaskAttemptReport report = attempt.getReport();
    TaskAttemptStateInternal iState = attempt.getInternalState();
    while (!finalState.equals(iState) && timeoutSecs++ < 20) {
        System.out.println("TaskAttempt Internal State is : " + iState + " Waiting for Internal state : " + finalState + "   progress : " + report.getProgress());
        Thread.sleep(500);
        report = attempt.getReport();
        iState = attempt.getInternalState();
    }
    System.out.println("TaskAttempt Internal State is : " + iState);
    Assert.assertEquals("TaskAttempt Internal state is not correct (timedout)", finalState, iState);
}
Also used : TaskAttemptStateInternal(org.apache.hadoop.mapreduce.v2.app.job.TaskAttemptStateInternal) TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport)

Example 12 with TaskAttemptReport

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport in project hadoop by apache.

the class TestMRClientService method test.

@Test
public void test() throws Exception {
    MRAppWithClientService app = new MRAppWithClientService(1, 0, false);
    Configuration conf = new Configuration();
    Job job = app.submit(conf);
    app.waitForState(job, JobState.RUNNING);
    Assert.assertEquals("Num tasks not correct", 1, job.getTasks().size());
    Iterator<Task> it = job.getTasks().values().iterator();
    Task task = it.next();
    app.waitForState(task, TaskState.RUNNING);
    TaskAttempt attempt = task.getAttempts().values().iterator().next();
    app.waitForState(attempt, TaskAttemptState.RUNNING);
    // send the diagnostic
    String diagnostic1 = "Diagnostic1";
    String diagnostic2 = "Diagnostic2";
    app.getContext().getEventHandler().handle(new TaskAttemptDiagnosticsUpdateEvent(attempt.getID(), diagnostic1));
    // send the status update
    TaskAttemptStatus taskAttemptStatus = new TaskAttemptStatus();
    taskAttemptStatus.id = attempt.getID();
    taskAttemptStatus.progress = 0.5f;
    taskAttemptStatus.stateString = "RUNNING";
    taskAttemptStatus.taskState = TaskAttemptState.RUNNING;
    taskAttemptStatus.phase = Phase.MAP;
    // send the status update
    app.getContext().getEventHandler().handle(new TaskAttemptStatusUpdateEvent(attempt.getID(), taskAttemptStatus));
    //verify that all object are fully populated by invoking RPCs.
    YarnRPC rpc = YarnRPC.create(conf);
    MRClientProtocol proxy = (MRClientProtocol) rpc.getProxy(MRClientProtocol.class, app.clientService.getBindAddress(), conf);
    GetCountersRequest gcRequest = recordFactory.newRecordInstance(GetCountersRequest.class);
    gcRequest.setJobId(job.getID());
    Assert.assertNotNull("Counters is null", proxy.getCounters(gcRequest).getCounters());
    GetJobReportRequest gjrRequest = recordFactory.newRecordInstance(GetJobReportRequest.class);
    gjrRequest.setJobId(job.getID());
    JobReport jr = proxy.getJobReport(gjrRequest).getJobReport();
    verifyJobReport(jr);
    GetTaskAttemptCompletionEventsRequest gtaceRequest = recordFactory.newRecordInstance(GetTaskAttemptCompletionEventsRequest.class);
    gtaceRequest.setJobId(job.getID());
    gtaceRequest.setFromEventId(0);
    gtaceRequest.setMaxEvents(10);
    Assert.assertNotNull("TaskCompletionEvents is null", proxy.getTaskAttemptCompletionEvents(gtaceRequest).getCompletionEventList());
    GetDiagnosticsRequest gdRequest = recordFactory.newRecordInstance(GetDiagnosticsRequest.class);
    gdRequest.setTaskAttemptId(attempt.getID());
    Assert.assertNotNull("Diagnostics is null", proxy.getDiagnostics(gdRequest).getDiagnosticsList());
    GetTaskAttemptReportRequest gtarRequest = recordFactory.newRecordInstance(GetTaskAttemptReportRequest.class);
    gtarRequest.setTaskAttemptId(attempt.getID());
    TaskAttemptReport tar = proxy.getTaskAttemptReport(gtarRequest).getTaskAttemptReport();
    verifyTaskAttemptReport(tar);
    GetTaskReportRequest gtrRequest = recordFactory.newRecordInstance(GetTaskReportRequest.class);
    gtrRequest.setTaskId(task.getID());
    Assert.assertNotNull("TaskReport is null", proxy.getTaskReport(gtrRequest).getTaskReport());
    GetTaskReportsRequest gtreportsRequest = recordFactory.newRecordInstance(GetTaskReportsRequest.class);
    gtreportsRequest.setJobId(job.getID());
    gtreportsRequest.setTaskType(TaskType.MAP);
    Assert.assertNotNull("TaskReports for map is null", proxy.getTaskReports(gtreportsRequest).getTaskReportList());
    gtreportsRequest = recordFactory.newRecordInstance(GetTaskReportsRequest.class);
    gtreportsRequest.setJobId(job.getID());
    gtreportsRequest.setTaskType(TaskType.REDUCE);
    Assert.assertNotNull("TaskReports for reduce is null", proxy.getTaskReports(gtreportsRequest).getTaskReportList());
    List<String> diag = proxy.getDiagnostics(gdRequest).getDiagnosticsList();
    Assert.assertEquals("Num diagnostics not correct", 1, diag.size());
    Assert.assertEquals("Diag 1 not correct", diagnostic1, diag.get(0).toString());
    TaskReport taskReport = proxy.getTaskReport(gtrRequest).getTaskReport();
    Assert.assertEquals("Num diagnostics not correct", 1, taskReport.getDiagnosticsCount());
    //send the done signal to the task
    app.getContext().getEventHandler().handle(new TaskAttemptEvent(task.getAttempts().values().iterator().next().getID(), TaskAttemptEventType.TA_DONE));
    app.waitForState(job, JobState.SUCCEEDED);
    // For invalid jobid, throw IOException
    gtreportsRequest = recordFactory.newRecordInstance(GetTaskReportsRequest.class);
    gtreportsRequest.setJobId(TypeConverter.toYarn(JobID.forName("job_1415730144495_0001")));
    gtreportsRequest.setTaskType(TaskType.REDUCE);
    try {
        proxy.getTaskReports(gtreportsRequest);
        fail("IOException not thrown for invalid job id");
    } catch (IOException e) {
    // Expected
    }
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) Configuration(org.apache.hadoop.conf.Configuration) GetTaskAttemptReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportRequest) MRClientProtocol(org.apache.hadoop.mapreduce.v2.api.MRClientProtocol) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport) GetTaskAttemptCompletionEventsRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsRequest) GetTaskReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportRequest) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) GetDiagnosticsRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) GetCountersRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest) TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskAttemptDiagnosticsUpdateEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent) TaskAttemptStatus(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.TaskAttemptStatus) TaskAttemptEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent) YarnRPC(org.apache.hadoop.yarn.ipc.YarnRPC) IOException(java.io.IOException) GetJobReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest) TaskAttemptStatusUpdateEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent) TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport) GetTaskReportsRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsRequest) Test(org.junit.Test)

Example 13 with TaskAttemptReport

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport in project hadoop by apache.

the class TestTaskAttempt method testMRAppHistory.

private void testMRAppHistory(MRApp app) throws Exception {
    Configuration conf = new Configuration();
    Job job = app.submit(conf);
    app.waitForState(job, JobState.FAILED);
    Map<TaskId, Task> tasks = job.getTasks();
    Assert.assertEquals("Num tasks is not correct", 1, tasks.size());
    Task task = tasks.values().iterator().next();
    Assert.assertEquals("Task state not correct", TaskState.FAILED, task.getReport().getTaskState());
    Map<TaskAttemptId, TaskAttempt> attempts = tasks.values().iterator().next().getAttempts();
    Assert.assertEquals("Num attempts is not correct", 4, attempts.size());
    Iterator<TaskAttempt> it = attempts.values().iterator();
    TaskAttemptReport report = it.next().getReport();
    Assert.assertEquals("Attempt state not correct", TaskAttemptState.FAILED, report.getTaskAttemptState());
    Assert.assertEquals("Diagnostic Information is not Correct", "Test Diagnostic Event", report.getDiagnosticInfo());
    report = it.next().getReport();
    Assert.assertEquals("Attempt state not correct", TaskAttemptState.FAILED, report.getTaskAttemptState());
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job)

Aggregations

TaskAttemptReport (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport)12 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)7 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)6 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)5 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)5 Test (org.junit.Test)4 Configuration (org.apache.hadoop.conf.Configuration)3 JobReport (org.apache.hadoop.mapreduce.v2.api.records.JobReport)3 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)3 TaskReport (org.apache.hadoop.mapreduce.v2.api.records.TaskReport)3 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)3 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)3 IOException (java.io.IOException)2 PrintWriter (java.io.PrintWriter)2 HashMap (java.util.HashMap)2 GetJobReportRequest (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest)2 GetTaskAttemptReportRequest (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportRequest)2 TaskAttemptState (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState)2 TaskAttemptIdPBImpl (org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskAttemptIdPBImpl)2 AppContext (org.apache.hadoop.mapreduce.v2.app.AppContext)2