Search in sources :

Example 1 with GetTaskReportResponse

use of org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportResponse in project hadoop by apache.

the class NotRunningJob method getTaskReport.

@Override
public GetTaskReportResponse getTaskReport(GetTaskReportRequest request) throws IOException {
    GetTaskReportResponse resp = recordFactory.newRecordInstance(GetTaskReportResponse.class);
    TaskReport report = recordFactory.newRecordInstance(TaskReport.class);
    report.setTaskId(request.getTaskId());
    report.setTaskState(TaskState.NEW);
    Counters counters = recordFactory.newRecordInstance(Counters.class);
    counters.addAllCounterGroups(new HashMap<String, CounterGroup>());
    report.setCounters(counters);
    report.addAllRunningAttempts(new ArrayList<TaskAttemptId>());
    return resp;
}
Also used : TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) GetTaskReportResponse(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportResponse) CounterGroup(org.apache.hadoop.mapreduce.v2.api.records.CounterGroup) Counters(org.apache.hadoop.mapreduce.v2.api.records.Counters)

Example 2 with GetTaskReportResponse

use of org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportResponse in project hadoop by apache.

the class TestJobHistoryServer method testReports.

//Test reports of  JobHistoryServer. History server should get log files from  MRApp and read them
@Test(timeout = 50000)
public void testReports() throws Exception {
    Configuration config = new Configuration();
    config.setClass(CommonConfigurationKeysPublic.NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class);
    RackResolver.init(config);
    MRApp app = new MRAppWithHistory(1, 1, true, this.getClass().getName(), true);
    app.submit(config);
    Job job = app.getContext().getAllJobs().values().iterator().next();
    app.waitForState(job, JobState.SUCCEEDED);
    historyServer = new JobHistoryServer();
    historyServer.init(config);
    historyServer.start();
    // search JobHistory  service
    JobHistory jobHistory = null;
    for (Service service : historyServer.getServices()) {
        if (service instanceof JobHistory) {
            jobHistory = (JobHistory) service;
        }
    }
    ;
    Map<JobId, Job> jobs = jobHistory.getAllJobs();
    assertEquals(1, jobs.size());
    assertEquals("job_0_0000", jobs.keySet().iterator().next().toString());
    Task task = job.getTasks().values().iterator().next();
    TaskAttempt attempt = task.getAttempts().values().iterator().next();
    HistoryClientService historyService = historyServer.getClientService();
    MRClientProtocol protocol = historyService.getClientHandler();
    GetTaskAttemptReportRequest gtarRequest = recordFactory.newRecordInstance(GetTaskAttemptReportRequest.class);
    // test getTaskAttemptReport
    TaskAttemptId taId = attempt.getID();
    taId.setTaskId(task.getID());
    taId.getTaskId().setJobId(job.getID());
    gtarRequest.setTaskAttemptId(taId);
    GetTaskAttemptReportResponse response = protocol.getTaskAttemptReport(gtarRequest);
    assertEquals("container_0_0000_01_000000", response.getTaskAttemptReport().getContainerId().toString());
    assertTrue(response.getTaskAttemptReport().getDiagnosticInfo().isEmpty());
    // counters
    assertNotNull(response.getTaskAttemptReport().getCounters().getCounter(TaskCounter.PHYSICAL_MEMORY_BYTES));
    assertEquals(taId.toString(), response.getTaskAttemptReport().getTaskAttemptId().toString());
    // test getTaskReport
    GetTaskReportRequest request = recordFactory.newRecordInstance(GetTaskReportRequest.class);
    TaskId taskId = task.getID();
    taskId.setJobId(job.getID());
    request.setTaskId(taskId);
    GetTaskReportResponse reportResponse = protocol.getTaskReport(request);
    assertEquals("", reportResponse.getTaskReport().getDiagnosticsList().iterator().next());
    // progress
    assertEquals(1.0f, reportResponse.getTaskReport().getProgress(), 0.01);
    // report has corrected taskId
    assertEquals(taskId.toString(), reportResponse.getTaskReport().getTaskId().toString());
    // Task state should be SUCCEEDED
    assertEquals(TaskState.SUCCEEDED, reportResponse.getTaskReport().getTaskState());
    // For invalid jobid, throw IOException
    GetTaskReportsRequest gtreportsRequest = recordFactory.newRecordInstance(GetTaskReportsRequest.class);
    gtreportsRequest.setJobId(TypeConverter.toYarn(JobID.forName("job_1415730144495_0001")));
    gtreportsRequest.setTaskType(TaskType.REDUCE);
    try {
        protocol.getTaskReports(gtreportsRequest);
        fail("IOException not thrown for invalid job id");
    } catch (IOException e) {
    // Expected
    }
    // test getTaskAttemptCompletionEvents
    GetTaskAttemptCompletionEventsRequest taskAttemptRequest = recordFactory.newRecordInstance(GetTaskAttemptCompletionEventsRequest.class);
    taskAttemptRequest.setJobId(job.getID());
    GetTaskAttemptCompletionEventsResponse taskAttemptCompletionEventsResponse = protocol.getTaskAttemptCompletionEvents(taskAttemptRequest);
    assertEquals(0, taskAttemptCompletionEventsResponse.getCompletionEventCount());
    // test getDiagnostics
    GetDiagnosticsRequest diagnosticRequest = recordFactory.newRecordInstance(GetDiagnosticsRequest.class);
    diagnosticRequest.setTaskAttemptId(taId);
    GetDiagnosticsResponse diagnosticResponse = protocol.getDiagnostics(diagnosticRequest);
    // it is strange : why one empty string ?
    assertEquals(1, diagnosticResponse.getDiagnosticsCount());
    assertEquals("", diagnosticResponse.getDiagnostics(0));
}
Also used : MRAppWithHistory(org.apache.hadoop.mapreduce.v2.hs.TestJobHistoryEvents.MRAppWithHistory) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) Configuration(org.apache.hadoop.conf.Configuration) GetTaskAttemptReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportRequest) GetTaskReportResponse(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportResponse) GetDiagnosticsResponse(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsResponse) MRClientProtocol(org.apache.hadoop.mapreduce.v2.api.MRClientProtocol) GetTaskAttemptReportResponse(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportResponse) GetTaskReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportRequest) GetTaskAttemptCompletionEventsRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsRequest) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) GetDiagnosticsRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) MRApp(org.apache.hadoop.mapreduce.v2.app.MRApp) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) Service(org.apache.hadoop.service.Service) IOException(java.io.IOException) GetTaskAttemptCompletionEventsResponse(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsResponse) GetTaskReportsRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsRequest) Test(org.junit.Test)

Example 3 with GetTaskReportResponse

use of org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportResponse in project tez by apache.

the class NotRunningJob method getTaskReport.

@Override
public GetTaskReportResponse getTaskReport(GetTaskReportRequest request) throws IOException {
    GetTaskReportResponse resp = recordFactory.newRecordInstance(GetTaskReportResponse.class);
    TaskReport report = recordFactory.newRecordInstance(TaskReport.class);
    report.setTaskId(request.getTaskId());
    report.setTaskState(TaskState.NEW);
    Counters counters = recordFactory.newRecordInstance(Counters.class);
    counters.addAllCounterGroups(new HashMap<String, CounterGroup>());
    report.setCounters(counters);
    report.addAllRunningAttempts(new ArrayList<TaskAttemptId>());
    return resp;
}
Also used : TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) GetTaskReportResponse(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportResponse) CounterGroup(org.apache.hadoop.mapreduce.v2.api.records.CounterGroup) Counters(org.apache.hadoop.mapreduce.v2.api.records.Counters)

Aggregations

GetTaskReportResponse (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportResponse)3 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)3 CounterGroup (org.apache.hadoop.mapreduce.v2.api.records.CounterGroup)2 Counters (org.apache.hadoop.mapreduce.v2.api.records.Counters)2 TaskReport (org.apache.hadoop.mapreduce.v2.api.records.TaskReport)2 IOException (java.io.IOException)1 Configuration (org.apache.hadoop.conf.Configuration)1 MRClientProtocol (org.apache.hadoop.mapreduce.v2.api.MRClientProtocol)1 GetDiagnosticsRequest (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest)1 GetDiagnosticsResponse (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsResponse)1 GetTaskAttemptCompletionEventsRequest (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsRequest)1 GetTaskAttemptCompletionEventsResponse (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsResponse)1 GetTaskAttemptReportRequest (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportRequest)1 GetTaskAttemptReportResponse (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportResponse)1 GetTaskReportRequest (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportRequest)1 GetTaskReportsRequest (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsRequest)1 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)1 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)1 MRApp (org.apache.hadoop.mapreduce.v2.app.MRApp)1 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)1