Search in sources :

Example 16 with TaskReport

use of org.apache.hadoop.mapreduce.v2.api.records.TaskReport in project hadoop by apache.

the class TestAMWebServicesTasks method verifyTaskGeneric.

public void verifyTaskGeneric(Task task, String id, String state, String type, String successfulAttempt, long startTime, long finishTime, long elapsedTime, float progress, String status) {
    TaskId taskid = task.getID();
    String tid = MRApps.toString(taskid);
    TaskReport report = task.getReport();
    WebServicesTestUtils.checkStringMatch("id", tid, id);
    WebServicesTestUtils.checkStringMatch("type", task.getType().toString(), type);
    WebServicesTestUtils.checkStringMatch("state", report.getTaskState().toString(), state);
    // not easily checked without duplicating logic, just make sure its here
    assertNotNull("successfulAttempt null", successfulAttempt);
    assertEquals("startTime wrong", report.getStartTime(), startTime);
    assertEquals("finishTime wrong", report.getFinishTime(), finishTime);
    assertEquals("elapsedTime wrong", finishTime - startTime, elapsedTime);
    assertEquals("progress wrong", report.getProgress() * 100, progress, 1e-3f);
    assertEquals("status wrong", report.getStatus(), status);
}
Also used : TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport)

Example 17 with TaskReport

use of org.apache.hadoop.mapreduce.v2.api.records.TaskReport in project hadoop by apache.

the class TypeConverter method fromYarn.

public static TaskReport fromYarn(org.apache.hadoop.mapreduce.v2.api.records.TaskReport report) {
    String[] diagnostics = null;
    if (report.getDiagnosticsList() != null) {
        diagnostics = new String[report.getDiagnosticsCount()];
        int i = 0;
        for (String cs : report.getDiagnosticsList()) {
            diagnostics[i++] = cs.toString();
        }
    } else {
        diagnostics = new String[0];
    }
    TaskReport rep = new TaskReport(fromYarn(report.getTaskId()), report.getProgress(), report.getTaskState().toString(), diagnostics, fromYarn(report.getTaskState()), report.getStartTime(), report.getFinishTime(), fromYarn(report.getCounters()));
    List<org.apache.hadoop.mapreduce.TaskAttemptID> runningAtts = new ArrayList<org.apache.hadoop.mapreduce.TaskAttemptID>();
    for (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId id : report.getRunningAttemptsList()) {
        runningAtts.add(fromYarn(id));
    }
    rep.setRunningTaskAttemptIds(runningAtts);
    if (report.getSuccessfulAttempt() != null) {
        rep.setSuccessfulAttemptId(fromYarn(report.getSuccessfulAttempt()));
    }
    return rep;
}
Also used : ArrayList(java.util.ArrayList) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)

Example 18 with TaskReport

use of org.apache.hadoop.mapreduce.v2.api.records.TaskReport in project hadoop by apache.

the class TestCompletedTask method testTaskStartTimes.

@Test(timeout = 5000)
public void testTaskStartTimes() {
    TaskId taskId = mock(TaskId.class);
    TaskInfo taskInfo = mock(TaskInfo.class);
    Map<TaskAttemptID, TaskAttemptInfo> taskAttempts = new TreeMap<TaskAttemptID, TaskAttemptInfo>();
    TaskAttemptID id = new TaskAttemptID("0", 0, TaskType.MAP, 0, 0);
    TaskAttemptInfo info = mock(TaskAttemptInfo.class);
    when(info.getAttemptId()).thenReturn(id);
    when(info.getStartTime()).thenReturn(10l);
    taskAttempts.put(id, info);
    id = new TaskAttemptID("1", 0, TaskType.MAP, 1, 1);
    info = mock(TaskAttemptInfo.class);
    when(info.getAttemptId()).thenReturn(id);
    when(info.getStartTime()).thenReturn(20l);
    taskAttempts.put(id, info);
    when(taskInfo.getAllTaskAttempts()).thenReturn(taskAttempts);
    CompletedTask task = new CompletedTask(taskId, taskInfo);
    TaskReport report = task.getReport();
    // Make sure the startTime returned by report is the lesser of the 
    // attempy launch times
    assertTrue(report.getStartTime() == 10);
}
Also used : TaskInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) TreeMap(java.util.TreeMap) CompletedTask(org.apache.hadoop.mapreduce.v2.hs.CompletedTask) Test(org.junit.Test)

Example 19 with TaskReport

use of org.apache.hadoop.mapreduce.v2.api.records.TaskReport in project hadoop by apache.

the class TestBlocks method getTask.

private Task getTask(long timestamp) {
    JobId jobId = new JobIdPBImpl();
    jobId.setId(0);
    jobId.setAppId(ApplicationIdPBImpl.newInstance(timestamp, 1));
    TaskId taskId = new TaskIdPBImpl();
    taskId.setId(0);
    taskId.setTaskType(TaskType.REDUCE);
    taskId.setJobId(jobId);
    Task task = mock(Task.class);
    when(task.getID()).thenReturn(taskId);
    TaskReport report = mock(TaskReport.class);
    when(report.getProgress()).thenReturn(0.7f);
    when(report.getTaskState()).thenReturn(TaskState.SUCCEEDED);
    when(report.getStartTime()).thenReturn(100001L);
    when(report.getFinishTime()).thenReturn(100011L);
    when(task.getReport()).thenReturn(report);
    when(task.getType()).thenReturn(TaskType.REDUCE);
    return task;
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskIdPBImpl) JobIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId)

Example 20 with TaskReport

use of org.apache.hadoop.mapreduce.v2.api.records.TaskReport in project hadoop by apache.

the class NotRunningJob method getTaskReports.

@Override
public GetTaskReportsResponse getTaskReports(GetTaskReportsRequest request) throws IOException {
    GetTaskReportsResponse resp = recordFactory.newRecordInstance(GetTaskReportsResponse.class);
    resp.addAllTaskReports(new ArrayList<TaskReport>());
    return resp;
}
Also used : TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) GetTaskReportsResponse(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsResponse)

Aggregations

TaskReport (org.apache.hadoop.mapreduce.v2.api.records.TaskReport)20 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)9 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)8 Test (org.junit.Test)6 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)5 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)5 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)5 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)4 JobIdPBImpl (org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl)4 TaskIdPBImpl (org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskIdPBImpl)4 PrintWriter (java.io.PrintWriter)3 HashMap (java.util.HashMap)3 TaskAttemptReport (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport)3 AppContext (org.apache.hadoop.mapreduce.v2.app.AppContext)3 FewAttemptsBlock (org.apache.hadoop.mapreduce.v2.app.webapp.AttemptsPage.FewAttemptsBlock)3 Configuration (org.apache.hadoop.conf.Configuration)2 GetTaskReportResponse (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportResponse)2 GetTaskReportsResponse (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsResponse)2 CounterGroup (org.apache.hadoop.mapreduce.v2.api.records.CounterGroup)2 Counters (org.apache.hadoop.mapreduce.v2.api.records.Counters)2