Search in sources :

Example 11 with TaskReport

use of org.apache.hadoop.mapreduce.v2.api.records.TaskReport in project hadoop by apache.

the class TaskImpl method getReport.

@Override
public TaskReport getReport() {
    TaskReport report = recordFactory.newRecordInstance(TaskReport.class);
    readLock.lock();
    try {
        TaskAttempt bestAttempt = selectBestAttempt();
        report.setTaskId(taskId);
        report.setStartTime(getLaunchTime());
        report.setFinishTime(getFinishTime());
        report.setTaskState(getState());
        report.setProgress(bestAttempt == null ? 0f : bestAttempt.getProgress());
        report.setStatus(bestAttempt == null ? "" : bestAttempt.getReport().getStateString());
        for (TaskAttempt attempt : attempts.values()) {
            if (TaskAttemptState.RUNNING.equals(attempt.getState())) {
                report.addRunningAttempt(attempt.getID());
            }
        }
        report.setSuccessfulAttempt(successfulAttempt);
        for (TaskAttempt att : attempts.values()) {
            String prefix = "AttemptID:" + att.getID() + " Info:";
            for (CharSequence cs : att.getDiagnostics()) {
                report.addDiagnostics(prefix + cs);
            }
        }
        // Add a copy of counters as the last step so that their lifetime on heap
        // is as small as possible.
        report.setCounters(TypeConverter.toYarn(bestAttempt == null ? TaskAttemptImpl.EMPTY_COUNTERS : bestAttempt.getCounters()));
        return report;
    } finally {
        readLock.unlock();
    }
}
Also used : TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)

Example 12 with TaskReport

use of org.apache.hadoop.mapreduce.v2.api.records.TaskReport in project hadoop by apache.

the class MockJobs method newTaskReport.

public static TaskReport newTaskReport(TaskId id) {
    TaskReport report = Records.newRecord(TaskReport.class);
    report.setTaskId(id);
    report.setStartTime(System.currentTimeMillis() - (int) (Math.random() * DT));
    report.setFinishTime(System.currentTimeMillis() + (int) (Math.random() * DT) + 1);
    report.setProgress((float) Math.random());
    report.setStatus("Moving average: " + Math.random());
    report.setCounters(TypeConverter.toYarn(newCounters()));
    report.setTaskState(TASK_STATES.next());
    return report;
}
Also used : TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport)

Example 13 with TaskReport

use of org.apache.hadoop.mapreduce.v2.api.records.TaskReport in project hadoop by apache.

the class MockJobs method newTask.

public static Task newTask(JobId jid, int i, int m, final boolean hasFailedTasks) {
    final TaskId tid = Records.newRecord(TaskId.class);
    tid.setJobId(jid);
    tid.setId(i);
    tid.setTaskType(TASK_TYPES.next());
    final TaskReport report = newTaskReport(tid);
    final Map<TaskAttemptId, TaskAttempt> attempts = newTaskAttempts(tid, m);
    return new Task() {

        @Override
        public TaskId getID() {
            return tid;
        }

        @Override
        public TaskReport getReport() {
            return report;
        }

        @Override
        public Counters getCounters() {
            if (hasFailedTasks) {
                return null;
            }
            return new Counters(TypeConverter.fromYarn(report.getCounters()));
        }

        @Override
        public float getProgress() {
            return report.getProgress();
        }

        @Override
        public TaskType getType() {
            return tid.getTaskType();
        }

        @Override
        public Map<TaskAttemptId, TaskAttempt> getAttempts() {
            return attempts;
        }

        @Override
        public TaskAttempt getAttempt(TaskAttemptId attemptID) {
            return attempts.get(attemptID);
        }

        @Override
        public boolean isFinished() {
            switch(report.getTaskState()) {
                case SUCCEEDED:
                case KILLED:
                case FAILED:
                    return true;
            }
            return false;
        }

        @Override
        public boolean canCommit(TaskAttemptId taskAttemptID) {
            return false;
        }

        @Override
        public TaskState getState() {
            return report.getTaskState();
        }
    };
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) Counters(org.apache.hadoop.mapreduce.Counters) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)

Example 14 with TaskReport

use of org.apache.hadoop.mapreduce.v2.api.records.TaskReport in project hadoop by apache.

the class TestMRClientService method test.

@Test
public void test() throws Exception {
    MRAppWithClientService app = new MRAppWithClientService(1, 0, false);
    Configuration conf = new Configuration();
    Job job = app.submit(conf);
    app.waitForState(job, JobState.RUNNING);
    Assert.assertEquals("Num tasks not correct", 1, job.getTasks().size());
    Iterator<Task> it = job.getTasks().values().iterator();
    Task task = it.next();
    app.waitForState(task, TaskState.RUNNING);
    TaskAttempt attempt = task.getAttempts().values().iterator().next();
    app.waitForState(attempt, TaskAttemptState.RUNNING);
    // send the diagnostic
    String diagnostic1 = "Diagnostic1";
    String diagnostic2 = "Diagnostic2";
    app.getContext().getEventHandler().handle(new TaskAttemptDiagnosticsUpdateEvent(attempt.getID(), diagnostic1));
    // send the status update
    TaskAttemptStatus taskAttemptStatus = new TaskAttemptStatus();
    taskAttemptStatus.id = attempt.getID();
    taskAttemptStatus.progress = 0.5f;
    taskAttemptStatus.stateString = "RUNNING";
    taskAttemptStatus.taskState = TaskAttemptState.RUNNING;
    taskAttemptStatus.phase = Phase.MAP;
    // send the status update
    app.getContext().getEventHandler().handle(new TaskAttemptStatusUpdateEvent(attempt.getID(), taskAttemptStatus));
    //verify that all object are fully populated by invoking RPCs.
    YarnRPC rpc = YarnRPC.create(conf);
    MRClientProtocol proxy = (MRClientProtocol) rpc.getProxy(MRClientProtocol.class, app.clientService.getBindAddress(), conf);
    GetCountersRequest gcRequest = recordFactory.newRecordInstance(GetCountersRequest.class);
    gcRequest.setJobId(job.getID());
    Assert.assertNotNull("Counters is null", proxy.getCounters(gcRequest).getCounters());
    GetJobReportRequest gjrRequest = recordFactory.newRecordInstance(GetJobReportRequest.class);
    gjrRequest.setJobId(job.getID());
    JobReport jr = proxy.getJobReport(gjrRequest).getJobReport();
    verifyJobReport(jr);
    GetTaskAttemptCompletionEventsRequest gtaceRequest = recordFactory.newRecordInstance(GetTaskAttemptCompletionEventsRequest.class);
    gtaceRequest.setJobId(job.getID());
    gtaceRequest.setFromEventId(0);
    gtaceRequest.setMaxEvents(10);
    Assert.assertNotNull("TaskCompletionEvents is null", proxy.getTaskAttemptCompletionEvents(gtaceRequest).getCompletionEventList());
    GetDiagnosticsRequest gdRequest = recordFactory.newRecordInstance(GetDiagnosticsRequest.class);
    gdRequest.setTaskAttemptId(attempt.getID());
    Assert.assertNotNull("Diagnostics is null", proxy.getDiagnostics(gdRequest).getDiagnosticsList());
    GetTaskAttemptReportRequest gtarRequest = recordFactory.newRecordInstance(GetTaskAttemptReportRequest.class);
    gtarRequest.setTaskAttemptId(attempt.getID());
    TaskAttemptReport tar = proxy.getTaskAttemptReport(gtarRequest).getTaskAttemptReport();
    verifyTaskAttemptReport(tar);
    GetTaskReportRequest gtrRequest = recordFactory.newRecordInstance(GetTaskReportRequest.class);
    gtrRequest.setTaskId(task.getID());
    Assert.assertNotNull("TaskReport is null", proxy.getTaskReport(gtrRequest).getTaskReport());
    GetTaskReportsRequest gtreportsRequest = recordFactory.newRecordInstance(GetTaskReportsRequest.class);
    gtreportsRequest.setJobId(job.getID());
    gtreportsRequest.setTaskType(TaskType.MAP);
    Assert.assertNotNull("TaskReports for map is null", proxy.getTaskReports(gtreportsRequest).getTaskReportList());
    gtreportsRequest = recordFactory.newRecordInstance(GetTaskReportsRequest.class);
    gtreportsRequest.setJobId(job.getID());
    gtreportsRequest.setTaskType(TaskType.REDUCE);
    Assert.assertNotNull("TaskReports for reduce is null", proxy.getTaskReports(gtreportsRequest).getTaskReportList());
    List<String> diag = proxy.getDiagnostics(gdRequest).getDiagnosticsList();
    Assert.assertEquals("Num diagnostics not correct", 1, diag.size());
    Assert.assertEquals("Diag 1 not correct", diagnostic1, diag.get(0).toString());
    TaskReport taskReport = proxy.getTaskReport(gtrRequest).getTaskReport();
    Assert.assertEquals("Num diagnostics not correct", 1, taskReport.getDiagnosticsCount());
    //send the done signal to the task
    app.getContext().getEventHandler().handle(new TaskAttemptEvent(task.getAttempts().values().iterator().next().getID(), TaskAttemptEventType.TA_DONE));
    app.waitForState(job, JobState.SUCCEEDED);
    // For invalid jobid, throw IOException
    gtreportsRequest = recordFactory.newRecordInstance(GetTaskReportsRequest.class);
    gtreportsRequest.setJobId(TypeConverter.toYarn(JobID.forName("job_1415730144495_0001")));
    gtreportsRequest.setTaskType(TaskType.REDUCE);
    try {
        proxy.getTaskReports(gtreportsRequest);
        fail("IOException not thrown for invalid job id");
    } catch (IOException e) {
    // Expected
    }
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) Configuration(org.apache.hadoop.conf.Configuration) GetTaskAttemptReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportRequest) MRClientProtocol(org.apache.hadoop.mapreduce.v2.api.MRClientProtocol) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport) GetTaskAttemptCompletionEventsRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsRequest) GetTaskReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportRequest) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) GetDiagnosticsRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) GetCountersRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest) TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskAttemptDiagnosticsUpdateEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent) TaskAttemptStatus(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.TaskAttemptStatus) TaskAttemptEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent) YarnRPC(org.apache.hadoop.yarn.ipc.YarnRPC) IOException(java.io.IOException) GetJobReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest) TaskAttemptStatusUpdateEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent) TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport) GetTaskReportsRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsRequest) Test(org.junit.Test)

Example 15 with TaskReport

use of org.apache.hadoop.mapreduce.v2.api.records.TaskReport in project hadoop by apache.

the class TestBlocks method testSingleCounterBlock.

@Test
public void testSingleCounterBlock() {
    AppContext appCtx = mock(AppContext.class);
    View.ViewContext ctx = mock(View.ViewContext.class);
    JobId jobId = new JobIdPBImpl();
    jobId.setId(0);
    jobId.setAppId(ApplicationIdPBImpl.newInstance(0, 1));
    TaskId mapTaskId = new TaskIdPBImpl();
    mapTaskId.setId(0);
    mapTaskId.setTaskType(TaskType.MAP);
    mapTaskId.setJobId(jobId);
    Task mapTask = mock(Task.class);
    when(mapTask.getID()).thenReturn(mapTaskId);
    TaskReport mapReport = mock(TaskReport.class);
    when(mapTask.getReport()).thenReturn(mapReport);
    when(mapTask.getType()).thenReturn(TaskType.MAP);
    TaskId reduceTaskId = new TaskIdPBImpl();
    reduceTaskId.setId(0);
    reduceTaskId.setTaskType(TaskType.REDUCE);
    reduceTaskId.setJobId(jobId);
    Task reduceTask = mock(Task.class);
    when(reduceTask.getID()).thenReturn(reduceTaskId);
    TaskReport reduceReport = mock(TaskReport.class);
    when(reduceTask.getReport()).thenReturn(reduceReport);
    when(reduceTask.getType()).thenReturn(TaskType.REDUCE);
    Map<TaskId, Task> tasks = new HashMap<TaskId, Task>();
    tasks.put(mapTaskId, mapTask);
    tasks.put(reduceTaskId, reduceTask);
    Job job = mock(Job.class);
    when(job.getTasks()).thenReturn(tasks);
    when(appCtx.getJob(any(JobId.class))).thenReturn(job);
    // SingleCounter for map task
    SingleCounterBlockForMapTest blockForMapTest = spy(new SingleCounterBlockForMapTest(appCtx, ctx));
    PrintWriter pWriterForMapTest = new PrintWriter(data);
    Block htmlForMapTest = new BlockForTest(new HtmlBlockForTest(), pWriterForMapTest, 0, false);
    blockForMapTest.render(htmlForMapTest);
    pWriterForMapTest.flush();
    assertTrue(data.toString().contains("task_0_0001_m_000000"));
    assertFalse(data.toString().contains("task_0_0001_r_000000"));
    data.reset();
    // SingleCounter for reduce task
    SingleCounterBlockForReduceTest blockForReduceTest = spy(new SingleCounterBlockForReduceTest(appCtx, ctx));
    PrintWriter pWriterForReduceTest = new PrintWriter(data);
    Block htmlForReduceTest = new BlockForTest(new HtmlBlockForTest(), pWriterForReduceTest, 0, false);
    blockForReduceTest.render(htmlForReduceTest);
    pWriterForReduceTest.flush();
    System.out.println(data.toString());
    assertFalse(data.toString().contains("task_0_0001_m_000000"));
    assertTrue(data.toString().contains("task_0_0001_r_000000"));
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskIdPBImpl) HashMap(java.util.HashMap) AppContext(org.apache.hadoop.mapreduce.v2.app.AppContext) View(org.apache.hadoop.yarn.webapp.View) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) JobIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl) HtmlBlock(org.apache.hadoop.yarn.webapp.view.HtmlBlock) Block(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block) FewAttemptsBlock(org.apache.hadoop.mapreduce.v2.app.webapp.AttemptsPage.FewAttemptsBlock) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) PrintWriter(java.io.PrintWriter) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) Test(org.junit.Test)

Aggregations

TaskReport (org.apache.hadoop.mapreduce.v2.api.records.TaskReport)20 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)9 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)8 Test (org.junit.Test)6 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)5 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)5 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)5 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)4 JobIdPBImpl (org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl)4 TaskIdPBImpl (org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskIdPBImpl)4 PrintWriter (java.io.PrintWriter)3 HashMap (java.util.HashMap)3 TaskAttemptReport (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport)3 AppContext (org.apache.hadoop.mapreduce.v2.app.AppContext)3 FewAttemptsBlock (org.apache.hadoop.mapreduce.v2.app.webapp.AttemptsPage.FewAttemptsBlock)3 Configuration (org.apache.hadoop.conf.Configuration)2 GetTaskReportResponse (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportResponse)2 GetTaskReportsResponse (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsResponse)2 CounterGroup (org.apache.hadoop.mapreduce.v2.api.records.CounterGroup)2 Counters (org.apache.hadoop.mapreduce.v2.api.records.Counters)2