Search in sources :

Example 6 with TaskAttemptReport

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport in project hadoop by apache.

the class MockJobs method newTaskAttempt.

public static TaskAttempt newTaskAttempt(TaskId tid, int i) {
    final TaskAttemptId taid = Records.newRecord(TaskAttemptId.class);
    taid.setTaskId(tid);
    taid.setId(i);
    final TaskAttemptReport report = newTaskAttemptReport(taid);
    return new TaskAttempt() {

        @Override
        public NodeId getNodeId() throws UnsupportedOperationException {
            throw new UnsupportedOperationException();
        }

        @Override
        public TaskAttemptId getID() {
            return taid;
        }

        @Override
        public TaskAttemptReport getReport() {
            return report;
        }

        @Override
        public long getLaunchTime() {
            return report.getStartTime();
        }

        @Override
        public long getFinishTime() {
            return report.getFinishTime();
        }

        @Override
        public int getShufflePort() {
            return ShuffleHandler.DEFAULT_SHUFFLE_PORT;
        }

        @Override
        public Counters getCounters() {
            if (report != null && report.getCounters() != null) {
                return new Counters(TypeConverter.fromYarn(report.getCounters()));
            }
            return null;
        }

        @Override
        public float getProgress() {
            return report.getProgress();
        }

        @Override
        public Phase getPhase() {
            return report.getPhase();
        }

        @Override
        public TaskAttemptState getState() {
            return report.getTaskAttemptState();
        }

        @Override
        public boolean isFinished() {
            switch(report.getTaskAttemptState()) {
                case SUCCEEDED:
                case FAILED:
                case KILLED:
                    return true;
            }
            return false;
        }

        @Override
        public ContainerId getAssignedContainerID() {
            ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(taid.getTaskId().getJobId().getAppId(), 0);
            ContainerId id = ContainerId.newContainerId(appAttemptId, 0);
            return id;
        }

        @Override
        public String getNodeHttpAddress() {
            return "localhost:8042";
        }

        @Override
        public List<String> getDiagnostics() {
            return Lists.newArrayList(report.getDiagnosticInfo());
        }

        @Override
        public String getAssignedContainerMgrAddress() {
            return "localhost:9998";
        }

        @Override
        public long getShuffleFinishTime() {
            return report.getShuffleFinishTime();
        }

        @Override
        public long getSortFinishTime() {
            return report.getSortFinishTime();
        }

        @Override
        public String getNodeRackName() {
            return "/default-rack";
        }
    };
}
Also used : TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) Counters(org.apache.hadoop.mapreduce.Counters) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId)

Example 7 with TaskAttemptReport

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport in project hadoop by apache.

the class GetTaskAttemptReportResponsePBImpl method getTaskAttemptReport.

@Override
public TaskAttemptReport getTaskAttemptReport() {
    GetTaskAttemptReportResponseProtoOrBuilder p = viaProto ? proto : builder;
    if (this.taskAttemptReport != null) {
        return this.taskAttemptReport;
    }
    if (!p.hasTaskAttemptReport()) {
        return null;
    }
    this.taskAttemptReport = convertFromProtoFormat(p.getTaskAttemptReport());
    return this.taskAttemptReport;
}
Also used : GetTaskAttemptReportResponseProtoOrBuilder(org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProtoOrBuilder)

Example 8 with TaskAttemptReport

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport in project hadoop by apache.

the class TaskAttemptImpl method getReport.

@Override
public TaskAttemptReport getReport() {
    TaskAttemptReport result = recordFactory.newRecordInstance(TaskAttemptReport.class);
    readLock.lock();
    try {
        result.setTaskAttemptId(attemptId);
        //take the LOCAL state of attempt
        //DO NOT take from reportedStatus
        result.setTaskAttemptState(getState());
        result.setProgress(reportedStatus.progress);
        result.setStartTime(launchTime);
        result.setFinishTime(finishTime);
        result.setShuffleFinishTime(this.reportedStatus.shuffleFinishTime);
        result.setDiagnosticInfo(StringUtils.join(LINE_SEPARATOR, getDiagnostics()));
        result.setPhase(reportedStatus.phase);
        result.setStateString(reportedStatus.stateString);
        result.setCounters(TypeConverter.toYarn(getCounters()));
        result.setContainerId(this.getAssignedContainerID());
        result.setNodeManagerHost(trackerName);
        result.setNodeManagerHttpPort(httpPort);
        if (this.container != null) {
            result.setNodeManagerPort(this.container.getNodeId().getPort());
        }
        return result;
    } finally {
        readLock.unlock();
    }
}
Also used : TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport)

Example 9 with TaskAttemptReport

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport in project hadoop by apache.

the class TestBlocks method testAttemptsBlock.

/**
   * test AttemptsBlock's rendering.
   */
@Test
public void testAttemptsBlock() {
    AppContext ctx = mock(AppContext.class);
    AppForTest app = new AppForTest(ctx);
    JobId jobId = new JobIdPBImpl();
    jobId.setId(0);
    jobId.setAppId(ApplicationIdPBImpl.newInstance(0, 1));
    TaskId taskId = new TaskIdPBImpl();
    taskId.setId(0);
    taskId.setTaskType(TaskType.REDUCE);
    taskId.setJobId(jobId);
    Task task = mock(Task.class);
    when(task.getID()).thenReturn(taskId);
    TaskReport report = mock(TaskReport.class);
    when(task.getReport()).thenReturn(report);
    when(task.getType()).thenReturn(TaskType.REDUCE);
    Map<TaskId, Task> tasks = new HashMap<TaskId, Task>();
    Map<TaskAttemptId, TaskAttempt> attempts = new HashMap<TaskAttemptId, TaskAttempt>();
    TaskAttempt attempt = mock(TaskAttempt.class);
    TaskAttemptId taId = new TaskAttemptIdPBImpl();
    taId.setId(0);
    taId.setTaskId(task.getID());
    when(attempt.getID()).thenReturn(taId);
    final TaskAttemptState taState = TaskAttemptState.SUCCEEDED;
    when(attempt.getState()).thenReturn(taState);
    TaskAttemptReport taReport = mock(TaskAttemptReport.class);
    when(taReport.getTaskAttemptState()).thenReturn(taState);
    when(attempt.getReport()).thenReturn(taReport);
    attempts.put(taId, attempt);
    tasks.put(taskId, task);
    when(task.getAttempts()).thenReturn(attempts);
    app.setTask(task);
    Job job = mock(Job.class);
    when(job.getTasks(TaskType.REDUCE)).thenReturn(tasks);
    app.setJob(job);
    AttemptsBlockForTest block = new AttemptsBlockForTest(app, new Configuration());
    block.addParameter(AMParams.TASK_TYPE, "r");
    block.addParameter(AMParams.ATTEMPT_STATE, "SUCCESSFUL");
    PrintWriter pWriter = new PrintWriter(data);
    Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
    block.render(html);
    pWriter.flush();
    assertTrue(data.toString().contains("<a href='" + block.url("task", task.getID().toString()) + "'>" + "attempt_0_0001_r_000000_0</a>"));
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskAttemptIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskAttemptIdPBImpl) Configuration(org.apache.hadoop.conf.Configuration) TaskIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskIdPBImpl) HashMap(java.util.HashMap) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) AppContext(org.apache.hadoop.mapreduce.v2.app.AppContext) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport) TaskAttemptState(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState) JobIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl) HtmlBlock(org.apache.hadoop.yarn.webapp.view.HtmlBlock) Block(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block) FewAttemptsBlock(org.apache.hadoop.mapreduce.v2.app.webapp.AttemptsPage.FewAttemptsBlock) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) PrintWriter(java.io.PrintWriter) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) Test(org.junit.Test)

Example 10 with TaskAttemptReport

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport in project hadoop by apache.

the class MockJobs method newTaskAttemptReport.

public static TaskAttemptReport newTaskAttemptReport(TaskAttemptId id) {
    ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(id.getTaskId().getJobId().getAppId(), 0);
    ContainerId containerId = ContainerId.newContainerId(appAttemptId, 0);
    TaskAttemptReport report = Records.newRecord(TaskAttemptReport.class);
    report.setTaskAttemptId(id);
    report.setStartTime(System.currentTimeMillis() - (int) (Math.random() * DT));
    report.setFinishTime(System.currentTimeMillis() + (int) (Math.random() * DT) + 1);
    if (id.getTaskId().getTaskType() == TaskType.REDUCE) {
        report.setShuffleFinishTime((report.getFinishTime() + report.getStartTime()) / 2);
        report.setSortFinishTime((report.getFinishTime() + report.getShuffleFinishTime()) / 2);
    }
    report.setPhase(PHASES.next());
    report.setTaskAttemptState(TASK_ATTEMPT_STATES.next());
    report.setProgress((float) Math.random());
    report.setCounters(TypeConverter.toYarn(newCounters()));
    report.setContainerId(containerId);
    report.setDiagnosticInfo(DIAGS.next());
    report.setStateString("Moving average " + Math.random());
    return report;
}
Also used : TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId)

Aggregations

TaskAttemptReport (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport)12 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)7 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)6 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)5 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)5 Test (org.junit.Test)4 Configuration (org.apache.hadoop.conf.Configuration)3 JobReport (org.apache.hadoop.mapreduce.v2.api.records.JobReport)3 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)3 TaskReport (org.apache.hadoop.mapreduce.v2.api.records.TaskReport)3 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)3 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)3 IOException (java.io.IOException)2 PrintWriter (java.io.PrintWriter)2 HashMap (java.util.HashMap)2 GetJobReportRequest (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest)2 GetTaskAttemptReportRequest (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportRequest)2 TaskAttemptState (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState)2 TaskAttemptIdPBImpl (org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskAttemptIdPBImpl)2 AppContext (org.apache.hadoop.mapreduce.v2.app.AppContext)2