use of org.apache.hadoop.mapreduce.v2.api.records.TaskReport in project hadoop by apache.
the class MRApp method verifyCompleted.
public void verifyCompleted() {
for (Job job : getContext().getAllJobs().values()) {
JobReport jobReport = job.getReport();
System.out.println("Job start time :" + jobReport.getStartTime());
System.out.println("Job finish time :" + jobReport.getFinishTime());
Assert.assertTrue("Job start time is not less than finish time", jobReport.getStartTime() <= jobReport.getFinishTime());
Assert.assertTrue("Job finish time is in future", jobReport.getFinishTime() <= System.currentTimeMillis());
for (Task task : job.getTasks().values()) {
TaskReport taskReport = task.getReport();
System.out.println("Task start time : " + taskReport.getStartTime());
System.out.println("Task finish time : " + taskReport.getFinishTime());
Assert.assertTrue("Task start time is not less than finish time", taskReport.getStartTime() <= taskReport.getFinishTime());
for (TaskAttempt attempt : task.getAttempts().values()) {
TaskAttemptReport attemptReport = attempt.getReport();
Assert.assertTrue("Attempt start time is not less than finish time", attemptReport.getStartTime() <= attemptReport.getFinishTime());
}
}
}
}
use of org.apache.hadoop.mapreduce.v2.api.records.TaskReport in project hadoop by apache.
the class GetTaskReportResponsePBImpl method getTaskReport.
@Override
public TaskReport getTaskReport() {
GetTaskReportResponseProtoOrBuilder p = viaProto ? proto : builder;
if (this.taskReport != null) {
return this.taskReport;
}
if (!p.hasTaskReport()) {
return null;
}
this.taskReport = convertFromProtoFormat(p.getTaskReport());
return this.taskReport;
}
use of org.apache.hadoop.mapreduce.v2.api.records.TaskReport in project hadoop by apache.
the class GetTaskReportsResponsePBImpl method initTaskReports.
private void initTaskReports() {
if (this.taskReports != null) {
return;
}
GetTaskReportsResponseProtoOrBuilder p = viaProto ? proto : builder;
List<TaskReportProto> list = p.getTaskReportsList();
this.taskReports = new ArrayList<TaskReport>();
for (TaskReportProto c : list) {
this.taskReports.add(convertFromProtoFormat(c));
}
}
use of org.apache.hadoop.mapreduce.v2.api.records.TaskReport in project hadoop by apache.
the class TestBlocks method testAttemptsBlock.
/**
* test AttemptsBlock's rendering.
*/
@Test
public void testAttemptsBlock() {
AppContext ctx = mock(AppContext.class);
AppForTest app = new AppForTest(ctx);
JobId jobId = new JobIdPBImpl();
jobId.setId(0);
jobId.setAppId(ApplicationIdPBImpl.newInstance(0, 1));
TaskId taskId = new TaskIdPBImpl();
taskId.setId(0);
taskId.setTaskType(TaskType.REDUCE);
taskId.setJobId(jobId);
Task task = mock(Task.class);
when(task.getID()).thenReturn(taskId);
TaskReport report = mock(TaskReport.class);
when(task.getReport()).thenReturn(report);
when(task.getType()).thenReturn(TaskType.REDUCE);
Map<TaskId, Task> tasks = new HashMap<TaskId, Task>();
Map<TaskAttemptId, TaskAttempt> attempts = new HashMap<TaskAttemptId, TaskAttempt>();
TaskAttempt attempt = mock(TaskAttempt.class);
TaskAttemptId taId = new TaskAttemptIdPBImpl();
taId.setId(0);
taId.setTaskId(task.getID());
when(attempt.getID()).thenReturn(taId);
final TaskAttemptState taState = TaskAttemptState.SUCCEEDED;
when(attempt.getState()).thenReturn(taState);
TaskAttemptReport taReport = mock(TaskAttemptReport.class);
when(taReport.getTaskAttemptState()).thenReturn(taState);
when(attempt.getReport()).thenReturn(taReport);
attempts.put(taId, attempt);
tasks.put(taskId, task);
when(task.getAttempts()).thenReturn(attempts);
app.setTask(task);
Job job = mock(Job.class);
when(job.getTasks(TaskType.REDUCE)).thenReturn(tasks);
app.setJob(job);
AttemptsBlockForTest block = new AttemptsBlockForTest(app, new Configuration());
block.addParameter(AMParams.TASK_TYPE, "r");
block.addParameter(AMParams.ATTEMPT_STATE, "SUCCESSFUL");
PrintWriter pWriter = new PrintWriter(data);
Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
block.render(html);
pWriter.flush();
assertTrue(data.toString().contains("<a href='" + block.url("task", task.getID().toString()) + "'>" + "attempt_0_0001_r_000000_0</a>"));
}
use of org.apache.hadoop.mapreduce.v2.api.records.TaskReport in project hadoop by apache.
the class TestBlocks method testTasksBlock.
/**
* Test rendering for TasksBlock
*/
@Test
public void testTasksBlock() throws Exception {
ApplicationId appId = ApplicationIdPBImpl.newInstance(0, 1);
JobId jobId = new JobIdPBImpl();
jobId.setId(0);
jobId.setAppId(appId);
TaskId taskId = new TaskIdPBImpl();
taskId.setId(0);
taskId.setTaskType(TaskType.MAP);
taskId.setJobId(jobId);
Task task = mock(Task.class);
when(task.getID()).thenReturn(taskId);
TaskReport report = mock(TaskReport.class);
when(report.getProgress()).thenReturn(0.7f);
when(report.getTaskState()).thenReturn(TaskState.SUCCEEDED);
when(report.getStartTime()).thenReturn(100001L);
when(report.getFinishTime()).thenReturn(100011L);
when(report.getStatus()).thenReturn("Dummy Status \n*");
when(task.getReport()).thenReturn(report);
when(task.getType()).thenReturn(TaskType.MAP);
Map<TaskId, Task> tasks = new HashMap<TaskId, Task>();
tasks.put(taskId, task);
AppContext ctx = mock(AppContext.class);
Job job = mock(Job.class);
when(job.getTasks()).thenReturn(tasks);
App app = new App(ctx);
app.setJob(job);
TasksBlockForTest taskBlock = new TasksBlockForTest(app);
taskBlock.addParameter(AMParams.TASK_TYPE, "m");
PrintWriter pWriter = new PrintWriter(data);
Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
taskBlock.render(html);
pWriter.flush();
assertTrue(data.toString().contains("task_0_0001_m_000000"));
assertTrue(data.toString().contains("70.00"));
assertTrue(data.toString().contains("SUCCEEDED"));
assertTrue(data.toString().contains("100001"));
assertTrue(data.toString().contains("100011"));
assertFalse(data.toString().contains("Dummy Status \n*"));
assertTrue(data.toString().contains("Dummy Status \\n*"));
}
Aggregations