use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo in project hadoop by apache.
the class TestRecovery method testRecoveryTaskSuccessAllAttemptsSucceed.
@Test
public void testRecoveryTaskSuccessAllAttemptsSucceed() {
LOG.info("--- START: testRecoveryTaskSuccessAllAttemptsFail ---");
long clusterTimestamp = System.currentTimeMillis();
EventHandler mockEventHandler = mock(EventHandler.class);
MapTaskImpl recoverMapTask = getMockMapTask(clusterTimestamp, mockEventHandler);
TaskId taskId = recoverMapTask.getID();
JobID jobID = new JobID(Long.toString(clusterTimestamp), 1);
TaskID taskID = new TaskID(jobID, org.apache.hadoop.mapreduce.TaskType.MAP, taskId.getId());
//Mock up the TaskAttempts
Map<TaskAttemptID, TaskAttemptInfo> mockTaskAttempts = new HashMap<TaskAttemptID, TaskAttemptInfo>();
TaskAttemptID taId1 = new TaskAttemptID(taskID, 2);
TaskAttemptInfo mockTAinfo1 = getMockTaskAttemptInfo(taId1, TaskAttemptState.SUCCEEDED);
mockTaskAttempts.put(taId1, mockTAinfo1);
TaskAttemptID taId2 = new TaskAttemptID(taskID, 1);
TaskAttemptInfo mockTAinfo2 = getMockTaskAttemptInfo(taId2, TaskAttemptState.SUCCEEDED);
mockTaskAttempts.put(taId2, mockTAinfo2);
OutputCommitter mockCommitter = mock(OutputCommitter.class);
TaskInfo mockTaskInfo = mock(TaskInfo.class);
when(mockTaskInfo.getTaskStatus()).thenReturn("SUCCEEDED");
when(mockTaskInfo.getTaskId()).thenReturn(taskID);
when(mockTaskInfo.getAllTaskAttempts()).thenReturn(mockTaskAttempts);
recoverMapTask.handle(new TaskRecoverEvent(taskId, mockTaskInfo, mockCommitter, true));
ArgumentCaptor<Event> arg = ArgumentCaptor.forClass(Event.class);
verify(mockEventHandler, atLeast(1)).handle((org.apache.hadoop.yarn.event.Event) arg.capture());
Map<TaskAttemptID, TaskAttemptState> finalAttemptStates = new HashMap<TaskAttemptID, TaskAttemptState>();
finalAttemptStates.put(taId1, TaskAttemptState.SUCCEEDED);
finalAttemptStates.put(taId2, TaskAttemptState.SUCCEEDED);
List<EventType> jobHistoryEvents = new ArrayList<EventType>();
jobHistoryEvents.add(EventType.TASK_STARTED);
jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED);
jobHistoryEvents.add(EventType.MAP_ATTEMPT_FINISHED);
jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED);
jobHistoryEvents.add(EventType.MAP_ATTEMPT_FINISHED);
jobHistoryEvents.add(EventType.TASK_FINISHED);
recoveryChecker(recoverMapTask, TaskState.SUCCEEDED, finalAttemptStates, arg, jobHistoryEvents, 2L, 0L);
}
use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo in project hadoop by apache.
the class TestRecovery method testRecoveryAllFailAttempts.
@Test
public void testRecoveryAllFailAttempts() {
LOG.info("--- START: testRecoveryAllFailAttempts ---");
long clusterTimestamp = System.currentTimeMillis();
EventHandler mockEventHandler = mock(EventHandler.class);
MapTaskImpl recoverMapTask = getMockMapTask(clusterTimestamp, mockEventHandler);
TaskId taskId = recoverMapTask.getID();
JobID jobID = new JobID(Long.toString(clusterTimestamp), 1);
TaskID taskID = new TaskID(jobID, org.apache.hadoop.mapreduce.TaskType.MAP, taskId.getId());
//Mock up the TaskAttempts
Map<TaskAttemptID, TaskAttemptInfo> mockTaskAttempts = new HashMap<TaskAttemptID, TaskAttemptInfo>();
TaskAttemptID taId1 = new TaskAttemptID(taskID, 2);
TaskAttemptInfo mockTAinfo1 = getMockTaskAttemptInfo(taId1, TaskAttemptState.FAILED);
mockTaskAttempts.put(taId1, mockTAinfo1);
TaskAttemptID taId2 = new TaskAttemptID(taskID, 1);
TaskAttemptInfo mockTAinfo2 = getMockTaskAttemptInfo(taId2, TaskAttemptState.FAILED);
mockTaskAttempts.put(taId2, mockTAinfo2);
OutputCommitter mockCommitter = mock(OutputCommitter.class);
TaskInfo mockTaskInfo = mock(TaskInfo.class);
when(mockTaskInfo.getTaskStatus()).thenReturn("FAILED");
when(mockTaskInfo.getTaskId()).thenReturn(taskID);
when(mockTaskInfo.getAllTaskAttempts()).thenReturn(mockTaskAttempts);
recoverMapTask.handle(new TaskRecoverEvent(taskId, mockTaskInfo, mockCommitter, true));
ArgumentCaptor<Event> arg = ArgumentCaptor.forClass(Event.class);
verify(mockEventHandler, atLeast(1)).handle((org.apache.hadoop.yarn.event.Event) arg.capture());
Map<TaskAttemptID, TaskAttemptState> finalAttemptStates = new HashMap<TaskAttemptID, TaskAttemptState>();
finalAttemptStates.put(taId1, TaskAttemptState.FAILED);
finalAttemptStates.put(taId2, TaskAttemptState.FAILED);
List<EventType> jobHistoryEvents = new ArrayList<EventType>();
jobHistoryEvents.add(EventType.TASK_STARTED);
jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED);
jobHistoryEvents.add(EventType.MAP_ATTEMPT_FAILED);
jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED);
jobHistoryEvents.add(EventType.MAP_ATTEMPT_FAILED);
jobHistoryEvents.add(EventType.TASK_FAILED);
recoveryChecker(recoverMapTask, TaskState.FAILED, finalAttemptStates, arg, jobHistoryEvents, 2L, 2L);
}
use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo in project hadoop by apache.
the class HsTasksBlock method render.
/*
* (non-Javadoc)
* @see org.apache.hadoop.yarn.webapp.view.HtmlBlock#render(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block)
*/
@Override
protected void render(Block html) {
if (app.getJob() == null) {
html.h2($(TITLE));
return;
}
TaskType type = null;
String symbol = $(TASK_TYPE);
if (!symbol.isEmpty()) {
type = MRApps.taskType(symbol);
}
THEAD<TABLE<Hamlet>> thead;
if (type != null)
thead = html.table("#" + app.getJob().getID() + type).$class("dt-tasks").thead();
else
thead = html.table("#tasks").thead();
//Create the spanning row
int attemptColSpan = type == TaskType.REDUCE ? 8 : 3;
thead.tr().th().$colspan(5).$class("ui-state-default")._("Task")._().th().$colspan(attemptColSpan).$class("ui-state-default")._("Successful Attempt")._()._();
TR<THEAD<TABLE<Hamlet>>> theadRow = thead.tr().th("Name").th("State").th("Start Time").th("Finish Time").th("Elapsed Time").th(//Attempt
"Start Time");
if (type == TaskType.REDUCE) {
//Attempt
theadRow.th("Shuffle Finish Time");
//Attempt
theadRow.th("Merge Finish Time");
}
//Attempt
theadRow.th("Finish Time");
if (type == TaskType.REDUCE) {
//Attempt
theadRow.th("Elapsed Time Shuffle");
//Attempt
theadRow.th("Elapsed Time Merge");
//Attempt
theadRow.th("Elapsed Time Reduce");
}
//Attempt
theadRow.th("Elapsed Time");
TBODY<TABLE<Hamlet>> tbody = theadRow._()._().tbody();
// Write all the data into a JavaScript array of arrays for JQuery
// DataTables to display
StringBuilder tasksTableData = new StringBuilder("[\n");
for (Task task : app.getJob().getTasks().values()) {
if (type != null && task.getType() != type) {
continue;
}
TaskInfo info = new TaskInfo(task);
String tid = info.getId();
long startTime = info.getStartTime();
long finishTime = info.getFinishTime();
long elapsed = info.getElapsedTime();
long attemptStartTime = -1;
long shuffleFinishTime = -1;
long sortFinishTime = -1;
long attemptFinishTime = -1;
long elapsedShuffleTime = -1;
long elapsedSortTime = -1;
;
long elapsedReduceTime = -1;
long attemptElapsed = -1;
TaskAttempt successful = info.getSuccessful();
if (successful != null) {
TaskAttemptInfo ta;
if (type == TaskType.REDUCE) {
ReduceTaskAttemptInfo rta = new ReduceTaskAttemptInfo(successful, type);
shuffleFinishTime = rta.getShuffleFinishTime();
sortFinishTime = rta.getMergeFinishTime();
elapsedShuffleTime = rta.getElapsedShuffleTime();
elapsedSortTime = rta.getElapsedMergeTime();
elapsedReduceTime = rta.getElapsedReduceTime();
ta = rta;
} else {
ta = new TaskAttemptInfo(successful, type, false);
}
attemptStartTime = ta.getStartTime();
attemptFinishTime = ta.getFinishTime();
attemptElapsed = ta.getElapsedTime();
}
tasksTableData.append("[\"").append("<a href='" + url("task", tid)).append("'>").append(tid).append("</a>\",\"").append(info.getState()).append("\",\"").append(startTime).append("\",\"").append(finishTime).append("\",\"").append(elapsed).append("\",\"").append(attemptStartTime).append("\",\"");
if (type == TaskType.REDUCE) {
tasksTableData.append(shuffleFinishTime).append("\",\"").append(sortFinishTime).append("\",\"");
}
tasksTableData.append(attemptFinishTime).append("\",\"");
if (type == TaskType.REDUCE) {
tasksTableData.append(elapsedShuffleTime).append("\",\"").append(elapsedSortTime).append("\",\"").append(elapsedReduceTime).append("\",\"");
}
tasksTableData.append(attemptElapsed).append("\"],\n");
}
//Remove the last comma and close off the array of arrays
if (tasksTableData.charAt(tasksTableData.length() - 2) == ',') {
tasksTableData.delete(tasksTableData.length() - 2, tasksTableData.length() - 1);
}
tasksTableData.append("]");
html.script().$type("text/javascript")._("var tasksTableData=" + tasksTableData)._();
TR<TFOOT<TABLE<Hamlet>>> footRow = tbody._().tfoot().tr();
footRow.th().input("search_init").$type(InputType.text).$name("task").$value("ID")._()._().th().input("search_init").$type(InputType.text).$name("state").$value("State")._()._().th().input("search_init").$type(InputType.text).$name("start_time").$value("Start Time")._()._().th().input("search_init").$type(InputType.text).$name("finish_time").$value("Finish Time")._()._().th().input("search_init").$type(InputType.text).$name("elapsed_time").$value("Elapsed Time")._()._().th().input("search_init").$type(InputType.text).$name("attempt_start_time").$value("Start Time")._()._();
if (type == TaskType.REDUCE) {
footRow.th().input("search_init").$type(InputType.text).$name("shuffle_time").$value("Shuffle Time")._()._();
footRow.th().input("search_init").$type(InputType.text).$name("merge_time").$value("Merge Time")._()._();
}
footRow.th().input("search_init").$type(InputType.text).$name("attempt_finish").$value("Finish Time")._()._();
if (type == TaskType.REDUCE) {
footRow.th().input("search_init").$type(InputType.text).$name("elapsed_shuffle_time").$value("Elapsed Shuffle Time")._()._();
footRow.th().input("search_init").$type(InputType.text).$name("elapsed_merge_time").$value("Elapsed Merge Time")._()._();
footRow.th().input("search_init").$type(InputType.text).$name("elapsed_reduce_time").$value("Elapsed Reduce Time")._()._();
}
footRow.th().input("search_init").$type(InputType.text).$name("attempt_elapsed").$value("Elapsed Time")._()._();
footRow._()._()._();
}
use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo in project hadoop by apache.
the class HsWebServices method getJobTaskAttempts.
@GET
@Path("/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public TaskAttemptsInfo getJobTaskAttempts(@Context HttpServletRequest hsr, @PathParam("jobid") String jid, @PathParam("taskid") String tid) {
init();
TaskAttemptsInfo attempts = new TaskAttemptsInfo();
Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
checkAccess(job, hsr);
Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
for (TaskAttempt ta : task.getAttempts().values()) {
if (ta != null) {
if (task.getType() == TaskType.REDUCE) {
attempts.add(new ReduceTaskAttemptInfo(ta, task.getType()));
} else {
attempts.add(new TaskAttemptInfo(ta, task.getType(), false));
}
}
}
return attempts;
}
use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo in project hadoop by apache.
the class TestCompletedTask method testTaskStartTimes.
@Test(timeout = 5000)
public void testTaskStartTimes() {
TaskId taskId = mock(TaskId.class);
TaskInfo taskInfo = mock(TaskInfo.class);
Map<TaskAttemptID, TaskAttemptInfo> taskAttempts = new TreeMap<TaskAttemptID, TaskAttemptInfo>();
TaskAttemptID id = new TaskAttemptID("0", 0, TaskType.MAP, 0, 0);
TaskAttemptInfo info = mock(TaskAttemptInfo.class);
when(info.getAttemptId()).thenReturn(id);
when(info.getStartTime()).thenReturn(10l);
taskAttempts.put(id, info);
id = new TaskAttemptID("1", 0, TaskType.MAP, 1, 1);
info = mock(TaskAttemptInfo.class);
when(info.getAttemptId()).thenReturn(id);
when(info.getStartTime()).thenReturn(20l);
taskAttempts.put(id, info);
when(taskInfo.getAllTaskAttempts()).thenReturn(taskAttempts);
CompletedTask task = new CompletedTask(taskId, taskInfo);
TaskReport report = task.getReport();
// Make sure the startTime returned by report is the lesser of the
// attempy launch times
assertTrue(report.getStartTime() == 10);
}
Aggregations