Search in sources :

Example 1 with AppForTest

use of org.apache.hadoop.mapreduce.v2.app.webapp.AppForTest in project hadoop by apache.

the class TestBlocks method testAttemptsBlock.

/**
   * test AttemptsBlock's rendering.
   */
@Test
public void testAttemptsBlock() {
    AppContext ctx = mock(AppContext.class);
    AppForTest app = new AppForTest(ctx);
    JobId jobId = new JobIdPBImpl();
    jobId.setId(0);
    jobId.setAppId(ApplicationIdPBImpl.newInstance(0, 1));
    TaskId taskId = new TaskIdPBImpl();
    taskId.setId(0);
    taskId.setTaskType(TaskType.REDUCE);
    taskId.setJobId(jobId);
    Task task = mock(Task.class);
    when(task.getID()).thenReturn(taskId);
    TaskReport report = mock(TaskReport.class);
    when(task.getReport()).thenReturn(report);
    when(task.getType()).thenReturn(TaskType.REDUCE);
    Map<TaskId, Task> tasks = new HashMap<TaskId, Task>();
    Map<TaskAttemptId, TaskAttempt> attempts = new HashMap<TaskAttemptId, TaskAttempt>();
    TaskAttempt attempt = mock(TaskAttempt.class);
    TaskAttemptId taId = new TaskAttemptIdPBImpl();
    taId.setId(0);
    taId.setTaskId(task.getID());
    when(attempt.getID()).thenReturn(taId);
    final TaskAttemptState taState = TaskAttemptState.SUCCEEDED;
    when(attempt.getState()).thenReturn(taState);
    TaskAttemptReport taReport = mock(TaskAttemptReport.class);
    when(taReport.getTaskAttemptState()).thenReturn(taState);
    when(attempt.getReport()).thenReturn(taReport);
    attempts.put(taId, attempt);
    tasks.put(taskId, task);
    when(task.getAttempts()).thenReturn(attempts);
    app.setTask(task);
    Job job = mock(Job.class);
    when(job.getTasks(TaskType.REDUCE)).thenReturn(tasks);
    app.setJob(job);
    AttemptsBlockForTest block = new AttemptsBlockForTest(app, new Configuration());
    block.addParameter(AMParams.TASK_TYPE, "r");
    block.addParameter(AMParams.ATTEMPT_STATE, "SUCCESSFUL");
    PrintWriter pWriter = new PrintWriter(data);
    Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
    block.render(html);
    pWriter.flush();
    assertTrue(data.toString().contains("<a href='" + block.url("task", task.getID().toString()) + "'>" + "attempt_0_0001_r_000000_0</a>"));
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskAttemptIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskAttemptIdPBImpl) Configuration(org.apache.hadoop.conf.Configuration) TaskIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskIdPBImpl) HashMap(java.util.HashMap) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) AppContext(org.apache.hadoop.mapreduce.v2.app.AppContext) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport) TaskAttemptState(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState) JobIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl) HtmlBlock(org.apache.hadoop.yarn.webapp.view.HtmlBlock) Block(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block) FewAttemptsBlock(org.apache.hadoop.mapreduce.v2.app.webapp.AttemptsPage.FewAttemptsBlock) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) PrintWriter(java.io.PrintWriter) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) Test(org.junit.Test)

Example 2 with AppForTest

use of org.apache.hadoop.mapreduce.v2.app.webapp.AppForTest in project hadoop by apache.

the class TestBlocks method testAttemptsBlock.

/**
   * test AttemptsBlock's rendering.
   */
@Test
public void testAttemptsBlock() {
    AppContext ctx = mock(AppContext.class);
    AppForTest app = new AppForTest(ctx);
    Task task = getTask(0);
    Map<TaskAttemptId, TaskAttempt> attempts = new HashMap<TaskAttemptId, TaskAttempt>();
    TaskAttempt attempt = mock(TaskAttempt.class);
    TaskAttemptId taId = new TaskAttemptIdPBImpl();
    taId.setId(0);
    taId.setTaskId(task.getID());
    when(attempt.getID()).thenReturn(taId);
    when(attempt.getNodeHttpAddress()).thenReturn("Node address");
    ApplicationId appId = ApplicationIdPBImpl.newInstance(0, 5);
    ApplicationAttemptId appAttemptId = ApplicationAttemptIdPBImpl.newInstance(appId, 1);
    ContainerId containerId = ContainerIdPBImpl.newContainerId(appAttemptId, 1);
    when(attempt.getAssignedContainerID()).thenReturn(containerId);
    when(attempt.getAssignedContainerMgrAddress()).thenReturn("assignedContainerMgrAddress");
    when(attempt.getNodeRackName()).thenReturn("nodeRackName");
    final long taStartTime = 100002L;
    final long taFinishTime = 100012L;
    final long taShuffleFinishTime = 100010L;
    final long taSortFinishTime = 100011L;
    final TaskAttemptState taState = TaskAttemptState.SUCCEEDED;
    when(attempt.getLaunchTime()).thenReturn(taStartTime);
    when(attempt.getFinishTime()).thenReturn(taFinishTime);
    when(attempt.getShuffleFinishTime()).thenReturn(taShuffleFinishTime);
    when(attempt.getSortFinishTime()).thenReturn(taSortFinishTime);
    when(attempt.getState()).thenReturn(taState);
    TaskAttemptReport taReport = mock(TaskAttemptReport.class);
    when(taReport.getStartTime()).thenReturn(taStartTime);
    when(taReport.getFinishTime()).thenReturn(taFinishTime);
    when(taReport.getShuffleFinishTime()).thenReturn(taShuffleFinishTime);
    when(taReport.getSortFinishTime()).thenReturn(taSortFinishTime);
    when(taReport.getContainerId()).thenReturn(containerId);
    when(taReport.getProgress()).thenReturn(1.0f);
    when(taReport.getStateString()).thenReturn("Processed 128/128 records <p> \n");
    when(taReport.getTaskAttemptState()).thenReturn(taState);
    when(taReport.getDiagnosticInfo()).thenReturn("");
    when(attempt.getReport()).thenReturn(taReport);
    attempts.put(taId, attempt);
    when(task.getAttempts()).thenReturn(attempts);
    app.setTask(task);
    Job job = mock(Job.class);
    when(job.getUserName()).thenReturn("User");
    app.setJob(job);
    AttemptsBlockForTest block = new AttemptsBlockForTest(app);
    block.addParameter(AMParams.TASK_TYPE, "r");
    PrintWriter pWriter = new PrintWriter(data);
    Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
    block.render(html);
    pWriter.flush();
    // should be printed information about attempts
    assertTrue(data.toString().contains("attempt_0_0001_r_000000_0"));
    assertTrue(data.toString().contains("SUCCEEDED"));
    assertFalse(data.toString().contains("Processed 128/128 records <p> \n"));
    assertTrue(data.toString().contains("Processed 128\\/128 records &lt;p&gt; \\n"));
    assertTrue(data.toString().contains("_0005_01_000001:attempt_0_0001_r_000000_0:User:"));
    assertTrue(data.toString().contains("100002"));
    assertTrue(data.toString().contains("100010"));
    assertTrue(data.toString().contains("100011"));
    assertTrue(data.toString().contains("100012"));
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskAttemptIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskAttemptIdPBImpl) HashMap(java.util.HashMap) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) AppContext(org.apache.hadoop.mapreduce.v2.app.AppContext) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) TaskAttemptState(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState) HtmlBlock(org.apache.hadoop.yarn.webapp.view.HtmlBlock) Block(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block) AttemptsBlock(org.apache.hadoop.mapreduce.v2.hs.webapp.HsTaskPage.AttemptsBlock) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) AppForTest(org.apache.hadoop.mapreduce.v2.app.webapp.AppForTest) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) PrintWriter(java.io.PrintWriter) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) Test(org.junit.Test) AppForTest(org.apache.hadoop.mapreduce.v2.app.webapp.AppForTest)

Example 3 with AppForTest

use of org.apache.hadoop.mapreduce.v2.app.webapp.AppForTest in project hadoop by apache.

the class TestBlocks method testHsTasksBlock.

/**
   * test HsTasksBlock's rendering.
   */
@Test
public void testHsTasksBlock() {
    Task task = getTask(0);
    Map<TaskId, Task> tasks = new HashMap<TaskId, Task>();
    tasks.put(task.getID(), task);
    AppContext ctx = mock(AppContext.class);
    AppForTest app = new AppForTest(ctx);
    Job job = mock(Job.class);
    when(job.getTasks()).thenReturn(tasks);
    app.setJob(job);
    HsTasksBlockForTest block = new HsTasksBlockForTest(app);
    block.addParameter(AMParams.TASK_TYPE, "r");
    PrintWriter pWriter = new PrintWriter(data);
    Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
    block.render(html);
    pWriter.flush();
    // should be printed information about task
    assertTrue(data.toString().contains("task_0_0001_r_000000"));
    assertTrue(data.toString().contains("SUCCEEDED"));
    assertTrue(data.toString().contains("100001"));
    assertTrue(data.toString().contains("100011"));
    assertTrue(data.toString().contains(""));
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) HashMap(java.util.HashMap) AppContext(org.apache.hadoop.mapreduce.v2.app.AppContext) HtmlBlock(org.apache.hadoop.yarn.webapp.view.HtmlBlock) Block(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block) AttemptsBlock(org.apache.hadoop.mapreduce.v2.hs.webapp.HsTaskPage.AttemptsBlock) AppForTest(org.apache.hadoop.mapreduce.v2.app.webapp.AppForTest) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) PrintWriter(java.io.PrintWriter) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) Test(org.junit.Test) AppForTest(org.apache.hadoop.mapreduce.v2.app.webapp.AppForTest)

Example 4 with AppForTest

use of org.apache.hadoop.mapreduce.v2.app.webapp.AppForTest in project hadoop by apache.

the class TestBlocks method testHsController.

/**
   * test HsController
   */
@Test
public void testHsController() throws Exception {
    AppContext ctx = mock(AppContext.class);
    ApplicationId appId = ApplicationIdPBImpl.newInstance(0, 5);
    when(ctx.getApplicationID()).thenReturn(appId);
    AppForTest app = new AppForTest(ctx);
    Configuration config = new Configuration();
    RequestContext requestCtx = mock(RequestContext.class);
    HsControllerForTest controller = new HsControllerForTest(app, config, requestCtx);
    controller.index();
    assertEquals("JobHistory", controller.get(Params.TITLE, ""));
    assertEquals(HsJobPage.class, controller.jobPage());
    assertEquals(HsCountersPage.class, controller.countersPage());
    assertEquals(HsTasksPage.class, controller.tasksPage());
    assertEquals(HsTaskPage.class, controller.taskPage());
    assertEquals(HsAttemptsPage.class, controller.attemptsPage());
    controller.set(AMParams.JOB_ID, "job_01_01");
    controller.set(AMParams.TASK_ID, "task_01_01_m_01");
    controller.set(AMParams.TASK_TYPE, "m");
    controller.set(AMParams.ATTEMPT_STATE, "State");
    Job job = mock(Job.class);
    Task task = mock(Task.class);
    when(job.getTask(any(TaskId.class))).thenReturn(task);
    JobId jobID = MRApps.toJobID("job_01_01");
    when(ctx.getJob(jobID)).thenReturn(job);
    when(job.checkAccess(any(UserGroupInformation.class), any(JobACL.class))).thenReturn(true);
    controller.job();
    assertEquals(HsJobPage.class, controller.getClazz());
    controller.jobCounters();
    assertEquals(HsCountersPage.class, controller.getClazz());
    controller.taskCounters();
    assertEquals(HsCountersPage.class, controller.getClazz());
    controller.tasks();
    assertEquals(HsTasksPage.class, controller.getClazz());
    controller.task();
    assertEquals(HsTaskPage.class, controller.getClazz());
    controller.attempts();
    assertEquals(HsAttemptsPage.class, controller.getClazz());
    assertEquals(HsConfPage.class, controller.confPage());
    assertEquals(HsAboutPage.class, controller.aboutPage());
    controller.about();
    assertEquals(HsAboutPage.class, controller.getClazz());
    controller.logs();
    assertEquals(HsLogsPage.class, controller.getClazz());
    controller.nmlogs();
    assertEquals(AggregatedLogsPage.class, controller.getClazz());
    assertEquals(HsSingleCounterPage.class, controller.singleCounterPage());
    controller.singleJobCounter();
    assertEquals(HsSingleCounterPage.class, controller.getClazz());
    controller.singleTaskCounter();
    assertEquals(HsSingleCounterPage.class, controller.getClazz());
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) Configuration(org.apache.hadoop.conf.Configuration) AppContext(org.apache.hadoop.mapreduce.v2.app.AppContext) RequestContext(org.apache.hadoop.yarn.webapp.Controller.RequestContext) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) AppForTest(org.apache.hadoop.mapreduce.v2.app.webapp.AppForTest) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobACL(org.apache.hadoop.mapreduce.JobACL) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) Test(org.junit.Test) AppForTest(org.apache.hadoop.mapreduce.v2.app.webapp.AppForTest)

Aggregations

AppContext (org.apache.hadoop.mapreduce.v2.app.AppContext)4 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)4 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)4 BlockForTest (org.apache.hadoop.yarn.webapp.view.BlockForTest)4 Test (org.junit.Test)4 PrintWriter (java.io.PrintWriter)3 HashMap (java.util.HashMap)3 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)3 AppForTest (org.apache.hadoop.mapreduce.v2.app.webapp.AppForTest)3 HtmlBlock (org.apache.hadoop.yarn.webapp.view.HtmlBlock)3 Block (org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block)3 Configuration (org.apache.hadoop.conf.Configuration)2 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)2 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)2 TaskAttemptReport (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport)2 TaskAttemptState (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState)2 TaskAttemptIdPBImpl (org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskAttemptIdPBImpl)2 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)2 AttemptsBlock (org.apache.hadoop.mapreduce.v2.hs.webapp.HsTaskPage.AttemptsBlock)2 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)2