use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestBlocks method testHsJobsBlock.
/**
* test HsJobsBlock's rendering.
*/
@Test
public void testHsJobsBlock() {
AppContext ctx = mock(AppContext.class);
Map<JobId, Job> jobs = new HashMap<JobId, Job>();
Job job = getJob();
jobs.put(job.getID(), job);
when(ctx.getAllJobs()).thenReturn(jobs);
HsJobsBlock block = new HsJobsBlockForTest(ctx);
PrintWriter pWriter = new PrintWriter(data);
Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
block.render(html);
pWriter.flush();
assertTrue(data.toString().contains("JobName"));
assertTrue(data.toString().contains("UserName"));
assertTrue(data.toString().contains("QueueName"));
assertTrue(data.toString().contains("SUCCEEDED"));
}
use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestBlocks method testAttemptsBlock.
/**
* test AttemptsBlock's rendering.
*/
@Test
public void testAttemptsBlock() {
AppContext ctx = mock(AppContext.class);
AppForTest app = new AppForTest(ctx);
Task task = getTask(0);
Map<TaskAttemptId, TaskAttempt> attempts = new HashMap<TaskAttemptId, TaskAttempt>();
TaskAttempt attempt = mock(TaskAttempt.class);
TaskAttemptId taId = new TaskAttemptIdPBImpl();
taId.setId(0);
taId.setTaskId(task.getID());
when(attempt.getID()).thenReturn(taId);
when(attempt.getNodeHttpAddress()).thenReturn("Node address");
ApplicationId appId = ApplicationIdPBImpl.newInstance(0, 5);
ApplicationAttemptId appAttemptId = ApplicationAttemptIdPBImpl.newInstance(appId, 1);
ContainerId containerId = ContainerIdPBImpl.newContainerId(appAttemptId, 1);
when(attempt.getAssignedContainerID()).thenReturn(containerId);
when(attempt.getAssignedContainerMgrAddress()).thenReturn("assignedContainerMgrAddress");
when(attempt.getNodeRackName()).thenReturn("nodeRackName");
final long taStartTime = 100002L;
final long taFinishTime = 100012L;
final long taShuffleFinishTime = 100010L;
final long taSortFinishTime = 100011L;
final TaskAttemptState taState = TaskAttemptState.SUCCEEDED;
when(attempt.getLaunchTime()).thenReturn(taStartTime);
when(attempt.getFinishTime()).thenReturn(taFinishTime);
when(attempt.getShuffleFinishTime()).thenReturn(taShuffleFinishTime);
when(attempt.getSortFinishTime()).thenReturn(taSortFinishTime);
when(attempt.getState()).thenReturn(taState);
TaskAttemptReport taReport = mock(TaskAttemptReport.class);
when(taReport.getStartTime()).thenReturn(taStartTime);
when(taReport.getFinishTime()).thenReturn(taFinishTime);
when(taReport.getShuffleFinishTime()).thenReturn(taShuffleFinishTime);
when(taReport.getSortFinishTime()).thenReturn(taSortFinishTime);
when(taReport.getContainerId()).thenReturn(containerId);
when(taReport.getProgress()).thenReturn(1.0f);
when(taReport.getStateString()).thenReturn("Processed 128/128 records <p> \n");
when(taReport.getTaskAttemptState()).thenReturn(taState);
when(taReport.getDiagnosticInfo()).thenReturn("");
when(attempt.getReport()).thenReturn(taReport);
attempts.put(taId, attempt);
when(task.getAttempts()).thenReturn(attempts);
app.setTask(task);
Job job = mock(Job.class);
when(job.getUserName()).thenReturn("User");
app.setJob(job);
AttemptsBlockForTest block = new AttemptsBlockForTest(app);
block.addParameter(AMParams.TASK_TYPE, "r");
PrintWriter pWriter = new PrintWriter(data);
Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
block.render(html);
pWriter.flush();
// should be printed information about attempts
assertTrue(data.toString().contains("attempt_0_0001_r_000000_0"));
assertTrue(data.toString().contains("SUCCEEDED"));
assertFalse(data.toString().contains("Processed 128/128 records <p> \n"));
assertTrue(data.toString().contains("Processed 128\\/128 records <p> \\n"));
assertTrue(data.toString().contains("_0005_01_000001:attempt_0_0001_r_000000_0:User:"));
assertTrue(data.toString().contains("100002"));
assertTrue(data.toString().contains("100010"));
assertTrue(data.toString().contains("100011"));
assertTrue(data.toString().contains("100012"));
}
use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestHSWebApp method testJobView.
@Test
public void testJobView() {
LOG.info("HsJobPage");
AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = TestAMWebApp.getJobParams(appContext);
WebAppTests.testPage(HsJobPage.class, AppContext.class, appContext, params);
}
use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestHSWebApp method testTaskViewNaturalSortType.
@Test
public void testTaskViewNaturalSortType() {
LOG.info("HsTaskPage");
AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = TestAMWebApp.getTaskParams(appContext);
Injector testPage = WebAppTests.testPage(HsTaskPage.class, AppContext.class, appContext, params);
View viewInstance = testPage.getInstance(HsTaskPage.class);
Map<String, String> moreParams = viewInstance.context().requestContext().moreParams();
String appTableColumnsMeta = moreParams.get("ui.dataTables.attempts.init");
Assert.assertTrue(appTableColumnsMeta.indexOf("natural") != -1);
}
use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestHSWebApp method testAttemptsView.
@Test
public void testAttemptsView() {
LOG.info("HsAttemptsPage");
AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = TestAMWebApp.getTaskParams(appContext);
WebAppTests.testPage(HsAttemptsPage.class, AppContext.class, appContext, params);
}
Aggregations