use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestBlocks method testHsTasksBlock.
/**
* test HsTasksBlock's rendering.
*/
@Test
public void testHsTasksBlock() {
Task task = getTask(0);
Map<TaskId, Task> tasks = new HashMap<TaskId, Task>();
tasks.put(task.getID(), task);
AppContext ctx = mock(AppContext.class);
AppForTest app = new AppForTest(ctx);
Job job = mock(Job.class);
when(job.getTasks()).thenReturn(tasks);
app.setJob(job);
HsTasksBlockForTest block = new HsTasksBlockForTest(app);
block.addParameter(AMParams.TASK_TYPE, "r");
PrintWriter pWriter = new PrintWriter(data);
Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
block.render(html);
pWriter.flush();
// should be printed information about task
assertTrue(data.toString().contains("task_0_0001_r_000000"));
assertTrue(data.toString().contains("SUCCEEDED"));
assertTrue(data.toString().contains("100001"));
assertTrue(data.toString().contains("100011"));
assertTrue(data.toString().contains(""));
}
use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestHsWebServicesJobs method testJobIdXML.
@Test
public void testJobIdXML() throws Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs").path(jobId).accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList job = dom.getElementsByTagName("job");
verifyHsJobXML(job, appContext);
}
}
use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestBlocks method testHsController.
/**
* test HsController
*/
@Test
public void testHsController() throws Exception {
AppContext ctx = mock(AppContext.class);
ApplicationId appId = ApplicationIdPBImpl.newInstance(0, 5);
when(ctx.getApplicationID()).thenReturn(appId);
AppForTest app = new AppForTest(ctx);
Configuration config = new Configuration();
RequestContext requestCtx = mock(RequestContext.class);
HsControllerForTest controller = new HsControllerForTest(app, config, requestCtx);
controller.index();
assertEquals("JobHistory", controller.get(Params.TITLE, ""));
assertEquals(HsJobPage.class, controller.jobPage());
assertEquals(HsCountersPage.class, controller.countersPage());
assertEquals(HsTasksPage.class, controller.tasksPage());
assertEquals(HsTaskPage.class, controller.taskPage());
assertEquals(HsAttemptsPage.class, controller.attemptsPage());
controller.set(AMParams.JOB_ID, "job_01_01");
controller.set(AMParams.TASK_ID, "task_01_01_m_01");
controller.set(AMParams.TASK_TYPE, "m");
controller.set(AMParams.ATTEMPT_STATE, "State");
Job job = mock(Job.class);
Task task = mock(Task.class);
when(job.getTask(any(TaskId.class))).thenReturn(task);
JobId jobID = MRApps.toJobID("job_01_01");
when(ctx.getJob(jobID)).thenReturn(job);
when(job.checkAccess(any(UserGroupInformation.class), any(JobACL.class))).thenReturn(true);
controller.job();
assertEquals(HsJobPage.class, controller.getClazz());
controller.jobCounters();
assertEquals(HsCountersPage.class, controller.getClazz());
controller.taskCounters();
assertEquals(HsCountersPage.class, controller.getClazz());
controller.tasks();
assertEquals(HsTasksPage.class, controller.getClazz());
controller.task();
assertEquals(HsTaskPage.class, controller.getClazz());
controller.attempts();
assertEquals(HsAttemptsPage.class, controller.getClazz());
assertEquals(HsConfPage.class, controller.confPage());
assertEquals(HsAboutPage.class, controller.aboutPage());
controller.about();
assertEquals(HsAboutPage.class, controller.getClazz());
controller.logs();
assertEquals(HsLogsPage.class, controller.getClazz());
controller.nmlogs();
assertEquals(AggregatedLogsPage.class, controller.getClazz());
assertEquals(HsSingleCounterPage.class, controller.singleCounterPage());
controller.singleJobCounter();
assertEquals(HsSingleCounterPage.class, controller.getClazz());
controller.singleTaskCounter();
assertEquals(HsSingleCounterPage.class, controller.getClazz());
}
use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestHSWebApp method testTasksViewNaturalSortType.
@Test
public void testTasksViewNaturalSortType() {
LOG.info("HsTasksPage");
AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = TestAMWebApp.getTaskParams(appContext);
Injector testPage = WebAppTests.testPage(HsTasksPage.class, AppContext.class, appContext, params);
View viewInstance = testPage.getInstance(HsTasksPage.class);
Map<String, String> moreParams = viewInstance.context().requestContext().moreParams();
String appTableColumnsMeta = moreParams.get("ui.dataTables.selector.init");
Assert.assertTrue(appTableColumnsMeta.indexOf("natural") != -1);
}
use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestHSWebApp method testJobCounterViewForKilledJob.
@Test
public void testJobCounterViewForKilledJob() {
LOG.info("JobCounterViewForKilledJob");
AppContext appContext = new MockAppContext(0, 1, 1, 1, true);
Map<String, String> params = TestAMWebApp.getJobParams(appContext);
WebAppTests.testPage(HsCountersPage.class, AppContext.class, appContext, params);
}
Aggregations