use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestHsWebServicesJobs method testJobCountersForKilledJob.
@Test
public void testJobCountersForKilledJob() throws Exception {
WebResource r = resource();
appContext = new MockHistoryContext(0, 1, 1, 1, true);
GuiceServletConfig.setInjector(Guice.createInjector(new ServletModule() {
@Override
protected void configureServlets() {
webApp = mock(HsWebApp.class);
when(webApp.name()).thenReturn("hsmockwebapp");
bind(JAXBContextResolver.class);
bind(HsWebServices.class);
bind(GenericExceptionHandler.class);
bind(WebApp.class).toInstance(webApp);
bind(AppContext.class).toInstance(appContext);
bind(HistoryContext.class).toInstance(appContext);
bind(Configuration.class).toInstance(conf);
serve("/*").with(GuiceContainer.class);
}
}));
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs").path(jobId).path("counters/").accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("jobCounters");
WebServicesTestUtils.checkStringMatch("id", MRApps.toString(id), info.getString("id"));
assertTrue("Job shouldn't contain any counters", info.length() == 1);
}
}
use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestHsWebServicesJobs method verifyHsJobPartialXML.
public void verifyHsJobPartialXML(NodeList nodes, MockHistoryContext appContext) {
assertEquals("incorrect number of elements", 1, nodes.getLength());
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
Job job = appContext.getPartialJob(MRApps.toJobID(WebServicesTestUtils.getXmlString(element, "id")));
assertNotNull("Job not found - output incorrect", job);
VerifyJobsUtils.verifyHsJobGeneric(job, WebServicesTestUtils.getXmlString(element, "id"), WebServicesTestUtils.getXmlString(element, "user"), WebServicesTestUtils.getXmlString(element, "name"), WebServicesTestUtils.getXmlString(element, "state"), WebServicesTestUtils.getXmlString(element, "queue"), WebServicesTestUtils.getXmlLong(element, "startTime"), WebServicesTestUtils.getXmlLong(element, "finishTime"), WebServicesTestUtils.getXmlInt(element, "mapsTotal"), WebServicesTestUtils.getXmlInt(element, "mapsCompleted"), WebServicesTestUtils.getXmlInt(element, "reducesTotal"), WebServicesTestUtils.getXmlInt(element, "reducesCompleted"));
}
}
use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestBlocks method testHsJobsBlock.
/**
* test HsJobsBlock's rendering.
*/
@Test
public void testHsJobsBlock() {
AppContext ctx = mock(AppContext.class);
Map<JobId, Job> jobs = new HashMap<JobId, Job>();
Job job = getJob();
jobs.put(job.getID(), job);
when(ctx.getAllJobs()).thenReturn(jobs);
HsJobsBlock block = new HsJobsBlockForTest(ctx);
PrintWriter pWriter = new PrintWriter(data);
Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
block.render(html);
pWriter.flush();
assertTrue(data.toString().contains("JobName"));
assertTrue(data.toString().contains("UserName"));
assertTrue(data.toString().contains("QueueName"));
assertTrue(data.toString().contains("SUCCEEDED"));
}
use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestBlocks method testAttemptsBlock.
/**
* test AttemptsBlock's rendering.
*/
@Test
public void testAttemptsBlock() {
AppContext ctx = mock(AppContext.class);
AppForTest app = new AppForTest(ctx);
Task task = getTask(0);
Map<TaskAttemptId, TaskAttempt> attempts = new HashMap<TaskAttemptId, TaskAttempt>();
TaskAttempt attempt = mock(TaskAttempt.class);
TaskAttemptId taId = new TaskAttemptIdPBImpl();
taId.setId(0);
taId.setTaskId(task.getID());
when(attempt.getID()).thenReturn(taId);
when(attempt.getNodeHttpAddress()).thenReturn("Node address");
ApplicationId appId = ApplicationIdPBImpl.newInstance(0, 5);
ApplicationAttemptId appAttemptId = ApplicationAttemptIdPBImpl.newInstance(appId, 1);
ContainerId containerId = ContainerIdPBImpl.newContainerId(appAttemptId, 1);
when(attempt.getAssignedContainerID()).thenReturn(containerId);
when(attempt.getAssignedContainerMgrAddress()).thenReturn("assignedContainerMgrAddress");
when(attempt.getNodeRackName()).thenReturn("nodeRackName");
final long taStartTime = 100002L;
final long taFinishTime = 100012L;
final long taShuffleFinishTime = 100010L;
final long taSortFinishTime = 100011L;
final TaskAttemptState taState = TaskAttemptState.SUCCEEDED;
when(attempt.getLaunchTime()).thenReturn(taStartTime);
when(attempt.getFinishTime()).thenReturn(taFinishTime);
when(attempt.getShuffleFinishTime()).thenReturn(taShuffleFinishTime);
when(attempt.getSortFinishTime()).thenReturn(taSortFinishTime);
when(attempt.getState()).thenReturn(taState);
TaskAttemptReport taReport = mock(TaskAttemptReport.class);
when(taReport.getStartTime()).thenReturn(taStartTime);
when(taReport.getFinishTime()).thenReturn(taFinishTime);
when(taReport.getShuffleFinishTime()).thenReturn(taShuffleFinishTime);
when(taReport.getSortFinishTime()).thenReturn(taSortFinishTime);
when(taReport.getContainerId()).thenReturn(containerId);
when(taReport.getProgress()).thenReturn(1.0f);
when(taReport.getStateString()).thenReturn("Processed 128/128 records <p> \n");
when(taReport.getTaskAttemptState()).thenReturn(taState);
when(taReport.getDiagnosticInfo()).thenReturn("");
when(attempt.getReport()).thenReturn(taReport);
attempts.put(taId, attempt);
when(task.getAttempts()).thenReturn(attempts);
app.setTask(task);
Job job = mock(Job.class);
when(job.getUserName()).thenReturn("User");
app.setJob(job);
AttemptsBlockForTest block = new AttemptsBlockForTest(app);
block.addParameter(AMParams.TASK_TYPE, "r");
PrintWriter pWriter = new PrintWriter(data);
Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
block.render(html);
pWriter.flush();
// should be printed information about attempts
assertTrue(data.toString().contains("attempt_0_0001_r_000000_0"));
assertTrue(data.toString().contains("SUCCEEDED"));
assertFalse(data.toString().contains("Processed 128/128 records <p> \n"));
assertTrue(data.toString().contains("Processed 128\\/128 records <p> \\n"));
assertTrue(data.toString().contains("_0005_01_000001:attempt_0_0001_r_000000_0:User:"));
assertTrue(data.toString().contains("100002"));
assertTrue(data.toString().contains("100010"));
assertTrue(data.toString().contains("100011"));
assertTrue(data.toString().contains("100012"));
}
use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestHSWebApp method testJobView.
@Test
public void testJobView() {
LOG.info("HsJobPage");
AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = TestAMWebApp.getJobParams(appContext);
WebAppTests.testPage(HsJobPage.class, AppContext.class, appContext, params);
}
Aggregations