use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.
the class TestHsWebServicesTasks method testTaskIdCountersSlash.
@Test
public void testTaskIdCountersSlash() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).path("counters/").accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("jobTaskCounters");
verifyHsJobTaskCounters(info, task);
}
}
}
use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.
the class TestHsWebServicesTasks method testTasksQueryInvalid.
@Test
public void testTasksQueryInvalid() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
// tasktype must be exactly either "m" or "r"
String tasktype = "reduce";
try {
r.path("ws").path("v1").path("history").path("mapreduce").path("jobs").path(jobId).path("tasks").queryParam("type", tasktype).accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertResponseStatusCode(Status.BAD_REQUEST, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
JSONObject msg = response.getEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals("incorrect number of elements", 3, exception.length());
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message", "java.lang.Exception: tasktype must be either m or r", message);
WebServicesTestUtils.checkStringMatch("exception type", "BadRequestException", type);
WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.BadRequestException", classname);
}
}
}
use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.
the class TestHsWebServicesTasks method testTaskIdDefault.
@Test
public void testTaskIdDefault() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("task");
verifyHsSingleTask(info, task);
}
}
}
use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.
the class TestJobInfo method testGetFormattedStartTimeStr.
@Test
public void testGetFormattedStartTimeStr() {
JobReport jobReport = mock(JobReport.class);
when(jobReport.getStartTime()).thenReturn(-1L);
Job job = mock(Job.class);
when(job.getReport()).thenReturn(jobReport);
when(job.getName()).thenReturn("TestJobInfo");
when(job.getState()).thenReturn(JobState.SUCCEEDED);
JobId jobId = MRBuilderUtils.newJobId(1L, 1, 1);
when(job.getID()).thenReturn(jobId);
DateFormat dateFormat = new SimpleDateFormat();
JobInfo jobInfo = new JobInfo(job);
Assert.assertEquals(JobInfo.NA, jobInfo.getFormattedStartTimeStr(dateFormat));
Date date = new Date();
when(jobReport.getStartTime()).thenReturn(date.getTime());
jobInfo = new JobInfo(job);
Assert.assertEquals(dateFormat.format(date), jobInfo.getFormattedStartTimeStr(dateFormat));
}
use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.
the class TestMRApps method testToJobID.
@Test(timeout = 120000)
public void testToJobID() {
JobId jid = MRApps.toJobID("job_1_1");
assertEquals(1, jid.getAppId().getClusterTimestamp());
assertEquals(1, jid.getAppId().getId());
// tests against some proto.id and not a job.id field
assertEquals(1, jid.getId());
}
Aggregations