use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.
the class TestHsWebServicesJobsQuery method testJobsQueryStartTimeBeginEnd.
@Test
public void testJobsQueryStartTimeBeginEnd() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
int size = jobsMap.size();
ArrayList<Long> startTime = new ArrayList<Long>(size);
// figure out the middle start Time
for (Map.Entry<JobId, Job> entry : jobsMap.entrySet()) {
startTime.add(entry.getValue().getReport().getStartTime());
}
Collections.sort(startTime);
assertTrue("Error we must have atleast 3 jobs", size >= 3);
long midStartTime = startTime.get(size - 2);
ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs").queryParam("startedTimeBegin", String.valueOf(40000)).queryParam("startedTimeEnd", String.valueOf(midStartTime)).accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject jobs = json.getJSONObject("jobs");
JSONArray arr = jobs.getJSONArray("job");
assertEquals("incorrect number of elements", size - 1, arr.length());
}
use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.
the class TestHsWebServicesJobsQuery method testJobsQueryStateNone.
@Test
public void testJobsQueryStateNone() throws JSONException, Exception {
WebResource r = resource();
ArrayList<JobState> JOB_STATES = new ArrayList<JobState>(Arrays.asList(JobState.values()));
// find a state that isn't in use
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (Map.Entry<JobId, Job> entry : jobsMap.entrySet()) {
JOB_STATES.remove(entry.getValue().getState());
}
assertTrue("No unused job states", JOB_STATES.size() > 0);
JobState notInUse = JOB_STATES.get(0);
ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs").queryParam("state", notInUse.toString()).accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
assertEquals("jobs is not empty", new JSONObject().toString(), json.get("jobs").toString());
}
use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.
the class TestHsWebServicesTasks method testTaskId.
@Test
public void testTaskId() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("task");
verifyHsSingleTask(info, task);
}
}
}
use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.
the class TestHsWebServicesTasks method testTaskIdXML.
@Test
public void testTaskIdXML() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodes = dom.getElementsByTagName("task");
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
verifyHsSingleTaskXML(element, task);
}
}
}
}
use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.
the class TestHsWebServicesTasks method testTaskIdInvalid3.
@Test
public void testTaskIdInvalid3() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String tid = "task_0_0000_m";
try {
r.path("ws").path("v1").path("history").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).get(JSONObject.class);
fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertResponseStatusCode(Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
JSONObject msg = response.getEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals("incorrect number of elements", 3, exception.length());
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringEqual("exception message", "java.lang.Exception: TaskId string : " + "task_0_0000_m is not properly formed" + "\nReason: java.util.regex.Matcher[pattern=" + TaskID.TASK_ID_REGEX + " region=0,13 lastmatch=]", message);
WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
}
Aggregations