use of org.apache.hadoop.mapreduce.v2.app.job.Task in project hadoop by apache.
the class TestAMWebServicesAttempts method verifyAMTaskAttemptsXML.
public void verifyAMTaskAttemptsXML(NodeList nodes, Task task) {
assertEquals("incorrect number of elements", 1, nodes.getLength());
for (TaskAttempt att : task.getAttempts().values()) {
TaskAttemptId id = att.getID();
String attid = MRApps.toString(id);
Boolean found = false;
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
if (attid.matches(WebServicesTestUtils.getXmlString(element, "id"))) {
found = true;
verifyAMTaskAttemptXML(element, att, task.getType());
}
}
assertTrue("task with id: " + attid + " not in web service output", found);
}
}
use of org.apache.hadoop.mapreduce.v2.app.job.Task in project hadoop by apache.
the class TestAMWebApp method getTaskParams.
public static Map<String, String> getTaskParams(AppContext appContext) {
JobId jobId = appContext.getAllJobs().entrySet().iterator().next().getKey();
Entry<TaskId, Task> e = appContext.getJob(jobId).getTasks().entrySet().iterator().next();
e.getValue().getType();
Map<String, String> params = new HashMap<String, String>();
params.put(AMParams.JOB_ID, MRApps.toString(jobId));
params.put(AMParams.TASK_ID, MRApps.toString(e.getKey()));
params.put(AMParams.TASK_TYPE, MRApps.taskSymbol(e.getValue().getType()));
return params;
}
use of org.apache.hadoop.mapreduce.v2.app.job.Task in project hadoop by apache.
the class TestAMWebServicesTasks method testTaskIdNonExist.
@Test
public void testTaskIdNonExist() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String tid = "task_0_0000_m_000000";
try {
r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).get(JSONObject.class);
fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertResponseStatusCode(Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
JSONObject msg = response.getEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals("incorrect number of elements", 3, exception.length());
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message", "java.lang.Exception: task not found with id task_0_0000_m_000000", message);
WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
}
use of org.apache.hadoop.mapreduce.v2.app.job.Task in project hadoop by apache.
the class TestAMWebServicesTasks method testTaskIdCountersDefault.
@Test
public void testTaskIdCountersDefault() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
ClientResponse response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).path("counters").get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("jobTaskCounters");
verifyAMJobTaskCounters(info, task);
}
}
}
use of org.apache.hadoop.mapreduce.v2.app.job.Task in project hadoop by apache.
the class TestAMWebServicesAttempt method testGetTaskAttemptIdState.
@Test
public void testGetTaskAttemptIdState() throws Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
for (TaskAttempt att : task.getAttempts().values()) {
TaskAttemptId attemptid = att.getID();
String attid = MRApps.toString(attemptid);
ClientResponse response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).path("attempts").path(attid).path("state").queryParam("user.name", webserviceUserName).accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
assertEquals(att.getState().toString(), json.get("state"));
}
}
}
}
Aggregations