Search in sources :

Example 26 with Job

use of org.apache.hadoop.mapreduce.v2.app.job.Job in project hadoop by apache.

the class TestAMWebServicesJobs method testJobCountersSlash.

@Test
public void testJobCountersSlash() throws JSONException, Exception {
    WebResource r = resource();
    Map<JobId, Job> jobsMap = appContext.getAllJobs();
    for (JobId id : jobsMap.keySet()) {
        String jobId = MRApps.toString(id);
        ClientResponse response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("counters/").accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
        assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
        JSONObject json = response.getEntity(JSONObject.class);
        assertEquals("incorrect number of elements", 1, json.length());
        JSONObject info = json.getJSONObject("jobCounters");
        verifyAMJobCounters(info, jobsMap.get(id));
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) JSONObject(org.codehaus.jettison.json.JSONObject) WebResource(com.sun.jersey.api.client.WebResource) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 27 with Job

use of org.apache.hadoop.mapreduce.v2.app.job.Job in project hadoop by apache.

the class TestAMWebServicesTasks method testTasksQueryInvalid.

@Test
public void testTasksQueryInvalid() throws JSONException, Exception {
    WebResource r = resource();
    Map<JobId, Job> jobsMap = appContext.getAllJobs();
    for (JobId id : jobsMap.keySet()) {
        String jobId = MRApps.toString(id);
        // tasktype must be exactly either "m" or "r"
        String tasktype = "reduce";
        try {
            r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("tasks").queryParam("type", tasktype).accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
            fail("should have thrown exception on invalid uri");
        } catch (UniformInterfaceException ue) {
            ClientResponse response = ue.getResponse();
            assertResponseStatusCode(Status.BAD_REQUEST, response.getStatusInfo());
            assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
            JSONObject msg = response.getEntity(JSONObject.class);
            JSONObject exception = msg.getJSONObject("RemoteException");
            assertEquals("incorrect number of elements", 3, exception.length());
            String message = exception.getString("message");
            String type = exception.getString("exception");
            String classname = exception.getString("javaClassName");
            WebServicesTestUtils.checkStringMatch("exception message", "java.lang.Exception: tasktype must be either m or r", message);
            WebServicesTestUtils.checkStringMatch("exception type", "BadRequestException", type);
            WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.BadRequestException", classname);
        }
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) UniformInterfaceException(com.sun.jersey.api.client.UniformInterfaceException) JSONObject(org.codehaus.jettison.json.JSONObject) WebResource(com.sun.jersey.api.client.WebResource) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 28 with Job

use of org.apache.hadoop.mapreduce.v2.app.job.Job in project hadoop by apache.

the class TestAMWebServicesTasks method testTaskIdNonExist.

@Test
public void testTaskIdNonExist() throws JSONException, Exception {
    WebResource r = resource();
    Map<JobId, Job> jobsMap = appContext.getAllJobs();
    for (JobId id : jobsMap.keySet()) {
        String jobId = MRApps.toString(id);
        String tid = "task_0_0000_m_000000";
        try {
            r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).get(JSONObject.class);
            fail("should have thrown exception on invalid uri");
        } catch (UniformInterfaceException ue) {
            ClientResponse response = ue.getResponse();
            assertResponseStatusCode(Status.NOT_FOUND, response.getStatusInfo());
            assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
            JSONObject msg = response.getEntity(JSONObject.class);
            JSONObject exception = msg.getJSONObject("RemoteException");
            assertEquals("incorrect number of elements", 3, exception.length());
            String message = exception.getString("message");
            String type = exception.getString("exception");
            String classname = exception.getString("javaClassName");
            WebServicesTestUtils.checkStringMatch("exception message", "java.lang.Exception: task not found with id task_0_0000_m_000000", message);
            WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type);
            WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
        }
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) UniformInterfaceException(com.sun.jersey.api.client.UniformInterfaceException) JSONObject(org.codehaus.jettison.json.JSONObject) WebResource(com.sun.jersey.api.client.WebResource) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 29 with Job

use of org.apache.hadoop.mapreduce.v2.app.job.Job in project hadoop by apache.

the class TestAMWebServicesTasks method testTaskIdCountersDefault.

@Test
public void testTaskIdCountersDefault() throws JSONException, Exception {
    WebResource r = resource();
    Map<JobId, Job> jobsMap = appContext.getAllJobs();
    for (JobId id : jobsMap.keySet()) {
        String jobId = MRApps.toString(id);
        for (Task task : jobsMap.get(id).getTasks().values()) {
            String tid = MRApps.toString(task.getID());
            ClientResponse response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).path("counters").get(ClientResponse.class);
            assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
            JSONObject json = response.getEntity(JSONObject.class);
            assertEquals("incorrect number of elements", 1, json.length());
            JSONObject info = json.getJSONObject("jobTaskCounters");
            verifyAMJobTaskCounters(info, task);
        }
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) JSONObject(org.codehaus.jettison.json.JSONObject) WebResource(com.sun.jersey.api.client.WebResource) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 30 with Job

use of org.apache.hadoop.mapreduce.v2.app.job.Job in project hadoop by apache.

the class TestAMWebServicesTasks method testTaskIdInvalid2.

@Test
public void testTaskIdInvalid2() throws JSONException, Exception {
    WebResource r = resource();
    Map<JobId, Job> jobsMap = appContext.getAllJobs();
    for (JobId id : jobsMap.keySet()) {
        String jobId = MRApps.toString(id);
        String tid = "task_0_m_000000";
        try {
            r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).get(JSONObject.class);
            fail("should have thrown exception on invalid uri");
        } catch (UniformInterfaceException ue) {
            ClientResponse response = ue.getResponse();
            assertResponseStatusCode(Status.NOT_FOUND, response.getStatusInfo());
            assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
            JSONObject msg = response.getEntity(JSONObject.class);
            JSONObject exception = msg.getJSONObject("RemoteException");
            assertEquals("incorrect number of elements", 3, exception.length());
            String message = exception.getString("message");
            String type = exception.getString("exception");
            String classname = exception.getString("javaClassName");
            WebServicesTestUtils.checkStringEqual("exception message", "java.lang.Exception: TaskId string : " + "task_0_m_000000 is not properly formed" + "\nReason: java.util.regex.Matcher[pattern=" + TaskID.TASK_ID_REGEX + " region=0,15 lastmatch=]", message);
            WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type);
            WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
        }
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) UniformInterfaceException(com.sun.jersey.api.client.UniformInterfaceException) JSONObject(org.codehaus.jettison.json.JSONObject) WebResource(com.sun.jersey.api.client.WebResource) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Aggregations

Job (org.apache.hadoop.mapreduce.v2.app.job.Job)291 Test (org.junit.Test)266 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)221 Configuration (org.apache.hadoop.conf.Configuration)145 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)141 ClientResponse (com.sun.jersey.api.client.ClientResponse)110 WebResource (com.sun.jersey.api.client.WebResource)110 JSONObject (org.codehaus.jettison.json.JSONObject)90 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)80 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)49 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)49 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)44 IOException (java.io.IOException)34 Path (org.apache.hadoop.fs.Path)31 JobEvent (org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent)30 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)30 AppContext (org.apache.hadoop.mapreduce.v2.app.AppContext)28 TaskAttemptEvent (org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent)28 DrainDispatcher (org.apache.hadoop.yarn.event.DrainDispatcher)25 Path (javax.ws.rs.Path)23