Search in sources :

Example 21 with Job

use of org.apache.hadoop.mapreduce.v2.app.job.Job in project hadoop by apache.

the class TestAMWebServicesAttempts method testTaskAttemptIdSlash.

@Test
public void testTaskAttemptIdSlash() throws JSONException, Exception {
    WebResource r = resource();
    Map<JobId, Job> jobsMap = appContext.getAllJobs();
    for (JobId id : jobsMap.keySet()) {
        String jobId = MRApps.toString(id);
        for (Task task : jobsMap.get(id).getTasks().values()) {
            String tid = MRApps.toString(task.getID());
            for (TaskAttempt att : task.getAttempts().values()) {
                TaskAttemptId attemptid = att.getID();
                String attid = MRApps.toString(attemptid);
                ClientResponse response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).path("attempts").path(attid + "/").accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
                assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
                JSONObject json = response.getEntity(JSONObject.class);
                assertEquals("incorrect number of elements", 1, json.length());
                JSONObject info = json.getJSONObject("taskAttempt");
                verifyAMTaskAttempt(info, att, task.getType());
            }
        }
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) JSONObject(org.codehaus.jettison.json.JSONObject) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) WebResource(com.sun.jersey.api.client.WebResource) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 22 with Job

use of org.apache.hadoop.mapreduce.v2.app.job.Job in project hadoop by apache.

the class TestAMWebServicesJobConf method testJobConf.

@Test
public void testJobConf() throws JSONException, Exception {
    WebResource r = resource();
    Map<JobId, Job> jobsMap = appContext.getAllJobs();
    for (JobId id : jobsMap.keySet()) {
        String jobId = MRApps.toString(id);
        ClientResponse response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("conf").accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
        assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
        JSONObject json = response.getEntity(JSONObject.class);
        assertEquals("incorrect number of elements", 1, json.length());
        JSONObject info = json.getJSONObject("conf");
        verifyAMJobConf(info, jobsMap.get(id));
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) JSONObject(org.codehaus.jettison.json.JSONObject) WebResource(com.sun.jersey.api.client.WebResource) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 23 with Job

use of org.apache.hadoop.mapreduce.v2.app.job.Job in project hadoop by apache.

the class TestAMWebApp method testMRWebAppRedirection.

@Test
public void testMRWebAppRedirection() throws Exception {
    String[] schemePrefix = { WebAppUtils.HTTP_PREFIX, WebAppUtils.HTTPS_PREFIX };
    for (String scheme : schemePrefix) {
        MRApp app = new MRApp(2, 2, true, this.getClass().getName(), true) {

            @Override
            protected ClientService createClientService(AppContext context) {
                return new MRClientService(context);
            }
        };
        Configuration conf = new Configuration();
        conf.set(YarnConfiguration.PROXY_ADDRESS, "9.9.9.9");
        conf.set(YarnConfiguration.YARN_HTTP_POLICY_KEY, scheme.equals(WebAppUtils.HTTPS_PREFIX) ? Policy.HTTPS_ONLY.name() : Policy.HTTP_ONLY.name());
        webProxyBase = "/proxy/" + app.getAppID();
        conf.set("hadoop.http.filter.initializers", TestAMFilterInitializer.class.getName());
        Job job = app.submit(conf);
        String hostPort = NetUtils.getHostPortString(((MRClientService) app.getClientService()).getWebApp().getListenerAddress());
        URL httpUrl = new URL("http://" + hostPort + "/mapreduce");
        HttpURLConnection conn = (HttpURLConnection) httpUrl.openConnection();
        conn.setInstanceFollowRedirects(false);
        conn.connect();
        // Because we're not calling from the proxy's address, we'll be redirected
        String expectedURL = scheme + conf.get(YarnConfiguration.PROXY_ADDRESS) + ProxyUriUtils.getPath(app.getAppID(), "/mapreduce", true);
        Assert.assertEquals(expectedURL, conn.getHeaderField(HttpHeaders.LOCATION));
        Assert.assertEquals(HttpStatus.SC_MOVED_TEMPORARILY, conn.getResponseCode());
        app.waitForState(job, JobState.SUCCEEDED);
        app.verifyCompleted();
    }
}
Also used : HttpURLConnection(java.net.HttpURLConnection) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) Configuration(org.apache.hadoop.conf.Configuration) AppContext(org.apache.hadoop.mapreduce.v2.app.AppContext) MockAppContext(org.apache.hadoop.mapreduce.v2.app.MockAppContext) MRClientService(org.apache.hadoop.mapreduce.v2.app.client.MRClientService) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) URL(java.net.URL) MRApp(org.apache.hadoop.mapreduce.v2.app.MRApp) Test(org.junit.Test)

Example 24 with Job

use of org.apache.hadoop.mapreduce.v2.app.job.Job in project hadoop by apache.

the class TestAMWebServicesJobs method testJobCountersXML.

@Test
public void testJobCountersXML() throws Exception {
    WebResource r = resource();
    Map<JobId, Job> jobsMap = appContext.getAllJobs();
    for (JobId id : jobsMap.keySet()) {
        String jobId = MRApps.toString(id);
        ClientResponse response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("counters").accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
        assertEquals(MediaType.APPLICATION_XML_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
        String xml = response.getEntity(String.class);
        DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
        DocumentBuilder db = dbf.newDocumentBuilder();
        InputSource is = new InputSource();
        is.setCharacterStream(new StringReader(xml));
        Document dom = db.parse(is);
        NodeList info = dom.getElementsByTagName("jobCounters");
        verifyAMJobCountersXML(info, jobsMap.get(id));
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) InputSource(org.xml.sax.InputSource) DocumentBuilderFactory(javax.xml.parsers.DocumentBuilderFactory) DocumentBuilder(javax.xml.parsers.DocumentBuilder) NodeList(org.w3c.dom.NodeList) StringReader(java.io.StringReader) WebResource(com.sun.jersey.api.client.WebResource) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) Document(org.w3c.dom.Document) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 25 with Job

use of org.apache.hadoop.mapreduce.v2.app.job.Job in project hadoop by apache.

the class TestAMWebServicesJobs method testJobIdDefault.

@Test
public void testJobIdDefault() throws JSONException, Exception {
    WebResource r = resource();
    Map<JobId, Job> jobsMap = appContext.getAllJobs();
    for (JobId id : jobsMap.keySet()) {
        String jobId = MRApps.toString(id);
        ClientResponse response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).get(ClientResponse.class);
        assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
        JSONObject json = response.getEntity(JSONObject.class);
        assertEquals("incorrect number of elements", 1, json.length());
        JSONObject info = json.getJSONObject("job");
        verifyAMJob(info, jobsMap.get(id));
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) JSONObject(org.codehaus.jettison.json.JSONObject) WebResource(com.sun.jersey.api.client.WebResource) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Aggregations

Job (org.apache.hadoop.mapreduce.v2.app.job.Job)291 Test (org.junit.Test)266 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)221 Configuration (org.apache.hadoop.conf.Configuration)145 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)141 ClientResponse (com.sun.jersey.api.client.ClientResponse)110 WebResource (com.sun.jersey.api.client.WebResource)110 JSONObject (org.codehaus.jettison.json.JSONObject)90 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)80 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)49 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)49 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)44 IOException (java.io.IOException)34 Path (org.apache.hadoop.fs.Path)31 JobEvent (org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent)30 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)30 AppContext (org.apache.hadoop.mapreduce.v2.app.AppContext)28 TaskAttemptEvent (org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent)28 DrainDispatcher (org.apache.hadoop.yarn.event.DrainDispatcher)25 Path (javax.ws.rs.Path)23