Search in sources :

Example 1 with JobTaskAttemptState

use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptState in project hadoop by apache.

the class AMWebServices method updateJobTaskAttemptState.

@PUT
@Path("/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}/state")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Consumes({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public Response updateJobTaskAttemptState(JobTaskAttemptState targetState, @Context HttpServletRequest hsr, @PathParam("jobid") String jid, @PathParam("taskid") String tid, @PathParam("attemptid") String attId) throws IOException, InterruptedException {
    init();
    Job job = getJobFromJobIdString(jid, appCtx);
    checkAccess(job, hsr);
    String remoteUser = hsr.getRemoteUser();
    UserGroupInformation callerUGI = null;
    if (remoteUser != null) {
        callerUGI = UserGroupInformation.createRemoteUser(remoteUser);
    }
    Task task = getTaskFromTaskIdString(tid, job);
    TaskAttempt ta = getTaskAttemptFromTaskAttemptString(attId, task);
    if (!ta.getState().toString().equals(targetState.getState())) {
        // allow users to kill the job task attempt
        if (targetState.getState().equals(TaskAttemptState.KILLED.toString())) {
            return killJobTaskAttempt(ta, callerUGI, hsr);
        }
        throw new BadRequestException("Only '" + TaskAttemptState.KILLED.toString() + "' is allowed as a target state.");
    }
    JobTaskAttemptState ret = new JobTaskAttemptState();
    ret.setState(ta.getState().toString());
    return Response.status(Status.OK).entity(ret).build();
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) JobTaskAttemptState(org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptState) BadRequestException(org.apache.hadoop.yarn.webapp.BadRequestException) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) Consumes(javax.ws.rs.Consumes) PUT(javax.ws.rs.PUT)

Example 2 with JobTaskAttemptState

use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptState in project hadoop by apache.

the class AMWebServices method getJobTaskAttemptState.

@GET
@Path("/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}/state")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public JobTaskAttemptState getJobTaskAttemptState(@Context HttpServletRequest hsr, @PathParam("jobid") String jid, @PathParam("taskid") String tid, @PathParam("attemptid") String attId) throws IOException, InterruptedException {
    init();
    Job job = getJobFromJobIdString(jid, appCtx);
    checkAccess(job, hsr);
    Task task = getTaskFromTaskIdString(tid, job);
    TaskAttempt ta = getTaskAttemptFromTaskAttemptString(attId, task);
    return new JobTaskAttemptState(ta.getState().toString());
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) JobTaskAttemptState(org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptState) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 3 with JobTaskAttemptState

use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptState in project hadoop by apache.

the class AMWebServices method killJobTaskAttempt.

protected Response killJobTaskAttempt(TaskAttempt ta, UserGroupInformation callerUGI, HttpServletRequest hsr) throws IOException, InterruptedException {
    Preconditions.checkNotNull(ta, "ta cannot be null");
    String userName = callerUGI.getUserName();
    final TaskAttemptId attemptId = ta.getID();
    try {
        callerUGI.doAs(new PrivilegedExceptionAction<KillTaskAttemptResponse>() {

            @Override
            public KillTaskAttemptResponse run() throws IOException, YarnException {
                KillTaskAttemptRequest req = new KillTaskAttemptRequestPBImpl();
                req.setTaskAttemptId(attemptId);
                return service.forceKillTaskAttempt(req);
            }
        });
    } catch (UndeclaredThrowableException ue) {
        // bubble that up to the user
        if (ue.getCause() instanceof YarnException) {
            YarnException ye = (YarnException) ue.getCause();
            if (ye.getCause() instanceof AccessControlException) {
                String taId = attemptId.toString();
                String msg = "Unauthorized attempt to kill task attempt " + taId + " by remote user " + userName;
                return Response.status(Status.FORBIDDEN).entity(msg).build();
            } else {
                throw ue;
            }
        } else {
            throw ue;
        }
    }
    JobTaskAttemptState ret = new JobTaskAttemptState();
    ret.setState(TaskAttemptState.KILLED.toString());
    return Response.status(Status.OK).entity(ret).build();
}
Also used : KillTaskAttemptRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptRequest) JobTaskAttemptState(org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptState) KillTaskAttemptRequestPBImpl(org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillTaskAttemptRequestPBImpl) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) UndeclaredThrowableException(java.lang.reflect.UndeclaredThrowableException) AccessControlException(java.security.AccessControlException) IOException(java.io.IOException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) KillTaskAttemptResponse(org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptResponse)

Example 4 with JobTaskAttemptState

use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptState in project hadoop by apache.

the class TestAMWebServicesAttempt method testGetTaskAttemptIdXMLState.

@Test
public void testGetTaskAttemptIdXMLState() throws Exception {
    WebResource r = resource();
    Map<JobId, Job> jobsMap = appContext.getAllJobs();
    for (JobId id : jobsMap.keySet()) {
        String jobId = MRApps.toString(id);
        for (Task task : jobsMap.get(id).getTasks().values()) {
            String tid = MRApps.toString(task.getID());
            for (TaskAttempt att : task.getAttempts().values()) {
                TaskAttemptId attemptid = att.getID();
                String attid = MRApps.toString(attemptid);
                ClientResponse response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).path("attempts").path(attid).path("state").queryParam("user.name", webserviceUserName).accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
                assertEquals(MediaType.APPLICATION_XML_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
                String xml = response.getEntity(String.class);
                DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
                DocumentBuilder db = dbf.newDocumentBuilder();
                InputSource is = new InputSource();
                is.setCharacterStream(new StringReader(xml));
                Document dom = db.parse(is);
                NodeList nodes = dom.getElementsByTagName("jobTaskAttemptState");
                assertEquals(1, nodes.getLength());
                String state = WebServicesTestUtils.getXmlString((Element) nodes.item(0), "state");
                assertEquals(att.getState().toString(), state);
            }
        }
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) InputSource(org.xml.sax.InputSource) DocumentBuilderFactory(javax.xml.parsers.DocumentBuilderFactory) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) NodeList(org.w3c.dom.NodeList) WebResource(com.sun.jersey.api.client.WebResource) Document(org.w3c.dom.Document) DocumentBuilder(javax.xml.parsers.DocumentBuilder) StringReader(java.io.StringReader) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 5 with JobTaskAttemptState

use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptState in project hadoop by apache.

the class TestAMWebServicesAttempt method testPutTaskAttemptIdXMLState.

@Test
public void testPutTaskAttemptIdXMLState() throws Exception {
    WebResource r = resource();
    Map<JobId, Job> jobsMap = appContext.getAllJobs();
    for (JobId id : jobsMap.keySet()) {
        String jobId = MRApps.toString(id);
        for (Task task : jobsMap.get(id).getTasks().values()) {
            String tid = MRApps.toString(task.getID());
            for (TaskAttempt att : task.getAttempts().values()) {
                TaskAttemptId attemptid = att.getID();
                String attid = MRApps.toString(attemptid);
                ClientResponse response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).path("attempts").path(attid).path("state").queryParam("user.name", webserviceUserName).accept(MediaType.APPLICATION_XML_TYPE).type(MediaType.APPLICATION_XML_TYPE).put(ClientResponse.class, "<jobTaskAttemptState><state>KILLED" + "</state></jobTaskAttemptState>");
                assertEquals(MediaType.APPLICATION_XML_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
                String xml = response.getEntity(String.class);
                DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
                DocumentBuilder db = dbf.newDocumentBuilder();
                InputSource is = new InputSource();
                is.setCharacterStream(new StringReader(xml));
                Document dom = db.parse(is);
                NodeList nodes = dom.getElementsByTagName("jobTaskAttemptState");
                assertEquals(1, nodes.getLength());
                String state = WebServicesTestUtils.getXmlString((Element) nodes.item(0), "state");
                assertEquals(TaskAttemptState.KILLED.toString(), state);
            }
        }
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) InputSource(org.xml.sax.InputSource) DocumentBuilderFactory(javax.xml.parsers.DocumentBuilderFactory) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) NodeList(org.w3c.dom.NodeList) WebResource(com.sun.jersey.api.client.WebResource) Document(org.w3c.dom.Document) DocumentBuilder(javax.xml.parsers.DocumentBuilder) StringReader(java.io.StringReader) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Aggregations

Job (org.apache.hadoop.mapreduce.v2.app.job.Job)4 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)4 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)4 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)3 JobTaskAttemptState (org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptState)3 ClientResponse (com.sun.jersey.api.client.ClientResponse)2 WebResource (com.sun.jersey.api.client.WebResource)2 StringReader (java.io.StringReader)2 Path (javax.ws.rs.Path)2 Produces (javax.ws.rs.Produces)2 DocumentBuilder (javax.xml.parsers.DocumentBuilder)2 DocumentBuilderFactory (javax.xml.parsers.DocumentBuilderFactory)2 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)2 Test (org.junit.Test)2 Document (org.w3c.dom.Document)2 NodeList (org.w3c.dom.NodeList)2 InputSource (org.xml.sax.InputSource)2 IOException (java.io.IOException)1 UndeclaredThrowableException (java.lang.reflect.UndeclaredThrowableException)1 AccessControlException (java.security.AccessControlException)1