Search in sources :

Example 11 with TaskType

use of org.apache.hadoop.mapreduce.v2.api.records.TaskType in project hadoop by apache.

the class DefaultSpeculator method containerNeed.

/*   *************************************************************    */
// This section contains the code that gets run for a SpeculatorEvent
private AtomicInteger containerNeed(TaskId taskID) {
    JobId jobID = taskID.getJobId();
    TaskType taskType = taskID.getTaskType();
    ConcurrentMap<JobId, AtomicInteger> relevantMap = taskType == TaskType.MAP ? mapContainerNeeds : reduceContainerNeeds;
    AtomicInteger result = relevantMap.get(jobID);
    if (result == null) {
        relevantMap.putIfAbsent(jobID, new AtomicInteger(0));
        result = relevantMap.get(jobID);
    }
    return result;
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) TaskType(org.apache.hadoop.mapreduce.v2.api.records.TaskType) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId)

Example 12 with TaskType

use of org.apache.hadoop.mapreduce.v2.api.records.TaskType in project hadoop by apache.

the class TaskAttemptImpl method createJobCounterUpdateEventTAFailed.

private static JobCounterUpdateEvent createJobCounterUpdateEventTAFailed(TaskAttemptImpl taskAttempt, boolean taskAlreadyCompleted) {
    TaskType taskType = taskAttempt.getID().getTaskId().getTaskType();
    JobCounterUpdateEvent jce = new JobCounterUpdateEvent(taskAttempt.getID().getTaskId().getJobId());
    if (taskType == TaskType.MAP) {
        jce.addCounterUpdate(JobCounter.NUM_FAILED_MAPS, 1);
    } else {
        jce.addCounterUpdate(JobCounter.NUM_FAILED_REDUCES, 1);
    }
    if (!taskAlreadyCompleted) {
        updateMillisCounters(jce, taskAttempt);
    }
    return jce;
}
Also used : TaskType(org.apache.hadoop.mapreduce.v2.api.records.TaskType) JobCounterUpdateEvent(org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent)

Example 13 with TaskType

use of org.apache.hadoop.mapreduce.v2.api.records.TaskType in project hadoop by apache.

the class TasksBlock method render.

@Override
protected void render(Block html) {
    if (app.getJob() == null) {
        html.h2($(TITLE));
        return;
    }
    TaskType type = null;
    String symbol = $(TASK_TYPE);
    if (!symbol.isEmpty()) {
        type = MRApps.taskType(symbol);
    }
    TBODY<TABLE<Hamlet>> tbody = html.table("#tasks").thead().tr().th("Task").th("Progress").th("Status").th("State").th("Start Time").th("Finish Time").th("Elapsed Time")._()._().tbody();
    StringBuilder tasksTableData = new StringBuilder("[\n");
    for (Task task : app.getJob().getTasks().values()) {
        if (type != null && task.getType() != type) {
            continue;
        }
        String taskStateStr = $(TASK_STATE);
        if (taskStateStr == null || taskStateStr.trim().equals("")) {
            taskStateStr = "ALL";
        }
        if (!taskStateStr.equalsIgnoreCase("ALL")) {
            try {
                // get stateUI enum
                MRApps.TaskStateUI stateUI = MRApps.taskState(taskStateStr);
                if (!stateUI.correspondsTo(task.getState())) {
                    continue;
                }
            } catch (IllegalArgumentException e) {
                // not supported state, ignore
                continue;
            }
        }
        TaskInfo info = new TaskInfo(task);
        String tid = info.getId();
        String pct = StringUtils.format("%.2f", info.getProgress());
        tasksTableData.append("[\"<a href='").append(url("task", tid)).append("'>").append(tid).append("</a>\",\"").append("<br title='").append(pct).append("'> <div class='").append(C_PROGRESSBAR).append("' title='").append(join(pct, '%')).append("'> ").append("<div class='").append(C_PROGRESSBAR_VALUE).append("' style='").append(join("width:", pct, '%')).append("'> </div> </div>\",\"").append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(info.getStatus()))).append("\",\"").append(info.getState()).append("\",\"").append(info.getStartTime()).append("\",\"").append(info.getFinishTime()).append("\",\"").append(info.getElapsedTime()).append("\"],\n");
    }
    //Remove the last comma and close off the array of arrays
    if (tasksTableData.charAt(tasksTableData.length() - 2) == ',') {
        tasksTableData.delete(tasksTableData.length() - 2, tasksTableData.length() - 1);
    }
    tasksTableData.append("]");
    html.script().$type("text/javascript")._("var tasksTableData=" + tasksTableData)._();
    tbody._()._();
}
Also used : TABLE(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE) MRApps(org.apache.hadoop.mapreduce.v2.util.MRApps) TaskInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskType(org.apache.hadoop.mapreduce.v2.api.records.TaskType)

Example 14 with TaskType

use of org.apache.hadoop.mapreduce.v2.api.records.TaskType in project hadoop by apache.

the class AMWebServices method getJobTasks.

@GET
@Path("/jobs/{jobid}/tasks")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public TasksInfo getJobTasks(@Context HttpServletRequest hsr, @PathParam("jobid") String jid, @QueryParam("type") String type) {
    init();
    Job job = getJobFromJobIdString(jid, appCtx);
    checkAccess(job, hsr);
    TasksInfo allTasks = new TasksInfo();
    for (Task task : job.getTasks().values()) {
        TaskType ttype = null;
        if (type != null && !type.isEmpty()) {
            try {
                ttype = MRApps.taskType(type);
            } catch (YarnRuntimeException e) {
                throw new BadRequestException("tasktype must be either m or r");
            }
        }
        if (ttype != null && task.getType() != ttype) {
            continue;
        }
        allTasks.add(new TaskInfo(task));
    }
    return allTasks;
}
Also used : YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) TaskInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskType(org.apache.hadoop.mapreduce.v2.api.records.TaskType) BadRequestException(org.apache.hadoop.yarn.webapp.BadRequestException) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) TasksInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.TasksInfo) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 15 with TaskType

use of org.apache.hadoop.mapreduce.v2.api.records.TaskType in project hadoop by apache.

the class TestAMWebServicesTasks method testTasksQueryInvalid.

@Test
public void testTasksQueryInvalid() throws JSONException, Exception {
    WebResource r = resource();
    Map<JobId, Job> jobsMap = appContext.getAllJobs();
    for (JobId id : jobsMap.keySet()) {
        String jobId = MRApps.toString(id);
        // tasktype must be exactly either "m" or "r"
        String tasktype = "reduce";
        try {
            r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("tasks").queryParam("type", tasktype).accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
            fail("should have thrown exception on invalid uri");
        } catch (UniformInterfaceException ue) {
            ClientResponse response = ue.getResponse();
            assertResponseStatusCode(Status.BAD_REQUEST, response.getStatusInfo());
            assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
            JSONObject msg = response.getEntity(JSONObject.class);
            JSONObject exception = msg.getJSONObject("RemoteException");
            assertEquals("incorrect number of elements", 3, exception.length());
            String message = exception.getString("message");
            String type = exception.getString("exception");
            String classname = exception.getString("javaClassName");
            WebServicesTestUtils.checkStringMatch("exception message", "java.lang.Exception: tasktype must be either m or r", message);
            WebServicesTestUtils.checkStringMatch("exception type", "BadRequestException", type);
            WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.BadRequestException", classname);
        }
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) UniformInterfaceException(com.sun.jersey.api.client.UniformInterfaceException) JSONObject(org.codehaus.jettison.json.JSONObject) WebResource(com.sun.jersey.api.client.WebResource) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Aggregations

TaskType (org.apache.hadoop.mapreduce.v2.api.records.TaskType)15 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)8 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)8 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)8 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)7 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)5 Test (org.junit.Test)5 HashMap (java.util.HashMap)4 TaskInfo (org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo)4 Map (java.util.Map)3 Counters (org.apache.hadoop.mapreduce.Counters)3 ClientResponse (com.sun.jersey.api.client.ClientResponse)2 UniformInterfaceException (com.sun.jersey.api.client.UniformInterfaceException)2 WebResource (com.sun.jersey.api.client.WebResource)2 ArrayList (java.util.ArrayList)2 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)2 GET (javax.ws.rs.GET)2 Path (javax.ws.rs.Path)2 Produces (javax.ws.rs.Produces)2 Path (org.apache.hadoop.fs.Path)2