Search in sources :

Example 6 with JobInfo

use of org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo in project hadoop by apache.

the class AMWebServices method getJobs.

@GET
@Path("/jobs")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public JobsInfo getJobs(@Context HttpServletRequest hsr) {
    init();
    JobsInfo allJobs = new JobsInfo();
    for (Job job : appCtx.getAllJobs().values()) {
        // getAllJobs only gives you a partial we want a full
        Job fullJob = appCtx.getJob(job.getID());
        if (fullJob == null) {
            continue;
        }
        allJobs.add(new JobInfo(fullJob, hasAccess(fullJob, hsr)));
    }
    return allJobs;
}
Also used : JobInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo) JobsInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobsInfo) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 7 with JobInfo

use of org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo in project hadoop by apache.

the class JobBlock method render.

@Override
protected void render(Block html) {
    String jid = $(JOB_ID);
    if (jid.isEmpty()) {
        html.p()._("Sorry, can't do anything without a JobID.")._();
        return;
    }
    JobId jobID = MRApps.toJobID(jid);
    Job job = appContext.getJob(jobID);
    if (job == null) {
        html.p()._("Sorry, ", jid, " not found.")._();
        return;
    }
    List<AMInfo> amInfos = job.getAMInfos();
    String amString = amInfos.size() == 1 ? "ApplicationMaster" : "ApplicationMasters";
    JobInfo jinfo = new JobInfo(job, true);
    info("Job Overview")._("Job Name:", jinfo.getName())._("User Name:", jinfo.getUserName())._("Queue Name:", jinfo.getQueueName())._("State:", jinfo.getState())._("Uberized:", jinfo.isUberized())._("Started:", new Date(jinfo.getStartTime()))._("Elapsed:", StringUtils.formatTime(jinfo.getElapsedTime()));
    DIV<Hamlet> div = html._(InfoBlock.class).div(_INFO_WRAP);
    // MRAppMasters Table
    TABLE<DIV<Hamlet>> table = div.table("#job");
    table.tr().th(amString)._().tr().th(_TH, "Attempt Number").th(_TH, "Start Time").th(_TH, "Node").th(_TH, "Logs")._();
    for (AMInfo amInfo : amInfos) {
        AMAttemptInfo attempt = new AMAttemptInfo(amInfo, jinfo.getId(), jinfo.getUserName());
        table.tr().td(String.valueOf(attempt.getAttemptId())).td(new Date(attempt.getStartTime()).toString()).td().a(".nodelink", url(MRWebAppUtil.getYARNWebappScheme(), attempt.getNodeHttpAddress()), attempt.getNodeHttpAddress())._().td().a(".logslink", url(attempt.getLogsLink()), "logs")._()._();
    }
    table._();
    div._();
    html.div(_INFO_WRAP).table("#job").tr().th(_TH, "Task Type").th(_TH, "Progress").th(_TH, "Total").th(_TH, "Pending").th(_TH, "Running").th(_TH, "Complete")._().tr(_ODD).th("Map").td().div(_PROGRESSBAR).$title(// tooltip
    join(jinfo.getMapProgressPercent(), '%')).div(_PROGRESSBAR_VALUE).$style(join("width:", jinfo.getMapProgressPercent(), '%'))._()._()._().td().a(url("tasks", jid, "m", "ALL"), String.valueOf(jinfo.getMapsTotal()))._().td().a(url("tasks", jid, "m", "PENDING"), String.valueOf(jinfo.getMapsPending()))._().td().a(url("tasks", jid, "m", "RUNNING"), String.valueOf(jinfo.getMapsRunning()))._().td().a(url("tasks", jid, "m", "COMPLETED"), String.valueOf(jinfo.getMapsCompleted()))._()._().tr(_EVEN).th("Reduce").td().div(_PROGRESSBAR).$title(// tooltip
    join(jinfo.getReduceProgressPercent(), '%')).div(_PROGRESSBAR_VALUE).$style(join("width:", jinfo.getReduceProgressPercent(), '%'))._()._()._().td().a(url("tasks", jid, "r", "ALL"), String.valueOf(jinfo.getReducesTotal()))._().td().a(url("tasks", jid, "r", "PENDING"), String.valueOf(jinfo.getReducesPending()))._().td().a(url("tasks", jid, "r", "RUNNING"), String.valueOf(jinfo.getReducesRunning()))._().td().a(url("tasks", jid, "r", "COMPLETED"), String.valueOf(jinfo.getReducesCompleted()))._()._()._().table("#job").tr().th(_TH, "Attempt Type").th(_TH, "New").th(_TH, "Running").th(_TH, "Failed").th(_TH, "Killed").th(_TH, "Successful")._().tr(_ODD).th("Maps").td().a(url("attempts", jid, "m", TaskAttemptStateUI.NEW.toString()), String.valueOf(jinfo.getNewMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.RUNNING.toString()), String.valueOf(jinfo.getRunningMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.FAILED.toString()), String.valueOf(jinfo.getFailedMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.KILLED.toString()), String.valueOf(jinfo.getKilledMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.SUCCESSFUL.toString()), String.valueOf(jinfo.getSuccessfulMapAttempts()))._()._().tr(_EVEN).th("Reduces").td().a(url("attempts", jid, "r", TaskAttemptStateUI.NEW.toString()), String.valueOf(jinfo.getNewReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.RUNNING.toString()), String.valueOf(jinfo.getRunningReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.FAILED.toString()), String.valueOf(jinfo.getFailedReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.KILLED.toString()), String.valueOf(jinfo.getKilledReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.SUCCESSFUL.toString()), String.valueOf(jinfo.getSuccessfulReduceAttempts()))._()._()._()._();
}
Also used : InfoBlock(org.apache.hadoop.yarn.webapp.view.InfoBlock) Hamlet(org.apache.hadoop.yarn.webapp.hamlet.Hamlet) Date(java.util.Date) AMInfo(org.apache.hadoop.mapreduce.v2.api.records.AMInfo) DIV(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV) JobInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) AMAttemptInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.AMAttemptInfo)

Example 8 with JobInfo

use of org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo in project hadoop by apache.

the class AMWebServices method getJob.

@GET
@Path("/jobs/{jobid}")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public JobInfo getJob(@Context HttpServletRequest hsr, @PathParam("jobid") String jid) {
    init();
    Job job = getJobFromJobIdString(jid, appCtx);
    return new JobInfo(job, hasAccess(job, hsr));
}
Also used : JobInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 9 with JobInfo

use of org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo in project hadoop by apache.

the class MRAppMaster method parsePreviousJobHistory.

private void parsePreviousJobHistory() throws IOException {
    FSDataInputStream in = getPreviousJobHistoryStream(getConfig(), appAttemptID);
    JobHistoryParser parser = new JobHistoryParser(in);
    JobInfo jobInfo = parser.parse();
    Exception parseException = parser.getParseException();
    if (parseException != null) {
        LOG.info("Got an error parsing job-history file" + ", ignoring incomplete events.", parseException);
    }
    Map<org.apache.hadoop.mapreduce.TaskID, TaskInfo> taskInfos = jobInfo.getAllTasks();
    for (TaskInfo taskInfo : taskInfos.values()) {
        if (TaskState.SUCCEEDED.toString().equals(taskInfo.getTaskStatus())) {
            Iterator<Entry<TaskAttemptID, TaskAttemptInfo>> taskAttemptIterator = taskInfo.getAllTaskAttempts().entrySet().iterator();
            while (taskAttemptIterator.hasNext()) {
                Map.Entry<TaskAttemptID, TaskAttemptInfo> currentEntry = taskAttemptIterator.next();
                if (!jobInfo.getAllCompletedTaskAttempts().containsKey(currentEntry.getKey())) {
                    taskAttemptIterator.remove();
                }
            }
            completedTasksFromPreviousRun.put(TypeConverter.toYarn(taskInfo.getTaskId()), taskInfo);
            LOG.info("Read from history task " + TypeConverter.toYarn(taskInfo.getTaskId()));
        }
    }
    LOG.info("Read completed tasks from history " + completedTasksFromPreviousRun.size());
    recoveredJobStartTime = jobInfo.getLaunchTime();
    // recover AMInfos
    List<JobHistoryParser.AMInfo> jhAmInfoList = jobInfo.getAMInfos();
    if (jhAmInfoList != null) {
        for (JobHistoryParser.AMInfo jhAmInfo : jhAmInfoList) {
            AMInfo amInfo = MRBuilderUtils.newAMInfo(jhAmInfo.getAppAttemptId(), jhAmInfo.getStartTime(), jhAmInfo.getContainerId(), jhAmInfo.getNodeManagerHost(), jhAmInfo.getNodeManagerPort(), jhAmInfo.getNodeManagerHttpPort());
            amInfos.add(amInfo);
        }
    }
}
Also used : TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) InvocationTargetException(java.lang.reflect.InvocationTargetException) NoSuchAlgorithmException(java.security.NoSuchAlgorithmException) IOException(java.io.IOException) YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) TaskInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo) AMInfo(org.apache.hadoop.mapreduce.v2.api.records.AMInfo) Entry(java.util.Map.Entry) JobHistoryParser(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser) JobInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo) TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap)

Example 10 with JobInfo

use of org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo in project hadoop by apache.

the class JobsBlock method render.

@Override
protected void render(Block html) {
    TBODY<TABLE<Hamlet>> tbody = html.h2("Active Jobs").table("#jobs").thead().tr().th(".id", "Job ID").th(".name", "Name").th(".state", "State").th("Map Progress").th("Maps Total").th("Maps Completed").th("Reduce Progress").th("Reduces Total").th("Reduces Completed")._()._().tbody();
    for (Job j : appContext.getAllJobs().values()) {
        JobInfo job = new JobInfo(j, false);
        tbody.tr().td().span().$title(String.valueOf(job.getId()))._().a(url("job", job.getId()), job.getId())._().td(job.getName()).td(job.getState()).td().span().$title(job.getMapProgressPercent())._().div(_PROGRESSBAR).$title(// tooltip
        join(job.getMapProgressPercent(), '%')).div(_PROGRESSBAR_VALUE).$style(join("width:", job.getMapProgressPercent(), '%'))._()._()._().td(String.valueOf(job.getMapsTotal())).td(String.valueOf(job.getMapsCompleted())).td().span().$title(job.getReduceProgressPercent())._().div(_PROGRESSBAR).$title(// tooltip
        join(job.getReduceProgressPercent(), '%')).div(_PROGRESSBAR_VALUE).$style(join("width:", job.getReduceProgressPercent(), '%'))._()._()._().td(String.valueOf(job.getReducesTotal())).td(String.valueOf(job.getReducesCompleted()))._();
    }
    tbody._()._();
}
Also used : TABLE(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE) JobInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo) Job(org.apache.hadoop.mapreduce.v2.app.job.Job)

Aggregations

Job (org.apache.hadoop.mapreduce.v2.app.job.Job)15 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)12 JobInfo (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo)8 Test (org.junit.Test)8 IOException (java.io.IOException)7 Configuration (org.apache.hadoop.conf.Configuration)7 Path (org.apache.hadoop.fs.Path)7 JobHistoryParser (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser)6 HistoryFileInfo (org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo)6 Date (java.util.Date)5 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)5 FileContext (org.apache.hadoop.fs.FileContext)4 TaskInfo (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo)4 JobReport (org.apache.hadoop.mapreduce.v2.api.records.JobReport)4 MRApp (org.apache.hadoop.mapreduce.v2.app.MRApp)4 JobInfo (org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo)4 HashMap (java.util.HashMap)3 GET (javax.ws.rs.GET)3 Path (javax.ws.rs.Path)3 Produces (javax.ws.rs.Produces)3