Search in sources :

Example 1 with ConfEntryInfo

use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo in project hadoop by apache.

the class HsJobBlock method render.

/*
   * (non-Javadoc)
   * @see org.apache.hadoop.yarn.webapp.view.HtmlBlock#render(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block)
   */
@Override
protected void render(Block html) {
    String jid = $(JOB_ID);
    if (jid.isEmpty()) {
        html.p()._("Sorry, can't do anything without a JobID.")._();
        return;
    }
    JobId jobID = MRApps.toJobID(jid);
    Job j = appContext.getJob(jobID);
    if (j == null) {
        html.p()._("Sorry, ", jid, " not found.")._();
        return;
    }
    if (j instanceof UnparsedJob) {
        final int taskCount = j.getTotalMaps() + j.getTotalReduces();
        UnparsedJob oversizedJob = (UnparsedJob) j;
        html.p()._("The job has a total of " + taskCount + " tasks. ")._("Any job larger than " + oversizedJob.getMaxTasksAllowed() + " will not be loaded.")._();
        html.p()._("You can either use the CLI tool: 'mapred job -history'" + " to view large jobs or adjust the property " + JHAdminConfig.MR_HS_LOADED_JOBS_TASKS_MAX + ".")._();
        return;
    }
    List<AMInfo> amInfos = j.getAMInfos();
    JobInfo job = new JobInfo(j);
    ResponseInfo infoBlock = info("Job Overview")._("Job Name:", job.getName())._("User Name:", job.getUserName())._("Queue:", job.getQueueName())._("State:", job.getState())._("Uberized:", job.isUber())._("Submitted:", new Date(job.getSubmitTime()))._("Started:", job.getStartTimeStr())._("Finished:", new Date(job.getFinishTime()))._("Elapsed:", StringUtils.formatTime(Times.elapsed(job.getStartTime(), job.getFinishTime(), false)));
    String amString = amInfos.size() == 1 ? "ApplicationMaster" : "ApplicationMasters";
    // todo - switch to use JobInfo
    List<String> diagnostics = j.getDiagnostics();
    if (diagnostics != null && !diagnostics.isEmpty()) {
        StringBuffer b = new StringBuffer();
        for (String diag : diagnostics) {
            b.append(addTaskLinks(diag));
        }
        infoBlock._r("Diagnostics:", b.toString());
    }
    if (job.getNumMaps() > 0) {
        infoBlock._("Average Map Time", StringUtils.formatTime(job.getAvgMapTime()));
    }
    if (job.getNumReduces() > 0) {
        infoBlock._("Average Shuffle Time", StringUtils.formatTime(job.getAvgShuffleTime()));
        infoBlock._("Average Merge Time", StringUtils.formatTime(job.getAvgMergeTime()));
        infoBlock._("Average Reduce Time", StringUtils.formatTime(job.getAvgReduceTime()));
    }
    for (ConfEntryInfo entry : job.getAcls()) {
        infoBlock._("ACL " + entry.getName() + ":", entry.getValue());
    }
    DIV<Hamlet> div = html._(InfoBlock.class).div(_INFO_WRAP);
    // MRAppMasters Table
    TABLE<DIV<Hamlet>> table = div.table("#job");
    table.tr().th(amString)._().tr().th(_TH, "Attempt Number").th(_TH, "Start Time").th(_TH, "Node").th(_TH, "Logs")._();
    boolean odd = false;
    for (AMInfo amInfo : amInfos) {
        AMAttemptInfo attempt = new AMAttemptInfo(amInfo, job.getId(), job.getUserName(), "", "");
        table.tr((odd = !odd) ? _ODD : _EVEN).td(String.valueOf(attempt.getAttemptId())).td(new Date(attempt.getStartTime()).toString()).td().a(".nodelink", url(MRWebAppUtil.getYARNWebappScheme(), attempt.getNodeHttpAddress()), attempt.getNodeHttpAddress())._().td().a(".logslink", url(attempt.getLogsLink()), "logs")._()._();
    }
    table._();
    div._();
    html.div(_INFO_WRAP).table("#job").tr().th(_TH, "Task Type").th(_TH, "Total").th(_TH, "Complete")._().tr(_ODD).th().a(url("tasks", jid, "m"), "Map")._().td(String.valueOf(String.valueOf(job.getMapsTotal()))).td(String.valueOf(String.valueOf(job.getMapsCompleted())))._().tr(_EVEN).th().a(url("tasks", jid, "r"), "Reduce")._().td(String.valueOf(String.valueOf(job.getReducesTotal()))).td(String.valueOf(String.valueOf(job.getReducesCompleted())))._()._().table("#job").tr().th(_TH, "Attempt Type").th(_TH, "Failed").th(_TH, "Killed").th(_TH, "Successful")._().tr(_ODD).th("Maps").td().a(url("attempts", jid, "m", TaskAttemptStateUI.FAILED.toString()), String.valueOf(job.getFailedMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.KILLED.toString()), String.valueOf(job.getKilledMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.SUCCESSFUL.toString()), String.valueOf(job.getSuccessfulMapAttempts()))._()._().tr(_EVEN).th("Reduces").td().a(url("attempts", jid, "r", TaskAttemptStateUI.FAILED.toString()), String.valueOf(job.getFailedReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.KILLED.toString()), String.valueOf(job.getKilledReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.SUCCESSFUL.toString()), String.valueOf(job.getSuccessfulReduceAttempts()))._()._()._()._();
}
Also used : ResponseInfo(org.apache.hadoop.yarn.webapp.ResponseInfo) InfoBlock(org.apache.hadoop.yarn.webapp.view.InfoBlock) Hamlet(org.apache.hadoop.yarn.webapp.hamlet.Hamlet) Date(java.util.Date) ConfEntryInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo) AMInfo(org.apache.hadoop.mapreduce.v2.api.records.AMInfo) DIV(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV) UnparsedJob(org.apache.hadoop.mapreduce.v2.hs.UnparsedJob) JobInfo(org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) UnparsedJob(org.apache.hadoop.mapreduce.v2.hs.UnparsedJob) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) AMAttemptInfo(org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo)

Example 2 with ConfEntryInfo

use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo in project hadoop by apache.

the class ConfBlock method render.

/*
   * (non-Javadoc)
   * @see org.apache.hadoop.yarn.webapp.view.HtmlBlock#render(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block)
   */
@Override
protected void render(Block html) {
    String jid = $(JOB_ID);
    if (jid.isEmpty()) {
        html.p()._("Sorry, can't do anything without a JobID.")._();
        return;
    }
    JobId jobID = MRApps.toJobID(jid);
    Job job = appContext.getJob(jobID);
    if (job == null) {
        html.p()._("Sorry, ", jid, " not found.")._();
        return;
    }
    Path confPath = job.getConfFile();
    try {
        ConfInfo info = new ConfInfo(job);
        html.div().a("/jobhistory/downloadconf/" + jid, confPath.toString());
        TBODY<TABLE<Hamlet>> tbody = html.table("#conf").thead().tr().th(_TH, "key").th(_TH, "value").th(_TH, "source chain")._()._().tbody();
        for (ConfEntryInfo entry : info.getProperties()) {
            StringBuffer buffer = new StringBuffer();
            String[] sources = entry.getSource();
            //Skip the last entry, because it is always the same HDFS file, and
            // output them in reverse order so most recent is output first
            boolean first = true;
            for (int i = (sources.length - 2); i >= 0; i--) {
                if (!first) {
                    buffer.append(" <- ");
                }
                first = false;
                buffer.append(sources[i]);
            }
            tbody.tr().td(entry.getName()).td(entry.getValue()).td(buffer.toString())._();
        }
        tbody._().tfoot().tr().th().input("search_init").$type(InputType.text).$name("key").$value("key")._()._().th().input("search_init").$type(InputType.text).$name("value").$value("value")._()._().th().input("search_init").$type(InputType.text).$name("source chain").$value("source chain")._()._()._()._()._();
    } catch (IOException e) {
        LOG.error("Error while reading " + confPath, e);
        html.p()._("Sorry got an error while reading conf file. ", confPath);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) ConfInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo) IOException(java.io.IOException) ConfEntryInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo) TABLE(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId)

Aggregations

JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)2 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)2 ConfEntryInfo (org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo)2 IOException (java.io.IOException)1 Date (java.util.Date)1 Path (org.apache.hadoop.fs.Path)1 AMInfo (org.apache.hadoop.mapreduce.v2.api.records.AMInfo)1 ConfInfo (org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo)1 UnparsedJob (org.apache.hadoop.mapreduce.v2.hs.UnparsedJob)1 AMAttemptInfo (org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo)1 JobInfo (org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo)1 ResponseInfo (org.apache.hadoop.yarn.webapp.ResponseInfo)1 Hamlet (org.apache.hadoop.yarn.webapp.hamlet.Hamlet)1 DIV (org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV)1 TABLE (org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE)1 InfoBlock (org.apache.hadoop.yarn.webapp.view.InfoBlock)1