Search in sources :

Example 1 with ConfInfo

use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo in project hadoop by apache.

the class ConfBlock method render.

/*
   * (non-Javadoc)
   * @see org.apache.hadoop.yarn.webapp.view.HtmlBlock#render(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block)
   */
@Override
protected void render(Block html) {
    String jid = $(JOB_ID);
    if (jid.isEmpty()) {
        html.p()._("Sorry, can't do anything without a JobID.")._();
        return;
    }
    JobId jobID = MRApps.toJobID(jid);
    Job job = appContext.getJob(jobID);
    if (job == null) {
        html.p()._("Sorry, ", jid, " not found.")._();
        return;
    }
    Path confPath = job.getConfFile();
    try {
        ConfInfo info = new ConfInfo(job);
        html.div().a("/jobhistory/downloadconf/" + jid, confPath.toString());
        TBODY<TABLE<Hamlet>> tbody = html.table("#conf").thead().tr().th(_TH, "key").th(_TH, "value").th(_TH, "source chain")._()._().tbody();
        for (ConfEntryInfo entry : info.getProperties()) {
            StringBuffer buffer = new StringBuffer();
            String[] sources = entry.getSource();
            //Skip the last entry, because it is always the same HDFS file, and
            // output them in reverse order so most recent is output first
            boolean first = true;
            for (int i = (sources.length - 2); i >= 0; i--) {
                if (!first) {
                    buffer.append(" <- ");
                }
                first = false;
                buffer.append(sources[i]);
            }
            tbody.tr().td(entry.getName()).td(entry.getValue()).td(buffer.toString())._();
        }
        tbody._().tfoot().tr().th().input("search_init").$type(InputType.text).$name("key").$value("key")._()._().th().input("search_init").$type(InputType.text).$name("value").$value("value")._()._().th().input("search_init").$type(InputType.text).$name("source chain").$value("source chain")._()._()._()._()._();
    } catch (IOException e) {
        LOG.error("Error while reading " + confPath, e);
        html.p()._("Sorry got an error while reading conf file. ", confPath);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) ConfInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo) IOException(java.io.IOException) ConfEntryInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo) TABLE(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId)

Example 2 with ConfInfo

use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo in project hadoop by apache.

the class AMWebServices method getJobConf.

@GET
@Path("/jobs/{jobid}/conf")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public ConfInfo getJobConf(@Context HttpServletRequest hsr, @PathParam("jobid") String jid) {
    init();
    Job job = getJobFromJobIdString(jid, appCtx);
    checkAccess(job, hsr);
    ConfInfo info;
    try {
        info = new ConfInfo(job);
    } catch (IOException e) {
        throw new NotFoundException("unable to load configuration for job: " + jid);
    }
    return info;
}
Also used : ConfInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo) NotFoundException(org.apache.hadoop.yarn.webapp.NotFoundException) IOException(java.io.IOException) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 3 with ConfInfo

use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo in project hadoop by apache.

the class HsWebServices method getJobConf.

@GET
@Path("/mapreduce/jobs/{jobid}/conf")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public ConfInfo getJobConf(@Context HttpServletRequest hsr, @PathParam("jobid") String jid) {
    init();
    Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
    checkAccess(job, hsr);
    ConfInfo info;
    try {
        info = new ConfInfo(job);
    } catch (IOException e) {
        throw new NotFoundException("unable to load configuration for job: " + jid);
    }
    return info;
}
Also used : ConfInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo) NotFoundException(org.apache.hadoop.yarn.webapp.NotFoundException) IOException(java.io.IOException) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Aggregations

IOException (java.io.IOException)3 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)3 ConfInfo (org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo)3 GET (javax.ws.rs.GET)2 Path (javax.ws.rs.Path)2 Produces (javax.ws.rs.Produces)2 NotFoundException (org.apache.hadoop.yarn.webapp.NotFoundException)2 Path (org.apache.hadoop.fs.Path)1 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)1 ConfEntryInfo (org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo)1 TABLE (org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE)1