use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.AMAttemptInfo in project hadoop by apache.
the class JobBlock method render.
@Override
protected void render(Block html) {
String jid = $(JOB_ID);
if (jid.isEmpty()) {
html.p()._("Sorry, can't do anything without a JobID.")._();
return;
}
JobId jobID = MRApps.toJobID(jid);
Job job = appContext.getJob(jobID);
if (job == null) {
html.p()._("Sorry, ", jid, " not found.")._();
return;
}
List<AMInfo> amInfos = job.getAMInfos();
String amString = amInfos.size() == 1 ? "ApplicationMaster" : "ApplicationMasters";
JobInfo jinfo = new JobInfo(job, true);
info("Job Overview")._("Job Name:", jinfo.getName())._("User Name:", jinfo.getUserName())._("Queue Name:", jinfo.getQueueName())._("State:", jinfo.getState())._("Uberized:", jinfo.isUberized())._("Started:", new Date(jinfo.getStartTime()))._("Elapsed:", StringUtils.formatTime(jinfo.getElapsedTime()));
DIV<Hamlet> div = html._(InfoBlock.class).div(_INFO_WRAP);
// MRAppMasters Table
TABLE<DIV<Hamlet>> table = div.table("#job");
table.tr().th(amString)._().tr().th(_TH, "Attempt Number").th(_TH, "Start Time").th(_TH, "Node").th(_TH, "Logs")._();
for (AMInfo amInfo : amInfos) {
AMAttemptInfo attempt = new AMAttemptInfo(amInfo, jinfo.getId(), jinfo.getUserName());
table.tr().td(String.valueOf(attempt.getAttemptId())).td(new Date(attempt.getStartTime()).toString()).td().a(".nodelink", url(MRWebAppUtil.getYARNWebappScheme(), attempt.getNodeHttpAddress()), attempt.getNodeHttpAddress())._().td().a(".logslink", url(attempt.getLogsLink()), "logs")._()._();
}
table._();
div._();
html.div(_INFO_WRAP).table("#job").tr().th(_TH, "Task Type").th(_TH, "Progress").th(_TH, "Total").th(_TH, "Pending").th(_TH, "Running").th(_TH, "Complete")._().tr(_ODD).th("Map").td().div(_PROGRESSBAR).$title(// tooltip
join(jinfo.getMapProgressPercent(), '%')).div(_PROGRESSBAR_VALUE).$style(join("width:", jinfo.getMapProgressPercent(), '%'))._()._()._().td().a(url("tasks", jid, "m", "ALL"), String.valueOf(jinfo.getMapsTotal()))._().td().a(url("tasks", jid, "m", "PENDING"), String.valueOf(jinfo.getMapsPending()))._().td().a(url("tasks", jid, "m", "RUNNING"), String.valueOf(jinfo.getMapsRunning()))._().td().a(url("tasks", jid, "m", "COMPLETED"), String.valueOf(jinfo.getMapsCompleted()))._()._().tr(_EVEN).th("Reduce").td().div(_PROGRESSBAR).$title(// tooltip
join(jinfo.getReduceProgressPercent(), '%')).div(_PROGRESSBAR_VALUE).$style(join("width:", jinfo.getReduceProgressPercent(), '%'))._()._()._().td().a(url("tasks", jid, "r", "ALL"), String.valueOf(jinfo.getReducesTotal()))._().td().a(url("tasks", jid, "r", "PENDING"), String.valueOf(jinfo.getReducesPending()))._().td().a(url("tasks", jid, "r", "RUNNING"), String.valueOf(jinfo.getReducesRunning()))._().td().a(url("tasks", jid, "r", "COMPLETED"), String.valueOf(jinfo.getReducesCompleted()))._()._()._().table("#job").tr().th(_TH, "Attempt Type").th(_TH, "New").th(_TH, "Running").th(_TH, "Failed").th(_TH, "Killed").th(_TH, "Successful")._().tr(_ODD).th("Maps").td().a(url("attempts", jid, "m", TaskAttemptStateUI.NEW.toString()), String.valueOf(jinfo.getNewMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.RUNNING.toString()), String.valueOf(jinfo.getRunningMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.FAILED.toString()), String.valueOf(jinfo.getFailedMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.KILLED.toString()), String.valueOf(jinfo.getKilledMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.SUCCESSFUL.toString()), String.valueOf(jinfo.getSuccessfulMapAttempts()))._()._().tr(_EVEN).th("Reduces").td().a(url("attempts", jid, "r", TaskAttemptStateUI.NEW.toString()), String.valueOf(jinfo.getNewReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.RUNNING.toString()), String.valueOf(jinfo.getRunningReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.FAILED.toString()), String.valueOf(jinfo.getFailedReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.KILLED.toString()), String.valueOf(jinfo.getKilledReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.SUCCESSFUL.toString()), String.valueOf(jinfo.getSuccessfulReduceAttempts()))._()._()._()._();
}
use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.AMAttemptInfo in project hadoop by apache.
the class HsJobBlock method render.
/*
* (non-Javadoc)
* @see org.apache.hadoop.yarn.webapp.view.HtmlBlock#render(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block)
*/
@Override
protected void render(Block html) {
String jid = $(JOB_ID);
if (jid.isEmpty()) {
html.p()._("Sorry, can't do anything without a JobID.")._();
return;
}
JobId jobID = MRApps.toJobID(jid);
Job j = appContext.getJob(jobID);
if (j == null) {
html.p()._("Sorry, ", jid, " not found.")._();
return;
}
if (j instanceof UnparsedJob) {
final int taskCount = j.getTotalMaps() + j.getTotalReduces();
UnparsedJob oversizedJob = (UnparsedJob) j;
html.p()._("The job has a total of " + taskCount + " tasks. ")._("Any job larger than " + oversizedJob.getMaxTasksAllowed() + " will not be loaded.")._();
html.p()._("You can either use the CLI tool: 'mapred job -history'" + " to view large jobs or adjust the property " + JHAdminConfig.MR_HS_LOADED_JOBS_TASKS_MAX + ".")._();
return;
}
List<AMInfo> amInfos = j.getAMInfos();
JobInfo job = new JobInfo(j);
ResponseInfo infoBlock = info("Job Overview")._("Job Name:", job.getName())._("User Name:", job.getUserName())._("Queue:", job.getQueueName())._("State:", job.getState())._("Uberized:", job.isUber())._("Submitted:", new Date(job.getSubmitTime()))._("Started:", job.getStartTimeStr())._("Finished:", new Date(job.getFinishTime()))._("Elapsed:", StringUtils.formatTime(Times.elapsed(job.getStartTime(), job.getFinishTime(), false)));
String amString = amInfos.size() == 1 ? "ApplicationMaster" : "ApplicationMasters";
// todo - switch to use JobInfo
List<String> diagnostics = j.getDiagnostics();
if (diagnostics != null && !diagnostics.isEmpty()) {
StringBuffer b = new StringBuffer();
for (String diag : diagnostics) {
b.append(addTaskLinks(diag));
}
infoBlock._r("Diagnostics:", b.toString());
}
if (job.getNumMaps() > 0) {
infoBlock._("Average Map Time", StringUtils.formatTime(job.getAvgMapTime()));
}
if (job.getNumReduces() > 0) {
infoBlock._("Average Shuffle Time", StringUtils.formatTime(job.getAvgShuffleTime()));
infoBlock._("Average Merge Time", StringUtils.formatTime(job.getAvgMergeTime()));
infoBlock._("Average Reduce Time", StringUtils.formatTime(job.getAvgReduceTime()));
}
for (ConfEntryInfo entry : job.getAcls()) {
infoBlock._("ACL " + entry.getName() + ":", entry.getValue());
}
DIV<Hamlet> div = html._(InfoBlock.class).div(_INFO_WRAP);
// MRAppMasters Table
TABLE<DIV<Hamlet>> table = div.table("#job");
table.tr().th(amString)._().tr().th(_TH, "Attempt Number").th(_TH, "Start Time").th(_TH, "Node").th(_TH, "Logs")._();
boolean odd = false;
for (AMInfo amInfo : amInfos) {
AMAttemptInfo attempt = new AMAttemptInfo(amInfo, job.getId(), job.getUserName(), "", "");
table.tr((odd = !odd) ? _ODD : _EVEN).td(String.valueOf(attempt.getAttemptId())).td(new Date(attempt.getStartTime()).toString()).td().a(".nodelink", url(MRWebAppUtil.getYARNWebappScheme(), attempt.getNodeHttpAddress()), attempt.getNodeHttpAddress())._().td().a(".logslink", url(attempt.getLogsLink()), "logs")._()._();
}
table._();
div._();
html.div(_INFO_WRAP).table("#job").tr().th(_TH, "Task Type").th(_TH, "Total").th(_TH, "Complete")._().tr(_ODD).th().a(url("tasks", jid, "m"), "Map")._().td(String.valueOf(String.valueOf(job.getMapsTotal()))).td(String.valueOf(String.valueOf(job.getMapsCompleted())))._().tr(_EVEN).th().a(url("tasks", jid, "r"), "Reduce")._().td(String.valueOf(String.valueOf(job.getReducesTotal()))).td(String.valueOf(String.valueOf(job.getReducesCompleted())))._()._().table("#job").tr().th(_TH, "Attempt Type").th(_TH, "Failed").th(_TH, "Killed").th(_TH, "Successful")._().tr(_ODD).th("Maps").td().a(url("attempts", jid, "m", TaskAttemptStateUI.FAILED.toString()), String.valueOf(job.getFailedMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.KILLED.toString()), String.valueOf(job.getKilledMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.SUCCESSFUL.toString()), String.valueOf(job.getSuccessfulMapAttempts()))._()._().tr(_EVEN).th("Reduces").td().a(url("attempts", jid, "r", TaskAttemptStateUI.FAILED.toString()), String.valueOf(job.getFailedReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.KILLED.toString()), String.valueOf(job.getKilledReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.SUCCESSFUL.toString()), String.valueOf(job.getSuccessfulReduceAttempts()))._()._()._()._();
}
use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.AMAttemptInfo in project hadoop by apache.
the class HsWebServices method getJobAttempts.
@GET
@Path("/mapreduce/jobs/{jobid}/jobattempts")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) {
init();
Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
AMAttemptsInfo amAttempts = new AMAttemptsInfo();
for (AMInfo amInfo : job.getAMInfos()) {
AMAttemptInfo attempt = new AMAttemptInfo(amInfo, MRApps.toString(job.getID()), job.getUserName(), uriInfo.getBaseUri().toString(), webapp.name());
amAttempts.add(attempt);
}
return amAttempts;
}
use of org.apache.hadoop.mapreduce.v2.app.webapp.dao.AMAttemptInfo in project hadoop by apache.
the class AMWebServices method getJobAttempts.
@GET
@Path("/jobs/{jobid}/jobattempts")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) {
init();
Job job = getJobFromJobIdString(jid, appCtx);
AMAttemptsInfo amAttempts = new AMAttemptsInfo();
for (AMInfo amInfo : job.getAMInfos()) {
AMAttemptInfo attempt = new AMAttemptInfo(amInfo, MRApps.toString(job.getID()), job.getUserName());
amAttempts.add(attempt);
}
return amAttempts;
}
Aggregations