Search in sources :

Example 1 with Hamlet

use of org.apache.hadoop.yarn.webapp.hamlet.Hamlet in project hadoop by apache.

the class CountersBlock method render.

@Override
protected void render(Block html) {
    if (job == null) {
        html.p()._("Sorry, no counters for nonexistent", $(JOB_ID, "job"))._();
        return;
    }
    if (!$(TASK_ID).isEmpty() && task == null) {
        html.p()._("Sorry, no counters for nonexistent", $(TASK_ID, "task"))._();
        return;
    }
    if (total == null || total.getGroupNames() == null || total.countCounters() == 0) {
        String type = $(TASK_ID);
        if (type == null || type.isEmpty()) {
            type = $(JOB_ID, "the job");
        }
        html.p()._("Sorry it looks like ", type, " has no counters.")._();
        return;
    }
    String urlBase;
    String urlId;
    if (task != null) {
        urlBase = "singletaskcounter";
        urlId = MRApps.toString(task.getID());
    } else {
        urlBase = "singlejobcounter";
        urlId = MRApps.toString(job.getID());
    }
    int numGroups = 0;
    TBODY<TABLE<DIV<Hamlet>>> tbody = html.div(_INFO_WRAP).table("#counters").thead().tr().th(".group.ui-state-default", "Counter Group").th(".ui-state-default", "Counters")._()._().tbody();
    for (CounterGroup g : total) {
        CounterGroup mg = map == null ? null : map.getGroup(g.getName());
        CounterGroup rg = reduce == null ? null : reduce.getGroup(g.getName());
        ++numGroups;
        // This is mostly for demonstration :) Typically we'd introduced
        // a CounterGroup block to reduce the verbosity. OTOH, this
        // serves as an indicator of where we're in the tag hierarchy.
        TR<THEAD<TABLE<TD<TR<TBODY<TABLE<DIV<Hamlet>>>>>>>> groupHeadRow = tbody.tr().th().$title(g.getName()).$class("ui-state-default")._(fixGroupDisplayName(g.getDisplayName()))._().td().$class(C_TABLE).table(".dt-counters").$id(job.getID() + "." + g.getName()).thead().tr().th(".name", "Name");
        if (map != null) {
            groupHeadRow.th("Map").th("Reduce");
        }
        // Ditto
        TBODY<TABLE<TD<TR<TBODY<TABLE<DIV<Hamlet>>>>>>> group = groupHeadRow.th(map == null ? "Value" : "Total")._()._().tbody();
        for (Counter counter : g) {
            // Ditto
            TR<TBODY<TABLE<TD<TR<TBODY<TABLE<DIV<Hamlet>>>>>>>> groupRow = group.tr();
            if (task == null && mg == null && rg == null) {
                groupRow.td().$title(counter.getName())._(counter.getDisplayName())._();
            } else {
                groupRow.td().$title(counter.getName()).a(url(urlBase, urlId, g.getName(), counter.getName()), counter.getDisplayName())._();
            }
            if (map != null) {
                Counter mc = mg == null ? null : mg.findCounter(counter.getName());
                Counter rc = rg == null ? null : rg.findCounter(counter.getName());
                groupRow.td(mc == null ? "0" : String.format("%,d", mc.getValue())).td(rc == null ? "0" : String.format("%,d", rc.getValue()));
            }
            groupRow.td(String.format("%,d", counter.getValue()))._();
        }
        group._()._()._()._();
    }
    tbody._()._()._();
}
Also used : Hamlet(org.apache.hadoop.yarn.webapp.hamlet.Hamlet) CounterGroup(org.apache.hadoop.mapreduce.CounterGroup) TBODY(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY) TABLE(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE) C_TABLE(org.apache.hadoop.yarn.webapp.view.JQueryUI.C_TABLE) TD(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TD) DIV(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV) Counter(org.apache.hadoop.mapreduce.Counter) THEAD(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.THEAD) TR(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TR)

Example 2 with Hamlet

use of org.apache.hadoop.yarn.webapp.hamlet.Hamlet in project hadoop by apache.

the class JobBlock method render.

@Override
protected void render(Block html) {
    String jid = $(JOB_ID);
    if (jid.isEmpty()) {
        html.p()._("Sorry, can't do anything without a JobID.")._();
        return;
    }
    JobId jobID = MRApps.toJobID(jid);
    Job job = appContext.getJob(jobID);
    if (job == null) {
        html.p()._("Sorry, ", jid, " not found.")._();
        return;
    }
    List<AMInfo> amInfos = job.getAMInfos();
    String amString = amInfos.size() == 1 ? "ApplicationMaster" : "ApplicationMasters";
    JobInfo jinfo = new JobInfo(job, true);
    info("Job Overview")._("Job Name:", jinfo.getName())._("User Name:", jinfo.getUserName())._("Queue Name:", jinfo.getQueueName())._("State:", jinfo.getState())._("Uberized:", jinfo.isUberized())._("Started:", new Date(jinfo.getStartTime()))._("Elapsed:", StringUtils.formatTime(jinfo.getElapsedTime()));
    DIV<Hamlet> div = html._(InfoBlock.class).div(_INFO_WRAP);
    // MRAppMasters Table
    TABLE<DIV<Hamlet>> table = div.table("#job");
    table.tr().th(amString)._().tr().th(_TH, "Attempt Number").th(_TH, "Start Time").th(_TH, "Node").th(_TH, "Logs")._();
    for (AMInfo amInfo : amInfos) {
        AMAttemptInfo attempt = new AMAttemptInfo(amInfo, jinfo.getId(), jinfo.getUserName());
        table.tr().td(String.valueOf(attempt.getAttemptId())).td(new Date(attempt.getStartTime()).toString()).td().a(".nodelink", url(MRWebAppUtil.getYARNWebappScheme(), attempt.getNodeHttpAddress()), attempt.getNodeHttpAddress())._().td().a(".logslink", url(attempt.getLogsLink()), "logs")._()._();
    }
    table._();
    div._();
    html.div(_INFO_WRAP).table("#job").tr().th(_TH, "Task Type").th(_TH, "Progress").th(_TH, "Total").th(_TH, "Pending").th(_TH, "Running").th(_TH, "Complete")._().tr(_ODD).th("Map").td().div(_PROGRESSBAR).$title(// tooltip
    join(jinfo.getMapProgressPercent(), '%')).div(_PROGRESSBAR_VALUE).$style(join("width:", jinfo.getMapProgressPercent(), '%'))._()._()._().td().a(url("tasks", jid, "m", "ALL"), String.valueOf(jinfo.getMapsTotal()))._().td().a(url("tasks", jid, "m", "PENDING"), String.valueOf(jinfo.getMapsPending()))._().td().a(url("tasks", jid, "m", "RUNNING"), String.valueOf(jinfo.getMapsRunning()))._().td().a(url("tasks", jid, "m", "COMPLETED"), String.valueOf(jinfo.getMapsCompleted()))._()._().tr(_EVEN).th("Reduce").td().div(_PROGRESSBAR).$title(// tooltip
    join(jinfo.getReduceProgressPercent(), '%')).div(_PROGRESSBAR_VALUE).$style(join("width:", jinfo.getReduceProgressPercent(), '%'))._()._()._().td().a(url("tasks", jid, "r", "ALL"), String.valueOf(jinfo.getReducesTotal()))._().td().a(url("tasks", jid, "r", "PENDING"), String.valueOf(jinfo.getReducesPending()))._().td().a(url("tasks", jid, "r", "RUNNING"), String.valueOf(jinfo.getReducesRunning()))._().td().a(url("tasks", jid, "r", "COMPLETED"), String.valueOf(jinfo.getReducesCompleted()))._()._()._().table("#job").tr().th(_TH, "Attempt Type").th(_TH, "New").th(_TH, "Running").th(_TH, "Failed").th(_TH, "Killed").th(_TH, "Successful")._().tr(_ODD).th("Maps").td().a(url("attempts", jid, "m", TaskAttemptStateUI.NEW.toString()), String.valueOf(jinfo.getNewMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.RUNNING.toString()), String.valueOf(jinfo.getRunningMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.FAILED.toString()), String.valueOf(jinfo.getFailedMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.KILLED.toString()), String.valueOf(jinfo.getKilledMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.SUCCESSFUL.toString()), String.valueOf(jinfo.getSuccessfulMapAttempts()))._()._().tr(_EVEN).th("Reduces").td().a(url("attempts", jid, "r", TaskAttemptStateUI.NEW.toString()), String.valueOf(jinfo.getNewReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.RUNNING.toString()), String.valueOf(jinfo.getRunningReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.FAILED.toString()), String.valueOf(jinfo.getFailedReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.KILLED.toString()), String.valueOf(jinfo.getKilledReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.SUCCESSFUL.toString()), String.valueOf(jinfo.getSuccessfulReduceAttempts()))._()._()._()._();
}
Also used : InfoBlock(org.apache.hadoop.yarn.webapp.view.InfoBlock) Hamlet(org.apache.hadoop.yarn.webapp.hamlet.Hamlet) Date(java.util.Date) AMInfo(org.apache.hadoop.mapreduce.v2.api.records.AMInfo) DIV(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV) JobInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) AMAttemptInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.AMAttemptInfo)

Example 3 with Hamlet

use of org.apache.hadoop.yarn.webapp.hamlet.Hamlet in project hadoop by apache.

the class TestHamlet method testEnumAttrs.

@Test
public void testEnumAttrs() {
    Hamlet h = newHamlet().meta_http("Content-type", "text/html; charset=utf-8").title("test enum attrs").link().$rel("stylesheet").$media(EnumSet.of(Media.screen, Media.print)).$type("text/css").$href("style.css")._().link().$rel(EnumSet.of(LinkType.index, LinkType.start)).$href("index.html")._();
    h.div("#content")._("content")._();
    PrintWriter out = h.getWriter();
    out.flush();
    assertEquals(0, h.nestLevel);
    verify(out).print(" media=\"screen, print\"");
    verify(out).print(" rel=\"start index\"");
}
Also used : Hamlet(org.apache.hadoop.yarn.webapp.hamlet.Hamlet) PrintWriter(java.io.PrintWriter) Test(org.junit.Test)

Example 4 with Hamlet

use of org.apache.hadoop.yarn.webapp.hamlet.Hamlet in project hadoop by apache.

the class TestHamlet method testTable.

@Test
public void testTable() {
    Hamlet h = newHamlet().title("test table").link("style.css");
    TABLE t = h.table("#id");
    for (int i = 0; i < 3; ++i) {
        t.tr().td("1").td("2")._();
    }
    t._();
    PrintWriter out = h.getWriter();
    out.flush();
    assertEquals(0, h.nestLevel);
    verify(out).print("<table");
    verify(out).print("</table>");
    verify(out, atLeast(1)).print("</td>");
    verify(out, atLeast(1)).print("</tr>");
}
Also used : Hamlet(org.apache.hadoop.yarn.webapp.hamlet.Hamlet) PrintWriter(java.io.PrintWriter) Test(org.junit.Test)

Example 5 with Hamlet

use of org.apache.hadoop.yarn.webapp.hamlet.Hamlet in project hadoop by apache.

the class TestHamlet method testHamlet.

@Test
public void testHamlet() {
    Hamlet h = newHamlet().title("test").h1("heading 1").p("#id.class").b("hello").em("world!")._().div("#footer")._("Brought to you by").a("http://hostname/", "Somebody")._();
    PrintWriter out = h.getWriter();
    out.flush();
    assertEquals(0, h.nestLevel);
    verify(out).print("<title");
    verify(out).print("test");
    verify(out).print("</title>");
    verify(out).print("<h1");
    verify(out).print("heading 1");
    verify(out).print("</h1>");
    verify(out).print("<p");
    verify(out).print(" id=\"id\"");
    verify(out).print(" class=\"class\"");
    verify(out).print("<b");
    verify(out).print("hello");
    verify(out).print("</b>");
    verify(out).print("<em");
    verify(out).print("world!");
    verify(out).print("</em>");
    verify(out).print("<div");
    verify(out).print(" id=\"footer\"");
    verify(out).print("Brought to you by");
    verify(out).print("<a");
    verify(out).print(" href=\"http://hostname/\"");
    verify(out).print("Somebody");
    verify(out).print("</a>");
    verify(out).print("</div>");
    verify(out, never()).print("</p>");
}
Also used : Hamlet(org.apache.hadoop.yarn.webapp.hamlet.Hamlet) PrintWriter(java.io.PrintWriter) Test(org.junit.Test)

Aggregations

Hamlet (org.apache.hadoop.yarn.webapp.hamlet.Hamlet)21 PrintWriter (java.io.PrintWriter)6 Test (org.junit.Test)6 DIV (org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV)5 InfoBlock (org.apache.hadoop.yarn.webapp.view.InfoBlock)4 Log (org.apache.commons.logging.Log)3 Log4JLogger (org.apache.commons.logging.impl.Log4JLogger)3 AMInfo (org.apache.hadoop.mapreduce.v2.api.records.AMInfo)3 RMApp (org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp)3 RMAppAttempt (org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt)3 RMAppAttemptMetrics (org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptMetrics)3 Log4jWarningErrorMetricsAppender (org.apache.hadoop.yarn.util.Log4jWarningErrorMetricsAppender)3 TABLE (org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE)3 Date (java.util.Date)2 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)2 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)2 THEAD (org.apache.hadoop.yarn.webapp.hamlet.Hamlet.THEAD)2 ArrayList (java.util.ArrayList)1 HashMap (java.util.HashMap)1 List (java.util.List)1