Search in sources :

Example 11 with AMInfo

use of org.apache.hadoop.mapreduce.v2.api.records.AMInfo in project hadoop by apache.

the class JobReportPBImpl method initAMInfos.

private synchronized void initAMInfos() {
    if (this.amInfos != null) {
        return;
    }
    JobReportProtoOrBuilder p = viaProto ? proto : builder;
    List<AMInfoProto> list = p.getAmInfosList();
    this.amInfos = new ArrayList<AMInfo>();
    for (AMInfoProto amInfoProto : list) {
        this.amInfos.add(convertFromProtoFormat(amInfoProto));
    }
}
Also used : AMInfo(org.apache.hadoop.mapreduce.v2.api.records.AMInfo) AMInfoProto(org.apache.hadoop.mapreduce.v2.proto.MRProtos.AMInfoProto) JobReportProtoOrBuilder(org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobReportProtoOrBuilder)

Example 12 with AMInfo

use of org.apache.hadoop.mapreduce.v2.api.records.AMInfo in project hadoop by apache.

the class MRBuilderUtils method newJobReport.

public static JobReport newJobReport(JobId jobId, String jobName, String userName, JobState state, long submitTime, long startTime, long finishTime, float setupProgress, float mapProgress, float reduceProgress, float cleanupProgress, String jobFile, List<AMInfo> amInfos, boolean isUber, String diagnostics, Priority priority) {
    JobReport report = Records.newRecord(JobReport.class);
    report.setJobId(jobId);
    report.setJobName(jobName);
    report.setUser(userName);
    report.setJobState(state);
    report.setSubmitTime(submitTime);
    report.setStartTime(startTime);
    report.setFinishTime(finishTime);
    report.setSetupProgress(setupProgress);
    report.setCleanupProgress(cleanupProgress);
    report.setMapProgress(mapProgress);
    report.setReduceProgress(reduceProgress);
    report.setJobFile(jobFile);
    report.setAMInfos(amInfos);
    report.setIsUber(isUber);
    report.setDiagnostics(diagnostics);
    report.setJobPriority(priority);
    return report;
}
Also used : JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport)

Example 13 with AMInfo

use of org.apache.hadoop.mapreduce.v2.api.records.AMInfo in project hadoop by apache.

the class MRBuilderUtils method newAMInfo.

public static AMInfo newAMInfo(ApplicationAttemptId appAttemptId, long startTime, ContainerId containerId, String nmHost, int nmPort, int nmHttpPort) {
    AMInfo amInfo = Records.newRecord(AMInfo.class);
    amInfo.setAppAttemptId(appAttemptId);
    amInfo.setStartTime(startTime);
    amInfo.setContainerId(containerId);
    amInfo.setNodeManagerHost(nmHost);
    amInfo.setNodeManagerPort(nmPort);
    amInfo.setNodeManagerHttpPort(nmHttpPort);
    return amInfo;
}
Also used : AMInfo(org.apache.hadoop.mapreduce.v2.api.records.AMInfo)

Example 14 with AMInfo

use of org.apache.hadoop.mapreduce.v2.api.records.AMInfo in project hadoop by apache.

the class JobBlock method render.

@Override
protected void render(Block html) {
    String jid = $(JOB_ID);
    if (jid.isEmpty()) {
        html.p()._("Sorry, can't do anything without a JobID.")._();
        return;
    }
    JobId jobID = MRApps.toJobID(jid);
    Job job = appContext.getJob(jobID);
    if (job == null) {
        html.p()._("Sorry, ", jid, " not found.")._();
        return;
    }
    List<AMInfo> amInfos = job.getAMInfos();
    String amString = amInfos.size() == 1 ? "ApplicationMaster" : "ApplicationMasters";
    JobInfo jinfo = new JobInfo(job, true);
    info("Job Overview")._("Job Name:", jinfo.getName())._("User Name:", jinfo.getUserName())._("Queue Name:", jinfo.getQueueName())._("State:", jinfo.getState())._("Uberized:", jinfo.isUberized())._("Started:", new Date(jinfo.getStartTime()))._("Elapsed:", StringUtils.formatTime(jinfo.getElapsedTime()));
    DIV<Hamlet> div = html._(InfoBlock.class).div(_INFO_WRAP);
    // MRAppMasters Table
    TABLE<DIV<Hamlet>> table = div.table("#job");
    table.tr().th(amString)._().tr().th(_TH, "Attempt Number").th(_TH, "Start Time").th(_TH, "Node").th(_TH, "Logs")._();
    for (AMInfo amInfo : amInfos) {
        AMAttemptInfo attempt = new AMAttemptInfo(amInfo, jinfo.getId(), jinfo.getUserName());
        table.tr().td(String.valueOf(attempt.getAttemptId())).td(new Date(attempt.getStartTime()).toString()).td().a(".nodelink", url(MRWebAppUtil.getYARNWebappScheme(), attempt.getNodeHttpAddress()), attempt.getNodeHttpAddress())._().td().a(".logslink", url(attempt.getLogsLink()), "logs")._()._();
    }
    table._();
    div._();
    html.div(_INFO_WRAP).table("#job").tr().th(_TH, "Task Type").th(_TH, "Progress").th(_TH, "Total").th(_TH, "Pending").th(_TH, "Running").th(_TH, "Complete")._().tr(_ODD).th("Map").td().div(_PROGRESSBAR).$title(// tooltip
    join(jinfo.getMapProgressPercent(), '%')).div(_PROGRESSBAR_VALUE).$style(join("width:", jinfo.getMapProgressPercent(), '%'))._()._()._().td().a(url("tasks", jid, "m", "ALL"), String.valueOf(jinfo.getMapsTotal()))._().td().a(url("tasks", jid, "m", "PENDING"), String.valueOf(jinfo.getMapsPending()))._().td().a(url("tasks", jid, "m", "RUNNING"), String.valueOf(jinfo.getMapsRunning()))._().td().a(url("tasks", jid, "m", "COMPLETED"), String.valueOf(jinfo.getMapsCompleted()))._()._().tr(_EVEN).th("Reduce").td().div(_PROGRESSBAR).$title(// tooltip
    join(jinfo.getReduceProgressPercent(), '%')).div(_PROGRESSBAR_VALUE).$style(join("width:", jinfo.getReduceProgressPercent(), '%'))._()._()._().td().a(url("tasks", jid, "r", "ALL"), String.valueOf(jinfo.getReducesTotal()))._().td().a(url("tasks", jid, "r", "PENDING"), String.valueOf(jinfo.getReducesPending()))._().td().a(url("tasks", jid, "r", "RUNNING"), String.valueOf(jinfo.getReducesRunning()))._().td().a(url("tasks", jid, "r", "COMPLETED"), String.valueOf(jinfo.getReducesCompleted()))._()._()._().table("#job").tr().th(_TH, "Attempt Type").th(_TH, "New").th(_TH, "Running").th(_TH, "Failed").th(_TH, "Killed").th(_TH, "Successful")._().tr(_ODD).th("Maps").td().a(url("attempts", jid, "m", TaskAttemptStateUI.NEW.toString()), String.valueOf(jinfo.getNewMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.RUNNING.toString()), String.valueOf(jinfo.getRunningMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.FAILED.toString()), String.valueOf(jinfo.getFailedMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.KILLED.toString()), String.valueOf(jinfo.getKilledMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.SUCCESSFUL.toString()), String.valueOf(jinfo.getSuccessfulMapAttempts()))._()._().tr(_EVEN).th("Reduces").td().a(url("attempts", jid, "r", TaskAttemptStateUI.NEW.toString()), String.valueOf(jinfo.getNewReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.RUNNING.toString()), String.valueOf(jinfo.getRunningReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.FAILED.toString()), String.valueOf(jinfo.getFailedReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.KILLED.toString()), String.valueOf(jinfo.getKilledReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.SUCCESSFUL.toString()), String.valueOf(jinfo.getSuccessfulReduceAttempts()))._()._()._()._();
}
Also used : InfoBlock(org.apache.hadoop.yarn.webapp.view.InfoBlock) Hamlet(org.apache.hadoop.yarn.webapp.hamlet.Hamlet) Date(java.util.Date) AMInfo(org.apache.hadoop.mapreduce.v2.api.records.AMInfo) DIV(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV) JobInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) AMAttemptInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.AMAttemptInfo)

Example 15 with AMInfo

use of org.apache.hadoop.mapreduce.v2.api.records.AMInfo in project hadoop by apache.

the class MRAppMaster method parsePreviousJobHistory.

private void parsePreviousJobHistory() throws IOException {
    FSDataInputStream in = getPreviousJobHistoryStream(getConfig(), appAttemptID);
    JobHistoryParser parser = new JobHistoryParser(in);
    JobInfo jobInfo = parser.parse();
    Exception parseException = parser.getParseException();
    if (parseException != null) {
        LOG.info("Got an error parsing job-history file" + ", ignoring incomplete events.", parseException);
    }
    Map<org.apache.hadoop.mapreduce.TaskID, TaskInfo> taskInfos = jobInfo.getAllTasks();
    for (TaskInfo taskInfo : taskInfos.values()) {
        if (TaskState.SUCCEEDED.toString().equals(taskInfo.getTaskStatus())) {
            Iterator<Entry<TaskAttemptID, TaskAttemptInfo>> taskAttemptIterator = taskInfo.getAllTaskAttempts().entrySet().iterator();
            while (taskAttemptIterator.hasNext()) {
                Map.Entry<TaskAttemptID, TaskAttemptInfo> currentEntry = taskAttemptIterator.next();
                if (!jobInfo.getAllCompletedTaskAttempts().containsKey(currentEntry.getKey())) {
                    taskAttemptIterator.remove();
                }
            }
            completedTasksFromPreviousRun.put(TypeConverter.toYarn(taskInfo.getTaskId()), taskInfo);
            LOG.info("Read from history task " + TypeConverter.toYarn(taskInfo.getTaskId()));
        }
    }
    LOG.info("Read completed tasks from history " + completedTasksFromPreviousRun.size());
    recoveredJobStartTime = jobInfo.getLaunchTime();
    // recover AMInfos
    List<JobHistoryParser.AMInfo> jhAmInfoList = jobInfo.getAMInfos();
    if (jhAmInfoList != null) {
        for (JobHistoryParser.AMInfo jhAmInfo : jhAmInfoList) {
            AMInfo amInfo = MRBuilderUtils.newAMInfo(jhAmInfo.getAppAttemptId(), jhAmInfo.getStartTime(), jhAmInfo.getContainerId(), jhAmInfo.getNodeManagerHost(), jhAmInfo.getNodeManagerPort(), jhAmInfo.getNodeManagerHttpPort());
            amInfos.add(amInfo);
        }
    }
}
Also used : TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) InvocationTargetException(java.lang.reflect.InvocationTargetException) NoSuchAlgorithmException(java.security.NoSuchAlgorithmException) IOException(java.io.IOException) YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) TaskInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo) AMInfo(org.apache.hadoop.mapreduce.v2.api.records.AMInfo) Entry(java.util.Map.Entry) JobHistoryParser(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser) JobInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo) TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap)

Aggregations

AMInfo (org.apache.hadoop.mapreduce.v2.api.records.AMInfo)21 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)9 Configuration (org.apache.hadoop.conf.Configuration)5 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)5 IOException (java.io.IOException)4 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)4 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)4 HashMap (java.util.HashMap)3 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)3 JobHistoryParser (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser)3 TaskInfo (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo)3 JobReport (org.apache.hadoop.mapreduce.v2.api.records.JobReport)3 Date (java.util.Date)2 LinkedList (java.util.LinkedList)2 GET (javax.ws.rs.GET)2 Path (javax.ws.rs.Path)2 Produces (javax.ws.rs.Produces)2 FileContext (org.apache.hadoop.fs.FileContext)2 Path (org.apache.hadoop.fs.Path)2 AMStartedEvent (org.apache.hadoop.mapreduce.jobhistory.AMStartedEvent)2