Search in sources :

Example 1 with JobInfo

use of org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo in project hadoop by apache.

the class TestJobInfo method testGetFormattedStartTimeStr.

@Test
public void testGetFormattedStartTimeStr() {
    JobReport jobReport = mock(JobReport.class);
    when(jobReport.getStartTime()).thenReturn(-1L);
    Job job = mock(Job.class);
    when(job.getReport()).thenReturn(jobReport);
    when(job.getName()).thenReturn("TestJobInfo");
    when(job.getState()).thenReturn(JobState.SUCCEEDED);
    JobId jobId = MRBuilderUtils.newJobId(1L, 1, 1);
    when(job.getID()).thenReturn(jobId);
    DateFormat dateFormat = new SimpleDateFormat();
    JobInfo jobInfo = new JobInfo(job);
    Assert.assertEquals(JobInfo.NA, jobInfo.getFormattedStartTimeStr(dateFormat));
    Date date = new Date();
    when(jobReport.getStartTime()).thenReturn(date.getTime());
    jobInfo = new JobInfo(job);
    Assert.assertEquals(dateFormat.format(date), jobInfo.getFormattedStartTimeStr(dateFormat));
}
Also used : SimpleDateFormat(java.text.SimpleDateFormat) DateFormat(java.text.DateFormat) CompletedJob(org.apache.hadoop.mapreduce.v2.hs.CompletedJob) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) SimpleDateFormat(java.text.SimpleDateFormat) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Date(java.util.Date) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport) Test(org.junit.Test)

Example 2 with JobInfo

use of org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo in project hadoop by apache.

the class TestJobHistoryParsing method checkHistoryParsing.

private void checkHistoryParsing(final int numMaps, final int numReduces, final int numSuccessfulMaps) throws Exception {
    Configuration conf = new Configuration();
    conf.set(MRJobConfig.USER_NAME, System.getProperty("user.name"));
    long amStartTimeEst = System.currentTimeMillis();
    conf.setClass(NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class);
    RackResolver.init(conf);
    MRApp app = new MRAppWithHistory(numMaps, numReduces, true, this.getClass().getName(), true);
    app.submit(conf);
    Job job = app.getContext().getAllJobs().values().iterator().next();
    JobId jobId = job.getID();
    LOG.info("JOBID is " + TypeConverter.fromYarn(jobId).toString());
    app.waitForState(job, JobState.SUCCEEDED);
    // make sure all events are flushed
    app.waitForState(Service.STATE.STOPPED);
    String jobhistoryDir = JobHistoryUtils.getHistoryIntermediateDoneDirForUser(conf);
    FileContext fc = null;
    try {
        fc = FileContext.getFileContext(conf);
    } catch (IOException ioe) {
        LOG.info("Can not get FileContext", ioe);
        throw (new Exception("Can not get File Context"));
    }
    if (numMaps == numSuccessfulMaps) {
        String summaryFileName = JobHistoryUtils.getIntermediateSummaryFileName(jobId);
        Path summaryFile = new Path(jobhistoryDir, summaryFileName);
        String jobSummaryString = getJobSummary(fc, summaryFile);
        Assert.assertNotNull(jobSummaryString);
        Assert.assertTrue(jobSummaryString.contains("resourcesPerMap=100"));
        Assert.assertTrue(jobSummaryString.contains("resourcesPerReduce=100"));
        Map<String, String> jobSummaryElements = new HashMap<String, String>();
        StringTokenizer strToken = new StringTokenizer(jobSummaryString, ",");
        while (strToken.hasMoreTokens()) {
            String keypair = strToken.nextToken();
            jobSummaryElements.put(keypair.split("=")[0], keypair.split("=")[1]);
        }
        Assert.assertEquals("JobId does not match", jobId.toString(), jobSummaryElements.get("jobId"));
        Assert.assertEquals("JobName does not match", "test", jobSummaryElements.get("jobName"));
        Assert.assertTrue("submitTime should not be 0", Long.parseLong(jobSummaryElements.get("submitTime")) != 0);
        Assert.assertTrue("launchTime should not be 0", Long.parseLong(jobSummaryElements.get("launchTime")) != 0);
        Assert.assertTrue("firstMapTaskLaunchTime should not be 0", Long.parseLong(jobSummaryElements.get("firstMapTaskLaunchTime")) != 0);
        Assert.assertTrue("firstReduceTaskLaunchTime should not be 0", Long.parseLong(jobSummaryElements.get("firstReduceTaskLaunchTime")) != 0);
        Assert.assertTrue("finishTime should not be 0", Long.parseLong(jobSummaryElements.get("finishTime")) != 0);
        Assert.assertEquals("Mismatch in num map slots", numSuccessfulMaps, Integer.parseInt(jobSummaryElements.get("numMaps")));
        Assert.assertEquals("Mismatch in num reduce slots", numReduces, Integer.parseInt(jobSummaryElements.get("numReduces")));
        Assert.assertEquals("User does not match", System.getProperty("user.name"), jobSummaryElements.get("user"));
        Assert.assertEquals("Queue does not match", "default", jobSummaryElements.get("queue"));
        Assert.assertEquals("Status does not match", "SUCCEEDED", jobSummaryElements.get("status"));
    }
    JobHistory jobHistory = new JobHistory();
    jobHistory.init(conf);
    HistoryFileInfo fileInfo = jobHistory.getJobFileInfo(jobId);
    JobInfo jobInfo;
    long numFinishedMaps;
    synchronized (fileInfo) {
        Path historyFilePath = fileInfo.getHistoryFile();
        FSDataInputStream in = null;
        LOG.info("JobHistoryFile is: " + historyFilePath);
        try {
            in = fc.open(fc.makeQualified(historyFilePath));
        } catch (IOException ioe) {
            LOG.info("Can not open history file: " + historyFilePath, ioe);
            throw (new Exception("Can not open History File"));
        }
        JobHistoryParser parser = new JobHistoryParser(in);
        final EventReader realReader = new EventReader(in);
        EventReader reader = Mockito.mock(EventReader.class);
        if (numMaps == numSuccessfulMaps) {
            reader = realReader;
        } else {
            // Hack!
            final AtomicInteger numFinishedEvents = new AtomicInteger(0);
            Mockito.when(reader.getNextEvent()).thenAnswer(new Answer<HistoryEvent>() {

                public HistoryEvent answer(InvocationOnMock invocation) throws IOException {
                    HistoryEvent event = realReader.getNextEvent();
                    if (event instanceof TaskFinishedEvent) {
                        numFinishedEvents.incrementAndGet();
                    }
                    if (numFinishedEvents.get() <= numSuccessfulMaps) {
                        return event;
                    } else {
                        throw new IOException("test");
                    }
                }
            });
        }
        jobInfo = parser.parse(reader);
        numFinishedMaps = computeFinishedMaps(jobInfo, numMaps, numSuccessfulMaps);
        if (numFinishedMaps != numMaps) {
            Exception parseException = parser.getParseException();
            Assert.assertNotNull("Didn't get expected parse exception", parseException);
        }
    }
    Assert.assertEquals("Incorrect username ", System.getProperty("user.name"), jobInfo.getUsername());
    Assert.assertEquals("Incorrect jobName ", "test", jobInfo.getJobname());
    Assert.assertEquals("Incorrect queuename ", "default", jobInfo.getJobQueueName());
    Assert.assertEquals("incorrect conf path", "test", jobInfo.getJobConfPath());
    Assert.assertEquals("incorrect finishedMap ", numSuccessfulMaps, numFinishedMaps);
    Assert.assertEquals("incorrect finishedReduces ", numReduces, jobInfo.getFinishedReduces());
    Assert.assertEquals("incorrect uberized ", job.isUber(), jobInfo.getUberized());
    Map<TaskID, TaskInfo> allTasks = jobInfo.getAllTasks();
    int totalTasks = allTasks.size();
    Assert.assertEquals("total number of tasks is incorrect  ", (numMaps + numReduces), totalTasks);
    // Verify aminfo
    Assert.assertEquals(1, jobInfo.getAMInfos().size());
    Assert.assertEquals(MRApp.NM_HOST, jobInfo.getAMInfos().get(0).getNodeManagerHost());
    AMInfo amInfo = jobInfo.getAMInfos().get(0);
    Assert.assertEquals(MRApp.NM_PORT, amInfo.getNodeManagerPort());
    Assert.assertEquals(MRApp.NM_HTTP_PORT, amInfo.getNodeManagerHttpPort());
    Assert.assertEquals(1, amInfo.getAppAttemptId().getAttemptId());
    Assert.assertEquals(amInfo.getAppAttemptId(), amInfo.getContainerId().getApplicationAttemptId());
    Assert.assertTrue(amInfo.getStartTime() <= System.currentTimeMillis() && amInfo.getStartTime() >= amStartTimeEst);
    ContainerId fakeCid = MRApp.newContainerId(-1, -1, -1, -1);
    // Assert at taskAttempt level
    for (TaskInfo taskInfo : allTasks.values()) {
        int taskAttemptCount = taskInfo.getAllTaskAttempts().size();
        Assert.assertEquals("total number of task attempts ", 1, taskAttemptCount);
        TaskAttemptInfo taInfo = taskInfo.getAllTaskAttempts().values().iterator().next();
        Assert.assertNotNull(taInfo.getContainerId());
        // Verify the wrong ctor is not being used. Remove after mrv1 is removed.
        Assert.assertFalse(taInfo.getContainerId().equals(fakeCid));
    }
    // Deep compare Job and JobInfo
    for (Task task : job.getTasks().values()) {
        TaskInfo taskInfo = allTasks.get(TypeConverter.fromYarn(task.getID()));
        Assert.assertNotNull("TaskInfo not found", taskInfo);
        for (TaskAttempt taskAttempt : task.getAttempts().values()) {
            TaskAttemptInfo taskAttemptInfo = taskInfo.getAllTaskAttempts().get(TypeConverter.fromYarn((taskAttempt.getID())));
            Assert.assertNotNull("TaskAttemptInfo not found", taskAttemptInfo);
            Assert.assertEquals("Incorrect shuffle port for task attempt", taskAttempt.getShufflePort(), taskAttemptInfo.getShufflePort());
            if (numMaps == numSuccessfulMaps) {
                Assert.assertEquals(MRApp.NM_HOST, taskAttemptInfo.getHostname());
                Assert.assertEquals(MRApp.NM_PORT, taskAttemptInfo.getPort());
                // Verify rack-name
                Assert.assertEquals("rack-name is incorrect", taskAttemptInfo.getRackname(), RACK_NAME);
            }
        }
    }
    // test output for HistoryViewer
    PrintStream stdps = System.out;
    try {
        System.setOut(new PrintStream(outContent));
        HistoryViewer viewer;
        synchronized (fileInfo) {
            viewer = new HistoryViewer(fc.makeQualified(fileInfo.getHistoryFile()).toString(), conf, true);
        }
        viewer.print();
        for (TaskInfo taskInfo : allTasks.values()) {
            String test = (taskInfo.getTaskStatus() == null ? "" : taskInfo.getTaskStatus()) + " " + taskInfo.getTaskType() + " task list for " + taskInfo.getTaskId().getJobID();
            Assert.assertTrue(outContent.toString().indexOf(test) > 0);
            Assert.assertTrue(outContent.toString().indexOf(taskInfo.getTaskId().toString()) > 0);
        }
    } finally {
        System.setOut(stdps);
    }
}
Also used : MRAppWithHistory(org.apache.hadoop.mapreduce.v2.hs.TestJobHistoryEvents.MRAppWithHistory) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) Configuration(org.apache.hadoop.conf.Configuration) HashMap(java.util.HashMap) TaskInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo) HistoryViewer(org.apache.hadoop.mapreduce.jobhistory.HistoryViewer) JobInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) MRApp(org.apache.hadoop.mapreduce.v2.app.MRApp) Path(org.apache.hadoop.fs.Path) EventReader(org.apache.hadoop.mapreduce.jobhistory.EventReader) PrintStream(java.io.PrintStream) HistoryFileInfo(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo) TaskID(org.apache.hadoop.mapreduce.TaskID) IOException(java.io.IOException) HistoryEvent(org.apache.hadoop.mapreduce.jobhistory.HistoryEvent) IOException(java.io.IOException) AMInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.AMInfo) StringTokenizer(java.util.StringTokenizer) TaskFinishedEvent(org.apache.hadoop.mapreduce.jobhistory.TaskFinishedEvent) JobHistoryParser(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) InvocationOnMock(org.mockito.invocation.InvocationOnMock) TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) FileContext(org.apache.hadoop.fs.FileContext)

Example 3 with JobInfo

use of org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo in project hadoop by apache.

the class TestJobHistoryEntities method testCompletedJobWithDiagnostics.

@Test(timeout = 30000)
public void testCompletedJobWithDiagnostics() throws Exception {
    final String jobError = "Job Diagnostics";
    JobInfo jobInfo = spy(new JobInfo());
    when(jobInfo.getErrorInfo()).thenReturn(jobError);
    when(jobInfo.getJobStatus()).thenReturn(JobState.FAILED.toString());
    when(jobInfo.getAMInfos()).thenReturn(Collections.<JobHistoryParser.AMInfo>emptyList());
    final JobHistoryParser mockParser = mock(JobHistoryParser.class);
    when(mockParser.parse()).thenReturn(jobInfo);
    HistoryFileInfo info = mock(HistoryFileInfo.class);
    when(info.getConfFile()).thenReturn(fullConfPath);
    when(info.getHistoryFile()).thenReturn(fullHistoryPath);
    CompletedJob job = new CompletedJob(conf, jobId, fullHistoryPath, loadTasks, "user", info, jobAclsManager) {

        @Override
        protected JobHistoryParser createJobHistoryParser(Path historyFileAbsolute) throws IOException {
            return mockParser;
        }
    };
    assertEquals(jobError, job.getReport().getDiagnostics());
}
Also used : Path(org.apache.hadoop.fs.Path) HistoryFileInfo(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo) JobHistoryParser(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser) JobInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo) Test(org.junit.Test)

Example 4 with JobInfo

use of org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo in project hadoop by apache.

the class HsJobBlock method render.

/*
   * (non-Javadoc)
   * @see org.apache.hadoop.yarn.webapp.view.HtmlBlock#render(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block)
   */
@Override
protected void render(Block html) {
    String jid = $(JOB_ID);
    if (jid.isEmpty()) {
        html.p()._("Sorry, can't do anything without a JobID.")._();
        return;
    }
    JobId jobID = MRApps.toJobID(jid);
    Job j = appContext.getJob(jobID);
    if (j == null) {
        html.p()._("Sorry, ", jid, " not found.")._();
        return;
    }
    if (j instanceof UnparsedJob) {
        final int taskCount = j.getTotalMaps() + j.getTotalReduces();
        UnparsedJob oversizedJob = (UnparsedJob) j;
        html.p()._("The job has a total of " + taskCount + " tasks. ")._("Any job larger than " + oversizedJob.getMaxTasksAllowed() + " will not be loaded.")._();
        html.p()._("You can either use the CLI tool: 'mapred job -history'" + " to view large jobs or adjust the property " + JHAdminConfig.MR_HS_LOADED_JOBS_TASKS_MAX + ".")._();
        return;
    }
    List<AMInfo> amInfos = j.getAMInfos();
    JobInfo job = new JobInfo(j);
    ResponseInfo infoBlock = info("Job Overview")._("Job Name:", job.getName())._("User Name:", job.getUserName())._("Queue:", job.getQueueName())._("State:", job.getState())._("Uberized:", job.isUber())._("Submitted:", new Date(job.getSubmitTime()))._("Started:", job.getStartTimeStr())._("Finished:", new Date(job.getFinishTime()))._("Elapsed:", StringUtils.formatTime(Times.elapsed(job.getStartTime(), job.getFinishTime(), false)));
    String amString = amInfos.size() == 1 ? "ApplicationMaster" : "ApplicationMasters";
    // todo - switch to use JobInfo
    List<String> diagnostics = j.getDiagnostics();
    if (diagnostics != null && !diagnostics.isEmpty()) {
        StringBuffer b = new StringBuffer();
        for (String diag : diagnostics) {
            b.append(addTaskLinks(diag));
        }
        infoBlock._r("Diagnostics:", b.toString());
    }
    if (job.getNumMaps() > 0) {
        infoBlock._("Average Map Time", StringUtils.formatTime(job.getAvgMapTime()));
    }
    if (job.getNumReduces() > 0) {
        infoBlock._("Average Shuffle Time", StringUtils.formatTime(job.getAvgShuffleTime()));
        infoBlock._("Average Merge Time", StringUtils.formatTime(job.getAvgMergeTime()));
        infoBlock._("Average Reduce Time", StringUtils.formatTime(job.getAvgReduceTime()));
    }
    for (ConfEntryInfo entry : job.getAcls()) {
        infoBlock._("ACL " + entry.getName() + ":", entry.getValue());
    }
    DIV<Hamlet> div = html._(InfoBlock.class).div(_INFO_WRAP);
    // MRAppMasters Table
    TABLE<DIV<Hamlet>> table = div.table("#job");
    table.tr().th(amString)._().tr().th(_TH, "Attempt Number").th(_TH, "Start Time").th(_TH, "Node").th(_TH, "Logs")._();
    boolean odd = false;
    for (AMInfo amInfo : amInfos) {
        AMAttemptInfo attempt = new AMAttemptInfo(amInfo, job.getId(), job.getUserName(), "", "");
        table.tr((odd = !odd) ? _ODD : _EVEN).td(String.valueOf(attempt.getAttemptId())).td(new Date(attempt.getStartTime()).toString()).td().a(".nodelink", url(MRWebAppUtil.getYARNWebappScheme(), attempt.getNodeHttpAddress()), attempt.getNodeHttpAddress())._().td().a(".logslink", url(attempt.getLogsLink()), "logs")._()._();
    }
    table._();
    div._();
    html.div(_INFO_WRAP).table("#job").tr().th(_TH, "Task Type").th(_TH, "Total").th(_TH, "Complete")._().tr(_ODD).th().a(url("tasks", jid, "m"), "Map")._().td(String.valueOf(String.valueOf(job.getMapsTotal()))).td(String.valueOf(String.valueOf(job.getMapsCompleted())))._().tr(_EVEN).th().a(url("tasks", jid, "r"), "Reduce")._().td(String.valueOf(String.valueOf(job.getReducesTotal()))).td(String.valueOf(String.valueOf(job.getReducesCompleted())))._()._().table("#job").tr().th(_TH, "Attempt Type").th(_TH, "Failed").th(_TH, "Killed").th(_TH, "Successful")._().tr(_ODD).th("Maps").td().a(url("attempts", jid, "m", TaskAttemptStateUI.FAILED.toString()), String.valueOf(job.getFailedMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.KILLED.toString()), String.valueOf(job.getKilledMapAttempts()))._().td().a(url("attempts", jid, "m", TaskAttemptStateUI.SUCCESSFUL.toString()), String.valueOf(job.getSuccessfulMapAttempts()))._()._().tr(_EVEN).th("Reduces").td().a(url("attempts", jid, "r", TaskAttemptStateUI.FAILED.toString()), String.valueOf(job.getFailedReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.KILLED.toString()), String.valueOf(job.getKilledReduceAttempts()))._().td().a(url("attempts", jid, "r", TaskAttemptStateUI.SUCCESSFUL.toString()), String.valueOf(job.getSuccessfulReduceAttempts()))._()._()._()._();
}
Also used : ResponseInfo(org.apache.hadoop.yarn.webapp.ResponseInfo) InfoBlock(org.apache.hadoop.yarn.webapp.view.InfoBlock) Hamlet(org.apache.hadoop.yarn.webapp.hamlet.Hamlet) Date(java.util.Date) ConfEntryInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo) AMInfo(org.apache.hadoop.mapreduce.v2.api.records.AMInfo) DIV(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV) UnparsedJob(org.apache.hadoop.mapreduce.v2.hs.UnparsedJob) JobInfo(org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) UnparsedJob(org.apache.hadoop.mapreduce.v2.hs.UnparsedJob) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) AMAttemptInfo(org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo)

Example 5 with JobInfo

use of org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo in project hadoop by apache.

the class JobHistoryFileReplayMapperV1 method map.

public void map(IntWritable key, IntWritable val, Context context) throws IOException {
    // collect the apps it needs to process
    TimelineClient tlc = new TimelineClientImpl();
    TimelineEntityConverterV1 converter = new TimelineEntityConverterV1();
    JobHistoryFileReplayHelper helper = new JobHistoryFileReplayHelper(context);
    int replayMode = helper.getReplayMode();
    Collection<JobFiles> jobs = helper.getJobFiles();
    JobHistoryFileParser parser = helper.getParser();
    if (jobs.isEmpty()) {
        LOG.info(context.getTaskAttemptID().getTaskID() + " will process no jobs");
    } else {
        LOG.info(context.getTaskAttemptID().getTaskID() + " will process " + jobs.size() + " jobs");
    }
    for (JobFiles job : jobs) {
        // process each job
        String jobIdStr = job.getJobId();
        LOG.info("processing " + jobIdStr + "...");
        JobId jobId = TypeConverter.toYarn(JobID.forName(jobIdStr));
        ApplicationId appId = jobId.getAppId();
        try {
            // parse the job info and configuration
            Path historyFilePath = job.getJobHistoryFilePath();
            Path confFilePath = job.getJobConfFilePath();
            if ((historyFilePath == null) || (confFilePath == null)) {
                continue;
            }
            JobInfo jobInfo = parser.parseHistoryFile(historyFilePath);
            Configuration jobConf = parser.parseConfiguration(confFilePath);
            LOG.info("parsed the job history file and the configuration file for job " + jobIdStr);
            // create entities from job history and write them
            long totalTime = 0;
            Set<TimelineEntity> entitySet = converter.createTimelineEntities(jobInfo, jobConf);
            LOG.info("converted them into timeline entities for job " + jobIdStr);
            // use the current user for this purpose
            UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
            long startWrite = System.nanoTime();
            try {
                switch(replayMode) {
                    case JobHistoryFileReplayHelper.WRITE_ALL_AT_ONCE:
                        writeAllEntities(tlc, entitySet, ugi);
                        break;
                    case JobHistoryFileReplayHelper.WRITE_PER_ENTITY:
                        writePerEntity(tlc, entitySet, ugi);
                        break;
                    default:
                        break;
                }
            } catch (Exception e) {
                context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_FAILURES).increment(1);
                LOG.error("writing to the timeline service failed", e);
            }
            long endWrite = System.nanoTime();
            totalTime += TimeUnit.NANOSECONDS.toMillis(endWrite - startWrite);
            int numEntities = entitySet.size();
            LOG.info("wrote " + numEntities + " entities in " + totalTime + " ms");
            context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).increment(totalTime);
            context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).increment(numEntities);
        } finally {
            // move it along
            context.progress();
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) JobFiles(org.apache.hadoop.mapreduce.JobHistoryFileReplayHelper.JobFiles) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) IOException(java.io.IOException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) TimelineClient(org.apache.hadoop.yarn.client.api.TimelineClient) JobInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo) TimelineClientImpl(org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Aggregations

Job (org.apache.hadoop.mapreduce.v2.app.job.Job)15 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)12 JobInfo (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo)8 Test (org.junit.Test)8 IOException (java.io.IOException)7 Configuration (org.apache.hadoop.conf.Configuration)7 Path (org.apache.hadoop.fs.Path)7 JobHistoryParser (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser)6 HistoryFileInfo (org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo)6 Date (java.util.Date)5 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)5 FileContext (org.apache.hadoop.fs.FileContext)4 TaskInfo (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo)4 JobReport (org.apache.hadoop.mapreduce.v2.api.records.JobReport)4 MRApp (org.apache.hadoop.mapreduce.v2.app.MRApp)4 JobInfo (org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo)4 HashMap (java.util.HashMap)3 GET (javax.ws.rs.GET)3 Path (javax.ws.rs.Path)3 Produces (javax.ws.rs.Produces)3