Search in sources :

Example 11 with JobReport

use of org.apache.hadoop.mapreduce.v2.api.records.JobReport in project hadoop by apache.

the class ClientServiceDelegate method getLogFilePath.

public LogParams getLogFilePath(JobID oldJobID, TaskAttemptID oldTaskAttemptID) throws IOException {
    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId = TypeConverter.toYarn(oldJobID);
    GetJobReportRequest request = recordFactory.newRecordInstance(GetJobReportRequest.class);
    request.setJobId(jobId);
    JobReport report = ((GetJobReportResponse) invoke("getJobReport", GetJobReportRequest.class, request)).getJobReport();
    if (EnumSet.of(JobState.SUCCEEDED, JobState.FAILED, JobState.KILLED, JobState.ERROR).contains(report.getJobState())) {
        if (oldTaskAttemptID != null) {
            GetTaskAttemptReportRequest taRequest = recordFactory.newRecordInstance(GetTaskAttemptReportRequest.class);
            taRequest.setTaskAttemptId(TypeConverter.toYarn(oldTaskAttemptID));
            TaskAttemptReport taReport = ((GetTaskAttemptReportResponse) invoke("getTaskAttemptReport", GetTaskAttemptReportRequest.class, taRequest)).getTaskAttemptReport();
            if (taReport.getContainerId() == null || taReport.getNodeManagerHost() == null) {
                throw new IOException("Unable to get log information for task: " + oldTaskAttemptID);
            }
            return new LogParams(taReport.getContainerId().toString(), taReport.getContainerId().getApplicationAttemptId().getApplicationId().toString(), NodeId.newInstance(taReport.getNodeManagerHost(), taReport.getNodeManagerPort()).toString(), report.getUser());
        } else {
            if (report.getAMInfos() == null || report.getAMInfos().size() == 0) {
                throw new IOException("Unable to get log information for job: " + oldJobID);
            }
            AMInfo amInfo = report.getAMInfos().get(report.getAMInfos().size() - 1);
            return new LogParams(amInfo.getContainerId().toString(), amInfo.getAppAttemptId().getApplicationId().toString(), NodeId.newInstance(amInfo.getNodeManagerHost(), amInfo.getNodeManagerPort()).toString(), report.getUser());
        }
    } else {
        throw new IOException("Cannot get log path for a in-progress job");
    }
}
Also used : GetTaskAttemptReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportRequest) IOException(java.io.IOException) GetJobReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport) GetJobReportResponse(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportResponse) LogParams(org.apache.hadoop.mapreduce.v2.LogParams) AMInfo(org.apache.hadoop.mapreduce.v2.api.records.AMInfo) TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport) GetTaskAttemptReportResponse(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportResponse)

Example 12 with JobReport

use of org.apache.hadoop.mapreduce.v2.api.records.JobReport in project hadoop by apache.

the class ClientServiceDelegate method getJobStatus.

public JobStatus getJobStatus(JobID oldJobID) throws IOException {
    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId = TypeConverter.toYarn(oldJobID);
    GetJobReportRequest request = recordFactory.newRecordInstance(GetJobReportRequest.class);
    request.setJobId(jobId);
    JobReport report = ((GetJobReportResponse) invoke("getJobReport", GetJobReportRequest.class, request)).getJobReport();
    JobStatus jobStatus = null;
    if (report != null) {
        if (StringUtils.isEmpty(report.getJobFile())) {
            String jobFile = MRApps.getJobFile(conf, report.getUser(), oldJobID);
            report.setJobFile(jobFile);
        }
        String historyTrackingUrl = report.getTrackingUrl();
        String url = StringUtils.isNotEmpty(historyTrackingUrl) ? historyTrackingUrl : trackingUrl;
        jobStatus = TypeConverter.fromYarn(report, url);
    }
    return jobStatus;
}
Also used : JobStatus(org.apache.hadoop.mapreduce.JobStatus) GetJobReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport) GetJobReportResponse(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportResponse)

Example 13 with JobReport

use of org.apache.hadoop.mapreduce.v2.api.records.JobReport in project hadoop by apache.

the class TestJobInfo method testGetFormattedStartTimeStr.

@Test
public void testGetFormattedStartTimeStr() {
    JobReport jobReport = mock(JobReport.class);
    when(jobReport.getStartTime()).thenReturn(-1L);
    Job job = mock(Job.class);
    when(job.getReport()).thenReturn(jobReport);
    when(job.getName()).thenReturn("TestJobInfo");
    when(job.getState()).thenReturn(JobState.SUCCEEDED);
    JobId jobId = MRBuilderUtils.newJobId(1L, 1, 1);
    when(job.getID()).thenReturn(jobId);
    DateFormat dateFormat = new SimpleDateFormat();
    JobInfo jobInfo = new JobInfo(job);
    Assert.assertEquals(JobInfo.NA, jobInfo.getFormattedStartTimeStr(dateFormat));
    Date date = new Date();
    when(jobReport.getStartTime()).thenReturn(date.getTime());
    jobInfo = new JobInfo(job);
    Assert.assertEquals(dateFormat.format(date), jobInfo.getFormattedStartTimeStr(dateFormat));
}
Also used : SimpleDateFormat(java.text.SimpleDateFormat) DateFormat(java.text.DateFormat) CompletedJob(org.apache.hadoop.mapreduce.v2.hs.CompletedJob) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) SimpleDateFormat(java.text.SimpleDateFormat) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Date(java.util.Date) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport) Test(org.junit.Test)

Example 14 with JobReport

use of org.apache.hadoop.mapreduce.v2.api.records.JobReport in project hadoop by apache.

the class TestMRJobsWithHistoryService method testJobHistoryData.

@Test(timeout = 90000)
public void testJobHistoryData() throws IOException, InterruptedException, AvroRemoteException, ClassNotFoundException {
    if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
        LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR + " not found. Not running test.");
        return;
    }
    SleepJob sleepJob = new SleepJob();
    sleepJob.setConf(mrCluster.getConfig());
    // Job with 3 maps and 2 reduces
    Job job = sleepJob.createJob(3, 2, 1000, 1, 500, 1);
    job.setJarByClass(SleepJob.class);
    // The AppMaster jar itself.
    job.addFileToClassPath(APP_JAR);
    job.waitForCompletion(true);
    Counters counterMR = job.getCounters();
    JobId jobId = TypeConverter.toYarn(job.getJobID());
    ApplicationId appID = jobId.getAppId();
    int pollElapsed = 0;
    while (true) {
        Thread.sleep(1000);
        pollElapsed += 1000;
        if (TERMINAL_RM_APP_STATES.contains(mrCluster.getResourceManager().getRMContext().getRMApps().get(appID).getState())) {
            break;
        }
        if (pollElapsed >= 60000) {
            LOG.warn("application did not reach terminal state within 60 seconds");
            break;
        }
    }
    Assert.assertEquals(RMAppState.FINISHED, mrCluster.getResourceManager().getRMContext().getRMApps().get(appID).getState());
    Counters counterHS = job.getCounters();
    //TODO the Assert below worked. need to check
    //Should we compare each field or convert to V2 counter and compare
    LOG.info("CounterHS " + counterHS);
    LOG.info("CounterMR " + counterMR);
    Assert.assertEquals(counterHS, counterMR);
    HSClientProtocol historyClient = instantiateHistoryProxy();
    GetJobReportRequest gjReq = Records.newRecord(GetJobReportRequest.class);
    gjReq.setJobId(jobId);
    JobReport jobReport = historyClient.getJobReport(gjReq).getJobReport();
    verifyJobReport(jobReport, jobId);
}
Also used : HSClientProtocol(org.apache.hadoop.mapreduce.v2.api.HSClientProtocol) SleepJob(org.apache.hadoop.mapreduce.SleepJob) Counters(org.apache.hadoop.mapreduce.Counters) SleepJob(org.apache.hadoop.mapreduce.SleepJob) Job(org.apache.hadoop.mapreduce.Job) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) File(java.io.File) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) GetJobReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport) Test(org.junit.Test)

Example 15 with JobReport

use of org.apache.hadoop.mapreduce.v2.api.records.JobReport in project hadoop by apache.

the class TestJobHistoryEntities method testCopmletedJobReportWithZeroTasks.

@Test(timeout = 100000)
public void testCopmletedJobReportWithZeroTasks() throws Exception {
    HistoryFileInfo info = mock(HistoryFileInfo.class);
    when(info.getConfFile()).thenReturn(fullConfPath);
    when(info.getHistoryFile()).thenReturn(fullHistoryPathZeroReduces);
    completedJob = new CompletedJob(conf, jobId, fullHistoryPathZeroReduces, loadTasks, "user", info, jobAclsManager);
    JobReport jobReport = completedJob.getReport();
    // Make sure that the number reduces (completed and total) are equal to zero.
    assertEquals(0, completedJob.getTotalReduces());
    assertEquals(0, completedJob.getCompletedReduces());
    // Verify that the reduce progress is 1.0 (not NaN)
    assertEquals(1.0, jobReport.getReduceProgress(), 0.001);
    assertEquals(fullHistoryPathZeroReduces.toString(), jobReport.getHistoryFile());
}
Also used : HistoryFileInfo(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport) Test(org.junit.Test)

Aggregations

JobReport (org.apache.hadoop.mapreduce.v2.api.records.JobReport)26 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)9 Test (org.junit.Test)9 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)8 GetJobReportResponse (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportResponse)5 GetJobReportRequest (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest)4 AMInfo (org.apache.hadoop.mapreduce.v2.api.records.AMInfo)4 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)4 TaskAttemptReport (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport)3 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)3 CompletedJob (org.apache.hadoop.mapreduce.v2.hs.CompletedJob)3 IOException (java.io.IOException)2 Date (java.util.Date)2 Configuration (org.apache.hadoop.conf.Configuration)2 Counters (org.apache.hadoop.mapreduce.Counters)2 GetTaskAttemptReportRequest (org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportRequest)2 JobState (org.apache.hadoop.mapreduce.v2.api.records.JobState)2 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)2 TaskReport (org.apache.hadoop.mapreduce.v2.api.records.TaskReport)2 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)2