Search in sources :

Example 1 with JobsInfo

use of org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo in project hadoop by apache.

the class AMWebServices method getJobs.

@GET
@Path("/jobs")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public JobsInfo getJobs(@Context HttpServletRequest hsr) {
    init();
    JobsInfo allJobs = new JobsInfo();
    for (Job job : appCtx.getAllJobs().values()) {
        // getAllJobs only gives you a partial we want a full
        Job fullJob = appCtx.getJob(job.getID());
        if (fullJob == null) {
            continue;
        }
        allJobs.add(new JobInfo(fullJob, hasAccess(fullJob, hsr)));
    }
    return allJobs;
}
Also used : JobInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo) JobsInfo(org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobsInfo) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 2 with JobsInfo

use of org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo in project hadoop by apache.

the class TestJobHistoryParsing method testJobHistoryMethods.

/**
   * Simple test some methods of JobHistory
   */
@Test(timeout = 20000)
public void testJobHistoryMethods() throws Exception {
    LOG.info("STARTING testJobHistoryMethods");
    try {
        Configuration configuration = new Configuration();
        configuration.setClass(NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class);
        RackResolver.init(configuration);
        MRApp app = new MRAppWithHistory(1, 1, true, this.getClass().getName(), true);
        app.submit(configuration);
        Job job = app.getContext().getAllJobs().values().iterator().next();
        JobId jobId = job.getID();
        LOG.info("JOBID is " + TypeConverter.fromYarn(jobId).toString());
        app.waitForState(job, JobState.SUCCEEDED);
        // make sure job history events are handled
        app.waitForState(Service.STATE.STOPPED);
        JobHistory jobHistory = new JobHistory();
        jobHistory.init(configuration);
        // Method getAllJobs
        Assert.assertEquals(1, jobHistory.getAllJobs().size());
        // and with ApplicationId
        Assert.assertEquals(1, jobHistory.getAllJobs(app.getAppID()).size());
        JobsInfo jobsinfo = jobHistory.getPartialJobs(0L, 10L, null, "default", 0L, System.currentTimeMillis() + 1, 0L, System.currentTimeMillis() + 1, JobState.SUCCEEDED);
        Assert.assertEquals(1, jobsinfo.getJobs().size());
        Assert.assertNotNull(jobHistory.getApplicationAttemptId());
        // test Application Id
        Assert.assertEquals("application_0_0000", jobHistory.getApplicationID().toString());
        Assert.assertEquals("Job History Server", jobHistory.getApplicationName());
        // method does not work
        Assert.assertNull(jobHistory.getEventHandler());
        // method does not work
        Assert.assertNull(jobHistory.getClock());
        // method does not work
        Assert.assertNull(jobHistory.getClusterInfo());
    } finally {
        LOG.info("FINISHED testJobHistoryMethods");
    }
}
Also used : MRAppWithHistory(org.apache.hadoop.mapreduce.v2.hs.TestJobHistoryEvents.MRAppWithHistory) Configuration(org.apache.hadoop.conf.Configuration) JobsInfo(org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) MRApp(org.apache.hadoop.mapreduce.v2.app.MRApp) Test(org.junit.Test)

Example 3 with JobsInfo

use of org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo in project hadoop by apache.

the class HsWebServices method getJobs.

@GET
@Path("/mapreduce/jobs")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public JobsInfo getJobs(@QueryParam("user") String userQuery, @QueryParam("limit") String count, @QueryParam("state") String stateQuery, @QueryParam("queue") String queueQuery, @QueryParam("startedTimeBegin") String startedBegin, @QueryParam("startedTimeEnd") String startedEnd, @QueryParam("finishedTimeBegin") String finishBegin, @QueryParam("finishedTimeEnd") String finishEnd) {
    Long countParam = null;
    init();
    if (count != null && !count.isEmpty()) {
        try {
            countParam = Long.parseLong(count);
        } catch (NumberFormatException e) {
            throw new BadRequestException(e.getMessage());
        }
        if (countParam <= 0) {
            throw new BadRequestException("limit value must be greater then 0");
        }
    }
    Long sBegin = null;
    if (startedBegin != null && !startedBegin.isEmpty()) {
        try {
            sBegin = Long.parseLong(startedBegin);
        } catch (NumberFormatException e) {
            throw new BadRequestException("Invalid number format: " + e.getMessage());
        }
        if (sBegin < 0) {
            throw new BadRequestException("startedTimeBegin must be greater than 0");
        }
    }
    Long sEnd = null;
    if (startedEnd != null && !startedEnd.isEmpty()) {
        try {
            sEnd = Long.parseLong(startedEnd);
        } catch (NumberFormatException e) {
            throw new BadRequestException("Invalid number format: " + e.getMessage());
        }
        if (sEnd < 0) {
            throw new BadRequestException("startedTimeEnd must be greater than 0");
        }
    }
    if (sBegin != null && sEnd != null && sBegin > sEnd) {
        throw new BadRequestException("startedTimeEnd must be greater than startTimeBegin");
    }
    Long fBegin = null;
    if (finishBegin != null && !finishBegin.isEmpty()) {
        try {
            fBegin = Long.parseLong(finishBegin);
        } catch (NumberFormatException e) {
            throw new BadRequestException("Invalid number format: " + e.getMessage());
        }
        if (fBegin < 0) {
            throw new BadRequestException("finishedTimeBegin must be greater than 0");
        }
    }
    Long fEnd = null;
    if (finishEnd != null && !finishEnd.isEmpty()) {
        try {
            fEnd = Long.parseLong(finishEnd);
        } catch (NumberFormatException e) {
            throw new BadRequestException("Invalid number format: " + e.getMessage());
        }
        if (fEnd < 0) {
            throw new BadRequestException("finishedTimeEnd must be greater than 0");
        }
    }
    if (fBegin != null && fEnd != null && fBegin > fEnd) {
        throw new BadRequestException("finishedTimeEnd must be greater than finishedTimeBegin");
    }
    JobState jobState = null;
    if (stateQuery != null) {
        jobState = JobState.valueOf(stateQuery);
    }
    return ctx.getPartialJobs(0l, countParam, userQuery, queueQuery, sBegin, sEnd, fBegin, fEnd, jobState);
}
Also used : BadRequestException(org.apache.hadoop.yarn.webapp.BadRequestException) JobState(org.apache.hadoop.mapreduce.v2.api.records.JobState) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 4 with JobsInfo

use of org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo in project hadoop by apache.

the class CachedHistoryStorage method getPartialJobs.

public static JobsInfo getPartialJobs(Collection<Job> jobs, Long offset, Long count, String user, String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd, JobState jobState) {
    JobsInfo allJobs = new JobsInfo();
    if (sBegin == null || sBegin < 0)
        sBegin = 0l;
    if (sEnd == null)
        sEnd = Long.MAX_VALUE;
    if (fBegin == null || fBegin < 0)
        fBegin = 0l;
    if (fEnd == null)
        fEnd = Long.MAX_VALUE;
    if (offset == null || offset < 0)
        offset = 0l;
    if (count == null)
        count = Long.MAX_VALUE;
    if (offset > jobs.size()) {
        return allJobs;
    }
    long at = 0;
    long end = offset + count - 1;
    if (end < 0) {
        // due to overflow
        end = Long.MAX_VALUE;
    }
    for (Job job : jobs) {
        if (at > end) {
            break;
        }
        // can't really validate queue is a valid one since queues could change
        if (queue != null && !queue.isEmpty()) {
            if (!job.getQueueName().equals(queue)) {
                continue;
            }
        }
        if (user != null && !user.isEmpty()) {
            if (!job.getUserName().equals(user)) {
                continue;
            }
        }
        JobReport report = job.getReport();
        if (report.getStartTime() < sBegin || report.getStartTime() > sEnd) {
            continue;
        }
        if (report.getFinishTime() < fBegin || report.getFinishTime() > fEnd) {
            continue;
        }
        if (jobState != null && jobState != report.getJobState()) {
            continue;
        }
        at++;
        if ((at - 1) < offset) {
            continue;
        }
        JobInfo jobInfo = new JobInfo(job);
        allJobs.add(jobInfo);
    }
    return allJobs;
}
Also used : JobInfo(org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo) JobsInfo(org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport)

Example 5 with JobsInfo

use of org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo in project hadoop by apache.

the class TestJobHistory method testRefreshLoadedJobCacheUnSupportedOperation.

@Test
public void testRefreshLoadedJobCacheUnSupportedOperation() {
    jobHistory = spy(new JobHistory());
    HistoryStorage storage = new HistoryStorage() {

        @Override
        public void setHistoryFileManager(HistoryFileManager hsManager) {
        // TODO Auto-generated method stub
        }

        @Override
        public JobsInfo getPartialJobs(Long offset, Long count, String user, String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd, JobState jobState) {
            // TODO Auto-generated method stub
            return null;
        }

        @Override
        public Job getFullJob(JobId jobId) {
            // TODO Auto-generated method stub
            return null;
        }

        @Override
        public Map<JobId, Job> getAllPartialJobs() {
            // TODO Auto-generated method stub
            return null;
        }
    };
    doReturn(storage).when(jobHistory).createHistoryStorage();
    jobHistory.init(new Configuration());
    jobHistory.start();
    Throwable th = null;
    try {
        jobHistory.refreshLoadedJobCache();
    } catch (Exception e) {
        th = e;
    }
    assertTrue(th instanceof UnsupportedOperationException);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) JobState(org.apache.hadoop.mapreduce.v2.api.records.JobState) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) IOException(java.io.IOException) YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) Test(org.junit.Test)

Aggregations

Job (org.apache.hadoop.mapreduce.v2.app.job.Job)4 GET (javax.ws.rs.GET)2 Path (javax.ws.rs.Path)2 Produces (javax.ws.rs.Produces)2 Configuration (org.apache.hadoop.conf.Configuration)2 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)2 JobState (org.apache.hadoop.mapreduce.v2.api.records.JobState)2 JobsInfo (org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo)2 Test (org.junit.Test)2 IOException (java.io.IOException)1 JobReport (org.apache.hadoop.mapreduce.v2.api.records.JobReport)1 MRApp (org.apache.hadoop.mapreduce.v2.app.MRApp)1 JobInfo (org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo)1 JobsInfo (org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobsInfo)1 MRAppWithHistory (org.apache.hadoop.mapreduce.v2.hs.TestJobHistoryEvents.MRAppWithHistory)1 JobInfo (org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo)1 YarnRuntimeException (org.apache.hadoop.yarn.exceptions.YarnRuntimeException)1 BadRequestException (org.apache.hadoop.yarn.webapp.BadRequestException)1