Search in sources :

Example 6 with JobState

use of org.apache.hadoop.mapreduce.v2.api.records.JobState in project hadoop by apache.

the class HsWebServices method getJobs.

@GET
@Path("/mapreduce/jobs")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public JobsInfo getJobs(@QueryParam("user") String userQuery, @QueryParam("limit") String count, @QueryParam("state") String stateQuery, @QueryParam("queue") String queueQuery, @QueryParam("startedTimeBegin") String startedBegin, @QueryParam("startedTimeEnd") String startedEnd, @QueryParam("finishedTimeBegin") String finishBegin, @QueryParam("finishedTimeEnd") String finishEnd) {
    Long countParam = null;
    init();
    if (count != null && !count.isEmpty()) {
        try {
            countParam = Long.parseLong(count);
        } catch (NumberFormatException e) {
            throw new BadRequestException(e.getMessage());
        }
        if (countParam <= 0) {
            throw new BadRequestException("limit value must be greater then 0");
        }
    }
    Long sBegin = null;
    if (startedBegin != null && !startedBegin.isEmpty()) {
        try {
            sBegin = Long.parseLong(startedBegin);
        } catch (NumberFormatException e) {
            throw new BadRequestException("Invalid number format: " + e.getMessage());
        }
        if (sBegin < 0) {
            throw new BadRequestException("startedTimeBegin must be greater than 0");
        }
    }
    Long sEnd = null;
    if (startedEnd != null && !startedEnd.isEmpty()) {
        try {
            sEnd = Long.parseLong(startedEnd);
        } catch (NumberFormatException e) {
            throw new BadRequestException("Invalid number format: " + e.getMessage());
        }
        if (sEnd < 0) {
            throw new BadRequestException("startedTimeEnd must be greater than 0");
        }
    }
    if (sBegin != null && sEnd != null && sBegin > sEnd) {
        throw new BadRequestException("startedTimeEnd must be greater than startTimeBegin");
    }
    Long fBegin = null;
    if (finishBegin != null && !finishBegin.isEmpty()) {
        try {
            fBegin = Long.parseLong(finishBegin);
        } catch (NumberFormatException e) {
            throw new BadRequestException("Invalid number format: " + e.getMessage());
        }
        if (fBegin < 0) {
            throw new BadRequestException("finishedTimeBegin must be greater than 0");
        }
    }
    Long fEnd = null;
    if (finishEnd != null && !finishEnd.isEmpty()) {
        try {
            fEnd = Long.parseLong(finishEnd);
        } catch (NumberFormatException e) {
            throw new BadRequestException("Invalid number format: " + e.getMessage());
        }
        if (fEnd < 0) {
            throw new BadRequestException("finishedTimeEnd must be greater than 0");
        }
    }
    if (fBegin != null && fEnd != null && fBegin > fEnd) {
        throw new BadRequestException("finishedTimeEnd must be greater than finishedTimeBegin");
    }
    JobState jobState = null;
    if (stateQuery != null) {
        jobState = JobState.valueOf(stateQuery);
    }
    return ctx.getPartialJobs(0l, countParam, userQuery, queueQuery, sBegin, sEnd, fBegin, fEnd, jobState);
}
Also used : BadRequestException(org.apache.hadoop.yarn.webapp.BadRequestException) JobState(org.apache.hadoop.mapreduce.v2.api.records.JobState) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 7 with JobState

use of org.apache.hadoop.mapreduce.v2.api.records.JobState in project hadoop by apache.

the class MockJobs method newJob.

public static Job newJob(ApplicationId appID, int i, int n, int m, Path confFile, boolean hasFailedTasks) {
    final JobId id = newJobID(appID, i);
    final String name = newJobName();
    final JobReport report = newJobReport(id);
    final Map<TaskId, Task> tasks = newTasks(id, n, m, hasFailedTasks);
    final TaskCount taskCount = getTaskCount(tasks.values());
    final Counters counters = getCounters(tasks.values());
    final Path configFile = confFile;
    Map<JobACL, AccessControlList> tmpJobACLs = new HashMap<JobACL, AccessControlList>();
    final Configuration conf = new Configuration();
    conf.set(JobACL.VIEW_JOB.getAclName(), "testuser");
    conf.setBoolean(MRConfig.MR_ACLS_ENABLED, true);
    JobACLsManager aclsManager = new JobACLsManager(conf);
    tmpJobACLs = aclsManager.constructJobACLs(conf);
    final Map<JobACL, AccessControlList> jobACLs = tmpJobACLs;
    return new Job() {

        @Override
        public JobId getID() {
            return id;
        }

        @Override
        public String getName() {
            return name;
        }

        @Override
        public JobState getState() {
            return report.getJobState();
        }

        @Override
        public JobReport getReport() {
            return report;
        }

        @Override
        public float getProgress() {
            return 0;
        }

        @Override
        public Counters getAllCounters() {
            return counters;
        }

        @Override
        public Map<TaskId, Task> getTasks() {
            return tasks;
        }

        @Override
        public Task getTask(TaskId taskID) {
            return tasks.get(taskID);
        }

        @Override
        public int getTotalMaps() {
            return taskCount.maps;
        }

        @Override
        public int getTotalReduces() {
            return taskCount.reduces;
        }

        @Override
        public int getCompletedMaps() {
            return taskCount.completedMaps;
        }

        @Override
        public int getCompletedReduces() {
            return taskCount.completedReduces;
        }

        @Override
        public boolean isUber() {
            return false;
        }

        @Override
        public TaskAttemptCompletionEvent[] getTaskAttemptCompletionEvents(int fromEventId, int maxEvents) {
            return null;
        }

        @Override
        public TaskCompletionEvent[] getMapAttemptCompletionEvents(int startIndex, int maxEvents) {
            return null;
        }

        @Override
        public Map<TaskId, Task> getTasks(TaskType taskType) {
            throw new UnsupportedOperationException("Not supported yet.");
        }

        @Override
        public List<String> getDiagnostics() {
            return Collections.<String>emptyList();
        }

        @Override
        public boolean checkAccess(UserGroupInformation callerUGI, JobACL jobOperation) {
            return true;
        }

        @Override
        public String getUserName() {
            return "mock";
        }

        @Override
        public String getQueueName() {
            return "mockqueue";
        }

        @Override
        public Path getConfFile() {
            return configFile;
        }

        @Override
        public Map<JobACL, AccessControlList> getJobACLs() {
            return jobACLs;
        }

        @Override
        public List<AMInfo> getAMInfos() {
            List<AMInfo> amInfoList = new LinkedList<AMInfo>();
            amInfoList.add(createAMInfo(1));
            amInfoList.add(createAMInfo(2));
            return amInfoList;
        }

        @Override
        public Configuration loadConfFile() throws IOException {
            FileContext fc = FileContext.getFileContext(configFile.toUri(), conf);
            Configuration jobConf = new Configuration(false);
            jobConf.addResource(fc.open(configFile), configFile.toString());
            return jobConf;
        }

        @Override
        public void setQueueName(String queueName) {
        // do nothing
        }

        @Override
        public void setJobPriority(Priority priority) {
        // do nothing
        }
    };
}
Also used : AccessControlList(org.apache.hadoop.security.authorize.AccessControlList) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) Configuration(org.apache.hadoop.conf.Configuration) HashMap(java.util.HashMap) TaskAttemptCompletionEvent(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport) TaskCompletionEvent(org.apache.hadoop.mapred.TaskCompletionEvent) JobACLsManager(org.apache.hadoop.mapred.JobACLsManager) TaskType(org.apache.hadoop.mapreduce.v2.api.records.TaskType) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Path(org.apache.hadoop.fs.Path) Priority(org.apache.hadoop.yarn.api.records.Priority) LinkedList(java.util.LinkedList) AMInfo(org.apache.hadoop.mapreduce.v2.api.records.AMInfo) Counters(org.apache.hadoop.mapreduce.Counters) JobACL(org.apache.hadoop.mapreduce.JobACL) FileContext(org.apache.hadoop.fs.FileContext)

Example 8 with JobState

use of org.apache.hadoop.mapreduce.v2.api.records.JobState in project hadoop by apache.

the class CachedHistoryStorage method getPartialJobs.

public static JobsInfo getPartialJobs(Collection<Job> jobs, Long offset, Long count, String user, String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd, JobState jobState) {
    JobsInfo allJobs = new JobsInfo();
    if (sBegin == null || sBegin < 0)
        sBegin = 0l;
    if (sEnd == null)
        sEnd = Long.MAX_VALUE;
    if (fBegin == null || fBegin < 0)
        fBegin = 0l;
    if (fEnd == null)
        fEnd = Long.MAX_VALUE;
    if (offset == null || offset < 0)
        offset = 0l;
    if (count == null)
        count = Long.MAX_VALUE;
    if (offset > jobs.size()) {
        return allJobs;
    }
    long at = 0;
    long end = offset + count - 1;
    if (end < 0) {
        // due to overflow
        end = Long.MAX_VALUE;
    }
    for (Job job : jobs) {
        if (at > end) {
            break;
        }
        // can't really validate queue is a valid one since queues could change
        if (queue != null && !queue.isEmpty()) {
            if (!job.getQueueName().equals(queue)) {
                continue;
            }
        }
        if (user != null && !user.isEmpty()) {
            if (!job.getUserName().equals(user)) {
                continue;
            }
        }
        JobReport report = job.getReport();
        if (report.getStartTime() < sBegin || report.getStartTime() > sEnd) {
            continue;
        }
        if (report.getFinishTime() < fBegin || report.getFinishTime() > fEnd) {
            continue;
        }
        if (jobState != null && jobState != report.getJobState()) {
            continue;
        }
        at++;
        if ((at - 1) < offset) {
            continue;
        }
        JobInfo jobInfo = new JobInfo(job);
        allJobs.add(jobInfo);
    }
    return allJobs;
}
Also used : JobInfo(org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo) JobsInfo(org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport)

Example 9 with JobState

use of org.apache.hadoop.mapreduce.v2.api.records.JobState in project hadoop by apache.

the class TestJobHistory method testRefreshLoadedJobCacheUnSupportedOperation.

@Test
public void testRefreshLoadedJobCacheUnSupportedOperation() {
    jobHistory = spy(new JobHistory());
    HistoryStorage storage = new HistoryStorage() {

        @Override
        public void setHistoryFileManager(HistoryFileManager hsManager) {
        // TODO Auto-generated method stub
        }

        @Override
        public JobsInfo getPartialJobs(Long offset, Long count, String user, String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd, JobState jobState) {
            // TODO Auto-generated method stub
            return null;
        }

        @Override
        public Job getFullJob(JobId jobId) {
            // TODO Auto-generated method stub
            return null;
        }

        @Override
        public Map<JobId, Job> getAllPartialJobs() {
            // TODO Auto-generated method stub
            return null;
        }
    };
    doReturn(storage).when(jobHistory).createHistoryStorage();
    jobHistory.init(new Configuration());
    jobHistory.start();
    Throwable th = null;
    try {
        jobHistory.refreshLoadedJobCache();
    } catch (Exception e) {
        th = e;
    }
    assertTrue(th instanceof UnsupportedOperationException);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) JobState(org.apache.hadoop.mapreduce.v2.api.records.JobState) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) IOException(java.io.IOException) YarnRuntimeException(org.apache.hadoop.yarn.exceptions.YarnRuntimeException) Test(org.junit.Test)

Example 10 with JobState

use of org.apache.hadoop.mapreduce.v2.api.records.JobState in project hadoop by apache.

the class NotRunningJob method getJobReport.

@Override
public GetJobReportResponse getJobReport(GetJobReportRequest request) throws IOException {
    JobReport jobReport = recordFactory.newRecordInstance(JobReport.class);
    jobReport.setJobId(request.getJobId());
    jobReport.setJobState(jobState);
    jobReport.setUser(applicationReport.getUser());
    jobReport.setStartTime(applicationReport.getStartTime());
    YarnApplicationState state = applicationReport.getYarnApplicationState();
    if (state == YarnApplicationState.KILLED || state == YarnApplicationState.FAILED || state == YarnApplicationState.FINISHED) {
        jobReport.setDiagnostics(applicationReport.getDiagnostics());
    }
    jobReport.setJobName(applicationReport.getName());
    jobReport.setTrackingUrl(applicationReport.getTrackingUrl());
    jobReport.setFinishTime(applicationReport.getFinishTime());
    GetJobReportResponse resp = recordFactory.newRecordInstance(GetJobReportResponse.class);
    resp.setJobReport(jobReport);
    return resp;
}
Also used : YarnApplicationState(org.apache.hadoop.yarn.api.records.YarnApplicationState) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport) GetJobReportResponse(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportResponse)

Aggregations

JobReport (org.apache.hadoop.mapreduce.v2.api.records.JobReport)7 JobState (org.apache.hadoop.mapreduce.v2.api.records.JobState)5 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)4 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)4 Test (org.junit.Test)3 Configuration (org.apache.hadoop.conf.Configuration)2 ClientResponse (com.sun.jersey.api.client.ClientResponse)1 WebResource (com.sun.jersey.api.client.WebResource)1 IOException (java.io.IOException)1 ArrayList (java.util.ArrayList)1 HashMap (java.util.HashMap)1 LinkedList (java.util.LinkedList)1 Map (java.util.Map)1 GET (javax.ws.rs.GET)1 Path (javax.ws.rs.Path)1 Produces (javax.ws.rs.Produces)1 FileContext (org.apache.hadoop.fs.FileContext)1 Path (org.apache.hadoop.fs.Path)1 JobACLsManager (org.apache.hadoop.mapred.JobACLsManager)1 TaskCompletionEvent (org.apache.hadoop.mapred.TaskCompletionEvent)1