Search in sources :

Example 1 with JobACLsManager

use of org.apache.hadoop.mapred.JobACLsManager in project hadoop by apache.

the class HistoryFileManager method serviceInit.

@Override
protected void serviceInit(Configuration conf) throws Exception {
    this.conf = conf;
    int serialNumberLowDigits = 3;
    serialNumberFormat = ("%0" + (JobHistoryUtils.SERIAL_NUMBER_DIRECTORY_DIGITS + serialNumberLowDigits) + "d");
    long maxFSWaitTime = conf.getLong(JHAdminConfig.MR_HISTORY_MAX_START_WAIT_TIME, JHAdminConfig.DEFAULT_MR_HISTORY_MAX_START_WAIT_TIME);
    createHistoryDirs(SystemClock.getInstance(), 10 * 1000, maxFSWaitTime);
    maxTasksForLoadedJob = conf.getInt(JHAdminConfig.MR_HS_LOADED_JOBS_TASKS_MAX, JHAdminConfig.DEFAULT_MR_HS_LOADED_JOBS_TASKS_MAX);
    this.aclsMgr = new JobACLsManager(conf);
    maxHistoryAge = conf.getLong(JHAdminConfig.MR_HISTORY_MAX_AGE_MS, JHAdminConfig.DEFAULT_MR_HISTORY_MAX_AGE);
    jobListCache = createJobListCache();
    serialNumberIndex = new SerialNumberIndex(conf.getInt(JHAdminConfig.MR_HISTORY_DATESTRING_CACHE_SIZE, JHAdminConfig.DEFAULT_MR_HISTORY_DATESTRING_CACHE_SIZE));
    int numMoveThreads = conf.getInt(JHAdminConfig.MR_HISTORY_MOVE_THREAD_COUNT, JHAdminConfig.DEFAULT_MR_HISTORY_MOVE_THREAD_COUNT);
    moveToDoneExecutor = createMoveToDoneThreadPool(numMoveThreads);
    super.serviceInit(conf);
}
Also used : JobACLsManager(org.apache.hadoop.mapred.JobACLsManager)

Example 2 with JobACLsManager

use of org.apache.hadoop.mapred.JobACLsManager in project hadoop by apache.

the class MockJobs method newJob.

public static Job newJob(ApplicationId appID, int i, int n, int m, Path confFile, boolean hasFailedTasks) {
    final JobId id = newJobID(appID, i);
    final String name = newJobName();
    final JobReport report = newJobReport(id);
    final Map<TaskId, Task> tasks = newTasks(id, n, m, hasFailedTasks);
    final TaskCount taskCount = getTaskCount(tasks.values());
    final Counters counters = getCounters(tasks.values());
    final Path configFile = confFile;
    Map<JobACL, AccessControlList> tmpJobACLs = new HashMap<JobACL, AccessControlList>();
    final Configuration conf = new Configuration();
    conf.set(JobACL.VIEW_JOB.getAclName(), "testuser");
    conf.setBoolean(MRConfig.MR_ACLS_ENABLED, true);
    JobACLsManager aclsManager = new JobACLsManager(conf);
    tmpJobACLs = aclsManager.constructJobACLs(conf);
    final Map<JobACL, AccessControlList> jobACLs = tmpJobACLs;
    return new Job() {

        @Override
        public JobId getID() {
            return id;
        }

        @Override
        public String getName() {
            return name;
        }

        @Override
        public JobState getState() {
            return report.getJobState();
        }

        @Override
        public JobReport getReport() {
            return report;
        }

        @Override
        public float getProgress() {
            return 0;
        }

        @Override
        public Counters getAllCounters() {
            return counters;
        }

        @Override
        public Map<TaskId, Task> getTasks() {
            return tasks;
        }

        @Override
        public Task getTask(TaskId taskID) {
            return tasks.get(taskID);
        }

        @Override
        public int getTotalMaps() {
            return taskCount.maps;
        }

        @Override
        public int getTotalReduces() {
            return taskCount.reduces;
        }

        @Override
        public int getCompletedMaps() {
            return taskCount.completedMaps;
        }

        @Override
        public int getCompletedReduces() {
            return taskCount.completedReduces;
        }

        @Override
        public boolean isUber() {
            return false;
        }

        @Override
        public TaskAttemptCompletionEvent[] getTaskAttemptCompletionEvents(int fromEventId, int maxEvents) {
            return null;
        }

        @Override
        public TaskCompletionEvent[] getMapAttemptCompletionEvents(int startIndex, int maxEvents) {
            return null;
        }

        @Override
        public Map<TaskId, Task> getTasks(TaskType taskType) {
            throw new UnsupportedOperationException("Not supported yet.");
        }

        @Override
        public List<String> getDiagnostics() {
            return Collections.<String>emptyList();
        }

        @Override
        public boolean checkAccess(UserGroupInformation callerUGI, JobACL jobOperation) {
            return true;
        }

        @Override
        public String getUserName() {
            return "mock";
        }

        @Override
        public String getQueueName() {
            return "mockqueue";
        }

        @Override
        public Path getConfFile() {
            return configFile;
        }

        @Override
        public Map<JobACL, AccessControlList> getJobACLs() {
            return jobACLs;
        }

        @Override
        public List<AMInfo> getAMInfos() {
            List<AMInfo> amInfoList = new LinkedList<AMInfo>();
            amInfoList.add(createAMInfo(1));
            amInfoList.add(createAMInfo(2));
            return amInfoList;
        }

        @Override
        public Configuration loadConfFile() throws IOException {
            FileContext fc = FileContext.getFileContext(configFile.toUri(), conf);
            Configuration jobConf = new Configuration(false);
            jobConf.addResource(fc.open(configFile), configFile.toString());
            return jobConf;
        }

        @Override
        public void setQueueName(String queueName) {
        // do nothing
        }

        @Override
        public void setJobPriority(Priority priority) {
        // do nothing
        }
    };
}
Also used : AccessControlList(org.apache.hadoop.security.authorize.AccessControlList) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) Configuration(org.apache.hadoop.conf.Configuration) HashMap(java.util.HashMap) TaskAttemptCompletionEvent(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport) TaskCompletionEvent(org.apache.hadoop.mapred.TaskCompletionEvent) JobACLsManager(org.apache.hadoop.mapred.JobACLsManager) TaskType(org.apache.hadoop.mapreduce.v2.api.records.TaskType) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Path(org.apache.hadoop.fs.Path) Priority(org.apache.hadoop.yarn.api.records.Priority) LinkedList(java.util.LinkedList) AMInfo(org.apache.hadoop.mapreduce.v2.api.records.AMInfo) Counters(org.apache.hadoop.mapreduce.Counters) JobACL(org.apache.hadoop.mapreduce.JobACL) FileContext(org.apache.hadoop.fs.FileContext)

Example 3 with JobACLsManager

use of org.apache.hadoop.mapred.JobACLsManager in project hadoop by apache.

the class TestJobInfo method testAverageMergeTime.

@Test(timeout = 10000)
public void testAverageMergeTime() throws IOException {
    String historyFileName = "job_1329348432655_0001-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist";
    String confFileName = "job_1329348432655_0001_conf.xml";
    Configuration conf = new Configuration();
    JobACLsManager jobAclsMgr = new JobACLsManager(conf);
    Path fulleHistoryPath = new Path(TestJobHistoryEntities.class.getClassLoader().getResource(historyFileName).getFile());
    Path fullConfPath = new Path(TestJobHistoryEntities.class.getClassLoader().getResource(confFileName).getFile());
    HistoryFileInfo info = mock(HistoryFileInfo.class);
    when(info.getConfFile()).thenReturn(fullConfPath);
    when(info.getHistoryFile()).thenReturn(fulleHistoryPath);
    JobId jobId = MRBuilderUtils.newJobId(1329348432655l, 1, 1);
    CompletedJob completedJob = new CompletedJob(conf, jobId, fulleHistoryPath, true, "user", info, jobAclsMgr);
    JobInfo jobInfo = new JobInfo(completedJob);
    // There are 2 tasks with merge time of 45 and 55 respectively. So average
    // merge time should be 50.
    Assert.assertEquals(50L, jobInfo.getAvgMergeTime().longValue());
}
Also used : Path(org.apache.hadoop.fs.Path) TestJobHistoryEntities(org.apache.hadoop.mapreduce.v2.hs.TestJobHistoryEntities) HistoryFileInfo(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo) CompletedJob(org.apache.hadoop.mapreduce.v2.hs.CompletedJob) Configuration(org.apache.hadoop.conf.Configuration) JobACLsManager(org.apache.hadoop.mapred.JobACLsManager) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Aggregations

JobACLsManager (org.apache.hadoop.mapred.JobACLsManager)3 Configuration (org.apache.hadoop.conf.Configuration)2 Path (org.apache.hadoop.fs.Path)2 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)2 HashMap (java.util.HashMap)1 LinkedList (java.util.LinkedList)1 FileContext (org.apache.hadoop.fs.FileContext)1 TaskCompletionEvent (org.apache.hadoop.mapred.TaskCompletionEvent)1 Counters (org.apache.hadoop.mapreduce.Counters)1 JobACL (org.apache.hadoop.mapreduce.JobACL)1 AMInfo (org.apache.hadoop.mapreduce.v2.api.records.AMInfo)1 JobReport (org.apache.hadoop.mapreduce.v2.api.records.JobReport)1 TaskAttemptCompletionEvent (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent)1 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)1 TaskType (org.apache.hadoop.mapreduce.v2.api.records.TaskType)1 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)1 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)1 CompletedJob (org.apache.hadoop.mapreduce.v2.hs.CompletedJob)1 HistoryFileInfo (org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo)1 TestJobHistoryEntities (org.apache.hadoop.mapreduce.v2.hs.TestJobHistoryEntities)1