Search in sources :

Example 41 with Counters

use of org.apache.hadoop.mapreduce.Counters in project hadoop by apache.

the class MockJobs method newCounters.

public static Counters newCounters() {
    Counters hc = new Counters();
    for (JobCounter c : JobCounter.values()) {
        hc.findCounter(c).setValue((long) (Math.random() * 1000));
    }
    for (TaskCounter c : TaskCounter.values()) {
        hc.findCounter(c).setValue((long) (Math.random() * 1000));
    }
    int nc = FileSystemCounter.values().length * 4;
    for (int i = 0; i < nc; ++i) {
        for (FileSystemCounter c : FileSystemCounter.values()) {
            hc.findCounter(FS_SCHEMES.next(), c).setValue((long) (Math.random() * DT));
        }
    }
    for (int i = 0; i < 2 * 3; ++i) {
        hc.findCounter(USER_COUNTER_GROUPS.next(), USER_COUNTERS.next()).setValue((long) (Math.random() * 100000));
    }
    return hc;
}
Also used : JobCounter(org.apache.hadoop.mapreduce.JobCounter) Counters(org.apache.hadoop.mapreduce.Counters) FileSystemCounter(org.apache.hadoop.mapreduce.FileSystemCounter) TaskCounter(org.apache.hadoop.mapreduce.TaskCounter)

Example 42 with Counters

use of org.apache.hadoop.mapreduce.Counters in project hadoop by apache.

the class MockJobs method newTask.

public static Task newTask(JobId jid, int i, int m, final boolean hasFailedTasks) {
    final TaskId tid = Records.newRecord(TaskId.class);
    tid.setJobId(jid);
    tid.setId(i);
    tid.setTaskType(TASK_TYPES.next());
    final TaskReport report = newTaskReport(tid);
    final Map<TaskAttemptId, TaskAttempt> attempts = newTaskAttempts(tid, m);
    return new Task() {

        @Override
        public TaskId getID() {
            return tid;
        }

        @Override
        public TaskReport getReport() {
            return report;
        }

        @Override
        public Counters getCounters() {
            if (hasFailedTasks) {
                return null;
            }
            return new Counters(TypeConverter.fromYarn(report.getCounters()));
        }

        @Override
        public float getProgress() {
            return report.getProgress();
        }

        @Override
        public TaskType getType() {
            return tid.getTaskType();
        }

        @Override
        public Map<TaskAttemptId, TaskAttempt> getAttempts() {
            return attempts;
        }

        @Override
        public TaskAttempt getAttempt(TaskAttemptId attemptID) {
            return attempts.get(attemptID);
        }

        @Override
        public boolean isFinished() {
            switch(report.getTaskState()) {
                case SUCCEEDED:
                case KILLED:
                case FAILED:
                    return true;
            }
            return false;
        }

        @Override
        public boolean canCommit(TaskAttemptId taskAttemptID) {
            return false;
        }

        @Override
        public TaskState getState() {
            return report.getTaskState();
        }
    };
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) Counters(org.apache.hadoop.mapreduce.Counters) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)

Example 43 with Counters

use of org.apache.hadoop.mapreduce.Counters in project hadoop by apache.

the class MockJobs method newJob.

public static Job newJob(ApplicationId appID, int i, int n, int m, Path confFile, boolean hasFailedTasks) {
    final JobId id = newJobID(appID, i);
    final String name = newJobName();
    final JobReport report = newJobReport(id);
    final Map<TaskId, Task> tasks = newTasks(id, n, m, hasFailedTasks);
    final TaskCount taskCount = getTaskCount(tasks.values());
    final Counters counters = getCounters(tasks.values());
    final Path configFile = confFile;
    Map<JobACL, AccessControlList> tmpJobACLs = new HashMap<JobACL, AccessControlList>();
    final Configuration conf = new Configuration();
    conf.set(JobACL.VIEW_JOB.getAclName(), "testuser");
    conf.setBoolean(MRConfig.MR_ACLS_ENABLED, true);
    JobACLsManager aclsManager = new JobACLsManager(conf);
    tmpJobACLs = aclsManager.constructJobACLs(conf);
    final Map<JobACL, AccessControlList> jobACLs = tmpJobACLs;
    return new Job() {

        @Override
        public JobId getID() {
            return id;
        }

        @Override
        public String getName() {
            return name;
        }

        @Override
        public JobState getState() {
            return report.getJobState();
        }

        @Override
        public JobReport getReport() {
            return report;
        }

        @Override
        public float getProgress() {
            return 0;
        }

        @Override
        public Counters getAllCounters() {
            return counters;
        }

        @Override
        public Map<TaskId, Task> getTasks() {
            return tasks;
        }

        @Override
        public Task getTask(TaskId taskID) {
            return tasks.get(taskID);
        }

        @Override
        public int getTotalMaps() {
            return taskCount.maps;
        }

        @Override
        public int getTotalReduces() {
            return taskCount.reduces;
        }

        @Override
        public int getCompletedMaps() {
            return taskCount.completedMaps;
        }

        @Override
        public int getCompletedReduces() {
            return taskCount.completedReduces;
        }

        @Override
        public boolean isUber() {
            return false;
        }

        @Override
        public TaskAttemptCompletionEvent[] getTaskAttemptCompletionEvents(int fromEventId, int maxEvents) {
            return null;
        }

        @Override
        public TaskCompletionEvent[] getMapAttemptCompletionEvents(int startIndex, int maxEvents) {
            return null;
        }

        @Override
        public Map<TaskId, Task> getTasks(TaskType taskType) {
            throw new UnsupportedOperationException("Not supported yet.");
        }

        @Override
        public List<String> getDiagnostics() {
            return Collections.<String>emptyList();
        }

        @Override
        public boolean checkAccess(UserGroupInformation callerUGI, JobACL jobOperation) {
            return true;
        }

        @Override
        public String getUserName() {
            return "mock";
        }

        @Override
        public String getQueueName() {
            return "mockqueue";
        }

        @Override
        public Path getConfFile() {
            return configFile;
        }

        @Override
        public Map<JobACL, AccessControlList> getJobACLs() {
            return jobACLs;
        }

        @Override
        public List<AMInfo> getAMInfos() {
            List<AMInfo> amInfoList = new LinkedList<AMInfo>();
            amInfoList.add(createAMInfo(1));
            amInfoList.add(createAMInfo(2));
            return amInfoList;
        }

        @Override
        public Configuration loadConfFile() throws IOException {
            FileContext fc = FileContext.getFileContext(configFile.toUri(), conf);
            Configuration jobConf = new Configuration(false);
            jobConf.addResource(fc.open(configFile), configFile.toString());
            return jobConf;
        }

        @Override
        public void setQueueName(String queueName) {
        // do nothing
        }

        @Override
        public void setJobPriority(Priority priority) {
        // do nothing
        }
    };
}
Also used : AccessControlList(org.apache.hadoop.security.authorize.AccessControlList) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) Configuration(org.apache.hadoop.conf.Configuration) HashMap(java.util.HashMap) TaskAttemptCompletionEvent(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport) TaskCompletionEvent(org.apache.hadoop.mapred.TaskCompletionEvent) JobACLsManager(org.apache.hadoop.mapred.JobACLsManager) TaskType(org.apache.hadoop.mapreduce.v2.api.records.TaskType) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Path(org.apache.hadoop.fs.Path) Priority(org.apache.hadoop.yarn.api.records.Priority) LinkedList(java.util.LinkedList) AMInfo(org.apache.hadoop.mapreduce.v2.api.records.AMInfo) Counters(org.apache.hadoop.mapreduce.Counters) JobACL(org.apache.hadoop.mapreduce.JobACL) FileContext(org.apache.hadoop.fs.FileContext)

Example 44 with Counters

use of org.apache.hadoop.mapreduce.Counters in project hadoop by apache.

the class TestEvents method testTaskAttemptFinishedEvent.

/**
   * test a getters of TaskAttemptFinishedEvent and TaskAttemptFinished
   * 
   * @throws Exception
   */
@Test(timeout = 10000)
public void testTaskAttemptFinishedEvent() throws Exception {
    JobID jid = new JobID("001", 1);
    TaskID tid = new TaskID(jid, TaskType.REDUCE, 2);
    TaskAttemptID taskAttemptId = new TaskAttemptID(tid, 3);
    Counters counters = new Counters();
    TaskAttemptFinishedEvent test = new TaskAttemptFinishedEvent(taskAttemptId, TaskType.REDUCE, "TEST", 123L, "RAKNAME", "HOSTNAME", "STATUS", counters);
    assertEquals(test.getAttemptId().toString(), taskAttemptId.toString());
    assertEquals(test.getCounters(), counters);
    assertEquals(test.getFinishTime(), 123L);
    assertEquals(test.getHostname(), "HOSTNAME");
    assertEquals(test.getRackName(), "RAKNAME");
    assertEquals(test.getState(), "STATUS");
    assertEquals(test.getTaskId(), tid);
    assertEquals(test.getTaskStatus(), "TEST");
    assertEquals(test.getTaskType(), TaskType.REDUCE);
}
Also used : TaskID(org.apache.hadoop.mapreduce.TaskID) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) Counters(org.apache.hadoop.mapreduce.Counters) JobID(org.apache.hadoop.mapreduce.JobID) Test(org.junit.Test)

Example 45 with Counters

use of org.apache.hadoop.mapreduce.Counters in project hadoop by apache.

the class JobCounterInfo method getCounters.

private void getCounters(AppContext ctx, Job job) {
    if (job == null) {
        return;
    }
    total = job.getAllCounters();
    boolean needTotalCounters = false;
    if (total == null) {
        total = new Counters();
        needTotalCounters = true;
    }
    map = new Counters();
    reduce = new Counters();
    // Get all types of counters
    Map<TaskId, Task> tasks = job.getTasks();
    for (Task t : tasks.values()) {
        Counters counters = t.getCounters();
        if (counters == null) {
            continue;
        }
        switch(t.getType()) {
            case MAP:
                map.incrAllCounters(counters);
                break;
            case REDUCE:
                reduce.incrAllCounters(counters);
                break;
        }
        if (needTotalCounters) {
            total.incrAllCounters(counters);
        }
    }
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) Counters(org.apache.hadoop.mapreduce.Counters)

Aggregations

Counters (org.apache.hadoop.mapreduce.Counters)72 Test (org.junit.Test)24 Job (org.apache.hadoop.mapreduce.Job)21 Path (org.apache.hadoop.fs.Path)14 Configuration (org.apache.hadoop.conf.Configuration)13 Counter (org.apache.hadoop.mapreduce.Counter)11 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)8 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)7 PhoenixScrutinyJobCounters (org.apache.phoenix.mapreduce.index.PhoenixScrutinyJobCounters)7 BaseTest (org.apache.phoenix.query.BaseTest)7 IOException (java.io.IOException)6 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)6 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)6 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)6 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)5 TableName (org.apache.hadoop.hbase.TableName)4 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)4 File (java.io.File)3 URI (java.net.URI)3 FileSystem (org.apache.hadoop.fs.FileSystem)3