Search in sources :

Example 1 with Counter

use of org.apache.hadoop.mapreduce.v2.api.records.Counter in project hadoop by apache.

the class TypeConverter method toYarn.

public static Counters toYarn(org.apache.hadoop.mapred.Counters counters) {
    if (counters == null) {
        return null;
    }
    Counters yCntrs = recordFactory.newRecordInstance(Counters.class);
    yCntrs.addAllCounterGroups(new HashMap<String, CounterGroup>());
    for (org.apache.hadoop.mapred.Counters.Group grp : counters) {
        CounterGroup yGrp = recordFactory.newRecordInstance(CounterGroup.class);
        yGrp.setName(grp.getName());
        yGrp.setDisplayName(grp.getDisplayName());
        yGrp.addAllCounters(new HashMap<String, Counter>());
        for (org.apache.hadoop.mapred.Counters.Counter cntr : grp) {
            Counter yCntr = recordFactory.newRecordInstance(Counter.class);
            yCntr.setName(cntr.getName());
            yCntr.setDisplayName(cntr.getDisplayName());
            yCntr.setValue(cntr.getValue());
            yGrp.setCounter(yCntr.getName(), yCntr);
        }
        yCntrs.setCounterGroup(yGrp.getName(), yGrp);
    }
    return yCntrs;
}
Also used : Counter(org.apache.hadoop.mapreduce.v2.api.records.Counter) CounterGroup(org.apache.hadoop.mapreduce.v2.api.records.CounterGroup) Counters(org.apache.hadoop.mapreduce.v2.api.records.Counters)

Example 2 with Counter

use of org.apache.hadoop.mapreduce.v2.api.records.Counter in project hadoop by apache.

the class SingleCounterBlock method populateMembers.

private void populateMembers(AppContext ctx) {
    JobId jobID = null;
    TaskId taskID = null;
    String tid = $(TASK_ID);
    if ($(TITLE).contains("MAPS")) {
        counterType = TaskType.MAP;
    } else if ($(TITLE).contains("REDUCES")) {
        counterType = TaskType.REDUCE;
    } else {
        counterType = null;
    }
    if (!tid.isEmpty()) {
        taskID = MRApps.toTaskID(tid);
        jobID = taskID.getJobId();
    } else {
        String jid = $(JOB_ID);
        if (!jid.isEmpty()) {
            jobID = MRApps.toJobID(jid);
        }
    }
    if (jobID == null) {
        return;
    }
    job = ctx.getJob(jobID);
    if (job == null) {
        return;
    }
    if (taskID != null) {
        task = job.getTask(taskID);
        if (task == null) {
            return;
        }
        for (Map.Entry<TaskAttemptId, TaskAttempt> entry : task.getAttempts().entrySet()) {
            long value = 0;
            Counters counters = entry.getValue().getCounters();
            CounterGroup group = (counters != null) ? counters.getGroup($(COUNTER_GROUP)) : null;
            if (group != null) {
                Counter c = group.findCounter($(COUNTER_NAME));
                if (c != null) {
                    value = c.getValue();
                }
            }
            values.put(MRApps.toString(entry.getKey()), value);
        }
        return;
    }
    // Get all types of counters
    Map<TaskId, Task> tasks = job.getTasks();
    for (Map.Entry<TaskId, Task> entry : tasks.entrySet()) {
        long value = 0;
        Counters counters = entry.getValue().getCounters();
        CounterGroup group = (counters != null) ? counters.getGroup($(COUNTER_GROUP)) : null;
        if (group != null) {
            Counter c = group.findCounter($(COUNTER_NAME));
            if (c != null) {
                value = c.getValue();
            }
        }
        if (counterType == null || counterType == entry.getValue().getType()) {
            values.put(MRApps.toString(entry.getKey()), value);
        }
    }
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) CounterGroup(org.apache.hadoop.mapreduce.CounterGroup) Counter(org.apache.hadoop.mapreduce.Counter) Counters(org.apache.hadoop.mapreduce.Counters) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Map(java.util.Map) TreeMap(java.util.TreeMap) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId)

Example 3 with Counter

use of org.apache.hadoop.mapreduce.v2.api.records.Counter in project hadoop by apache.

the class TestTaskAttemptListenerImpl method testCheckpointIDTracking.

@Test
public void testCheckpointIDTracking() throws IOException, InterruptedException {
    SystemClock clock = SystemClock.getInstance();
    org.apache.hadoop.mapreduce.v2.app.job.Task mockTask = mock(org.apache.hadoop.mapreduce.v2.app.job.Task.class);
    when(mockTask.canCommit(any(TaskAttemptId.class))).thenReturn(true);
    Job mockJob = mock(Job.class);
    when(mockJob.getTask(any(TaskId.class))).thenReturn(mockTask);
    Dispatcher dispatcher = mock(Dispatcher.class);
    @SuppressWarnings("unchecked") EventHandler<Event> ea = mock(EventHandler.class);
    when(dispatcher.getEventHandler()).thenReturn(ea);
    RMHeartbeatHandler rmHeartbeatHandler = mock(RMHeartbeatHandler.class);
    AppContext appCtx = mock(AppContext.class);
    when(appCtx.getJob(any(JobId.class))).thenReturn(mockJob);
    when(appCtx.getClock()).thenReturn(clock);
    when(appCtx.getEventHandler()).thenReturn(ea);
    JobTokenSecretManager secret = mock(JobTokenSecretManager.class);
    final TaskHeartbeatHandler hbHandler = mock(TaskHeartbeatHandler.class);
    when(appCtx.getEventHandler()).thenReturn(ea);
    CheckpointAMPreemptionPolicy policy = new CheckpointAMPreemptionPolicy();
    policy.init(appCtx);
    TaskAttemptListenerImpl listener = new MockTaskAttemptListenerImpl(appCtx, secret, rmHeartbeatHandler, policy) {

        @Override
        protected void registerHeartbeatHandler(Configuration conf) {
            taskHeartbeatHandler = hbHandler;
        }
    };
    Configuration conf = new Configuration();
    conf.setBoolean(MRJobConfig.TASK_PREEMPTION, true);
    //conf.setBoolean("preemption.reduce", true);
    listener.init(conf);
    listener.start();
    TaskAttemptID tid = new TaskAttemptID("12345", 1, TaskType.REDUCE, 1, 0);
    List<Path> partialOut = new ArrayList<Path>();
    partialOut.add(new Path("/prev1"));
    partialOut.add(new Path("/prev2"));
    Counters counters = mock(Counters.class);
    final long CBYTES = 64L * 1024 * 1024;
    final long CTIME = 4344L;
    final Path CLOC = new Path("/test/1");
    Counter cbytes = mock(Counter.class);
    when(cbytes.getValue()).thenReturn(CBYTES);
    Counter ctime = mock(Counter.class);
    when(ctime.getValue()).thenReturn(CTIME);
    when(counters.findCounter(eq(EnumCounter.CHECKPOINT_BYTES))).thenReturn(cbytes);
    when(counters.findCounter(eq(EnumCounter.CHECKPOINT_MS))).thenReturn(ctime);
    // propagating a taskstatus that contains a checkpoint id
    TaskCheckpointID incid = new TaskCheckpointID(new FSCheckpointID(CLOC), partialOut, counters);
    listener.setCheckpointID(org.apache.hadoop.mapred.TaskID.downgrade(tid.getTaskID()), incid);
    // and try to get it back
    CheckpointID outcid = listener.getCheckpointID(tid.getTaskID());
    TaskCheckpointID tcid = (TaskCheckpointID) outcid;
    assertEquals(CBYTES, tcid.getCheckpointBytes());
    assertEquals(CTIME, tcid.getCheckpointTime());
    assertTrue(partialOut.containsAll(tcid.getPartialCommittedOutput()));
    assertTrue(tcid.getPartialCommittedOutput().containsAll(partialOut));
    //assert it worked
    assert outcid == incid;
    listener.stop();
}
Also used : RMHeartbeatHandler(org.apache.hadoop.mapreduce.v2.app.rm.RMHeartbeatHandler) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) Dispatcher(org.apache.hadoop.yarn.event.Dispatcher) TaskCheckpointID(org.apache.hadoop.mapreduce.checkpoint.TaskCheckpointID) EnumCounter(org.apache.hadoop.mapreduce.checkpoint.EnumCounter) Counter(org.apache.hadoop.mapred.Counters.Counter) JobTokenSecretManager(org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager) TaskCheckpointID(org.apache.hadoop.mapreduce.checkpoint.TaskCheckpointID) FSCheckpointID(org.apache.hadoop.mapreduce.checkpoint.FSCheckpointID) CheckpointID(org.apache.hadoop.mapreduce.checkpoint.CheckpointID) TaskHeartbeatHandler(org.apache.hadoop.mapreduce.v2.app.TaskHeartbeatHandler) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Path(org.apache.hadoop.fs.Path) SystemClock(org.apache.hadoop.yarn.util.SystemClock) FSCheckpointID(org.apache.hadoop.mapreduce.checkpoint.FSCheckpointID) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) AppContext(org.apache.hadoop.mapreduce.v2.app.AppContext) CheckpointAMPreemptionPolicy(org.apache.hadoop.mapreduce.v2.app.rm.preemption.CheckpointAMPreemptionPolicy) TaskAttemptCompletionEvent(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent) Event(org.apache.hadoop.yarn.event.Event) Test(org.junit.Test)

Example 4 with Counter

use of org.apache.hadoop.mapreduce.v2.api.records.Counter in project hadoop by apache.

the class CounterGroupPBImpl method initCounters.

private void initCounters() {
    if (this.counters != null) {
        return;
    }
    CounterGroupProtoOrBuilder p = viaProto ? proto : builder;
    List<StringCounterMapProto> list = p.getCountersList();
    this.counters = new HashMap<String, Counter>();
    for (StringCounterMapProto c : list) {
        this.counters.put(c.getKey(), convertFromProtoFormat(c.getValue()));
    }
}
Also used : StringCounterMapProto(org.apache.hadoop.mapreduce.v2.proto.MRProtos.StringCounterMapProto) Counter(org.apache.hadoop.mapreduce.v2.api.records.Counter) CounterGroupProtoOrBuilder(org.apache.hadoop.mapreduce.v2.proto.MRProtos.CounterGroupProtoOrBuilder)

Example 5 with Counter

use of org.apache.hadoop.mapreduce.v2.api.records.Counter in project hadoop by apache.

the class CLI method run.

public int run(String[] argv) throws Exception {
    int exitCode = -1;
    if (argv.length < 1) {
        displayUsage("");
        return exitCode;
    }
    // process arguments
    String cmd = argv[0];
    String submitJobFile = null;
    String jobid = null;
    String taskid = null;
    String historyFileOrJobId = null;
    String historyOutFile = null;
    String historyOutFormat = HistoryViewer.HUMAN_FORMAT;
    String counterGroupName = null;
    String counterName = null;
    JobPriority jp = null;
    String taskType = null;
    String taskState = null;
    int fromEvent = 0;
    int nEvents = 0;
    int jpvalue = 0;
    String configOutFile = null;
    boolean getStatus = false;
    boolean getCounter = false;
    boolean killJob = false;
    boolean listEvents = false;
    boolean viewHistory = false;
    boolean viewAllHistory = false;
    boolean listJobs = false;
    boolean listAllJobs = false;
    boolean listActiveTrackers = false;
    boolean listBlacklistedTrackers = false;
    boolean displayTasks = false;
    boolean killTask = false;
    boolean failTask = false;
    boolean setJobPriority = false;
    boolean logs = false;
    boolean downloadConfig = false;
    if ("-submit".equals(cmd)) {
        if (argv.length != 2) {
            displayUsage(cmd);
            return exitCode;
        }
        submitJobFile = argv[1];
    } else if ("-status".equals(cmd)) {
        if (argv.length != 2) {
            displayUsage(cmd);
            return exitCode;
        }
        jobid = argv[1];
        getStatus = true;
    } else if ("-counter".equals(cmd)) {
        if (argv.length != 4) {
            displayUsage(cmd);
            return exitCode;
        }
        getCounter = true;
        jobid = argv[1];
        counterGroupName = argv[2];
        counterName = argv[3];
    } else if ("-kill".equals(cmd)) {
        if (argv.length != 2) {
            displayUsage(cmd);
            return exitCode;
        }
        jobid = argv[1];
        killJob = true;
    } else if ("-set-priority".equals(cmd)) {
        if (argv.length != 3) {
            displayUsage(cmd);
            return exitCode;
        }
        jobid = argv[1];
        try {
            jp = JobPriority.valueOf(argv[2]);
        } catch (IllegalArgumentException iae) {
            try {
                jpvalue = Integer.parseInt(argv[2]);
            } catch (NumberFormatException ne) {
                LOG.info(ne);
                displayUsage(cmd);
                return exitCode;
            }
        }
        setJobPriority = true;
    } else if ("-events".equals(cmd)) {
        if (argv.length != 4) {
            displayUsage(cmd);
            return exitCode;
        }
        jobid = argv[1];
        fromEvent = Integer.parseInt(argv[2]);
        nEvents = Integer.parseInt(argv[3]);
        listEvents = true;
    } else if ("-history".equals(cmd)) {
        viewHistory = true;
        if (argv.length < 2 || argv.length > 7) {
            displayUsage(cmd);
            return exitCode;
        }
        // Some arguments are optional while others are not, and some require
        // second arguments.  Due to this, the indexing can vary depending on
        // what's specified and what's left out, as summarized in the below table:
        // [all] <jobHistoryFile|jobId> [-outfile <file>] [-format <human|json>]
        //   1                  2            3       4         5         6
        //   1                  2            3       4
        //   1                  2                              3         4
        //   1                  2
        //                      1            2       3         4         5
        //                      1            2       3
        //                      1                              2         3
        //                      1
        // "all" is optional, but comes first if specified
        int index = 1;
        if ("all".equals(argv[index])) {
            index++;
            viewAllHistory = true;
            if (argv.length == 2) {
                displayUsage(cmd);
                return exitCode;
            }
        }
        // Get the job history file or job id argument
        historyFileOrJobId = argv[index++];
        // "-outfile" is optional, but if specified requires a second argument
        if (argv.length > index + 1 && "-outfile".equals(argv[index])) {
            index++;
            historyOutFile = argv[index++];
        }
        // "-format" is optional, but if specified required a second argument
        if (argv.length > index + 1 && "-format".equals(argv[index])) {
            index++;
            historyOutFormat = argv[index++];
        }
        // Check for any extra arguments that don't belong here
        if (argv.length > index) {
            displayUsage(cmd);
            return exitCode;
        }
    } else if ("-list".equals(cmd)) {
        if (argv.length != 1 && !(argv.length == 2 && "all".equals(argv[1]))) {
            displayUsage(cmd);
            return exitCode;
        }
        if (argv.length == 2 && "all".equals(argv[1])) {
            listAllJobs = true;
        } else {
            listJobs = true;
        }
    } else if ("-kill-task".equals(cmd)) {
        if (argv.length != 2) {
            displayUsage(cmd);
            return exitCode;
        }
        killTask = true;
        taskid = argv[1];
    } else if ("-fail-task".equals(cmd)) {
        if (argv.length != 2) {
            displayUsage(cmd);
            return exitCode;
        }
        failTask = true;
        taskid = argv[1];
    } else if ("-list-active-trackers".equals(cmd)) {
        if (argv.length != 1) {
            displayUsage(cmd);
            return exitCode;
        }
        listActiveTrackers = true;
    } else if ("-list-blacklisted-trackers".equals(cmd)) {
        if (argv.length != 1) {
            displayUsage(cmd);
            return exitCode;
        }
        listBlacklistedTrackers = true;
    } else if ("-list-attempt-ids".equals(cmd)) {
        if (argv.length != 4) {
            displayUsage(cmd);
            return exitCode;
        }
        jobid = argv[1];
        taskType = argv[2];
        taskState = argv[3];
        displayTasks = true;
        if (!taskTypes.contains(org.apache.hadoop.util.StringUtils.toUpperCase(taskType))) {
            System.out.println("Error: Invalid task-type: " + taskType);
            displayUsage(cmd);
            return exitCode;
        }
        if (!taskStates.contains(org.apache.hadoop.util.StringUtils.toLowerCase(taskState))) {
            System.out.println("Error: Invalid task-state: " + taskState);
            displayUsage(cmd);
            return exitCode;
        }
    } else if ("-logs".equals(cmd)) {
        if (argv.length == 2 || argv.length == 3) {
            logs = true;
            jobid = argv[1];
            if (argv.length == 3) {
                taskid = argv[2];
            } else {
                taskid = null;
            }
        } else {
            displayUsage(cmd);
            return exitCode;
        }
    } else if ("-config".equals(cmd)) {
        downloadConfig = true;
        if (argv.length != 3) {
            displayUsage(cmd);
            return exitCode;
        }
        jobid = argv[1];
        configOutFile = argv[2];
    } else {
        displayUsage(cmd);
        return exitCode;
    }
    // initialize cluster
    cluster = createCluster();
    // Submit the request
    try {
        if (submitJobFile != null) {
            Job job = Job.getInstance(new JobConf(submitJobFile));
            job.submit();
            System.out.println("Created job " + job.getJobID());
            exitCode = 0;
        } else if (getStatus) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                Counters counters = job.getCounters();
                System.out.println();
                System.out.println(job);
                if (counters != null) {
                    System.out.println(counters);
                } else {
                    System.out.println("Counters not available. Job is retired.");
                }
                exitCode = 0;
            }
        } else if (getCounter) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                Counters counters = job.getCounters();
                if (counters == null) {
                    System.out.println("Counters not available for retired job " + jobid);
                    exitCode = -1;
                } else {
                    System.out.println(getCounter(counters, counterGroupName, counterName));
                    exitCode = 0;
                }
            }
        } else if (killJob) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                JobStatus jobStatus = job.getStatus();
                if (jobStatus.getState() == JobStatus.State.FAILED) {
                    System.out.println("Could not mark the job " + jobid + " as killed, as it has already failed.");
                    exitCode = -1;
                } else if (jobStatus.getState() == JobStatus.State.KILLED) {
                    System.out.println("The job " + jobid + " has already been killed.");
                    exitCode = -1;
                } else if (jobStatus.getState() == JobStatus.State.SUCCEEDED) {
                    System.out.println("Could not kill the job " + jobid + ", as it has already succeeded.");
                    exitCode = -1;
                } else {
                    job.killJob();
                    System.out.println("Killed job " + jobid);
                    exitCode = 0;
                }
            }
        } else if (setJobPriority) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                if (jp != null) {
                    job.setPriority(jp);
                } else {
                    job.setPriorityAsInteger(jpvalue);
                }
                System.out.println("Changed job priority.");
                exitCode = 0;
            }
        } else if (viewHistory) {
            // it's a Job ID
            if (historyFileOrJobId.endsWith(".jhist")) {
                viewHistory(historyFileOrJobId, viewAllHistory, historyOutFile, historyOutFormat);
                exitCode = 0;
            } else {
                Job job = getJob(JobID.forName(historyFileOrJobId));
                if (job == null) {
                    System.out.println("Could not find job " + jobid);
                } else {
                    String historyUrl = job.getHistoryUrl();
                    if (historyUrl == null || historyUrl.isEmpty()) {
                        System.out.println("History file for job " + historyFileOrJobId + " is currently unavailable.");
                    } else {
                        viewHistory(historyUrl, viewAllHistory, historyOutFile, historyOutFormat);
                        exitCode = 0;
                    }
                }
            }
        } else if (listEvents) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                listEvents(job, fromEvent, nEvents);
                exitCode = 0;
            }
        } else if (listJobs) {
            listJobs(cluster);
            exitCode = 0;
        } else if (listAllJobs) {
            listAllJobs(cluster);
            exitCode = 0;
        } else if (listActiveTrackers) {
            listActiveTrackers(cluster);
            exitCode = 0;
        } else if (listBlacklistedTrackers) {
            listBlacklistedTrackers(cluster);
            exitCode = 0;
        } else if (displayTasks) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                displayTasks(getJob(JobID.forName(jobid)), taskType, taskState);
                exitCode = 0;
            }
        } else if (killTask) {
            TaskAttemptID taskID = TaskAttemptID.forName(taskid);
            Job job = getJob(taskID.getJobID());
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else if (job.killTask(taskID, false)) {
                System.out.println("Killed task " + taskid);
                exitCode = 0;
            } else {
                System.out.println("Could not kill task " + taskid);
                exitCode = -1;
            }
        } else if (failTask) {
            TaskAttemptID taskID = TaskAttemptID.forName(taskid);
            Job job = getJob(taskID.getJobID());
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else if (job.killTask(taskID, true)) {
                System.out.println("Killed task " + taskID + " by failing it");
                exitCode = 0;
            } else {
                System.out.println("Could not fail task " + taskid);
                exitCode = -1;
            }
        } else if (logs) {
            JobID jobID = JobID.forName(jobid);
            if (getJob(jobID) == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                try {
                    TaskAttemptID taskAttemptID = TaskAttemptID.forName(taskid);
                    LogParams logParams = cluster.getLogParams(jobID, taskAttemptID);
                    LogCLIHelpers logDumper = new LogCLIHelpers();
                    logDumper.setConf(getConf());
                    exitCode = logDumper.dumpAContainersLogs(logParams.getApplicationId(), logParams.getContainerId(), logParams.getNodeId(), logParams.getOwner());
                } catch (IOException e) {
                    if (e instanceof RemoteException) {
                        throw e;
                    }
                    System.out.println(e.getMessage());
                }
            }
        } else if (downloadConfig) {
            Job job = getJob(JobID.forName(jobid));
            if (job == null) {
                System.out.println("Could not find job " + jobid);
            } else {
                String jobFile = job.getJobFile();
                if (jobFile == null || jobFile.isEmpty()) {
                    System.out.println("Config file for job " + jobFile + " could not be found.");
                } else {
                    Path configPath = new Path(jobFile);
                    FileSystem fs = FileSystem.get(getConf());
                    fs.copyToLocalFile(configPath, new Path(configOutFile));
                    exitCode = 0;
                }
            }
        }
    } catch (RemoteException re) {
        IOException unwrappedException = re.unwrapRemoteException();
        if (unwrappedException instanceof AccessControlException) {
            System.out.println(unwrappedException.getMessage());
        } else {
            throw re;
        }
    } finally {
        cluster.close();
    }
    return exitCode;
}
Also used : Path(org.apache.hadoop.fs.Path) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) JobPriority(org.apache.hadoop.mapreduce.JobPriority) AccessControlException(org.apache.hadoop.security.AccessControlException) IOException(java.io.IOException) LogParams(org.apache.hadoop.mapreduce.v2.LogParams) JobStatus(org.apache.hadoop.mapreduce.JobStatus) FileSystem(org.apache.hadoop.fs.FileSystem) LogCLIHelpers(org.apache.hadoop.yarn.logaggregation.LogCLIHelpers) Counters(org.apache.hadoop.mapreduce.Counters) Job(org.apache.hadoop.mapreduce.Job) RemoteException(org.apache.hadoop.ipc.RemoteException) JobConf(org.apache.hadoop.mapred.JobConf) JobID(org.apache.hadoop.mapreduce.JobID)

Aggregations

Counter (org.apache.hadoop.mapreduce.v2.api.records.Counter)6 CounterGroup (org.apache.hadoop.mapreduce.v2.api.records.CounterGroup)5 Counters (org.apache.hadoop.mapreduce.Counters)4 Counters (org.apache.hadoop.mapreduce.v2.api.records.Counters)4 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)3 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)3 Test (org.junit.Test)3 Path (org.apache.hadoop.fs.Path)2 Counter (org.apache.hadoop.mapreduce.Counter)2 Job (org.apache.hadoop.mapreduce.Job)2 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)2 File (java.io.File)1 IOException (java.io.IOException)1 ArrayList (java.util.ArrayList)1 Map (java.util.Map)1 TreeMap (java.util.TreeMap)1 Configuration (org.apache.hadoop.conf.Configuration)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 RemoteException (org.apache.hadoop.ipc.RemoteException)1 Counter (org.apache.hadoop.mapred.Counters.Counter)1