use of org.apache.hadoop.mapreduce.v2.api.records.Counters in project hadoop by apache.
the class TestAMWebServicesTasks method testTaskIdCounters.
@Test
public void testTaskIdCounters() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
ClientResponse response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).path("counters").accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("jobTaskCounters");
verifyAMJobTaskCounters(info, task);
}
}
}
use of org.apache.hadoop.mapreduce.v2.api.records.Counters in project hadoop by apache.
the class TaskImpl method getCounters.
@Override
public Counters getCounters() {
Counters counters = null;
readLock.lock();
try {
TaskAttempt bestAttempt = selectBestAttempt();
if (bestAttempt != null) {
counters = bestAttempt.getCounters();
} else {
counters = TaskAttemptImpl.EMPTY_COUNTERS;
// counters.groups = new HashMap<CharSequence, CounterGroup>();
}
return counters;
} finally {
readLock.unlock();
}
}
use of org.apache.hadoop.mapreduce.v2.api.records.Counters in project hadoop by apache.
the class KillAMPreemptionPolicy method killContainer.
@SuppressWarnings("unchecked")
private void killContainer(Context ctxt, PreemptionContainer c) {
ContainerId reqCont = c.getId();
TaskAttemptId reqTask = ctxt.getTaskAttempt(reqCont);
LOG.info("Evicting " + reqTask);
dispatcher.handle(new TaskAttemptEvent(reqTask, TaskAttemptEventType.TA_KILL));
// add preemption to counters
JobCounterUpdateEvent jce = new JobCounterUpdateEvent(reqTask.getTaskId().getJobId());
jce.addCounterUpdate(JobCounter.TASKS_REQ_PREEMPT, 1);
dispatcher.handle(jce);
}
use of org.apache.hadoop.mapreduce.v2.api.records.Counters in project hadoop by apache.
the class CountersBlock method getCounters.
private void getCounters(AppContext ctx) {
JobId jobID = null;
TaskId taskID = null;
String tid = $(TASK_ID);
if (!tid.isEmpty()) {
taskID = MRApps.toTaskID(tid);
jobID = taskID.getJobId();
} else {
String jid = $(JOB_ID);
if (jid != null && !jid.isEmpty()) {
jobID = MRApps.toJobID(jid);
}
}
if (jobID == null) {
return;
}
job = ctx.getJob(jobID);
if (job == null) {
return;
}
if (taskID != null) {
task = job.getTask(taskID);
if (task == null) {
return;
}
total = task.getCounters();
return;
}
// Get all types of counters
Map<TaskId, Task> tasks = job.getTasks();
total = job.getAllCounters();
boolean needTotalCounters = false;
if (total == null) {
total = new Counters();
needTotalCounters = true;
}
map = new Counters();
reduce = new Counters();
for (Task t : tasks.values()) {
Counters counters = t.getCounters();
if (counters == null) {
continue;
}
switch(t.getType()) {
case MAP:
map.incrAllCounters(counters);
break;
case REDUCE:
reduce.incrAllCounters(counters);
break;
}
if (needTotalCounters) {
total.incrAllCounters(counters);
}
}
}
use of org.apache.hadoop.mapreduce.v2.api.records.Counters in project hadoop by apache.
the class SingleCounterBlock method populateMembers.
private void populateMembers(AppContext ctx) {
JobId jobID = null;
TaskId taskID = null;
String tid = $(TASK_ID);
if ($(TITLE).contains("MAPS")) {
counterType = TaskType.MAP;
} else if ($(TITLE).contains("REDUCES")) {
counterType = TaskType.REDUCE;
} else {
counterType = null;
}
if (!tid.isEmpty()) {
taskID = MRApps.toTaskID(tid);
jobID = taskID.getJobId();
} else {
String jid = $(JOB_ID);
if (!jid.isEmpty()) {
jobID = MRApps.toJobID(jid);
}
}
if (jobID == null) {
return;
}
job = ctx.getJob(jobID);
if (job == null) {
return;
}
if (taskID != null) {
task = job.getTask(taskID);
if (task == null) {
return;
}
for (Map.Entry<TaskAttemptId, TaskAttempt> entry : task.getAttempts().entrySet()) {
long value = 0;
Counters counters = entry.getValue().getCounters();
CounterGroup group = (counters != null) ? counters.getGroup($(COUNTER_GROUP)) : null;
if (group != null) {
Counter c = group.findCounter($(COUNTER_NAME));
if (c != null) {
value = c.getValue();
}
}
values.put(MRApps.toString(entry.getKey()), value);
}
return;
}
// Get all types of counters
Map<TaskId, Task> tasks = job.getTasks();
for (Map.Entry<TaskId, Task> entry : tasks.entrySet()) {
long value = 0;
Counters counters = entry.getValue().getCounters();
CounterGroup group = (counters != null) ? counters.getGroup($(COUNTER_GROUP)) : null;
if (group != null) {
Counter c = group.findCounter($(COUNTER_NAME));
if (c != null) {
value = c.getValue();
}
}
if (counterType == null || counterType == entry.getValue().getType()) {
values.put(MRApps.toString(entry.getKey()), value);
}
}
}
Aggregations