use of org.apache.hadoop.mapreduce.Counter in project hadoop by apache.
the class JSONHistoryViewerPrinter method printTaskCounters.
private void printTaskCounters(JSONObject jTask, Counters taskCounters) throws JSONException {
// Killed tasks might not have counters
if (taskCounters != null) {
JSONObject jGroups = new JSONObject();
for (String groupName : taskCounters.getGroupNames()) {
CounterGroup group = taskCounters.getGroup(groupName);
Iterator<Counter> ctrItr = group.iterator();
JSONArray jGroup = new JSONArray();
while (ctrItr.hasNext()) {
JSONObject jCounter = new JSONObject();
org.apache.hadoop.mapreduce.Counter counter = ctrItr.next();
jCounter.put("counterName", counter.getName());
jCounter.put("value", counter.getValue());
jGroup.put(jCounter);
}
jGroups.put(fixGroupNameForShuffleErrors(group.getName()), jGroup);
}
jTask.put("counters", jGroups);
}
}
use of org.apache.hadoop.mapreduce.Counter in project hadoop by apache.
the class AbstractCounterGroup method incrAllCounters.
@Override
public void incrAllCounters(CounterGroupBase<T> rightGroup) {
try {
for (Counter right : rightGroup) {
Counter left = findCounter(right.getName(), right.getDisplayName());
left.increment(right.getValue());
}
} catch (LimitExceededException e) {
counters.clear();
throw e;
}
}
use of org.apache.hadoop.mapreduce.Counter in project hadoop by apache.
the class JobHistoryEventUtils method countersToJSON.
public static JsonNode countersToJSON(Counters counters) {
ObjectMapper mapper = new ObjectMapper();
ArrayNode nodes = mapper.createArrayNode();
if (counters != null) {
for (CounterGroup counterGroup : counters) {
ObjectNode groupNode = nodes.addObject();
groupNode.put("NAME", counterGroup.getName());
groupNode.put("DISPLAY_NAME", counterGroup.getDisplayName());
ArrayNode countersNode = groupNode.putArray("COUNTERS");
for (Counter counter : counterGroup) {
ObjectNode counterNode = countersNode.addObject();
counterNode.put("NAME", counter.getName());
counterNode.put("DISPLAY_NAME", counter.getDisplayName());
counterNode.put("VALUE", counter.getValue());
}
}
}
return nodes;
}
use of org.apache.hadoop.mapreduce.Counter in project hadoop by apache.
the class OldAPICombinerTest method testWordCountCombinerWithOldAPI.
@Test
public void testWordCountCombinerWithOldAPI() throws Exception {
final Configuration nativeConf = ScenarioConfiguration.getNativeConfiguration();
nativeConf.addResource(TestConstants.COMBINER_CONF_PATH);
final String nativeoutput = TestConstants.NATIVETASK_OLDAPI_COMBINER_TEST_NATIVE_OUTPUTPATH;
final JobConf nativeJob = getOldAPIJobconf(nativeConf, "nativeCombinerWithOldAPI", inputpath, nativeoutput);
RunningJob nativeRunning = JobClient.runJob(nativeJob);
Counter nativeReduceGroups = nativeRunning.getCounters().findCounter(TaskCounter.REDUCE_INPUT_RECORDS);
final Configuration normalConf = ScenarioConfiguration.getNormalConfiguration();
normalConf.addResource(TestConstants.COMBINER_CONF_PATH);
final String normaloutput = TestConstants.NATIVETASK_OLDAPI_COMBINER_TEST_NORMAL_OUTPUTPATH;
final JobConf normalJob = getOldAPIJobconf(normalConf, "normalCombinerWithOldAPI", inputpath, normaloutput);
RunningJob normalRunning = JobClient.runJob(normalJob);
Counter normalReduceGroups = normalRunning.getCounters().findCounter(TaskCounter.REDUCE_INPUT_RECORDS);
final boolean compareRet = ResultVerifier.verify(nativeoutput, normaloutput);
assertEquals("file compare result: if they are the same ,then return true", true, compareRet);
assertEquals("The input reduce record count must be same", nativeReduceGroups.getValue(), normalReduceGroups.getValue());
}
use of org.apache.hadoop.mapreduce.Counter in project ignite by apache.
the class HadoopMapReduceCounterGroup method addCounter.
/**
* {@inheritDoc}
*/
@Override
public Counter addCounter(String name, String displayName, long value) {
final Counter counter = cntrs.findCounter(this.name, name);
counter.setValue(value);
return counter;
}
Aggregations