use of org.apache.hadoop.mapreduce.CounterGroup in project hadoop by apache.
the class EventWriter method toAvro.
static JhCounters toAvro(Counters counters, String name) {
JhCounters result = new JhCounters();
result.setName(new Utf8(name));
result.setGroups(new ArrayList<JhCounterGroup>(0));
if (counters == null)
return result;
for (CounterGroup group : counters) {
JhCounterGroup g = new JhCounterGroup();
g.setName(new Utf8(group.getName()));
g.setDisplayName(new Utf8(group.getDisplayName()));
g.setCounts(new ArrayList<JhCounter>(group.size()));
for (Counter counter : group) {
JhCounter c = new JhCounter();
c.setName(new Utf8(counter.getName()));
c.setDisplayName(new Utf8(counter.getDisplayName()));
c.setValue(counter.getValue());
g.getCounts().add(c);
}
result.getGroups().add(g);
}
return result;
}
use of org.apache.hadoop.mapreduce.CounterGroup in project hadoop by apache.
the class JSONHistoryViewerPrinter method printJobCounters.
private void printJobCounters(Counters totalCounters, Counters mapCounters, Counters reduceCounters) throws JSONException {
// Killed jobs might not have counters
if (totalCounters != null) {
JSONObject jGroups = new JSONObject();
for (String groupName : totalCounters.getGroupNames()) {
CounterGroup totalGroup = totalCounters.getGroup(groupName);
CounterGroup mapGroup = mapCounters.getGroup(groupName);
CounterGroup reduceGroup = reduceCounters.getGroup(groupName);
Iterator<Counter> ctrItr = totalGroup.iterator();
JSONArray jGroup = new JSONArray();
while (ctrItr.hasNext()) {
JSONObject jCounter = new JSONObject();
org.apache.hadoop.mapreduce.Counter counter = ctrItr.next();
String name = counter.getName();
long mapValue = mapGroup.findCounter(name).getValue();
long reduceValue = reduceGroup.findCounter(name).getValue();
long totalValue = counter.getValue();
jCounter.put("counterName", name);
jCounter.put("mapValue", mapValue);
jCounter.put("reduceValue", reduceValue);
jCounter.put("totalValue", totalValue);
jGroup.put(jCounter);
}
jGroups.put(fixGroupNameForShuffleErrors(totalGroup.getName()), jGroup);
}
json.put("counters", jGroups);
}
}
use of org.apache.hadoop.mapreduce.CounterGroup in project pinot by linkedin.
the class JoinPhaseJob method dumpSummary.
private void dumpSummary(Job job, List<String> sourceNames) throws IOException {
System.out.println("Join Input Matrix.");
CounterGroup group = job.getCounters().getGroup("DynamicCounter");
for (String source : sourceNames) {
System.out.print(String.format("%25s\t", source));
}
if (group != null) {
Iterator<Counter> iterator = group.iterator();
while (iterator.hasNext()) {
Counter counter = iterator.next();
String displayName = counter.getDisplayName();
String[] split = displayName.replace("[", "").replace("[", "").split(",");
for (String str : split) {
if (str.trim().equals("1")) {
System.out.print(String.format("%25s\t", "1"));
} else {
System.out.print(String.format("%25s\t", "-"));
}
}
}
}
}
use of org.apache.hadoop.mapreduce.CounterGroup in project cdap by caskdata.
the class BasicWorkflowToken method setMapReduceCounters.
public synchronized void setMapReduceCounters(Counters counters) {
ImmutableMap.Builder<String, Map<String, Long>> countersBuilder = ImmutableMap.builder();
for (CounterGroup group : counters) {
ImmutableMap.Builder<String, Long> groupBuilder = ImmutableMap.builder();
for (Counter counter : group) {
groupBuilder.put(counter.getName(), counter.getValue());
// Also put the counter to system scope.
put(group.getName() + "." + counter.getName(), Value.of(counter.getValue()), WorkflowToken.Scope.SYSTEM);
}
countersBuilder.put(group.getName(), groupBuilder.build());
}
this.mapReduceCounters = countersBuilder.build();
}
use of org.apache.hadoop.mapreduce.CounterGroup in project hadoop by apache.
the class HumanReadableHistoryViewerPrinter method printJobCounters.
private void printJobCounters(StringBuilder buff, Counters totalCounters, Counters mapCounters, Counters reduceCounters) {
// Killed jobs might not have counters
if (totalCounters != null) {
buff.append("\nCounters: \n\n");
buff.append(String.format("|%1$-30s|%2$-30s|%3$-10s|%4$-10s|%5$-10s|", "Group Name", "Counter name", "Map Value", "Reduce Value", "Total Value"));
buff.append("\n------------------------------------------" + "---------------------------------------------");
for (String groupName : totalCounters.getGroupNames()) {
CounterGroup totalGroup = totalCounters.getGroup(groupName);
CounterGroup mapGroup = mapCounters.getGroup(groupName);
CounterGroup reduceGroup = reduceCounters.getGroup(groupName);
Format decimal = new DecimalFormat();
Iterator<Counter> ctrItr = totalGroup.iterator();
while (ctrItr.hasNext()) {
org.apache.hadoop.mapreduce.Counter counter = ctrItr.next();
String name = counter.getName();
String mapValue = decimal.format(mapGroup.findCounter(name).getValue());
String reduceValue = decimal.format(reduceGroup.findCounter(name).getValue());
String totalValue = decimal.format(counter.getValue());
buff.append(String.format("%n|%1$-30s|%2$-30s|%3$-10s|%4$-10s|%5$-10s", totalGroup.getDisplayName(), counter.getDisplayName(), mapValue, reduceValue, totalValue));
}
}
}
}
Aggregations