use of org.apache.hadoop.mapreduce.CounterGroup in project pinot by linkedin.
the class JoinPhaseJob method dumpSummary.
private void dumpSummary(Job job, List<String> sourceNames) throws IOException {
System.out.println("Join Input Matrix.");
CounterGroup group = job.getCounters().getGroup("DynamicCounter");
for (String source : sourceNames) {
System.out.print(String.format("%25s\t", source));
}
if (group != null) {
Iterator<Counter> iterator = group.iterator();
while (iterator.hasNext()) {
Counter counter = iterator.next();
String displayName = counter.getDisplayName();
String[] split = displayName.replace("[", "").replace("[", "").split(",");
for (String str : split) {
if (str.trim().equals("1")) {
System.out.print(String.format("%25s\t", "1"));
} else {
System.out.print(String.format("%25s\t", "-"));
}
}
}
}
}
use of org.apache.hadoop.mapreduce.CounterGroup in project hadoop by apache.
the class TestMRMultipleOutputs method _testMultipleOutputs.
protected void _testMultipleOutputs(boolean withCounters) throws Exception {
String input = "a\nb\nc\nd\ne\nc\nd\ne";
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, IN_DIR, OUT_DIR, 2, 1, input);
job.setJobName("mo");
MultipleOutputs.addNamedOutput(job, TEXT, TextOutputFormat.class, LongWritable.class, Text.class);
MultipleOutputs.addNamedOutput(job, SEQUENCE, SequenceFileOutputFormat.class, IntWritable.class, Text.class);
MultipleOutputs.setCountersEnabled(job, withCounters);
job.setMapperClass(MOMap.class);
job.setReducerClass(MOReduce.class);
job.waitForCompletion(true);
// assert number of named output part files
int namedOutputCount = 0;
int valueBasedOutputCount = 0;
FileSystem fs = OUT_DIR.getFileSystem(conf);
FileStatus[] statuses = fs.listStatus(OUT_DIR);
for (FileStatus status : statuses) {
String fileName = status.getPath().getName();
if (fileName.equals("text-m-00000") || fileName.equals("text-m-00001") || fileName.equals("text-r-00000") || fileName.equals("sequence_A-m-00000") || fileName.equals("sequence_A-m-00001") || fileName.equals("sequence_B-m-00000") || fileName.equals("sequence_B-m-00001") || fileName.equals("sequence_B-r-00000") || fileName.equals("sequence_C-r-00000")) {
namedOutputCount++;
} else if (fileName.equals("a-r-00000") || fileName.equals("b-r-00000") || fileName.equals("c-r-00000") || fileName.equals("d-r-00000") || fileName.equals("e-r-00000")) {
valueBasedOutputCount++;
}
}
assertEquals(9, namedOutputCount);
assertEquals(5, valueBasedOutputCount);
// assert TextOutputFormat files correctness
BufferedReader reader = new BufferedReader(new InputStreamReader(fs.open(new Path(FileOutputFormat.getOutputPath(job), "text-r-00000"))));
int count = 0;
String line = reader.readLine();
while (line != null) {
assertTrue(line.endsWith(TEXT));
line = reader.readLine();
count++;
}
reader.close();
assertFalse(count == 0);
// assert SequenceOutputFormat files correctness
SequenceFile.Reader seqReader = new SequenceFile.Reader(fs, new Path(FileOutputFormat.getOutputPath(job), "sequence_B-r-00000"), conf);
assertEquals(IntWritable.class, seqReader.getKeyClass());
assertEquals(Text.class, seqReader.getValueClass());
count = 0;
IntWritable key = new IntWritable();
Text value = new Text();
while (seqReader.next(key, value)) {
assertEquals(SEQUENCE, value.toString());
count++;
}
seqReader.close();
assertFalse(count == 0);
if (withCounters) {
CounterGroup counters = job.getCounters().getGroup(MultipleOutputs.class.getName());
assertEquals(9, counters.size());
assertEquals(4, counters.findCounter(TEXT).getValue());
assertEquals(2, counters.findCounter(SEQUENCE + "_A").getValue());
assertEquals(4, counters.findCounter(SEQUENCE + "_B").getValue());
assertEquals(2, counters.findCounter(SEQUENCE + "_C").getValue());
assertEquals(2, counters.findCounter("a").getValue());
assertEquals(2, counters.findCounter("b").getValue());
assertEquals(4, counters.findCounter("c").getValue());
assertEquals(4, counters.findCounter("d").getValue());
assertEquals(4, counters.findCounter("e").getValue());
}
}
use of org.apache.hadoop.mapreduce.CounterGroup in project hadoop by apache.
the class EventWriter method toAvro.
static JhCounters toAvro(Counters counters, String name) {
JhCounters result = new JhCounters();
result.setName(new Utf8(name));
result.setGroups(new ArrayList<JhCounterGroup>(0));
if (counters == null)
return result;
for (CounterGroup group : counters) {
JhCounterGroup g = new JhCounterGroup();
g.setName(new Utf8(group.getName()));
g.setDisplayName(new Utf8(group.getDisplayName()));
g.setCounts(new ArrayList<JhCounter>(group.size()));
for (Counter counter : group) {
JhCounter c = new JhCounter();
c.setName(new Utf8(counter.getName()));
c.setDisplayName(new Utf8(counter.getDisplayName()));
c.setValue(counter.getValue());
g.getCounts().add(c);
}
result.getGroups().add(g);
}
return result;
}
use of org.apache.hadoop.mapreduce.CounterGroup in project hadoop by apache.
the class JSONHistoryViewerPrinter method printJobCounters.
private void printJobCounters(Counters totalCounters, Counters mapCounters, Counters reduceCounters) throws JSONException {
// Killed jobs might not have counters
if (totalCounters != null) {
JSONObject jGroups = new JSONObject();
for (String groupName : totalCounters.getGroupNames()) {
CounterGroup totalGroup = totalCounters.getGroup(groupName);
CounterGroup mapGroup = mapCounters.getGroup(groupName);
CounterGroup reduceGroup = reduceCounters.getGroup(groupName);
Iterator<Counter> ctrItr = totalGroup.iterator();
JSONArray jGroup = new JSONArray();
while (ctrItr.hasNext()) {
JSONObject jCounter = new JSONObject();
org.apache.hadoop.mapreduce.Counter counter = ctrItr.next();
String name = counter.getName();
long mapValue = mapGroup.findCounter(name).getValue();
long reduceValue = reduceGroup.findCounter(name).getValue();
long totalValue = counter.getValue();
jCounter.put("counterName", name);
jCounter.put("mapValue", mapValue);
jCounter.put("reduceValue", reduceValue);
jCounter.put("totalValue", totalValue);
jGroup.put(jCounter);
}
jGroups.put(fixGroupNameForShuffleErrors(totalGroup.getName()), jGroup);
}
json.put("counters", jGroups);
}
}
use of org.apache.hadoop.mapreduce.CounterGroup in project hadoop by apache.
the class JHEventHandlerForSigtermTest method testCountersToJSON.
@Test(timeout = 50000)
public void testCountersToJSON() throws Exception {
JobHistoryEventHandler jheh = new JobHistoryEventHandler(null, 0);
Counters counters = new Counters();
CounterGroup group1 = counters.addGroup("DOCTORS", "Incarnations of the Doctor");
group1.addCounter("PETER_CAPALDI", "Peter Capaldi", 12);
group1.addCounter("MATT_SMITH", "Matt Smith", 11);
group1.addCounter("DAVID_TENNANT", "David Tennant", 10);
CounterGroup group2 = counters.addGroup("COMPANIONS", "Companions of the Doctor");
group2.addCounter("CLARA_OSWALD", "Clara Oswald", 6);
group2.addCounter("RORY_WILLIAMS", "Rory Williams", 5);
group2.addCounter("AMY_POND", "Amy Pond", 4);
group2.addCounter("MARTHA_JONES", "Martha Jones", 3);
group2.addCounter("DONNA_NOBLE", "Donna Noble", 2);
group2.addCounter("ROSE_TYLER", "Rose Tyler", 1);
JsonNode jsonNode = JobHistoryEventUtils.countersToJSON(counters);
String jsonStr = new ObjectMapper().writeValueAsString(jsonNode);
String expected = "[{\"NAME\":\"COMPANIONS\",\"DISPLAY_NAME\":\"Companions " + "of the Doctor\",\"COUNTERS\":[{\"NAME\":\"AMY_POND\",\"DISPLAY_NAME\"" + ":\"Amy Pond\",\"VALUE\":4},{\"NAME\":\"CLARA_OSWALD\"," + "\"DISPLAY_NAME\":\"Clara Oswald\",\"VALUE\":6},{\"NAME\":" + "\"DONNA_NOBLE\",\"DISPLAY_NAME\":\"Donna Noble\",\"VALUE\":2}," + "{\"NAME\":\"MARTHA_JONES\",\"DISPLAY_NAME\":\"Martha Jones\"," + "\"VALUE\":3},{\"NAME\":\"RORY_WILLIAMS\",\"DISPLAY_NAME\":\"Rory " + "Williams\",\"VALUE\":5},{\"NAME\":\"ROSE_TYLER\",\"DISPLAY_NAME\":" + "\"Rose Tyler\",\"VALUE\":1}]},{\"NAME\":\"DOCTORS\",\"DISPLAY_NAME\"" + ":\"Incarnations of the Doctor\",\"COUNTERS\":[{\"NAME\":" + "\"DAVID_TENNANT\",\"DISPLAY_NAME\":\"David Tennant\",\"VALUE\":10}," + "{\"NAME\":\"MATT_SMITH\",\"DISPLAY_NAME\":\"Matt Smith\",\"VALUE\":" + "11},{\"NAME\":\"PETER_CAPALDI\",\"DISPLAY_NAME\":\"Peter Capaldi\"," + "\"VALUE\":12}]}]";
Assert.assertEquals(expected, jsonStr);
}
Aggregations