use of org.apache.hadoop.mapreduce.Counter in project hadoop by apache.
the class TestGridMixClasses method testLoadJobLoadReducer.
/*
* test LoadReducer
*/
@Test(timeout = 3000)
public void testLoadJobLoadReducer() throws Exception {
LoadJob.LoadReducer test = new LoadJob.LoadReducer();
Configuration conf = new Configuration();
conf.setInt(JobContext.NUM_REDUCES, 2);
CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true);
conf.setBoolean(FileOutputFormat.COMPRESS, true);
CompressionEmulationUtil.setCompressionEmulationEnabled(conf, true);
conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true);
TaskAttemptID taskid = new TaskAttemptID();
RawKeyValueIterator input = new FakeRawKeyValueIterator();
Counter counter = new GenericCounter();
Counter inputValueCounter = new GenericCounter();
LoadRecordWriter output = new LoadRecordWriter();
OutputCommitter committer = new CustomOutputCommitter();
StatusReporter reporter = new DummyReporter();
RawComparator<GridmixKey> comparator = new FakeRawComparator();
ReduceContext<GridmixKey, GridmixRecord, NullWritable, GridmixRecord> reduceContext = new ReduceContextImpl<GridmixKey, GridmixRecord, NullWritable, GridmixRecord>(conf, taskid, input, counter, inputValueCounter, output, committer, reporter, comparator, GridmixKey.class, GridmixRecord.class);
// read for previous data
reduceContext.nextKeyValue();
org.apache.hadoop.mapreduce.Reducer<GridmixKey, GridmixRecord, NullWritable, GridmixRecord>.Context<GridmixKey, GridmixRecord, NullWritable, GridmixRecord> context = new WrappedReducer<GridmixKey, GridmixRecord, NullWritable, GridmixRecord>().getReducerContext(reduceContext);
// test.setup(context);
test.run(context);
// have been readed 9 records (-1 for previous)
assertEquals(9, counter.getValue());
assertEquals(10, inputValueCounter.getValue());
assertEquals(1, output.getData().size());
GridmixRecord record = output.getData().values().iterator().next();
assertEquals(1593, record.getSize());
}
use of org.apache.hadoop.mapreduce.Counter in project hadoop by apache.
the class JobHistoryEventHandler method setSummarySlotSeconds.
private void setSummarySlotSeconds(JobSummary summary, Counters allCounters) {
Counter slotMillisMapCounter = allCounters.findCounter(JobCounter.SLOTS_MILLIS_MAPS);
if (slotMillisMapCounter != null) {
summary.setMapSlotSeconds(slotMillisMapCounter.getValue() / 1000);
}
Counter slotMillisReduceCounter = allCounters.findCounter(JobCounter.SLOTS_MILLIS_REDUCES);
if (slotMillisReduceCounter != null) {
summary.setReduceSlotSeconds(slotMillisReduceCounter.getValue() / 1000);
}
}
use of org.apache.hadoop.mapreduce.Counter in project hadoop by apache.
the class CountersStrings method toEscapedCompactString.
/**
* Make the 0.21 counter group string.
* format: {(actual-name)(display-name)(value)[][][]}
* where [] are compact strings for the counters within.
* @param <G> type of the group
* @param group to stringify
* @return the stringified result
*/
public static <G extends CounterGroupBase<?>> String toEscapedCompactString(G group) {
List<String> escapedStrs = Lists.newArrayList();
int length;
String escapedName, escapedDispName;
synchronized (group) {
// First up, obtain the strings that need escaping. This will help us
// determine the buffer length apriori.
escapedName = escape(group.getName());
escapedDispName = escape(group.getDisplayName());
int i = 0;
length = escapedName.length() + escapedDispName.length();
for (Counter counter : group) {
String escapedStr = toEscapedCompactString(counter);
escapedStrs.add(escapedStr);
length += escapedStr.length();
}
}
// for all the delimiting characters below
length += 6;
StringBuilder builder = new StringBuilder(length);
// group start
builder.append(GROUP_OPEN);
// Add the group name
builder.append(UNIT_OPEN);
builder.append(escapedName);
builder.append(UNIT_CLOSE);
// Add the display name
builder.append(UNIT_OPEN);
builder.append(escapedDispName);
builder.append(UNIT_CLOSE);
// write the value
for (String escaped : escapedStrs) {
builder.append(escaped);
}
// group end
builder.append(GROUP_CLOSE);
return builder.toString();
}
use of org.apache.hadoop.mapreduce.Counter in project hadoop by apache.
the class CountersStrings method parseEscapedCompactString.
/**
* Parse a pre 0.21 counters string into a counter object.
* @param <C> type of the counter
* @param <G> type of the counter group
* @param <T> type of the counters object
* @param compactString to parse
* @param counters an empty counters object to hold the result
* @return the counters object holding the result
* @throws ParseException
*/
@SuppressWarnings("deprecation")
public static <C extends Counter, G extends CounterGroupBase<C>, T extends AbstractCounters<C, G>> T parseEscapedCompactString(String compactString, T counters) throws ParseException {
IntWritable index = new IntWritable(0);
// Get the group to work on
String groupString = getBlock(compactString, GROUP_OPEN, GROUP_CLOSE, index);
while (groupString != null) {
IntWritable groupIndex = new IntWritable(0);
// Get the actual name
String groupName = StringInterner.weakIntern(getBlock(groupString, UNIT_OPEN, UNIT_CLOSE, groupIndex));
groupName = StringInterner.weakIntern(unescape(groupName));
// Get the display name
String groupDisplayName = StringInterner.weakIntern(getBlock(groupString, UNIT_OPEN, UNIT_CLOSE, groupIndex));
groupDisplayName = StringInterner.weakIntern(unescape(groupDisplayName));
// Get the counters
G group = counters.getGroup(groupName);
group.setDisplayName(groupDisplayName);
String counterString = getBlock(groupString, COUNTER_OPEN, COUNTER_CLOSE, groupIndex);
while (counterString != null) {
IntWritable counterIndex = new IntWritable(0);
// Get the actual name
String counterName = StringInterner.weakIntern(getBlock(counterString, UNIT_OPEN, UNIT_CLOSE, counterIndex));
counterName = StringInterner.weakIntern(unescape(counterName));
// Get the display name
String counterDisplayName = StringInterner.weakIntern(getBlock(counterString, UNIT_OPEN, UNIT_CLOSE, counterIndex));
counterDisplayName = StringInterner.weakIntern(unescape(counterDisplayName));
// Get the value
long value = Long.parseLong(getBlock(counterString, UNIT_OPEN, UNIT_CLOSE, counterIndex));
// Add the counter
Counter counter = group.findCounter(counterName);
counter.setDisplayName(counterDisplayName);
counter.increment(value);
// Get the next counter
counterString = getBlock(groupString, COUNTER_OPEN, COUNTER_CLOSE, groupIndex);
}
groupString = getBlock(compactString, GROUP_OPEN, GROUP_CLOSE, index);
}
return counters;
}
use of org.apache.hadoop.mapreduce.Counter in project hadoop by apache.
the class JobHistoryEventUtils method countersToTimelineMetric.
public static Set<TimelineMetric> countersToTimelineMetric(Counters counters, long timestamp, String groupNamePrefix) {
Set<TimelineMetric> entityMetrics = new HashSet<TimelineMetric>();
for (CounterGroup g : counters) {
String groupName = g.getName();
for (Counter c : g) {
String name = groupNamePrefix + groupName + ":" + c.getName();
TimelineMetric metric = new TimelineMetric();
metric.setId(name);
metric.addValue(timestamp, c.getValue());
entityMetrics.add(metric);
}
}
return entityMetrics;
}
Aggregations