use of org.apache.hadoop.mapreduce.Counter in project parquet-mr by apache.
the class TestInputOutputFormat method value.
private static long value(Job job, String groupName, String name) throws Exception {
// getGroup moved to AbstractCounters
Method getGroup = org.apache.hadoop.mapreduce.Counters.class.getMethod("getGroup", String.class);
// CounterGroup changed to an interface
Method findCounter = org.apache.hadoop.mapreduce.CounterGroup.class.getMethod("findCounter", String.class);
// Counter changed to an interface
Method getValue = org.apache.hadoop.mapreduce.Counter.class.getMethod("getValue");
CounterGroup group = (CounterGroup) getGroup.invoke(job.getCounters(), groupName);
Counter counter = (Counter) findCounter.invoke(group, name);
return (Long) getValue.invoke(counter);
}
use of org.apache.hadoop.mapreduce.Counter in project tez by apache.
the class TezTypeConverters method fromTez.
public static Counters fromTez(TezCounters tezCounters) {
if (tezCounters == null) {
return null;
}
Counters counters = new Counters();
for (CounterGroup xGrp : tezCounters) {
counters.addGroup(xGrp.getName(), xGrp.getDisplayName());
for (TezCounter xCounter : xGrp) {
Counter counter = counters.findCounter(xGrp.getName(), xCounter.getName());
counter.setValue(xCounter.getValue());
}
}
return counters;
}
use of org.apache.hadoop.mapreduce.Counter in project cdap by caskdata.
the class BasicWorkflowToken method setMapReduceCounters.
public synchronized void setMapReduceCounters(Counters counters) {
ImmutableMap.Builder<String, Map<String, Long>> countersBuilder = ImmutableMap.builder();
for (CounterGroup group : counters) {
ImmutableMap.Builder<String, Long> groupBuilder = ImmutableMap.builder();
for (Counter counter : group) {
groupBuilder.put(counter.getName(), counter.getValue());
// Also put the counter to system scope.
put(group.getName() + "." + counter.getName(), Value.of(counter.getValue()), WorkflowToken.Scope.SYSTEM);
}
countersBuilder.put(group.getName(), groupBuilder.build());
}
this.mapReduceCounters = countersBuilder.build();
}
use of org.apache.hadoop.mapreduce.Counter in project hbase by apache.
the class TestTableMapReduce method verifyJobCountersAreEmitted.
/**
* Verify scan counters are emitted from the job
* @param job
* @throws IOException
*/
private void verifyJobCountersAreEmitted(Job job) throws IOException {
Counters counters = job.getCounters();
Counter counter = counters.findCounter(TableRecordReaderImpl.HBASE_COUNTER_GROUP_NAME, "RPC_CALLS");
assertNotNull("Unable to find Job counter for HBase scan metrics, RPC_CALLS", counter);
assertTrue("Counter value for RPC_CALLS should be larger than 0", counter.getValue() > 0);
}
use of org.apache.hadoop.mapreduce.Counter in project hbase by apache.
the class TestRowCounter method runCreateSubmittableJobWithArgs.
/**
* Run the RowCounter map reduce job and verify the row count.
*
* @param args the command line arguments to be used for rowcounter job.
* @param expectedCount the expected row count (result of map reduce job).
* @throws Exception in case of any unexpected error.
*/
private void runCreateSubmittableJobWithArgs(String[] args, int expectedCount) throws Exception {
Job job = RowCounter.createSubmittableJob(TEST_UTIL.getConfiguration(), args);
long start = EnvironmentEdgeManager.currentTime();
job.waitForCompletion(true);
long duration = EnvironmentEdgeManager.currentTime() - start;
LOG.debug("row count duration (ms): " + duration);
assertTrue(job.isSuccessful());
Counter counter = job.getCounters().findCounter(RowCounter.RowCounterMapper.Counters.ROWS);
assertEquals(expectedCount, counter.getValue());
}
Aggregations