use of org.apache.hadoop.mapreduce.Counters in project hadoop by apache.
the class TestTaskAttempt method verifyMillisCounters.
public void verifyMillisCounters(Resource containerResource, int minContainerSize) throws Exception {
Clock actualClock = SystemClock.getInstance();
ControlledClock clock = new ControlledClock(actualClock);
clock.setTime(10);
MRApp app = new MRApp(1, 1, false, "testSlotMillisCounterUpdate", true, clock);
app.setAllocatedContainerResource(containerResource);
Configuration conf = new Configuration();
conf.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, minContainerSize);
app.setClusterInfo(new ClusterInfo(Resource.newInstance(10240, 1)));
Job job = app.submit(conf);
app.waitForState(job, JobState.RUNNING);
Map<TaskId, Task> tasks = job.getTasks();
Assert.assertEquals("Num tasks is not correct", 2, tasks.size());
Iterator<Task> taskIter = tasks.values().iterator();
Task mTask = taskIter.next();
app.waitForState(mTask, TaskState.RUNNING);
Task rTask = taskIter.next();
app.waitForState(rTask, TaskState.RUNNING);
Map<TaskAttemptId, TaskAttempt> mAttempts = mTask.getAttempts();
Assert.assertEquals("Num attempts is not correct", 1, mAttempts.size());
Map<TaskAttemptId, TaskAttempt> rAttempts = rTask.getAttempts();
Assert.assertEquals("Num attempts is not correct", 1, rAttempts.size());
TaskAttempt mta = mAttempts.values().iterator().next();
TaskAttempt rta = rAttempts.values().iterator().next();
app.waitForState(mta, TaskAttemptState.RUNNING);
app.waitForState(rta, TaskAttemptState.RUNNING);
clock.setTime(11);
app.getContext().getEventHandler().handle(new TaskAttemptEvent(mta.getID(), TaskAttemptEventType.TA_DONE));
app.getContext().getEventHandler().handle(new TaskAttemptEvent(rta.getID(), TaskAttemptEventType.TA_DONE));
app.waitForState(job, JobState.SUCCEEDED);
Assert.assertEquals(mta.getFinishTime(), 11);
Assert.assertEquals(mta.getLaunchTime(), 10);
Assert.assertEquals(rta.getFinishTime(), 11);
Assert.assertEquals(rta.getLaunchTime(), 10);
Counters counters = job.getAllCounters();
int memoryMb = (int) containerResource.getMemorySize();
int vcores = containerResource.getVirtualCores();
Assert.assertEquals((int) Math.ceil((float) memoryMb / minContainerSize), counters.findCounter(JobCounter.SLOTS_MILLIS_MAPS).getValue());
Assert.assertEquals((int) Math.ceil((float) memoryMb / minContainerSize), counters.findCounter(JobCounter.SLOTS_MILLIS_REDUCES).getValue());
Assert.assertEquals(1, counters.findCounter(JobCounter.MILLIS_MAPS).getValue());
Assert.assertEquals(1, counters.findCounter(JobCounter.MILLIS_REDUCES).getValue());
Assert.assertEquals(memoryMb, counters.findCounter(JobCounter.MB_MILLIS_MAPS).getValue());
Assert.assertEquals(memoryMb, counters.findCounter(JobCounter.MB_MILLIS_REDUCES).getValue());
Assert.assertEquals(vcores, counters.findCounter(JobCounter.VCORES_MILLIS_MAPS).getValue());
Assert.assertEquals(vcores, counters.findCounter(JobCounter.VCORES_MILLIS_REDUCES).getValue());
}
use of org.apache.hadoop.mapreduce.Counters in project cdap by caskdata.
the class MapReduceMetricsWriter method reportMapTaskMetrics.
private void reportMapTaskMetrics(TaskReport taskReport) {
Counters counters = taskReport.getTaskCounters();
MetricsContext metricsContext = mapTaskMetricsCollectors.getUnchecked(taskReport.getTaskId());
metricsContext.gauge(MapReduceMetrics.METRIC_TASK_INPUT_RECORDS, getTaskCounter(counters, TaskCounter.MAP_INPUT_RECORDS));
metricsContext.gauge(MapReduceMetrics.METRIC_TASK_OUTPUT_RECORDS, getTaskCounter(counters, TaskCounter.MAP_OUTPUT_RECORDS));
metricsContext.gauge(MapReduceMetrics.METRIC_TASK_BYTES, getTaskCounter(counters, TaskCounter.MAP_OUTPUT_BYTES));
metricsContext.gauge(MapReduceMetrics.METRIC_TASK_COMPLETION, (long) (taskReport.getProgress() * 100));
}
use of org.apache.hadoop.mapreduce.Counters in project hbase by apache.
the class ScanPerformanceEvaluation method testSnapshotScanMapReduce.
public void testSnapshotScanMapReduce() throws IOException, InterruptedException, ClassNotFoundException {
Stopwatch scanOpenTimer = Stopwatch.createUnstarted();
Stopwatch scanTimer = Stopwatch.createUnstarted();
Scan scan = getScan();
String jobName = "testSnapshotScanMapReduce";
Job job = new Job(conf);
job.setJobName(jobName);
job.setJarByClass(getClass());
TableMapReduceUtil.initTableSnapshotMapperJob(this.snapshotName, scan, MyMapper.class, NullWritable.class, NullWritable.class, job, true, new Path(restoreDir));
job.setNumReduceTasks(0);
job.setOutputKeyClass(NullWritable.class);
job.setOutputValueClass(NullWritable.class);
job.setOutputFormatClass(NullOutputFormat.class);
scanTimer.start();
job.waitForCompletion(true);
scanTimer.stop();
Counters counters = job.getCounters();
long numRows = counters.findCounter(ScanCounter.NUM_ROWS).getValue();
long numCells = counters.findCounter(ScanCounter.NUM_CELLS).getValue();
long totalBytes = counters.findCounter(HBASE_COUNTER_GROUP_NAME, "BYTES_IN_RESULTS").getValue();
double throughput = (double) totalBytes / scanTimer.elapsed(TimeUnit.SECONDS);
double throughputRows = (double) numRows / scanTimer.elapsed(TimeUnit.SECONDS);
double throughputCells = (double) numCells / scanTimer.elapsed(TimeUnit.SECONDS);
System.out.println("HBase scan mapreduce: ");
System.out.println("total time to open scanner: " + scanOpenTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
System.out.println("total time to scan: " + scanTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
System.out.println("total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")");
System.out.println("throughput : " + StringUtils.humanReadableInt((long) throughput) + "B/s");
System.out.println("total rows : " + numRows);
System.out.println("throughput : " + StringUtils.humanReadableInt((long) throughputRows) + " rows/s");
System.out.println("total cells : " + numCells);
System.out.println("throughput : " + StringUtils.humanReadableInt((long) throughputCells) + " cells/s");
}
use of org.apache.hadoop.mapreduce.Counters in project hbase by apache.
the class ScanPerformanceEvaluation method testScanMapReduce.
public void testScanMapReduce() throws IOException, InterruptedException, ClassNotFoundException {
Stopwatch scanOpenTimer = Stopwatch.createUnstarted();
Stopwatch scanTimer = Stopwatch.createUnstarted();
Scan scan = getScan();
String jobName = "testScanMapReduce";
Job job = new Job(conf);
job.setJobName(jobName);
job.setJarByClass(getClass());
TableMapReduceUtil.initTableMapperJob(this.tablename, scan, MyMapper.class, NullWritable.class, NullWritable.class, job);
job.setNumReduceTasks(0);
job.setOutputKeyClass(NullWritable.class);
job.setOutputValueClass(NullWritable.class);
job.setOutputFormatClass(NullOutputFormat.class);
scanTimer.start();
job.waitForCompletion(true);
scanTimer.stop();
Counters counters = job.getCounters();
long numRows = counters.findCounter(ScanCounter.NUM_ROWS).getValue();
long numCells = counters.findCounter(ScanCounter.NUM_CELLS).getValue();
long totalBytes = counters.findCounter(HBASE_COUNTER_GROUP_NAME, "BYTES_IN_RESULTS").getValue();
double throughput = (double) totalBytes / scanTimer.elapsed(TimeUnit.SECONDS);
double throughputRows = (double) numRows / scanTimer.elapsed(TimeUnit.SECONDS);
double throughputCells = (double) numCells / scanTimer.elapsed(TimeUnit.SECONDS);
System.out.println("HBase scan mapreduce: ");
System.out.println("total time to open scanner: " + scanOpenTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
System.out.println("total time to scan: " + scanTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
System.out.println("total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")");
System.out.println("throughput : " + StringUtils.humanReadableInt((long) throughput) + "B/s");
System.out.println("total rows : " + numRows);
System.out.println("throughput : " + StringUtils.humanReadableInt((long) throughputRows) + " rows/s");
System.out.println("total cells : " + numCells);
System.out.println("throughput : " + StringUtils.humanReadableInt((long) throughputCells) + " cells/s");
}
use of org.apache.hadoop.mapreduce.Counters in project hbase by apache.
the class TestSyncTable method testSyncTable.
@Test
public void testSyncTable() throws Exception {
final TableName sourceTableName = TableName.valueOf(name.getMethodName() + "_source");
final TableName targetTableName = TableName.valueOf(name.getMethodName() + "_target");
Path testDir = TEST_UTIL.getDataTestDirOnTestFS("testSyncTable");
writeTestData(sourceTableName, targetTableName);
hashSourceTable(sourceTableName, testDir);
Counters syncCounters = syncTables(sourceTableName, targetTableName, testDir);
assertEqualTables(90, sourceTableName, targetTableName, false);
assertEquals(60, syncCounters.findCounter(Counter.ROWSWITHDIFFS).getValue());
assertEquals(10, syncCounters.findCounter(Counter.SOURCEMISSINGROWS).getValue());
assertEquals(10, syncCounters.findCounter(Counter.TARGETMISSINGROWS).getValue());
assertEquals(50, syncCounters.findCounter(Counter.SOURCEMISSINGCELLS).getValue());
assertEquals(50, syncCounters.findCounter(Counter.TARGETMISSINGCELLS).getValue());
assertEquals(20, syncCounters.findCounter(Counter.DIFFERENTCELLVALUES).getValue());
TEST_UTIL.deleteTable(sourceTableName);
TEST_UTIL.deleteTable(targetTableName);
}
Aggregations