Search in sources :

Example 1 with Counters

use of org.apache.hadoop.mapreduce.Counters in project hadoop by apache.

the class TestSpeculativeExecution method testSpeculativeExecution.

@Test
public void testSpeculativeExecution() throws Exception {
    if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
        LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR + " not found. Not running test.");
        return;
    }
    /*------------------------------------------------------------------
     * Test that Map/Red does not speculate if MAP_SPECULATIVE and 
     * REDUCE_SPECULATIVE are both false.
     * -----------------------------------------------------------------
     */
    Job job = runSpecTest(false, false);
    boolean succeeded = job.waitForCompletion(true);
    Assert.assertTrue(succeeded);
    Assert.assertEquals(JobStatus.State.SUCCEEDED, job.getJobState());
    Counters counters = job.getCounters();
    Assert.assertEquals(2, counters.findCounter(JobCounter.TOTAL_LAUNCHED_MAPS).getValue());
    Assert.assertEquals(2, counters.findCounter(JobCounter.TOTAL_LAUNCHED_REDUCES).getValue());
    Assert.assertEquals(0, counters.findCounter(JobCounter.NUM_FAILED_MAPS).getValue());
    /*----------------------------------------------------------------------
     * Test that Mapper speculates if MAP_SPECULATIVE is true and
     * REDUCE_SPECULATIVE is false.
     * ---------------------------------------------------------------------
     */
    job = runSpecTest(true, false);
    succeeded = job.waitForCompletion(true);
    Assert.assertTrue(succeeded);
    Assert.assertEquals(JobStatus.State.SUCCEEDED, job.getJobState());
    counters = job.getCounters();
    // The long-running map will be killed and a new one started.
    Assert.assertEquals(3, counters.findCounter(JobCounter.TOTAL_LAUNCHED_MAPS).getValue());
    Assert.assertEquals(2, counters.findCounter(JobCounter.TOTAL_LAUNCHED_REDUCES).getValue());
    Assert.assertEquals(0, counters.findCounter(JobCounter.NUM_FAILED_MAPS).getValue());
    Assert.assertEquals(1, counters.findCounter(JobCounter.NUM_KILLED_MAPS).getValue());
    /*----------------------------------------------------------------------
     * Test that Reducer speculates if REDUCE_SPECULATIVE is true and
     * MAP_SPECULATIVE is false.
     * ---------------------------------------------------------------------
     */
    job = runSpecTest(false, true);
    succeeded = job.waitForCompletion(true);
    Assert.assertTrue(succeeded);
    Assert.assertEquals(JobStatus.State.SUCCEEDED, job.getJobState());
    counters = job.getCounters();
    // The long-running map will be killed and a new one started.
    Assert.assertEquals(2, counters.findCounter(JobCounter.TOTAL_LAUNCHED_MAPS).getValue());
    Assert.assertEquals(3, counters.findCounter(JobCounter.TOTAL_LAUNCHED_REDUCES).getValue());
}
Also used : Counters(org.apache.hadoop.mapreduce.Counters) Job(org.apache.hadoop.mapreduce.Job) File(java.io.File) Test(org.junit.Test)

Example 2 with Counters

use of org.apache.hadoop.mapreduce.Counters in project hadoop by apache.

the class TestMRJobs method verifyFailingMapperCounters.

protected void verifyFailingMapperCounters(Job job) throws InterruptedException, IOException {
    Counters counters = job.getCounters();
    Assert.assertEquals(2, counters.findCounter(JobCounter.OTHER_LOCAL_MAPS).getValue());
    Assert.assertEquals(2, counters.findCounter(JobCounter.TOTAL_LAUNCHED_MAPS).getValue());
    Assert.assertEquals(2, counters.findCounter(JobCounter.NUM_FAILED_MAPS).getValue());
    Assert.assertTrue(counters.findCounter(JobCounter.SLOTS_MILLIS_MAPS) != null && counters.findCounter(JobCounter.SLOTS_MILLIS_MAPS).getValue() != 0);
}
Also used : Counters(org.apache.hadoop.mapreduce.Counters)

Example 3 with Counters

use of org.apache.hadoop.mapreduce.Counters in project hadoop by apache.

the class TestMRJobs method verifyRandomWriterCounters.

protected void verifyRandomWriterCounters(Job job) throws InterruptedException, IOException {
    Counters counters = job.getCounters();
    Assert.assertEquals(3, counters.findCounter(JobCounter.OTHER_LOCAL_MAPS).getValue());
    Assert.assertEquals(3, counters.findCounter(JobCounter.TOTAL_LAUNCHED_MAPS).getValue());
}
Also used : Counters(org.apache.hadoop.mapreduce.Counters)

Example 4 with Counters

use of org.apache.hadoop.mapreduce.Counters in project hadoop by apache.

the class TestMRJobsWithHistoryService method testJobHistoryData.

@Test(timeout = 90000)
public void testJobHistoryData() throws IOException, InterruptedException, AvroRemoteException, ClassNotFoundException {
    if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
        LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR + " not found. Not running test.");
        return;
    }
    SleepJob sleepJob = new SleepJob();
    sleepJob.setConf(mrCluster.getConfig());
    // Job with 3 maps and 2 reduces
    Job job = sleepJob.createJob(3, 2, 1000, 1, 500, 1);
    job.setJarByClass(SleepJob.class);
    // The AppMaster jar itself.
    job.addFileToClassPath(APP_JAR);
    job.waitForCompletion(true);
    Counters counterMR = job.getCounters();
    JobId jobId = TypeConverter.toYarn(job.getJobID());
    ApplicationId appID = jobId.getAppId();
    int pollElapsed = 0;
    while (true) {
        Thread.sleep(1000);
        pollElapsed += 1000;
        if (TERMINAL_RM_APP_STATES.contains(mrCluster.getResourceManager().getRMContext().getRMApps().get(appID).getState())) {
            break;
        }
        if (pollElapsed >= 60000) {
            LOG.warn("application did not reach terminal state within 60 seconds");
            break;
        }
    }
    Assert.assertEquals(RMAppState.FINISHED, mrCluster.getResourceManager().getRMContext().getRMApps().get(appID).getState());
    Counters counterHS = job.getCounters();
    //TODO the Assert below worked. need to check
    //Should we compare each field or convert to V2 counter and compare
    LOG.info("CounterHS " + counterHS);
    LOG.info("CounterMR " + counterMR);
    Assert.assertEquals(counterHS, counterMR);
    HSClientProtocol historyClient = instantiateHistoryProxy();
    GetJobReportRequest gjReq = Records.newRecord(GetJobReportRequest.class);
    gjReq.setJobId(jobId);
    JobReport jobReport = historyClient.getJobReport(gjReq).getJobReport();
    verifyJobReport(jobReport, jobId);
}
Also used : HSClientProtocol(org.apache.hadoop.mapreduce.v2.api.HSClientProtocol) SleepJob(org.apache.hadoop.mapreduce.SleepJob) Counters(org.apache.hadoop.mapreduce.Counters) SleepJob(org.apache.hadoop.mapreduce.SleepJob) Job(org.apache.hadoop.mapreduce.Job) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) File(java.io.File) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) GetJobReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport) Test(org.junit.Test)

Example 5 with Counters

use of org.apache.hadoop.mapreduce.Counters in project hadoop by apache.

the class TestUberAM method verifyFailingMapperCounters.

@Override
protected void verifyFailingMapperCounters(Job job) throws InterruptedException, IOException {
    Counters counters = job.getCounters();
    super.verifyFailingMapperCounters(job);
    Assert.assertEquals(2, counters.findCounter(JobCounter.TOTAL_LAUNCHED_UBERTASKS).getValue());
    Assert.assertEquals(2, counters.findCounter(JobCounter.NUM_UBER_SUBMAPS).getValue());
    Assert.assertEquals(2, counters.findCounter(JobCounter.NUM_FAILED_UBERTASKS).getValue());
}
Also used : Counters(org.apache.hadoop.mapreduce.Counters)

Aggregations

Counters (org.apache.hadoop.mapreduce.Counters)72 Test (org.junit.Test)24 Job (org.apache.hadoop.mapreduce.Job)21 Path (org.apache.hadoop.fs.Path)14 Configuration (org.apache.hadoop.conf.Configuration)13 Counter (org.apache.hadoop.mapreduce.Counter)11 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)8 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)7 PhoenixScrutinyJobCounters (org.apache.phoenix.mapreduce.index.PhoenixScrutinyJobCounters)7 BaseTest (org.apache.phoenix.query.BaseTest)7 IOException (java.io.IOException)6 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)6 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)6 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)6 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)5 TableName (org.apache.hadoop.hbase.TableName)4 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)4 File (java.io.File)3 URI (java.net.URI)3 FileSystem (org.apache.hadoop.fs.FileSystem)3