Search in sources :

Example 41 with Counters

use of org.apache.hadoop.mapreduce.v2.api.records.Counters in project hadoop by apache.

the class JobCounterInfo method getCounters.

private void getCounters(AppContext ctx, Job job) {
    if (job == null) {
        return;
    }
    total = job.getAllCounters();
    boolean needTotalCounters = false;
    if (total == null) {
        total = new Counters();
        needTotalCounters = true;
    }
    map = new Counters();
    reduce = new Counters();
    // Get all types of counters
    Map<TaskId, Task> tasks = job.getTasks();
    for (Task t : tasks.values()) {
        Counters counters = t.getCounters();
        if (counters == null) {
            continue;
        }
        switch(t.getType()) {
            case MAP:
                map.incrAllCounters(counters);
                break;
            case REDUCE:
                reduce.incrAllCounters(counters);
                break;
        }
        if (needTotalCounters) {
            total.incrAllCounters(counters);
        }
    }
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) Counters(org.apache.hadoop.mapreduce.Counters)

Example 42 with Counters

use of org.apache.hadoop.mapreduce.v2.api.records.Counters in project hadoop by apache.

the class TestTaskAttemptReport method testBuildImplicitRawCounters.

@Test
public void testBuildImplicitRawCounters() {
    TaskAttemptReportPBImpl report = new TaskAttemptReportPBImpl();
    org.apache.hadoop.mapreduce.Counters rCounters = MockJobs.newCounters();
    report.setRawCounters(rCounters);
    MRProtos.TaskAttemptReportProto protoVal = report.getProto();
    Counters counters = report.getCounters();
    assertTrue(protoVal.hasCounters());
}
Also used : MRProtos(org.apache.hadoop.mapreduce.v2.proto.MRProtos) TaskAttemptReportPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskAttemptReportPBImpl) Test(org.junit.Test)

Example 43 with Counters

use of org.apache.hadoop.mapreduce.v2.api.records.Counters in project hadoop by apache.

the class TestTaskReport method testBuildImplicitRawCounters.

@Test
public void testBuildImplicitRawCounters() {
    // Create basic class
    TaskReportPBImpl report = new TaskReportPBImpl();
    org.apache.hadoop.mapreduce.Counters rCounters = MockJobs.newCounters();
    // Set raw counters
    report.setRawCounters(rCounters);
    // Verify getProto method implicitly converts/sets real counters
    MRProtos.TaskReportProto protoVal = report.getProto();
    assertTrue(protoVal.hasCounters());
}
Also used : MRProtos(org.apache.hadoop.mapreduce.v2.proto.MRProtos) TaskReportPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskReportPBImpl) Test(org.junit.Test)

Example 44 with Counters

use of org.apache.hadoop.mapreduce.v2.api.records.Counters in project hadoop by apache.

the class TestMRClientService method test.

@Test
public void test() throws Exception {
    MRAppWithClientService app = new MRAppWithClientService(1, 0, false);
    Configuration conf = new Configuration();
    Job job = app.submit(conf);
    app.waitForState(job, JobState.RUNNING);
    Assert.assertEquals("Num tasks not correct", 1, job.getTasks().size());
    Iterator<Task> it = job.getTasks().values().iterator();
    Task task = it.next();
    app.waitForState(task, TaskState.RUNNING);
    TaskAttempt attempt = task.getAttempts().values().iterator().next();
    app.waitForState(attempt, TaskAttemptState.RUNNING);
    // send the diagnostic
    String diagnostic1 = "Diagnostic1";
    String diagnostic2 = "Diagnostic2";
    app.getContext().getEventHandler().handle(new TaskAttemptDiagnosticsUpdateEvent(attempt.getID(), diagnostic1));
    // send the status update
    TaskAttemptStatus taskAttemptStatus = new TaskAttemptStatus();
    taskAttemptStatus.id = attempt.getID();
    taskAttemptStatus.progress = 0.5f;
    taskAttemptStatus.stateString = "RUNNING";
    taskAttemptStatus.taskState = TaskAttemptState.RUNNING;
    taskAttemptStatus.phase = Phase.MAP;
    // send the status update
    app.getContext().getEventHandler().handle(new TaskAttemptStatusUpdateEvent(attempt.getID(), taskAttemptStatus));
    //verify that all object are fully populated by invoking RPCs.
    YarnRPC rpc = YarnRPC.create(conf);
    MRClientProtocol proxy = (MRClientProtocol) rpc.getProxy(MRClientProtocol.class, app.clientService.getBindAddress(), conf);
    GetCountersRequest gcRequest = recordFactory.newRecordInstance(GetCountersRequest.class);
    gcRequest.setJobId(job.getID());
    Assert.assertNotNull("Counters is null", proxy.getCounters(gcRequest).getCounters());
    GetJobReportRequest gjrRequest = recordFactory.newRecordInstance(GetJobReportRequest.class);
    gjrRequest.setJobId(job.getID());
    JobReport jr = proxy.getJobReport(gjrRequest).getJobReport();
    verifyJobReport(jr);
    GetTaskAttemptCompletionEventsRequest gtaceRequest = recordFactory.newRecordInstance(GetTaskAttemptCompletionEventsRequest.class);
    gtaceRequest.setJobId(job.getID());
    gtaceRequest.setFromEventId(0);
    gtaceRequest.setMaxEvents(10);
    Assert.assertNotNull("TaskCompletionEvents is null", proxy.getTaskAttemptCompletionEvents(gtaceRequest).getCompletionEventList());
    GetDiagnosticsRequest gdRequest = recordFactory.newRecordInstance(GetDiagnosticsRequest.class);
    gdRequest.setTaskAttemptId(attempt.getID());
    Assert.assertNotNull("Diagnostics is null", proxy.getDiagnostics(gdRequest).getDiagnosticsList());
    GetTaskAttemptReportRequest gtarRequest = recordFactory.newRecordInstance(GetTaskAttemptReportRequest.class);
    gtarRequest.setTaskAttemptId(attempt.getID());
    TaskAttemptReport tar = proxy.getTaskAttemptReport(gtarRequest).getTaskAttemptReport();
    verifyTaskAttemptReport(tar);
    GetTaskReportRequest gtrRequest = recordFactory.newRecordInstance(GetTaskReportRequest.class);
    gtrRequest.setTaskId(task.getID());
    Assert.assertNotNull("TaskReport is null", proxy.getTaskReport(gtrRequest).getTaskReport());
    GetTaskReportsRequest gtreportsRequest = recordFactory.newRecordInstance(GetTaskReportsRequest.class);
    gtreportsRequest.setJobId(job.getID());
    gtreportsRequest.setTaskType(TaskType.MAP);
    Assert.assertNotNull("TaskReports for map is null", proxy.getTaskReports(gtreportsRequest).getTaskReportList());
    gtreportsRequest = recordFactory.newRecordInstance(GetTaskReportsRequest.class);
    gtreportsRequest.setJobId(job.getID());
    gtreportsRequest.setTaskType(TaskType.REDUCE);
    Assert.assertNotNull("TaskReports for reduce is null", proxy.getTaskReports(gtreportsRequest).getTaskReportList());
    List<String> diag = proxy.getDiagnostics(gdRequest).getDiagnosticsList();
    Assert.assertEquals("Num diagnostics not correct", 1, diag.size());
    Assert.assertEquals("Diag 1 not correct", diagnostic1, diag.get(0).toString());
    TaskReport taskReport = proxy.getTaskReport(gtrRequest).getTaskReport();
    Assert.assertEquals("Num diagnostics not correct", 1, taskReport.getDiagnosticsCount());
    //send the done signal to the task
    app.getContext().getEventHandler().handle(new TaskAttemptEvent(task.getAttempts().values().iterator().next().getID(), TaskAttemptEventType.TA_DONE));
    app.waitForState(job, JobState.SUCCEEDED);
    // For invalid jobid, throw IOException
    gtreportsRequest = recordFactory.newRecordInstance(GetTaskReportsRequest.class);
    gtreportsRequest.setJobId(TypeConverter.toYarn(JobID.forName("job_1415730144495_0001")));
    gtreportsRequest.setTaskType(TaskType.REDUCE);
    try {
        proxy.getTaskReports(gtreportsRequest);
        fail("IOException not thrown for invalid job id");
    } catch (IOException e) {
    // Expected
    }
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) Configuration(org.apache.hadoop.conf.Configuration) GetTaskAttemptReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportRequest) MRClientProtocol(org.apache.hadoop.mapreduce.v2.api.MRClientProtocol) JobReport(org.apache.hadoop.mapreduce.v2.api.records.JobReport) GetTaskAttemptCompletionEventsRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsRequest) GetTaskReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportRequest) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) GetDiagnosticsRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) GetCountersRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest) TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskAttemptDiagnosticsUpdateEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent) TaskAttemptStatus(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.TaskAttemptStatus) TaskAttemptEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent) YarnRPC(org.apache.hadoop.yarn.ipc.YarnRPC) IOException(java.io.IOException) GetJobReportRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest) TaskAttemptStatusUpdateEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent) TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport) GetTaskReportsRequest(org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsRequest) Test(org.junit.Test)

Example 45 with Counters

use of org.apache.hadoop.mapreduce.v2.api.records.Counters in project hadoop by apache.

the class TestTaskAttempt method verifyMillisCounters.

public void verifyMillisCounters(Resource containerResource, int minContainerSize) throws Exception {
    Clock actualClock = SystemClock.getInstance();
    ControlledClock clock = new ControlledClock(actualClock);
    clock.setTime(10);
    MRApp app = new MRApp(1, 1, false, "testSlotMillisCounterUpdate", true, clock);
    app.setAllocatedContainerResource(containerResource);
    Configuration conf = new Configuration();
    conf.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, minContainerSize);
    app.setClusterInfo(new ClusterInfo(Resource.newInstance(10240, 1)));
    Job job = app.submit(conf);
    app.waitForState(job, JobState.RUNNING);
    Map<TaskId, Task> tasks = job.getTasks();
    Assert.assertEquals("Num tasks is not correct", 2, tasks.size());
    Iterator<Task> taskIter = tasks.values().iterator();
    Task mTask = taskIter.next();
    app.waitForState(mTask, TaskState.RUNNING);
    Task rTask = taskIter.next();
    app.waitForState(rTask, TaskState.RUNNING);
    Map<TaskAttemptId, TaskAttempt> mAttempts = mTask.getAttempts();
    Assert.assertEquals("Num attempts is not correct", 1, mAttempts.size());
    Map<TaskAttemptId, TaskAttempt> rAttempts = rTask.getAttempts();
    Assert.assertEquals("Num attempts is not correct", 1, rAttempts.size());
    TaskAttempt mta = mAttempts.values().iterator().next();
    TaskAttempt rta = rAttempts.values().iterator().next();
    app.waitForState(mta, TaskAttemptState.RUNNING);
    app.waitForState(rta, TaskAttemptState.RUNNING);
    clock.setTime(11);
    app.getContext().getEventHandler().handle(new TaskAttemptEvent(mta.getID(), TaskAttemptEventType.TA_DONE));
    app.getContext().getEventHandler().handle(new TaskAttemptEvent(rta.getID(), TaskAttemptEventType.TA_DONE));
    app.waitForState(job, JobState.SUCCEEDED);
    Assert.assertEquals(mta.getFinishTime(), 11);
    Assert.assertEquals(mta.getLaunchTime(), 10);
    Assert.assertEquals(rta.getFinishTime(), 11);
    Assert.assertEquals(rta.getLaunchTime(), 10);
    Counters counters = job.getAllCounters();
    int memoryMb = (int) containerResource.getMemorySize();
    int vcores = containerResource.getVirtualCores();
    Assert.assertEquals((int) Math.ceil((float) memoryMb / minContainerSize), counters.findCounter(JobCounter.SLOTS_MILLIS_MAPS).getValue());
    Assert.assertEquals((int) Math.ceil((float) memoryMb / minContainerSize), counters.findCounter(JobCounter.SLOTS_MILLIS_REDUCES).getValue());
    Assert.assertEquals(1, counters.findCounter(JobCounter.MILLIS_MAPS).getValue());
    Assert.assertEquals(1, counters.findCounter(JobCounter.MILLIS_REDUCES).getValue());
    Assert.assertEquals(memoryMb, counters.findCounter(JobCounter.MB_MILLIS_MAPS).getValue());
    Assert.assertEquals(memoryMb, counters.findCounter(JobCounter.MB_MILLIS_REDUCES).getValue());
    Assert.assertEquals(vcores, counters.findCounter(JobCounter.VCORES_MILLIS_MAPS).getValue());
    Assert.assertEquals(vcores, counters.findCounter(JobCounter.VCORES_MILLIS_REDUCES).getValue());
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) TaskAttemptEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent) SystemClock(org.apache.hadoop.yarn.util.SystemClock) Clock(org.apache.hadoop.yarn.util.Clock) ControlledClock(org.apache.hadoop.yarn.util.ControlledClock) ControlledClock(org.apache.hadoop.yarn.util.ControlledClock) ClusterInfo(org.apache.hadoop.mapreduce.v2.app.ClusterInfo) Counters(org.apache.hadoop.mapreduce.Counters) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) MRApp(org.apache.hadoop.mapreduce.v2.app.MRApp)

Aggregations

Job (org.apache.hadoop.mapreduce.v2.app.job.Job)36 Test (org.junit.Test)34 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)29 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)28 ClientResponse (com.sun.jersey.api.client.ClientResponse)21 WebResource (com.sun.jersey.api.client.WebResource)21 Counters (org.apache.hadoop.mapreduce.Counters)18 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)16 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)15 JSONObject (org.codehaus.jettison.json.JSONObject)15 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)13 Configuration (org.apache.hadoop.conf.Configuration)9 Counters (org.apache.hadoop.mapreduce.v2.api.records.Counters)8 StringReader (java.io.StringReader)6 GET (javax.ws.rs.GET)6 Path (javax.ws.rs.Path)6 Produces (javax.ws.rs.Produces)6 DocumentBuilder (javax.xml.parsers.DocumentBuilder)6 DocumentBuilderFactory (javax.xml.parsers.DocumentBuilderFactory)6 CounterGroup (org.apache.hadoop.mapreduce.v2.api.records.CounterGroup)6