Search in sources :

Example 46 with Counters

use of org.apache.hadoop.mapreduce.v2.api.records.Counters in project hadoop by apache.

the class TestAMWebApp method testSingleCounterView.

@Test
public void testSingleCounterView() {
    AppContext appContext = new MockAppContext(0, 1, 1, 1);
    Job job = appContext.getAllJobs().values().iterator().next();
    // add a failed task to the job without any counters
    Task failedTask = MockJobs.newTask(job.getID(), 2, 1, true);
    Map<TaskId, Task> tasks = job.getTasks();
    tasks.put(failedTask.getID(), failedTask);
    Map<String, String> params = getJobParams(appContext);
    params.put(AMParams.COUNTER_GROUP, "org.apache.hadoop.mapreduce.FileSystemCounter");
    params.put(AMParams.COUNTER_NAME, "HDFS_WRITE_OPS");
    WebAppTests.testPage(SingleCounterPage.class, AppContext.class, appContext, params);
}
Also used : MockAppContext(org.apache.hadoop.mapreduce.v2.app.MockAppContext) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) AppContext(org.apache.hadoop.mapreduce.v2.app.AppContext) MockAppContext(org.apache.hadoop.mapreduce.v2.app.MockAppContext) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) Test(org.junit.Test)

Example 47 with Counters

use of org.apache.hadoop.mapreduce.v2.api.records.Counters in project hadoop by apache.

the class TestAMWebApp method testSingleTaskCounterView.

@Test
public void testSingleTaskCounterView() {
    AppContext appContext = new MockAppContext(0, 1, 1, 2);
    Map<String, String> params = getTaskParams(appContext);
    params.put(AMParams.COUNTER_GROUP, "org.apache.hadoop.mapreduce.FileSystemCounter");
    params.put(AMParams.COUNTER_NAME, "HDFS_WRITE_OPS");
    // remove counters from one task attempt
    // to test handling of missing counters
    TaskId taskID = MRApps.toTaskID(params.get(AMParams.TASK_ID));
    Job job = appContext.getJob(taskID.getJobId());
    Task task = job.getTask(taskID);
    TaskAttempt attempt = task.getAttempts().values().iterator().next();
    attempt.getReport().setCounters(null);
    WebAppTests.testPage(SingleCounterPage.class, AppContext.class, appContext, params);
}
Also used : MockAppContext(org.apache.hadoop.mapreduce.v2.app.MockAppContext) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) AppContext(org.apache.hadoop.mapreduce.v2.app.AppContext) MockAppContext(org.apache.hadoop.mapreduce.v2.app.MockAppContext) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) Test(org.junit.Test)

Example 48 with Counters

use of org.apache.hadoop.mapreduce.v2.api.records.Counters in project hadoop by apache.

the class TestTaskImpl method testCountersWithSpeculation.

@Test
public void testCountersWithSpeculation() {
    mockTask = new MockTaskImpl(jobId, partition, dispatcher.getEventHandler(), remoteJobConfFile, conf, taskAttemptListener, jobToken, credentials, clock, startCount, metrics, appContext, TaskType.MAP) {

        @Override
        protected int getMaxAttempts() {
            return 1;
        }
    };
    TaskId taskId = getNewTaskID();
    scheduleTaskAttempt(taskId);
    launchTaskAttempt(getLastAttempt().getAttemptId());
    updateLastAttemptState(TaskAttemptState.RUNNING);
    MockTaskAttemptImpl baseAttempt = getLastAttempt();
    // add a speculative attempt
    mockTask.handle(new TaskTAttemptEvent(getLastAttempt().getAttemptId(), TaskEventType.T_ADD_SPEC_ATTEMPT));
    launchTaskAttempt(getLastAttempt().getAttemptId());
    updateLastAttemptState(TaskAttemptState.RUNNING);
    MockTaskAttemptImpl specAttempt = getLastAttempt();
    assertEquals(2, taskAttempts.size());
    Counters specAttemptCounters = new Counters();
    Counter cpuCounter = specAttemptCounters.findCounter(TaskCounter.CPU_MILLISECONDS);
    cpuCounter.setValue(1000);
    specAttempt.setCounters(specAttemptCounters);
    // have the spec attempt succeed but second attempt at 1.0 progress as well
    commitTaskAttempt(specAttempt.getAttemptId());
    specAttempt.setProgress(1.0f);
    specAttempt.setState(TaskAttemptState.SUCCEEDED);
    mockTask.handle(new TaskTAttemptEvent(specAttempt.getAttemptId(), TaskEventType.T_ATTEMPT_SUCCEEDED));
    assertEquals(TaskState.SUCCEEDED, mockTask.getState());
    baseAttempt.setProgress(1.0f);
    Counters taskCounters = mockTask.getCounters();
    assertEquals("wrong counters for task", specAttemptCounters, taskCounters);
}
Also used : TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) Counter(org.apache.hadoop.mapreduce.Counter) TaskCounter(org.apache.hadoop.mapreduce.TaskCounter) TaskTAttemptEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent) Counters(org.apache.hadoop.mapreduce.Counters) Test(org.junit.Test)

Example 49 with Counters

use of org.apache.hadoop.mapreduce.v2.api.records.Counters in project hadoop by apache.

the class TestAMWebServicesAttempts method testTaskAttemptIdCounters.

@Test
public void testTaskAttemptIdCounters() throws JSONException, Exception {
    WebResource r = resource();
    Map<JobId, Job> jobsMap = appContext.getAllJobs();
    for (JobId id : jobsMap.keySet()) {
        String jobId = MRApps.toString(id);
        for (Task task : jobsMap.get(id).getTasks().values()) {
            String tid = MRApps.toString(task.getID());
            for (TaskAttempt att : task.getAttempts().values()) {
                TaskAttemptId attemptid = att.getID();
                String attid = MRApps.toString(attemptid);
                ClientResponse response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).path("attempts").path(attid).path("counters").accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
                assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
                JSONObject json = response.getEntity(JSONObject.class);
                assertEquals("incorrect number of elements", 1, json.length());
                JSONObject info = json.getJSONObject("jobTaskAttemptCounters");
                verifyAMJobTaskAttemptCounters(info, att);
            }
        }
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) JSONObject(org.codehaus.jettison.json.JSONObject) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) WebResource(com.sun.jersey.api.client.WebResource) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 50 with Counters

use of org.apache.hadoop.mapreduce.v2.api.records.Counters in project hadoop by apache.

the class TestAMWebServicesAttempts method testTaskAttemptIdXMLCounters.

@Test
public void testTaskAttemptIdXMLCounters() throws JSONException, Exception {
    WebResource r = resource();
    Map<JobId, Job> jobsMap = appContext.getAllJobs();
    for (JobId id : jobsMap.keySet()) {
        String jobId = MRApps.toString(id);
        for (Task task : jobsMap.get(id).getTasks().values()) {
            String tid = MRApps.toString(task.getID());
            for (TaskAttempt att : task.getAttempts().values()) {
                TaskAttemptId attemptid = att.getID();
                String attid = MRApps.toString(attemptid);
                ClientResponse response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).path("attempts").path(attid).path("counters").accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
                assertEquals(MediaType.APPLICATION_XML_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
                String xml = response.getEntity(String.class);
                DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
                DocumentBuilder db = dbf.newDocumentBuilder();
                InputSource is = new InputSource();
                is.setCharacterStream(new StringReader(xml));
                Document dom = db.parse(is);
                NodeList nodes = dom.getElementsByTagName("jobTaskAttemptCounters");
                verifyAMTaskCountersXML(nodes, att);
            }
        }
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) InputSource(org.xml.sax.InputSource) DocumentBuilderFactory(javax.xml.parsers.DocumentBuilderFactory) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) NodeList(org.w3c.dom.NodeList) WebResource(com.sun.jersey.api.client.WebResource) Document(org.w3c.dom.Document) DocumentBuilder(javax.xml.parsers.DocumentBuilder) StringReader(java.io.StringReader) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Aggregations

Job (org.apache.hadoop.mapreduce.v2.app.job.Job)36 Test (org.junit.Test)34 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)29 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)28 ClientResponse (com.sun.jersey.api.client.ClientResponse)21 WebResource (com.sun.jersey.api.client.WebResource)21 Counters (org.apache.hadoop.mapreduce.Counters)18 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)16 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)15 JSONObject (org.codehaus.jettison.json.JSONObject)15 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)13 Configuration (org.apache.hadoop.conf.Configuration)9 Counters (org.apache.hadoop.mapreduce.v2.api.records.Counters)8 StringReader (java.io.StringReader)6 GET (javax.ws.rs.GET)6 Path (javax.ws.rs.Path)6 Produces (javax.ws.rs.Produces)6 DocumentBuilder (javax.xml.parsers.DocumentBuilder)6 DocumentBuilderFactory (javax.xml.parsers.DocumentBuilderFactory)6 CounterGroup (org.apache.hadoop.mapreduce.v2.api.records.CounterGroup)6