Search in sources :

Example 6 with TaskAttemptId

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId in project hadoop by apache.

the class TestHsWebServicesAttempts method testTaskAttemptId.

@Test
public void testTaskAttemptId() throws JSONException, Exception {
    WebResource r = resource();
    Map<JobId, Job> jobsMap = appContext.getAllJobs();
    for (JobId id : jobsMap.keySet()) {
        String jobId = MRApps.toString(id);
        for (Task task : jobsMap.get(id).getTasks().values()) {
            String tid = MRApps.toString(task.getID());
            for (TaskAttempt att : task.getAttempts().values()) {
                TaskAttemptId attemptid = att.getID();
                String attid = MRApps.toString(attemptid);
                ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).path("attempts").path(attid).accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
                assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
                JSONObject json = response.getEntity(JSONObject.class);
                assertEquals("incorrect number of elements", 1, json.length());
                JSONObject info = json.getJSONObject("taskAttempt");
                verifyHsTaskAttempt(info, att, task.getType());
            }
        }
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) JSONObject(org.codehaus.jettison.json.JSONObject) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) WebResource(com.sun.jersey.api.client.WebResource) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 7 with TaskAttemptId

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId in project hadoop by apache.

the class TestHsWebServicesAttempts method verifyTaskAttemptGeneric.

public void verifyTaskAttemptGeneric(TaskAttempt ta, TaskType ttype, String id, String state, String type, String rack, String nodeHttpAddress, String diagnostics, String assignedContainerId, long startTime, long finishTime, long elapsedTime, float progress) {
    TaskAttemptId attid = ta.getID();
    String attemptId = MRApps.toString(attid);
    WebServicesTestUtils.checkStringMatch("id", attemptId, id);
    WebServicesTestUtils.checkStringMatch("type", ttype.toString(), type);
    WebServicesTestUtils.checkStringMatch("state", ta.getState().toString(), state);
    WebServicesTestUtils.checkStringMatch("rack", ta.getNodeRackName(), rack);
    WebServicesTestUtils.checkStringMatch("nodeHttpAddress", ta.getNodeHttpAddress(), nodeHttpAddress);
    String expectDiag = "";
    List<String> diagnosticsList = ta.getDiagnostics();
    if (diagnosticsList != null && !diagnostics.isEmpty()) {
        StringBuffer b = new StringBuffer();
        for (String diag : diagnosticsList) {
            b.append(diag);
        }
        expectDiag = b.toString();
    }
    WebServicesTestUtils.checkStringMatch("diagnostics", expectDiag, diagnostics);
    WebServicesTestUtils.checkStringMatch("assignedContainerId", ta.getAssignedContainerID().toString(), assignedContainerId);
    assertEquals("startTime wrong", ta.getLaunchTime(), startTime);
    assertEquals("finishTime wrong", ta.getFinishTime(), finishTime);
    assertEquals("elapsedTime wrong", finishTime - startTime, elapsedTime);
    assertEquals("progress wrong", ta.getProgress() * 100, progress, 1e-3f);
}
Also used : TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)

Example 8 with TaskAttemptId

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId in project hadoop by apache.

the class TestMRApps method testToTaskAttemptID.

@Test(timeout = 120000)
public void testToTaskAttemptID() {
    TaskAttemptId taid = MRApps.toTaskAttemptID("attempt_0_1_m_2_3");
    assertEquals(0, taid.getTaskId().getJobId().getAppId().getClusterTimestamp());
    assertEquals(1, taid.getTaskId().getJobId().getAppId().getId());
    assertEquals(1, taid.getTaskId().getJobId().getId());
    assertEquals(2, taid.getTaskId().getId());
    assertEquals(3, taid.getId());
}
Also used : TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) Test(org.junit.Test)

Example 9 with TaskAttemptId

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId in project hadoop by apache.

the class TestJobHistoryEvents method verifyTask.

private void verifyTask(Task task) {
    Assert.assertEquals("Task state not currect", TaskState.SUCCEEDED, task.getState());
    Map<TaskAttemptId, TaskAttempt> attempts = task.getAttempts();
    Assert.assertEquals("No of attempts not correct", 1, attempts.size());
    for (TaskAttempt attempt : attempts.values()) {
        verifyAttempt(attempt);
    }
}
Also used : TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)

Example 10 with TaskAttemptId

use of org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId in project hadoop by apache.

the class TestJobHistoryEntities method testCompletedTaskAttempt.

@Test(timeout = 10000)
public void testCompletedTaskAttempt() throws Exception {
    HistoryFileInfo info = mock(HistoryFileInfo.class);
    when(info.getConfFile()).thenReturn(fullConfPath);
    completedJob = new CompletedJob(conf, jobId, fullHistoryPath, loadTasks, "user", info, jobAclsManager);
    TaskId mt1Id = MRBuilderUtils.newTaskId(jobId, 0, TaskType.MAP);
    TaskId rt1Id = MRBuilderUtils.newTaskId(jobId, 0, TaskType.REDUCE);
    TaskAttemptId mta1Id = MRBuilderUtils.newTaskAttemptId(mt1Id, 0);
    TaskAttemptId rta1Id = MRBuilderUtils.newTaskAttemptId(rt1Id, 0);
    Task mt1 = completedJob.getTask(mt1Id);
    Task rt1 = completedJob.getTask(rt1Id);
    TaskAttempt mta1 = mt1.getAttempt(mta1Id);
    assertEquals(TaskAttemptState.SUCCEEDED, mta1.getState());
    assertEquals("localhost:45454", mta1.getAssignedContainerMgrAddress());
    assertEquals("localhost:9999", mta1.getNodeHttpAddress());
    TaskAttemptReport mta1Report = mta1.getReport();
    assertEquals(TaskAttemptState.SUCCEEDED, mta1Report.getTaskAttemptState());
    assertEquals("localhost", mta1Report.getNodeManagerHost());
    assertEquals(45454, mta1Report.getNodeManagerPort());
    assertEquals(9999, mta1Report.getNodeManagerHttpPort());
    TaskAttempt rta1 = rt1.getAttempt(rta1Id);
    assertEquals(TaskAttemptState.SUCCEEDED, rta1.getState());
    assertEquals("localhost:45454", rta1.getAssignedContainerMgrAddress());
    assertEquals("localhost:9999", rta1.getNodeHttpAddress());
    TaskAttemptReport rta1Report = rta1.getReport();
    assertEquals(TaskAttemptState.SUCCEEDED, rta1Report.getTaskAttemptState());
    assertEquals("localhost", rta1Report.getNodeManagerHost());
    assertEquals(45454, rta1Report.getNodeManagerPort());
    assertEquals(9999, rta1Report.getNodeManagerHttpPort());
}
Also used : HistoryFileInfo(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Test(org.junit.Test)

Aggregations

TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)111 Test (org.junit.Test)72 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)61 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)57 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)51 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)48 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)45 TaskAttemptEvent (org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent)33 Configuration (org.apache.hadoop.conf.Configuration)32 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)28 AppContext (org.apache.hadoop.mapreduce.v2.app.AppContext)27 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)21 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)20 ClientResponse (com.sun.jersey.api.client.ClientResponse)16 WebResource (com.sun.jersey.api.client.WebResource)16 HashMap (java.util.HashMap)16 Container (org.apache.hadoop.yarn.api.records.Container)16 Path (org.apache.hadoop.fs.Path)15 TaskAttemptContainerLaunchedEvent (org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent)14 NodeId (org.apache.hadoop.yarn.api.records.NodeId)14