Search in sources :

Example 1 with Task

use of org.apache.hadoop.mapreduce.v2.app.job.Task in project hadoop by apache.

the class TestRMContainerAllocator method testMapReduceAllocationWithNodeLabelExpression.

@Test
public void testMapReduceAllocationWithNodeLabelExpression() throws Exception {
    LOG.info("Running testMapReduceAllocationWithNodeLabelExpression");
    Configuration conf = new Configuration();
    /*
     * final int MAP_LIMIT = 3; final int REDUCE_LIMIT = 1;
     * conf.setInt(MRJobConfig.JOB_RUNNING_MAP_LIMIT, MAP_LIMIT);
     * conf.setInt(MRJobConfig.JOB_RUNNING_REDUCE_LIMIT, REDUCE_LIMIT);
     */
    conf.setFloat(MRJobConfig.COMPLETED_MAPS_FOR_REDUCE_SLOWSTART, 1.0f);
    conf.set(MRJobConfig.MAP_NODE_LABEL_EXP, "MapNodes");
    conf.set(MRJobConfig.REDUCE_NODE_LABEL_EXP, "ReduceNodes");
    ApplicationId appId = ApplicationId.newInstance(1, 1);
    ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1);
    JobId jobId = MRBuilderUtils.newJobId(appAttemptId.getApplicationId(), 0);
    Job mockJob = mock(Job.class);
    when(mockJob.getReport()).thenReturn(MRBuilderUtils.newJobReport(jobId, "job", "user", JobState.RUNNING, 0, 0, 0, 0, 0, 0, 0, "jobfile", null, false, ""));
    final MockScheduler mockScheduler = new MockScheduler(appAttemptId);
    MyContainerAllocator allocator = new MyContainerAllocator(null, conf, appAttemptId, mockJob, SystemClock.getInstance()) {

        @Override
        protected void register() {
        }

        @Override
        protected ApplicationMasterProtocol createSchedulerProxy() {
            return mockScheduler;
        }
    };
    // create some map requests
    ContainerRequestEvent reqMapEvents;
    reqMapEvents = createReq(jobId, 0, 1024, new String[] { "map" });
    allocator.sendRequests(Arrays.asList(reqMapEvents));
    // create some reduce requests
    ContainerRequestEvent reqReduceEvents;
    reqReduceEvents = createReq(jobId, 0, 2048, new String[] { "reduce" }, false, true);
    allocator.sendRequests(Arrays.asList(reqReduceEvents));
    allocator.schedule();
    // verify all of the host-specific asks were sent plus one for the
    // default rack and one for the ANY request
    Assert.assertEquals(3, mockScheduler.lastAsk.size());
    // verify ResourceRequest sent for MAP have appropriate node
    // label expression as per the configuration
    validateLabelsRequests(mockScheduler.lastAsk.get(0), false);
    validateLabelsRequests(mockScheduler.lastAsk.get(1), false);
    validateLabelsRequests(mockScheduler.lastAsk.get(2), false);
    // assign a map task and verify we do not ask for any more maps
    ContainerId cid0 = mockScheduler.assignContainer("map", false);
    allocator.schedule();
    // default rack and one for the ANY request
    Assert.assertEquals(3, mockScheduler.lastAsk.size());
    validateLabelsRequests(mockScheduler.lastAsk.get(0), true);
    validateLabelsRequests(mockScheduler.lastAsk.get(1), true);
    validateLabelsRequests(mockScheduler.lastAsk.get(2), true);
    // complete the map task and verify that we ask for one more
    allocator.close();
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 2 with Task

use of org.apache.hadoop.mapreduce.v2.app.job.Task in project hadoop by apache.

the class TestAppController method setUp.

@Before
public void setUp() throws IOException {
    AppContext context = mock(AppContext.class);
    when(context.getApplicationID()).thenReturn(ApplicationId.newInstance(0, 0));
    when(context.getApplicationName()).thenReturn("AppName");
    when(context.getUser()).thenReturn("User");
    when(context.getStartTime()).thenReturn(System.currentTimeMillis());
    job = mock(Job.class);
    Task task = mock(Task.class);
    when(job.getTask(any(TaskId.class))).thenReturn(task);
    when(job.loadConfFile()).thenReturn(new Configuration());
    when(job.getConfFile()).thenReturn(new Path("/"));
    JobId jobID = MRApps.toJobID("job_01_01");
    when(context.getJob(jobID)).thenReturn(job);
    when(job.checkAccess(any(UserGroupInformation.class), any(JobACL.class))).thenReturn(true);
    App app = new App(context);
    Configuration configuration = new Configuration();
    ctx = mock(RequestContext.class);
    appController = new AppControllerForTest(app, configuration, ctx);
    appController.getProperty().put(AMParams.JOB_ID, "job_01_01");
    appController.getProperty().put(AMParams.TASK_ID, taskId);
}
Also used : Path(org.apache.hadoop.fs.Path) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) Configuration(org.apache.hadoop.conf.Configuration) AppContext(org.apache.hadoop.mapreduce.v2.app.AppContext) RequestContext(org.apache.hadoop.yarn.webapp.Controller.RequestContext) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobACL(org.apache.hadoop.mapreduce.JobACL) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Before(org.junit.Before)

Example 3 with Task

use of org.apache.hadoop.mapreduce.v2.app.job.Task in project hadoop by apache.

the class TestBlocks method testAttemptsBlock.

/**
   * test AttemptsBlock's rendering.
   */
@Test
public void testAttemptsBlock() {
    AppContext ctx = mock(AppContext.class);
    AppForTest app = new AppForTest(ctx);
    JobId jobId = new JobIdPBImpl();
    jobId.setId(0);
    jobId.setAppId(ApplicationIdPBImpl.newInstance(0, 1));
    TaskId taskId = new TaskIdPBImpl();
    taskId.setId(0);
    taskId.setTaskType(TaskType.REDUCE);
    taskId.setJobId(jobId);
    Task task = mock(Task.class);
    when(task.getID()).thenReturn(taskId);
    TaskReport report = mock(TaskReport.class);
    when(task.getReport()).thenReturn(report);
    when(task.getType()).thenReturn(TaskType.REDUCE);
    Map<TaskId, Task> tasks = new HashMap<TaskId, Task>();
    Map<TaskAttemptId, TaskAttempt> attempts = new HashMap<TaskAttemptId, TaskAttempt>();
    TaskAttempt attempt = mock(TaskAttempt.class);
    TaskAttemptId taId = new TaskAttemptIdPBImpl();
    taId.setId(0);
    taId.setTaskId(task.getID());
    when(attempt.getID()).thenReturn(taId);
    final TaskAttemptState taState = TaskAttemptState.SUCCEEDED;
    when(attempt.getState()).thenReturn(taState);
    TaskAttemptReport taReport = mock(TaskAttemptReport.class);
    when(taReport.getTaskAttemptState()).thenReturn(taState);
    when(attempt.getReport()).thenReturn(taReport);
    attempts.put(taId, attempt);
    tasks.put(taskId, task);
    when(task.getAttempts()).thenReturn(attempts);
    app.setTask(task);
    Job job = mock(Job.class);
    when(job.getTasks(TaskType.REDUCE)).thenReturn(tasks);
    app.setJob(job);
    AttemptsBlockForTest block = new AttemptsBlockForTest(app, new Configuration());
    block.addParameter(AMParams.TASK_TYPE, "r");
    block.addParameter(AMParams.ATTEMPT_STATE, "SUCCESSFUL");
    PrintWriter pWriter = new PrintWriter(data);
    Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
    block.render(html);
    pWriter.flush();
    assertTrue(data.toString().contains("<a href='" + block.url("task", task.getID().toString()) + "'>" + "attempt_0_0001_r_000000_0</a>"));
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskAttemptIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskAttemptIdPBImpl) Configuration(org.apache.hadoop.conf.Configuration) TaskIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskIdPBImpl) HashMap(java.util.HashMap) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) AppContext(org.apache.hadoop.mapreduce.v2.app.AppContext) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport) TaskAttemptState(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState) JobIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl) HtmlBlock(org.apache.hadoop.yarn.webapp.view.HtmlBlock) Block(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block) FewAttemptsBlock(org.apache.hadoop.mapreduce.v2.app.webapp.AttemptsPage.FewAttemptsBlock) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) PrintWriter(java.io.PrintWriter) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) Test(org.junit.Test)

Example 4 with Task

use of org.apache.hadoop.mapreduce.v2.app.job.Task in project hadoop by apache.

the class TestBlocks method testTasksBlock.

/**
   * Test rendering for TasksBlock
   */
@Test
public void testTasksBlock() throws Exception {
    ApplicationId appId = ApplicationIdPBImpl.newInstance(0, 1);
    JobId jobId = new JobIdPBImpl();
    jobId.setId(0);
    jobId.setAppId(appId);
    TaskId taskId = new TaskIdPBImpl();
    taskId.setId(0);
    taskId.setTaskType(TaskType.MAP);
    taskId.setJobId(jobId);
    Task task = mock(Task.class);
    when(task.getID()).thenReturn(taskId);
    TaskReport report = mock(TaskReport.class);
    when(report.getProgress()).thenReturn(0.7f);
    when(report.getTaskState()).thenReturn(TaskState.SUCCEEDED);
    when(report.getStartTime()).thenReturn(100001L);
    when(report.getFinishTime()).thenReturn(100011L);
    when(report.getStatus()).thenReturn("Dummy Status \n*");
    when(task.getReport()).thenReturn(report);
    when(task.getType()).thenReturn(TaskType.MAP);
    Map<TaskId, Task> tasks = new HashMap<TaskId, Task>();
    tasks.put(taskId, task);
    AppContext ctx = mock(AppContext.class);
    Job job = mock(Job.class);
    when(job.getTasks()).thenReturn(tasks);
    App app = new App(ctx);
    app.setJob(job);
    TasksBlockForTest taskBlock = new TasksBlockForTest(app);
    taskBlock.addParameter(AMParams.TASK_TYPE, "m");
    PrintWriter pWriter = new PrintWriter(data);
    Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
    taskBlock.render(html);
    pWriter.flush();
    assertTrue(data.toString().contains("task_0_0001_m_000000"));
    assertTrue(data.toString().contains("70.00"));
    assertTrue(data.toString().contains("SUCCEEDED"));
    assertTrue(data.toString().contains("100001"));
    assertTrue(data.toString().contains("100011"));
    assertFalse(data.toString().contains("Dummy Status \n*"));
    assertTrue(data.toString().contains("Dummy Status \\n*"));
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskIdPBImpl) HashMap(java.util.HashMap) AppContext(org.apache.hadoop.mapreduce.v2.app.AppContext) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) JobIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl) HtmlBlock(org.apache.hadoop.yarn.webapp.view.HtmlBlock) Block(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block) FewAttemptsBlock(org.apache.hadoop.mapreduce.v2.app.webapp.AttemptsPage.FewAttemptsBlock) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) PrintWriter(java.io.PrintWriter) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) Test(org.junit.Test)

Example 5 with Task

use of org.apache.hadoop.mapreduce.v2.app.job.Task in project hadoop by apache.

the class TestAMWebServicesAttempts method testTaskAttemptsSlash.

@Test
public void testTaskAttemptsSlash() throws JSONException, Exception {
    WebResource r = resource();
    Map<JobId, Job> jobsMap = appContext.getAllJobs();
    for (JobId id : jobsMap.keySet()) {
        String jobId = MRApps.toString(id);
        for (Task task : jobsMap.get(id).getTasks().values()) {
            String tid = MRApps.toString(task.getID());
            ClientResponse response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).path("attempts/").accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
            assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
            JSONObject json = response.getEntity(JSONObject.class);
            verifyAMTaskAttempts(json, task);
        }
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) JSONObject(org.codehaus.jettison.json.JSONObject) WebResource(com.sun.jersey.api.client.WebResource) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Aggregations

Task (org.apache.hadoop.mapreduce.v2.app.job.Task)157 Test (org.junit.Test)153 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)150 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)107 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)94 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)79 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)73 Configuration (org.apache.hadoop.conf.Configuration)68 ClientResponse (com.sun.jersey.api.client.ClientResponse)56 WebResource (com.sun.jersey.api.client.WebResource)56 TaskAttemptEvent (org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent)52 JSONObject (org.codehaus.jettison.json.JSONObject)46 AppContext (org.apache.hadoop.mapreduce.v2.app.AppContext)25 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)23 Path (org.apache.hadoop.fs.Path)22 MapTaskAttemptImpl (org.apache.hadoop.mapred.MapTaskAttemptImpl)20 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)20 HashMap (java.util.HashMap)19 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)19 JobConf (org.apache.hadoop.mapred.JobConf)16