use of org.apache.hadoop.mapreduce.v2.app.job.Task in project hadoop by apache.
the class TestRMContainerAllocator method testMapReduceAllocationWithNodeLabelExpression.
@Test
public void testMapReduceAllocationWithNodeLabelExpression() throws Exception {
LOG.info("Running testMapReduceAllocationWithNodeLabelExpression");
Configuration conf = new Configuration();
/*
* final int MAP_LIMIT = 3; final int REDUCE_LIMIT = 1;
* conf.setInt(MRJobConfig.JOB_RUNNING_MAP_LIMIT, MAP_LIMIT);
* conf.setInt(MRJobConfig.JOB_RUNNING_REDUCE_LIMIT, REDUCE_LIMIT);
*/
conf.setFloat(MRJobConfig.COMPLETED_MAPS_FOR_REDUCE_SLOWSTART, 1.0f);
conf.set(MRJobConfig.MAP_NODE_LABEL_EXP, "MapNodes");
conf.set(MRJobConfig.REDUCE_NODE_LABEL_EXP, "ReduceNodes");
ApplicationId appId = ApplicationId.newInstance(1, 1);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1);
JobId jobId = MRBuilderUtils.newJobId(appAttemptId.getApplicationId(), 0);
Job mockJob = mock(Job.class);
when(mockJob.getReport()).thenReturn(MRBuilderUtils.newJobReport(jobId, "job", "user", JobState.RUNNING, 0, 0, 0, 0, 0, 0, 0, "jobfile", null, false, ""));
final MockScheduler mockScheduler = new MockScheduler(appAttemptId);
MyContainerAllocator allocator = new MyContainerAllocator(null, conf, appAttemptId, mockJob, SystemClock.getInstance()) {
@Override
protected void register() {
}
@Override
protected ApplicationMasterProtocol createSchedulerProxy() {
return mockScheduler;
}
};
// create some map requests
ContainerRequestEvent reqMapEvents;
reqMapEvents = createReq(jobId, 0, 1024, new String[] { "map" });
allocator.sendRequests(Arrays.asList(reqMapEvents));
// create some reduce requests
ContainerRequestEvent reqReduceEvents;
reqReduceEvents = createReq(jobId, 0, 2048, new String[] { "reduce" }, false, true);
allocator.sendRequests(Arrays.asList(reqReduceEvents));
allocator.schedule();
// verify all of the host-specific asks were sent plus one for the
// default rack and one for the ANY request
Assert.assertEquals(3, mockScheduler.lastAsk.size());
// verify ResourceRequest sent for MAP have appropriate node
// label expression as per the configuration
validateLabelsRequests(mockScheduler.lastAsk.get(0), false);
validateLabelsRequests(mockScheduler.lastAsk.get(1), false);
validateLabelsRequests(mockScheduler.lastAsk.get(2), false);
// assign a map task and verify we do not ask for any more maps
ContainerId cid0 = mockScheduler.assignContainer("map", false);
allocator.schedule();
// default rack and one for the ANY request
Assert.assertEquals(3, mockScheduler.lastAsk.size());
validateLabelsRequests(mockScheduler.lastAsk.get(0), true);
validateLabelsRequests(mockScheduler.lastAsk.get(1), true);
validateLabelsRequests(mockScheduler.lastAsk.get(2), true);
// complete the map task and verify that we ask for one more
allocator.close();
}
use of org.apache.hadoop.mapreduce.v2.app.job.Task in project hadoop by apache.
the class TestAppController method setUp.
@Before
public void setUp() throws IOException {
AppContext context = mock(AppContext.class);
when(context.getApplicationID()).thenReturn(ApplicationId.newInstance(0, 0));
when(context.getApplicationName()).thenReturn("AppName");
when(context.getUser()).thenReturn("User");
when(context.getStartTime()).thenReturn(System.currentTimeMillis());
job = mock(Job.class);
Task task = mock(Task.class);
when(job.getTask(any(TaskId.class))).thenReturn(task);
when(job.loadConfFile()).thenReturn(new Configuration());
when(job.getConfFile()).thenReturn(new Path("/"));
JobId jobID = MRApps.toJobID("job_01_01");
when(context.getJob(jobID)).thenReturn(job);
when(job.checkAccess(any(UserGroupInformation.class), any(JobACL.class))).thenReturn(true);
App app = new App(context);
Configuration configuration = new Configuration();
ctx = mock(RequestContext.class);
appController = new AppControllerForTest(app, configuration, ctx);
appController.getProperty().put(AMParams.JOB_ID, "job_01_01");
appController.getProperty().put(AMParams.TASK_ID, taskId);
}
use of org.apache.hadoop.mapreduce.v2.app.job.Task in project hadoop by apache.
the class TestBlocks method testAttemptsBlock.
/**
* test AttemptsBlock's rendering.
*/
@Test
public void testAttemptsBlock() {
AppContext ctx = mock(AppContext.class);
AppForTest app = new AppForTest(ctx);
JobId jobId = new JobIdPBImpl();
jobId.setId(0);
jobId.setAppId(ApplicationIdPBImpl.newInstance(0, 1));
TaskId taskId = new TaskIdPBImpl();
taskId.setId(0);
taskId.setTaskType(TaskType.REDUCE);
taskId.setJobId(jobId);
Task task = mock(Task.class);
when(task.getID()).thenReturn(taskId);
TaskReport report = mock(TaskReport.class);
when(task.getReport()).thenReturn(report);
when(task.getType()).thenReturn(TaskType.REDUCE);
Map<TaskId, Task> tasks = new HashMap<TaskId, Task>();
Map<TaskAttemptId, TaskAttempt> attempts = new HashMap<TaskAttemptId, TaskAttempt>();
TaskAttempt attempt = mock(TaskAttempt.class);
TaskAttemptId taId = new TaskAttemptIdPBImpl();
taId.setId(0);
taId.setTaskId(task.getID());
when(attempt.getID()).thenReturn(taId);
final TaskAttemptState taState = TaskAttemptState.SUCCEEDED;
when(attempt.getState()).thenReturn(taState);
TaskAttemptReport taReport = mock(TaskAttemptReport.class);
when(taReport.getTaskAttemptState()).thenReturn(taState);
when(attempt.getReport()).thenReturn(taReport);
attempts.put(taId, attempt);
tasks.put(taskId, task);
when(task.getAttempts()).thenReturn(attempts);
app.setTask(task);
Job job = mock(Job.class);
when(job.getTasks(TaskType.REDUCE)).thenReturn(tasks);
app.setJob(job);
AttemptsBlockForTest block = new AttemptsBlockForTest(app, new Configuration());
block.addParameter(AMParams.TASK_TYPE, "r");
block.addParameter(AMParams.ATTEMPT_STATE, "SUCCESSFUL");
PrintWriter pWriter = new PrintWriter(data);
Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
block.render(html);
pWriter.flush();
assertTrue(data.toString().contains("<a href='" + block.url("task", task.getID().toString()) + "'>" + "attempt_0_0001_r_000000_0</a>"));
}
use of org.apache.hadoop.mapreduce.v2.app.job.Task in project hadoop by apache.
the class TestBlocks method testTasksBlock.
/**
* Test rendering for TasksBlock
*/
@Test
public void testTasksBlock() throws Exception {
ApplicationId appId = ApplicationIdPBImpl.newInstance(0, 1);
JobId jobId = new JobIdPBImpl();
jobId.setId(0);
jobId.setAppId(appId);
TaskId taskId = new TaskIdPBImpl();
taskId.setId(0);
taskId.setTaskType(TaskType.MAP);
taskId.setJobId(jobId);
Task task = mock(Task.class);
when(task.getID()).thenReturn(taskId);
TaskReport report = mock(TaskReport.class);
when(report.getProgress()).thenReturn(0.7f);
when(report.getTaskState()).thenReturn(TaskState.SUCCEEDED);
when(report.getStartTime()).thenReturn(100001L);
when(report.getFinishTime()).thenReturn(100011L);
when(report.getStatus()).thenReturn("Dummy Status \n*");
when(task.getReport()).thenReturn(report);
when(task.getType()).thenReturn(TaskType.MAP);
Map<TaskId, Task> tasks = new HashMap<TaskId, Task>();
tasks.put(taskId, task);
AppContext ctx = mock(AppContext.class);
Job job = mock(Job.class);
when(job.getTasks()).thenReturn(tasks);
App app = new App(ctx);
app.setJob(job);
TasksBlockForTest taskBlock = new TasksBlockForTest(app);
taskBlock.addParameter(AMParams.TASK_TYPE, "m");
PrintWriter pWriter = new PrintWriter(data);
Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
taskBlock.render(html);
pWriter.flush();
assertTrue(data.toString().contains("task_0_0001_m_000000"));
assertTrue(data.toString().contains("70.00"));
assertTrue(data.toString().contains("SUCCEEDED"));
assertTrue(data.toString().contains("100001"));
assertTrue(data.toString().contains("100011"));
assertFalse(data.toString().contains("Dummy Status \n*"));
assertTrue(data.toString().contains("Dummy Status \\n*"));
}
use of org.apache.hadoop.mapreduce.v2.app.job.Task in project hadoop by apache.
the class TestAMWebServicesAttempts method testTaskAttemptsSlash.
@Test
public void testTaskAttemptsSlash() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
ClientResponse response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).path("attempts/").accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
verifyAMTaskAttempts(json, task);
}
}
}
Aggregations