use of org.apache.hadoop.mapreduce.v2.app.job.Job in project hadoop by apache.
the class TestHsWebServicesAcls method setup.
@Before
public void setup() throws IOException {
this.conf = new JobConf();
this.conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING, NullGroupsProvider.class.getName());
this.conf.setBoolean(MRConfig.MR_ACLS_ENABLED, true);
Groups.getUserToGroupsMappingService(conf);
this.ctx = buildHistoryContext(this.conf);
WebApp webApp = mock(HsWebApp.class);
when(webApp.name()).thenReturn("hsmockwebapp");
this.hsWebServices = new HsWebServices(ctx, conf, webApp);
this.hsWebServices.setResponse(mock(HttpServletResponse.class));
Job job = ctx.getAllJobs().values().iterator().next();
this.jobIdStr = job.getID().toString();
Task task = job.getTasks().values().iterator().next();
this.taskIdStr = task.getID().toString();
this.taskAttemptIdStr = task.getAttempts().keySet().iterator().next().toString();
}
use of org.apache.hadoop.mapreduce.v2.app.job.Job in project hadoop by apache.
the class TestHsWebServicesAttempts method testTaskAttemptIdSlash.
@Test
public void testTaskAttemptIdSlash() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
for (TaskAttempt att : task.getAttempts().values()) {
TaskAttemptId attemptid = att.getID();
String attid = MRApps.toString(attemptid);
ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).path("attempts").path(attid + "/").accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("taskAttempt");
verifyHsTaskAttempt(info, att, task.getType());
}
}
}
}
use of org.apache.hadoop.mapreduce.v2.app.job.Job in project hadoop by apache.
the class TestHsWebServicesAttempts method testTaskAttemptIdDefault.
@Test
public void testTaskAttemptIdDefault() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
for (TaskAttempt att : task.getAttempts().values()) {
TaskAttemptId attemptid = att.getID();
String attid = MRApps.toString(attemptid);
ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).path("attempts").path(attid).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("taskAttempt");
verifyHsTaskAttempt(info, att, task.getType());
}
}
}
}
use of org.apache.hadoop.mapreduce.v2.app.job.Job in project hadoop by apache.
the class TestHistoryFileManager method testHistoryFileInfoShouldReturnCompletedJobIfMaxNotConfiged.
@Test
public void testHistoryFileInfoShouldReturnCompletedJobIfMaxNotConfiged() throws Exception {
HistoryFileManagerTest hmTest = new HistoryFileManagerTest();
Configuration conf = dfsCluster.getConfiguration(0);
conf.setInt(JHAdminConfig.MR_HS_LOADED_JOBS_TASKS_MAX, -1);
hmTest.init(conf);
final String jobId = "job_1416424547277_0002";
JobIndexInfo jobIndexInfo = new JobIndexInfo();
jobIndexInfo.setJobId(TypeConverter.toYarn(JobID.forName(jobId)));
jobIndexInfo.setNumMaps(100);
jobIndexInfo.setNumReduces(100);
final String historyFile = getClass().getClassLoader().getResource("job_2.0.3-alpha-FAILED.jhist").getFile();
final Path historyFilePath = FileSystem.getLocal(conf).makeQualified(new Path(historyFile));
HistoryFileInfo info = hmTest.getHistoryFileInfo(historyFilePath, null, null, jobIndexInfo, false);
Job job = info.loadJob();
Assert.assertTrue("Should return an instance of CompletedJob as " + "a result of parsing the job history file of the job", job instanceof CompletedJob);
}
use of org.apache.hadoop.mapreduce.v2.app.job.Job in project hadoop by apache.
the class JobHistoryFileReplayMapperV1 method map.
public void map(IntWritable key, IntWritable val, Context context) throws IOException {
// collect the apps it needs to process
TimelineClient tlc = new TimelineClientImpl();
TimelineEntityConverterV1 converter = new TimelineEntityConverterV1();
JobHistoryFileReplayHelper helper = new JobHistoryFileReplayHelper(context);
int replayMode = helper.getReplayMode();
Collection<JobFiles> jobs = helper.getJobFiles();
JobHistoryFileParser parser = helper.getParser();
if (jobs.isEmpty()) {
LOG.info(context.getTaskAttemptID().getTaskID() + " will process no jobs");
} else {
LOG.info(context.getTaskAttemptID().getTaskID() + " will process " + jobs.size() + " jobs");
}
for (JobFiles job : jobs) {
// process each job
String jobIdStr = job.getJobId();
LOG.info("processing " + jobIdStr + "...");
JobId jobId = TypeConverter.toYarn(JobID.forName(jobIdStr));
ApplicationId appId = jobId.getAppId();
try {
// parse the job info and configuration
Path historyFilePath = job.getJobHistoryFilePath();
Path confFilePath = job.getJobConfFilePath();
if ((historyFilePath == null) || (confFilePath == null)) {
continue;
}
JobInfo jobInfo = parser.parseHistoryFile(historyFilePath);
Configuration jobConf = parser.parseConfiguration(confFilePath);
LOG.info("parsed the job history file and the configuration file for job " + jobIdStr);
// create entities from job history and write them
long totalTime = 0;
Set<TimelineEntity> entitySet = converter.createTimelineEntities(jobInfo, jobConf);
LOG.info("converted them into timeline entities for job " + jobIdStr);
// use the current user for this purpose
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
long startWrite = System.nanoTime();
try {
switch(replayMode) {
case JobHistoryFileReplayHelper.WRITE_ALL_AT_ONCE:
writeAllEntities(tlc, entitySet, ugi);
break;
case JobHistoryFileReplayHelper.WRITE_PER_ENTITY:
writePerEntity(tlc, entitySet, ugi);
break;
default:
break;
}
} catch (Exception e) {
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_FAILURES).increment(1);
LOG.error("writing to the timeline service failed", e);
}
long endWrite = System.nanoTime();
totalTime += TimeUnit.NANOSECONDS.toMillis(endWrite - startWrite);
int numEntities = entitySet.size();
LOG.info("wrote " + numEntities + " entities in " + totalTime + " ms");
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).increment(totalTime);
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).increment(numEntities);
} finally {
// move it along
context.progress();
}
}
}
Aggregations