use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class TimelineServiceV1Publisher method createApplicationEntity.
private static TimelineEntity createApplicationEntity(ApplicationId applicationId) {
TimelineEntity entity = new TimelineEntity();
entity.setEntityType(ApplicationMetricsConstants.ENTITY_TYPE);
entity.setEntityId(applicationId.toString());
return entity;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class TimelineEntityConverterV1 method createTaskEntity.
private TimelineEntity createTaskEntity(TaskInfo taskInfo) {
TimelineEntity task = new TimelineEntity();
task.setEntityType(TASK);
task.setEntityId(taskInfo.getTaskId().toString());
task.setStartTime(taskInfo.getStartTime());
task.addOtherInfo("START_TIME", taskInfo.getStartTime());
task.addOtherInfo("FINISH_TIME", taskInfo.getFinishTime());
task.addOtherInfo("TASK_TYPE", taskInfo.getTaskType());
task.addOtherInfo("TASK_STATUS", taskInfo.getTaskStatus());
task.addOtherInfo("ERROR_INFO", taskInfo.getError());
LOG.info("converted task " + taskInfo.getTaskId() + " to a timeline entity");
return task;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class TimelineEntityConverterV1 method createTimelineEntities.
/**
* Creates job, task, and task attempt entities based on the job history info
* and configuration.
*
* Note: currently these are plan timeline entities created for mapreduce
* types. These are not meant to be the complete and accurate entity set-up
* for mapreduce jobs. We do not leverage hierarchical timeline entities. If
* we create canonical mapreduce hierarchical timeline entities with proper
* parent-child relationship, we could modify this to use that instead.
*
* Note that we also do not add info to the YARN application entity, which
* would be needed for aggregation.
*/
public Set<TimelineEntity> createTimelineEntities(JobInfo jobInfo, Configuration conf) {
Set<TimelineEntity> entities = new HashSet<>();
// create the job entity
TimelineEntity job = createJobEntity(jobInfo, conf);
entities.add(job);
// create the task and task attempt entities
Set<TimelineEntity> tasksAndAttempts = createTaskAndTaskAttemptEntities(jobInfo);
entities.addAll(tasksAndAttempts);
return entities;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class TimelineEntityConverterV1 method createJobEntity.
private TimelineEntity createJobEntity(JobInfo jobInfo, Configuration conf) {
TimelineEntity job = new TimelineEntity();
job.setEntityType(JOB);
job.setEntityId(jobInfo.getJobId().toString());
job.setStartTime(jobInfo.getSubmitTime());
job.addPrimaryFilter("JOBNAME", jobInfo.getJobname());
job.addPrimaryFilter("USERNAME", jobInfo.getUsername());
job.addOtherInfo("JOB_QUEUE_NAME", jobInfo.getJobQueueName());
job.addOtherInfo("SUBMIT_TIME", jobInfo.getSubmitTime());
job.addOtherInfo("LAUNCH_TIME", jobInfo.getLaunchTime());
job.addOtherInfo("FINISH_TIME", jobInfo.getFinishTime());
job.addOtherInfo("JOB_STATUS", jobInfo.getJobStatus());
job.addOtherInfo("PRIORITY", jobInfo.getPriority());
job.addOtherInfo("TOTAL_MAPS", jobInfo.getTotalMaps());
job.addOtherInfo("TOTAL_REDUCES", jobInfo.getTotalReduces());
job.addOtherInfo("UBERIZED", jobInfo.getUberized());
job.addOtherInfo("ERROR_INFO", jobInfo.getErrorInfo());
LOG.info("converted job " + jobInfo.getJobId() + " to a timeline entity");
return job;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class TestTimelineClientForATS1_5 method testPostEntities.
@Test
public void testPostEntities() throws Exception {
ApplicationId appId = ApplicationId.newInstance(System.currentTimeMillis(), 1);
TimelineEntityGroupId groupId = TimelineEntityGroupId.newInstance(appId, "1");
TimelineEntityGroupId groupId2 = TimelineEntityGroupId.newInstance(appId, "2");
// Create two entities, includes an entity type and a summary type
TimelineEntity[] entities = new TimelineEntity[2];
entities[0] = generateEntity("entity_type");
entities[1] = generateEntity("summary_type");
try {
// if attemptid is null, fall back to the original putEntities call, and
// save the entity
// into configured levelDB store
client.putEntities(null, null, entities);
verify(spyTimelineWriter, times(1)).putEntities(entities);
reset(spyTimelineWriter);
// if the attemptId is specified, but groupId is given as null, it would
// fall back to the original putEntities call if we have the entity type.
// the entity which is summary type would be written into FS
ApplicationAttemptId attemptId1 = ApplicationAttemptId.newInstance(appId, 1);
client.putEntities(attemptId1, null, entities);
TimelineEntity[] entityTDB = new TimelineEntity[1];
entityTDB[0] = entities[0];
verify(spyTimelineWriter, times(1)).putEntities(entityTDB);
Assert.assertTrue(localFS.util().exists(new Path(getAppAttemptDir(attemptId1), "summarylog-" + attemptId1.toString())));
reset(spyTimelineWriter);
// if we specified attemptId as well as groupId, it would save the entity
// into
// FileSystem instead of levelDB store
ApplicationAttemptId attemptId2 = ApplicationAttemptId.newInstance(appId, 2);
client.putEntities(attemptId2, groupId, entities);
client.putEntities(attemptId2, groupId2, entities);
verify(spyTimelineWriter, times(0)).putEntities(any(TimelineEntity[].class));
Assert.assertTrue(localFS.util().exists(new Path(getAppAttemptDir(attemptId2), "summarylog-" + attemptId2.toString())));
Assert.assertTrue(localFS.util().exists(new Path(getAppAttemptDir(attemptId2), "entitylog-" + groupId.toString())));
Assert.assertTrue(localFS.util().exists(new Path(getAppAttemptDir(attemptId2), "entitylog-" + groupId2.toString())));
reset(spyTimelineWriter);
} catch (Exception e) {
Assert.fail("Exception is not expected. " + e);
}
}
Aggregations