use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TimelineServiceV2Publisher method containerCreated.
@SuppressWarnings("unchecked")
@Override
public void containerCreated(RMContainer container, long createdTime) {
if (publishContainerEvents) {
TimelineEntity entity = createContainerEntity(container.getContainerId());
entity.setCreatedTime(createdTime);
TimelineEvent tEvent = new TimelineEvent();
tEvent.setId(ContainerMetricsConstants.CREATED_IN_RM_EVENT_TYPE);
tEvent.setTimestamp(createdTime);
entity.addEvent(tEvent);
// updated as event info instead of entity info, as entity info is updated
// by NM
Map<String, Object> entityInfo = new HashMap<String, Object>();
entityInfo.put(ContainerMetricsConstants.ALLOCATED_MEMORY_INFO, container.getAllocatedResource().getMemorySize());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_VCORE_INFO, container.getAllocatedResource().getVirtualCores());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_INFO, container.getAllocatedNode().getHost());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_PORT_INFO, container.getAllocatedNode().getPort());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_PRIORITY_INFO, container.getAllocatedPriority().getPriority());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_HTTP_ADDRESS_INFO, container.getNodeHttpAddress());
entity.setInfo(entityInfo);
getDispatcher().getEventHandler().handle(new TimelineV2PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, container.getContainerId().getApplicationAttemptId().getApplicationId()));
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TimelineEntityConverterV2 method createTaskAttemptEntities.
private Set<TimelineEntity> createTaskAttemptEntities(TaskInfo taskInfo) {
Set<TimelineEntity> taskAttempts = new HashSet<TimelineEntity>();
Map<TaskAttemptID, TaskAttemptInfo> taskAttemptInfoMap = taskInfo.getAllTaskAttempts();
LOG.info("task " + taskInfo.getTaskId() + " has " + taskAttemptInfoMap.size() + " task attempts");
for (TaskAttemptInfo taskAttemptInfo : taskAttemptInfoMap.values()) {
TimelineEntity taskAttempt = createTaskAttemptEntity(taskAttemptInfo);
taskAttempts.add(taskAttempt);
}
return taskAttempts;
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TimelineEntityConverterV2 method createTaskAndTaskAttemptEntities.
private List<TimelineEntity> createTaskAndTaskAttemptEntities(JobInfo jobInfo) {
List<TimelineEntity> entities = new ArrayList<>();
Map<TaskID, TaskInfo> taskInfoMap = jobInfo.getAllTasks();
LOG.info("job " + jobInfo.getJobId() + " has " + taskInfoMap.size() + " tasks");
for (TaskInfo taskInfo : taskInfoMap.values()) {
TimelineEntity task = createTaskEntity(taskInfo);
entities.add(task);
// add the task attempts from this task
Set<TimelineEntity> taskAttempts = createTaskAttemptEntities(taskInfo);
entities.addAll(taskAttempts);
}
return entities;
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class SimpleEntityWriterV2 method writeEntities.
protected void writeEntities(Configuration tlConf, TimelineCollectorManager manager, Context context) throws IOException {
Configuration conf = context.getConfiguration();
// simulate the app id with the task id
int taskId = context.getTaskAttemptID().getTaskID().getId();
long timestamp = conf.getLong(TIMELINE_SERVICE_PERFORMANCE_RUN_ID, 0);
ApplicationId appId = ApplicationId.newInstance(timestamp, taskId);
// create the app level timeline collector
AppLevelTimelineCollector collector = new AppLevelTimelineCollector(appId);
manager.putIfAbsent(appId, collector);
try {
// set the context
// flow id: job name, flow run id: timestamp, user id
TimelineCollectorContext tlContext = collector.getTimelineEntityContext();
tlContext.setFlowName(context.getJobName());
tlContext.setFlowRunId(timestamp);
tlContext.setUserId(context.getUser());
final int kbs = conf.getInt(KBS_SENT, KBS_SENT_DEFAULT);
long totalTime = 0;
final int testtimes = conf.getInt(TEST_TIMES, TEST_TIMES_DEFAULT);
final Random rand = new Random();
final TaskAttemptID taskAttemptId = context.getTaskAttemptID();
final char[] payLoad = new char[kbs * 1024];
for (int i = 0; i < testtimes; i++) {
// Generate a fixed length random payload
for (int xx = 0; xx < kbs * 1024; xx++) {
int alphaNumIdx = rand.nextInt(ALPHA_NUMS.length);
payLoad[xx] = ALPHA_NUMS[alphaNumIdx];
}
String entId = taskAttemptId + "_" + Integer.toString(i);
final TimelineEntity entity = new TimelineEntity();
entity.setId(entId);
entity.setType("FOO_ATTEMPT");
entity.addInfo("PERF_TEST", payLoad);
// add an event
TimelineEvent event = new TimelineEvent();
event.setId("foo_event_id");
event.setTimestamp(System.currentTimeMillis());
event.addInfo("foo_event", "test");
entity.addEvent(event);
// add a metric
TimelineMetric metric = new TimelineMetric();
metric.setId("foo_metric");
metric.addValue(System.currentTimeMillis(), 123456789L);
entity.addMetric(metric);
// add a config
entity.addConfig("foo", "bar");
TimelineEntities entities = new TimelineEntities();
entities.addEntity(entity);
// use the current user for this purpose
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
long startWrite = System.nanoTime();
try {
collector.putEntities(entities, ugi);
} catch (Exception e) {
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_FAILURES).increment(1);
LOG.error("writing to the timeline service failed", e);
}
long endWrite = System.nanoTime();
totalTime += TimeUnit.NANOSECONDS.toMillis(endWrite - startWrite);
}
LOG.info("wrote " + testtimes + " entities (" + kbs * testtimes + " kB) in " + totalTime + " ms");
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).increment(totalTime);
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).increment(testtimes);
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_KBS).increment(kbs * testtimes);
} finally {
// clean up
manager.remove(appId);
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TestSystemMetricsPublisherForV2 method verifyEntity.
private static void verifyEntity(File entityFile, long expectedEvents, String eventForCreatedTime, long expectedMetrics) throws IOException {
BufferedReader reader = null;
String strLine;
long count = 0;
long metricsCount = 0;
try {
reader = new BufferedReader(new FileReader(entityFile));
while ((strLine = reader.readLine()) != null) {
if (strLine.trim().length() > 0) {
TimelineEntity entity = FileSystemTimelineReaderImpl.getTimelineRecordFromJSON(strLine.trim(), TimelineEntity.class);
metricsCount = entity.getMetrics().size();
for (TimelineEvent event : entity.getEvents()) {
if (event.getId().equals(eventForCreatedTime)) {
assertTrue(entity.getCreatedTime() > 0);
break;
}
}
count++;
}
}
} finally {
reader.close();
}
assertEquals("Expected " + expectedEvents + " events to be published", expectedEvents, count);
assertEquals("Expected " + expectedMetrics + " metrics is incorrect", expectedMetrics, metricsCount);
}
Aggregations