use of org.apache.hadoop.yarn.api.records.timelineservice.ContainerEntity in project hadoop by apache.
the class TestTimelineServiceClientIntegration method testPutExtendedEntities.
@Test
public void testPutExtendedEntities() throws Exception {
ApplicationId appId = ApplicationId.newInstance(0, 1);
TimelineV2Client client = TimelineV2Client.createTimelineClient(appId);
try {
// set the timeline service address manually
client.setTimelineServiceAddress(collectorManager.getRestServerBindAddress());
client.init(conf);
client.start();
ClusterEntity cluster = new ClusterEntity();
cluster.setId(YarnConfiguration.DEFAULT_RM_CLUSTER_ID);
FlowRunEntity flow = new FlowRunEntity();
flow.setUser(UserGroupInformation.getCurrentUser().getShortUserName());
flow.setName("test_flow_name");
flow.setVersion("test_flow_version");
flow.setRunId(1L);
flow.setParent(cluster.getType(), cluster.getId());
ApplicationEntity app = new ApplicationEntity();
app.setId(appId.toString());
flow.addChild(app.getType(), app.getId());
ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(appId, 1);
ApplicationAttemptEntity appAttempt = new ApplicationAttemptEntity();
appAttempt.setId(attemptId.toString());
ContainerId containerId = ContainerId.newContainerId(attemptId, 1);
ContainerEntity container = new ContainerEntity();
container.setId(containerId.toString());
UserEntity user = new UserEntity();
user.setId(UserGroupInformation.getCurrentUser().getShortUserName());
QueueEntity queue = new QueueEntity();
queue.setId("default_queue");
client.putEntities(cluster, flow, app, appAttempt, container, user, queue);
client.putEntitiesAsync(cluster, flow, app, appAttempt, container, user, queue);
} finally {
client.stop();
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.ContainerEntity in project hadoop by apache.
the class NMTimelinePublisher method publishContainerCreatedEvent.
@SuppressWarnings("unchecked")
private void publishContainerCreatedEvent(ContainerEvent event) {
ContainerId containerId = event.getContainerID();
ContainerEntity entity = createContainerEntity(containerId);
Container container = context.getContainers().get(containerId);
Resource resource = container.getResource();
Map<String, Object> entityInfo = new HashMap<String, Object>();
entityInfo.put(ContainerMetricsConstants.ALLOCATED_MEMORY_INFO, resource.getMemorySize());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_VCORE_INFO, resource.getVirtualCores());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_INFO, nodeId.getHost());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_PORT_INFO, nodeId.getPort());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_PRIORITY_INFO, container.getPriority().toString());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_HTTP_ADDRESS_INFO, httpAddress);
entity.setInfo(entityInfo);
TimelineEvent tEvent = new TimelineEvent();
tEvent.setId(ContainerMetricsConstants.CREATED_EVENT_TYPE);
tEvent.setTimestamp(event.getTimestamp());
entity.addEvent(tEvent);
entity.setCreatedTime(event.getTimestamp());
dispatcher.getEventHandler().handle(new TimelinePublishEvent(entity, containerId.getApplicationAttemptId().getApplicationId()));
}
use of org.apache.hadoop.yarn.api.records.timelineservice.ContainerEntity in project hadoop by apache.
the class TestFileSystemTimelineReaderImpl method loadEntityData.
private static void loadEntityData(String rootDir) throws Exception {
File appDir = getAppDir(rootDir, "cluster1", "user1", "flow1", "1", "app1", "app");
TimelineEntity entity11 = new TimelineEntity();
entity11.setId("id_1");
entity11.setType("app");
entity11.setCreatedTime(1425016502000L);
Map<String, Object> info1 = new HashMap<String, Object>();
info1.put("info1", "val1");
info1.put("info2", "val5");
entity11.addInfo(info1);
TimelineEvent event = new TimelineEvent();
event.setId("event_1");
event.setTimestamp(1425016502003L);
entity11.addEvent(event);
Set<TimelineMetric> metrics = new HashSet<TimelineMetric>();
TimelineMetric metric1 = new TimelineMetric();
metric1.setId("metric1");
metric1.setType(TimelineMetric.Type.SINGLE_VALUE);
metric1.addValue(1425016502006L, 113);
metrics.add(metric1);
TimelineMetric metric2 = new TimelineMetric();
metric2.setId("metric2");
metric2.setType(TimelineMetric.Type.TIME_SERIES);
metric2.addValue(1425016502016L, 34);
metrics.add(metric2);
entity11.setMetrics(metrics);
Map<String, String> configs = new HashMap<String, String>();
configs.put("config_1", "127");
entity11.setConfigs(configs);
entity11.addRelatesToEntity("flow", "flow1");
entity11.addIsRelatedToEntity("type1", "tid1_1");
writeEntityFile(entity11, appDir);
TimelineEntity entity12 = new TimelineEntity();
entity12.setId("id_1");
entity12.setType("app");
configs.clear();
configs.put("config_2", "23");
configs.put("config_3", "abc");
entity12.addConfigs(configs);
metrics.clear();
TimelineMetric metric12 = new TimelineMetric();
metric12.setId("metric2");
metric12.setType(TimelineMetric.Type.TIME_SERIES);
metric12.addValue(1425016502032L, 48);
metric12.addValue(1425016502054L, 51);
metrics.add(metric12);
TimelineMetric metric3 = new TimelineMetric();
metric3.setId("metric3");
metric3.setType(TimelineMetric.Type.SINGLE_VALUE);
metric3.addValue(1425016502060L, 23L);
metrics.add(metric3);
entity12.setMetrics(metrics);
entity12.addIsRelatedToEntity("type1", "tid1_2");
entity12.addIsRelatedToEntity("type2", "tid2_1`");
TimelineEvent event15 = new TimelineEvent();
event15.setId("event_5");
event15.setTimestamp(1425016502017L);
entity12.addEvent(event15);
writeEntityFile(entity12, appDir);
TimelineEntity entity2 = new TimelineEntity();
entity2.setId("id_2");
entity2.setType("app");
entity2.setCreatedTime(1425016501050L);
Map<String, Object> info2 = new HashMap<String, Object>();
info1.put("info2", 4);
entity2.addInfo(info2);
Map<String, String> configs2 = new HashMap<String, String>();
configs2.put("config_1", "129");
configs2.put("config_3", "def");
entity2.setConfigs(configs2);
TimelineEvent event2 = new TimelineEvent();
event2.setId("event_2");
event2.setTimestamp(1425016501003L);
entity2.addEvent(event2);
Set<TimelineMetric> metrics2 = new HashSet<TimelineMetric>();
TimelineMetric metric21 = new TimelineMetric();
metric21.setId("metric1");
metric21.setType(TimelineMetric.Type.SINGLE_VALUE);
metric21.addValue(1425016501006L, 300);
metrics2.add(metric21);
TimelineMetric metric22 = new TimelineMetric();
metric22.setId("metric2");
metric22.setType(TimelineMetric.Type.TIME_SERIES);
metric22.addValue(1425016501056L, 31);
metric22.addValue(1425016501084L, 70);
metrics2.add(metric22);
TimelineMetric metric23 = new TimelineMetric();
metric23.setId("metric3");
metric23.setType(TimelineMetric.Type.SINGLE_VALUE);
metric23.addValue(1425016502060L, 23L);
metrics2.add(metric23);
entity2.setMetrics(metrics2);
entity2.addRelatesToEntity("flow", "flow2");
writeEntityFile(entity2, appDir);
TimelineEntity entity3 = new TimelineEntity();
entity3.setId("id_3");
entity3.setType("app");
entity3.setCreatedTime(1425016501050L);
Map<String, Object> info3 = new HashMap<String, Object>();
info3.put("info2", 3.5);
info3.put("info4", 20);
entity3.addInfo(info3);
Map<String, String> configs3 = new HashMap<String, String>();
configs3.put("config_1", "123");
configs3.put("config_3", "abc");
entity3.setConfigs(configs3);
TimelineEvent event3 = new TimelineEvent();
event3.setId("event_2");
event3.setTimestamp(1425016501003L);
entity3.addEvent(event3);
TimelineEvent event4 = new TimelineEvent();
event4.setId("event_4");
event4.setTimestamp(1425016502006L);
entity3.addEvent(event4);
Set<TimelineMetric> metrics3 = new HashSet<TimelineMetric>();
TimelineMetric metric31 = new TimelineMetric();
metric31.setId("metric1");
metric31.setType(TimelineMetric.Type.SINGLE_VALUE);
metric31.addValue(1425016501006L, 124);
metrics3.add(metric31);
TimelineMetric metric32 = new TimelineMetric();
metric32.setId("metric2");
metric32.setType(TimelineMetric.Type.TIME_SERIES);
metric32.addValue(1425016501056L, 31);
metric32.addValue(1425016501084L, 74);
metrics3.add(metric32);
entity3.setMetrics(metrics3);
entity3.addIsRelatedToEntity("type1", "tid1_2");
writeEntityFile(entity3, appDir);
TimelineEntity entity4 = new TimelineEntity();
entity4.setId("id_4");
entity4.setType("app");
entity4.setCreatedTime(1425016502050L);
TimelineEvent event44 = new TimelineEvent();
event44.setId("event_4");
event44.setTimestamp(1425016502003L);
entity4.addEvent(event44);
writeEntityFile(entity4, appDir);
File attemptDir = getAppDir(rootDir, "cluster1", "user1", "flow1", "1", "app1", TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString());
ApplicationAttemptEntity attempt1 = new ApplicationAttemptEntity();
attempt1.setId("app-attempt-1");
attempt1.setCreatedTime(1425017502003L);
writeEntityFile(attempt1, attemptDir);
ApplicationAttemptEntity attempt2 = new ApplicationAttemptEntity();
attempt2.setId("app-attempt-2");
attempt2.setCreatedTime(1425017502004L);
writeEntityFile(attempt2, attemptDir);
File entityDir = getAppDir(rootDir, "cluster1", "user1", "flow1", "1", "app1", TimelineEntityType.YARN_CONTAINER.toString());
ContainerEntity containerEntity1 = new ContainerEntity();
containerEntity1.setId("container_1_1");
containerEntity1.setParent(attempt1.getIdentifier());
containerEntity1.setCreatedTime(1425017502003L);
writeEntityFile(containerEntity1, entityDir);
ContainerEntity containerEntity2 = new ContainerEntity();
containerEntity2.setId("container_2_1");
containerEntity2.setParent(attempt2.getIdentifier());
containerEntity2.setCreatedTime(1425018502003L);
writeEntityFile(containerEntity2, entityDir);
ContainerEntity containerEntity3 = new ContainerEntity();
containerEntity3.setId("container_2_2");
containerEntity3.setParent(attempt2.getIdentifier());
containerEntity3.setCreatedTime(1425018502003L);
writeEntityFile(containerEntity3, entityDir);
File appDir2 = getAppDir(rootDir, "cluster1", "user1", "flow1,flow", "1", "app2", "app");
TimelineEntity entity5 = new TimelineEntity();
entity5.setId("id_5");
entity5.setType("app");
entity5.setCreatedTime(1425016502050L);
writeEntityFile(entity5, appDir2);
}
use of org.apache.hadoop.yarn.api.records.timelineservice.ContainerEntity in project hadoop by apache.
the class NMTimelinePublisher method reportContainerResourceUsage.
public void reportContainerResourceUsage(Container container, Long pmemUsage, Float cpuUsagePercentPerCore) {
if (pmemUsage != ResourceCalculatorProcessTree.UNAVAILABLE || cpuUsagePercentPerCore != ResourceCalculatorProcessTree.UNAVAILABLE) {
ContainerEntity entity = createContainerEntity(container.getContainerId());
long currentTimeMillis = System.currentTimeMillis();
if (pmemUsage != ResourceCalculatorProcessTree.UNAVAILABLE) {
TimelineMetric memoryMetric = new TimelineMetric();
memoryMetric.setId(ContainerMetric.MEMORY.toString());
memoryMetric.setRealtimeAggregationOp(TimelineMetricOperation.SUM);
memoryMetric.addValue(currentTimeMillis, pmemUsage);
entity.addMetric(memoryMetric);
}
if (cpuUsagePercentPerCore != ResourceCalculatorProcessTree.UNAVAILABLE) {
TimelineMetric cpuMetric = new TimelineMetric();
cpuMetric.setId(ContainerMetric.CPU.toString());
// TODO: support average
cpuMetric.setRealtimeAggregationOp(TimelineMetricOperation.SUM);
cpuMetric.addValue(currentTimeMillis, Math.round(cpuUsagePercentPerCore));
entity.addMetric(cpuMetric);
}
ApplicationId appId = container.getContainerId().getApplicationAttemptId().getApplicationId();
try {
// no need to put it as part of publisher as timeline client already has
// Queuing concept
TimelineV2Client timelineClient = getTimelineClient(appId);
if (timelineClient != null) {
timelineClient.putEntitiesAsync(entity);
} else {
LOG.error("Seems like client has been removed before the container" + " metric could be published for " + container.getContainerId());
}
} catch (IOException | YarnException e) {
LOG.error("Failed to publish Container metrics for container " + container.getContainerId(), e);
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.ContainerEntity in project hadoop by apache.
the class NMTimelinePublisher method createContainerEntity.
private static ContainerEntity createContainerEntity(ContainerId containerId) {
ContainerEntity entity = new ContainerEntity();
entity.setId(containerId.toString());
Identifier parentIdentifier = new Identifier();
parentIdentifier.setType(TimelineEntityType.YARN_APPLICATION_ATTEMPT.name());
parentIdentifier.setId(containerId.getApplicationAttemptId().toString());
entity.setParent(parentIdentifier);
return entity;
}
Aggregations