use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TestTimelineReaderWebServicesHBaseStorage method testGetEntitiesEventFilters.
@Test
public void testGetEntitiesEventFilters() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?eventfilters=event1,event3");
ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
});
assertNotNull(entities);
assertEquals(2, entities.size());
for (TimelineEntity entity : entities) {
assertTrue(entity.getId().equals("entity1") || entity.getId().equals("entity2"));
}
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?eventfilters=!(event1,event3)");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
});
assertNotNull(entities);
assertEquals(0, entities.size());
// eventfilters=!(event1,event3) OR event5,event6
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?eventfilters=!(event1,event3)%20OR%20event5,event6");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
});
assertNotNull(entities);
assertEquals(1, entities.size());
for (TimelineEntity entity : entities) {
assertTrue(entity.getId().equals("entity2"));
}
// eventfilters=(!(event1,event3) OR event5,event6) OR
// (event1,event2 AND (event3,event4))
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?eventfilters=(!(event1,event3)%20OR%20event5," + "event6)%20OR%20(event1,event2%20AND%20(event3,event4))");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
});
assertNotNull(entities);
assertEquals(2, entities.size());
for (TimelineEntity entity : entities) {
assertTrue(entity.getId().equals("entity1") || entity.getId().equals("entity2"));
}
} finally {
client.destroy();
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TimelineServiceV2Publisher method containerCreated.
@SuppressWarnings("unchecked")
@Override
public void containerCreated(RMContainer container, long createdTime) {
if (publishContainerEvents) {
TimelineEntity entity = createContainerEntity(container.getContainerId());
entity.setCreatedTime(createdTime);
TimelineEvent tEvent = new TimelineEvent();
tEvent.setId(ContainerMetricsConstants.CREATED_IN_RM_EVENT_TYPE);
tEvent.setTimestamp(createdTime);
entity.addEvent(tEvent);
// updated as event info instead of entity info, as entity info is updated
// by NM
Map<String, Object> entityInfo = new HashMap<String, Object>();
entityInfo.put(ContainerMetricsConstants.ALLOCATED_MEMORY_INFO, container.getAllocatedResource().getMemorySize());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_VCORE_INFO, container.getAllocatedResource().getVirtualCores());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_INFO, container.getAllocatedNode().getHost());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_PORT_INFO, container.getAllocatedNode().getPort());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_PRIORITY_INFO, container.getAllocatedPriority().getPriority());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_HTTP_ADDRESS_INFO, container.getNodeHttpAddress());
entity.setInfo(entityInfo);
getDispatcher().getEventHandler().handle(new TimelineV2PublishEvent(SystemMetricsEventType.PUBLISH_ENTITY, entity, container.getContainerId().getApplicationAttemptId().getApplicationId()));
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TimelineEntityConverterV2 method createTaskAttemptEntities.
private Set<TimelineEntity> createTaskAttemptEntities(TaskInfo taskInfo) {
Set<TimelineEntity> taskAttempts = new HashSet<TimelineEntity>();
Map<TaskAttemptID, TaskAttemptInfo> taskAttemptInfoMap = taskInfo.getAllTaskAttempts();
LOG.info("task " + taskInfo.getTaskId() + " has " + taskAttemptInfoMap.size() + " task attempts");
for (TaskAttemptInfo taskAttemptInfo : taskAttemptInfoMap.values()) {
TimelineEntity taskAttempt = createTaskAttemptEntity(taskAttemptInfo);
taskAttempts.add(taskAttempt);
}
return taskAttempts;
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class TimelineEntityConverterV2 method createTaskAndTaskAttemptEntities.
private List<TimelineEntity> createTaskAndTaskAttemptEntities(JobInfo jobInfo) {
List<TimelineEntity> entities = new ArrayList<>();
Map<TaskID, TaskInfo> taskInfoMap = jobInfo.getAllTasks();
LOG.info("job " + jobInfo.getJobId() + " has " + taskInfoMap.size() + " tasks");
for (TaskInfo taskInfo : taskInfoMap.values()) {
TimelineEntity task = createTaskEntity(taskInfo);
entities.add(task);
// add the task attempts from this task
Set<TimelineEntity> taskAttempts = createTaskAttemptEntities(taskInfo);
entities.addAll(taskAttempts);
}
return entities;
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity in project hadoop by apache.
the class HBaseTimelineWriterImpl method write.
/**
* Stores the entire information in TimelineEntities to the timeline store.
*/
@Override
public TimelineWriteResponse write(String clusterId, String userId, String flowName, String flowVersion, long flowRunId, String appId, TimelineEntities data) throws IOException {
TimelineWriteResponse putStatus = new TimelineWriteResponse();
// defensive coding to avoid NPE during row key construction
if ((flowName == null) || (appId == null) || (clusterId == null) || (userId == null)) {
LOG.warn("Found null for one of: flowName=" + flowName + " appId=" + appId + " userId=" + userId + " clusterId=" + clusterId + " . Not proceeding with writing to hbase");
return putStatus;
}
for (TimelineEntity te : data.getEntities()) {
// a set can have at most 1 null
if (te == null) {
continue;
}
// if the entity is the application, the destination is the application
// table
boolean isApplication = isApplicationEntity(te);
byte[] rowKey;
if (isApplication) {
ApplicationRowKey applicationRowKey = new ApplicationRowKey(clusterId, userId, flowName, flowRunId, appId);
rowKey = applicationRowKey.getRowKey();
} else {
EntityRowKey entityRowKey = new EntityRowKey(clusterId, userId, flowName, flowRunId, appId, te.getType(), te.getId());
rowKey = entityRowKey.getRowKey();
}
storeInfo(rowKey, te, flowVersion, isApplication);
storeEvents(rowKey, te.getEvents(), isApplication);
storeConfig(rowKey, te.getConfigs(), isApplication);
storeMetrics(rowKey, te.getMetrics(), isApplication);
storeRelations(rowKey, te, isApplication);
if (isApplication) {
TimelineEvent event = getApplicationEvent(te, ApplicationMetricsConstants.CREATED_EVENT_TYPE);
FlowRunRowKey flowRunRowKey = new FlowRunRowKey(clusterId, userId, flowName, flowRunId);
if (event != null) {
AppToFlowRowKey appToFlowRowKey = new AppToFlowRowKey(clusterId, appId);
onApplicationCreated(flowRunRowKey, appToFlowRowKey, appId, userId, flowVersion, te, event.getTimestamp());
}
// if it's an application entity, store metrics
storeFlowMetricsAppRunning(flowRunRowKey, appId, te);
// if application has finished, store it's finish time and write final
// values of all metrics
event = getApplicationEvent(te, ApplicationMetricsConstants.FINISHED_EVENT_TYPE);
if (event != null) {
onApplicationFinished(flowRunRowKey, flowVersion, appId, te, event.getTimestamp());
}
}
}
return putStatus;
}
Aggregations