use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.
the class PluginStoreTestUtils method generateTestEntities.
/**
* Create sample entities for testing
* @return two timeline entities in a {@link TimelineEntities} object
*/
static TimelineEntities generateTestEntities() {
TimelineEntities entities = new TimelineEntities();
Map<String, Set<Object>> primaryFilters = new HashMap<String, Set<Object>>();
Set<Object> l1 = new HashSet<Object>();
l1.add("username");
Set<Object> l2 = new HashSet<Object>();
l2.add(Integer.MAX_VALUE);
Set<Object> l3 = new HashSet<Object>();
l3.add("123abc");
Set<Object> l4 = new HashSet<Object>();
l4.add((long) Integer.MAX_VALUE + 1l);
primaryFilters.put("user", l1);
primaryFilters.put("appname", l2);
primaryFilters.put("other", l3);
primaryFilters.put("long", l4);
Map<String, Object> secondaryFilters = new HashMap<String, Object>();
secondaryFilters.put("startTime", 123456);
secondaryFilters.put("status", "RUNNING");
Map<String, Object> otherInfo1 = new HashMap<String, Object>();
otherInfo1.put("info1", "val1");
otherInfo1.putAll(secondaryFilters);
String entityId1 = "id_1";
String entityType1 = "type_1";
String entityId2 = "id_2";
String entityType2 = "type_2";
Map<String, Set<String>> relatedEntities = new HashMap<String, Set<String>>();
relatedEntities.put(entityType2, Collections.singleton(entityId2));
TimelineEvent ev3 = createEvent(789l, "launch_event", null);
TimelineEvent ev4 = createEvent(0l, "init_event", null);
List<TimelineEvent> events = new ArrayList<TimelineEvent>();
events.add(ev3);
events.add(ev4);
entities.addEntity(createEntity(entityId2, entityType2, 456l, events, null, null, null, "domain_id_1"));
TimelineEvent ev1 = createEvent(123l, "start_event", null);
entities.addEntity(createEntity(entityId1, entityType1, 123l, Collections.singletonList(ev1), relatedEntities, primaryFilters, otherInfo1, "domain_id_1"));
return entities;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project jstorm by alibaba.
the class JstormMaster method publishApplicationAttemptEvent.
private static void publishApplicationAttemptEvent(final TimelineClient timelineClient, String appAttemptId, DSEvent appEvent, String domainId, UserGroupInformation ugi) {
final TimelineEntity entity = new TimelineEntity();
entity.setEntityId(appAttemptId);
entity.setEntityType(DSEntity.DS_APP_ATTEMPT.toString());
entity.setDomainId(domainId);
entity.addPrimaryFilter(JOYConstants.USER, ugi.getShortUserName());
TimelineEvent event = new TimelineEvent();
event.setEventType(appEvent.toString());
event.setTimestamp(System.currentTimeMillis());
entity.addEvent(event);
try {
timelineClient.putEntities(entity);
} catch (YarnException e) {
LOG.error("App Attempt " + (appEvent.equals(DSEvent.DS_APP_ATTEMPT_START) ? JOYConstants.START : JOYConstants.END) + " event could not be published for " + appAttemptId.toString(), e);
} catch (IOException e) {
LOG.error("App Attempt " + (appEvent.equals(DSEvent.DS_APP_ATTEMPT_START) ? JOYConstants.START : JOYConstants.END) + " event could not be published for " + appAttemptId.toString(), e);
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.
the class TestTimelineClient method generateEntity.
private static TimelineEntity generateEntity() {
TimelineEntity entity = new TimelineEntity();
entity.setEntityId("entity id");
entity.setEntityType("entity type");
entity.setStartTime(System.currentTimeMillis());
for (int i = 0; i < 2; ++i) {
TimelineEvent event = new TimelineEvent();
event.setTimestamp(System.currentTimeMillis());
event.setEventType("test event type " + i);
event.addEventInfo("key1", "val1");
event.addEventInfo("key2", "val2");
entity.addEvent(event);
}
entity.addRelatedEntity("test ref type 1", "test ref id 1");
entity.addRelatedEntity("test ref type 2", "test ref id 2");
entity.addPrimaryFilter("pkey1", "pval1");
entity.addPrimaryFilter("pkey2", "pval2");
entity.addOtherInfo("okey1", "oval1");
entity.addOtherInfo("okey2", "oval2");
entity.setDomainId("domain id 1");
return entity;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.
the class ApplicationHistoryManagerOnTimelineStore method convertToApplicationReport.
private static ApplicationReportExt convertToApplicationReport(TimelineEntity entity, ApplicationReportField field) {
String user = null;
String queue = null;
String name = null;
String type = null;
boolean unmanagedApplication = false;
long createdTime = 0;
long finishedTime = 0;
float progress = 0.0f;
int applicationPriority = 0;
ApplicationAttemptId latestApplicationAttemptId = null;
String diagnosticsInfo = null;
FinalApplicationStatus finalStatus = FinalApplicationStatus.UNDEFINED;
YarnApplicationState state = YarnApplicationState.ACCEPTED;
ApplicationResourceUsageReport appResources = null;
Set<String> appTags = null;
Map<ApplicationAccessType, String> appViewACLs = new HashMap<ApplicationAccessType, String>();
String appNodeLabelExpression = null;
String amNodeLabelExpression = null;
Map<String, Object> entityInfo = entity.getOtherInfo();
if (entityInfo != null) {
if (entityInfo.containsKey(ApplicationMetricsConstants.USER_ENTITY_INFO)) {
user = entityInfo.get(ApplicationMetricsConstants.USER_ENTITY_INFO).toString();
}
if (entityInfo.containsKey(ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO)) {
String appViewACLsStr = entityInfo.get(ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO).toString();
if (appViewACLsStr.length() > 0) {
appViewACLs.put(ApplicationAccessType.VIEW_APP, appViewACLsStr);
}
}
if (field == ApplicationReportField.USER_AND_ACLS) {
return new ApplicationReportExt(ApplicationReport.newInstance(ApplicationId.fromString(entity.getEntityId()), latestApplicationAttemptId, user, queue, name, null, -1, null, state, diagnosticsInfo, null, createdTime, finishedTime, finalStatus, null, null, progress, type, null, appTags, unmanagedApplication, Priority.newInstance(applicationPriority), appNodeLabelExpression, amNodeLabelExpression), appViewACLs);
}
if (entityInfo.containsKey(ApplicationMetricsConstants.QUEUE_ENTITY_INFO)) {
queue = entityInfo.get(ApplicationMetricsConstants.QUEUE_ENTITY_INFO).toString();
}
if (entityInfo.containsKey(ApplicationMetricsConstants.NAME_ENTITY_INFO)) {
name = entityInfo.get(ApplicationMetricsConstants.NAME_ENTITY_INFO).toString();
}
if (entityInfo.containsKey(ApplicationMetricsConstants.TYPE_ENTITY_INFO)) {
type = entityInfo.get(ApplicationMetricsConstants.TYPE_ENTITY_INFO).toString();
}
if (entityInfo.containsKey(ApplicationMetricsConstants.TYPE_ENTITY_INFO)) {
type = entityInfo.get(ApplicationMetricsConstants.TYPE_ENTITY_INFO).toString();
}
if (entityInfo.containsKey(ApplicationMetricsConstants.UNMANAGED_APPLICATION_ENTITY_INFO)) {
unmanagedApplication = Boolean.parseBoolean(entityInfo.get(ApplicationMetricsConstants.UNMANAGED_APPLICATION_ENTITY_INFO).toString());
}
if (entityInfo.containsKey(ApplicationMetricsConstants.APPLICATION_PRIORITY_INFO)) {
applicationPriority = Integer.parseInt(entityInfo.get(ApplicationMetricsConstants.APPLICATION_PRIORITY_INFO).toString());
}
if (entityInfo.containsKey(ApplicationMetricsConstants.APP_NODE_LABEL_EXPRESSION)) {
appNodeLabelExpression = entityInfo.get(ApplicationMetricsConstants.APP_NODE_LABEL_EXPRESSION).toString();
}
if (entityInfo.containsKey(ApplicationMetricsConstants.AM_NODE_LABEL_EXPRESSION)) {
amNodeLabelExpression = entityInfo.get(ApplicationMetricsConstants.AM_NODE_LABEL_EXPRESSION).toString();
}
if (entityInfo.containsKey(ApplicationMetricsConstants.APP_CPU_METRICS)) {
long vcoreSeconds = Long.parseLong(entityInfo.get(ApplicationMetricsConstants.APP_CPU_METRICS).toString());
long memorySeconds = Long.parseLong(entityInfo.get(ApplicationMetricsConstants.APP_MEM_METRICS).toString());
long preemptedMemorySeconds = Long.parseLong(entityInfo.get(ApplicationMetricsConstants.APP_MEM_PREEMPT_METRICS).toString());
long preemptedVcoreSeconds = Long.parseLong(entityInfo.get(ApplicationMetricsConstants.APP_CPU_PREEMPT_METRICS).toString());
appResources = ApplicationResourceUsageReport.newInstance(0, 0, null, null, null, memorySeconds, vcoreSeconds, 0, 0, preemptedMemorySeconds, preemptedVcoreSeconds);
}
if (entityInfo.containsKey(ApplicationMetricsConstants.APP_TAGS_INFO)) {
appTags = new HashSet<String>();
Object obj = entityInfo.get(ApplicationMetricsConstants.APP_TAGS_INFO);
if (obj != null && obj instanceof Collection<?>) {
for (Object o : (Collection<?>) obj) {
if (o != null) {
appTags.add(o.toString());
}
}
}
}
}
List<TimelineEvent> events = entity.getEvents();
long updatedTimeStamp = 0L;
if (events != null) {
for (TimelineEvent event : events) {
if (event.getEventType().equals(ApplicationMetricsConstants.CREATED_EVENT_TYPE)) {
createdTime = event.getTimestamp();
} else if (event.getEventType().equals(ApplicationMetricsConstants.UPDATED_EVENT_TYPE)) {
// before over writing.
if (event.getTimestamp() > updatedTimeStamp) {
updatedTimeStamp = event.getTimestamp();
} else {
continue;
}
Map<String, Object> eventInfo = event.getEventInfo();
if (eventInfo == null) {
continue;
}
applicationPriority = Integer.parseInt(eventInfo.get(ApplicationMetricsConstants.APPLICATION_PRIORITY_INFO).toString());
queue = eventInfo.get(ApplicationMetricsConstants.QUEUE_ENTITY_INFO).toString();
} else if (event.getEventType().equals(ApplicationMetricsConstants.STATE_UPDATED_EVENT_TYPE)) {
Map<String, Object> eventInfo = event.getEventInfo();
if (eventInfo == null) {
continue;
}
if (eventInfo.containsKey(ApplicationMetricsConstants.STATE_EVENT_INFO)) {
if (!isFinalState(state)) {
state = YarnApplicationState.valueOf(eventInfo.get(ApplicationMetricsConstants.STATE_EVENT_INFO).toString());
}
}
} else if (event.getEventType().equals(ApplicationMetricsConstants.FINISHED_EVENT_TYPE)) {
progress = 1.0F;
finishedTime = event.getTimestamp();
Map<String, Object> eventInfo = event.getEventInfo();
if (eventInfo == null) {
continue;
}
if (eventInfo.containsKey(ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO)) {
latestApplicationAttemptId = ApplicationAttemptId.fromString(eventInfo.get(ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO).toString());
}
if (eventInfo.containsKey(ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO)) {
diagnosticsInfo = eventInfo.get(ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO).toString();
}
if (eventInfo.containsKey(ApplicationMetricsConstants.FINAL_STATUS_EVENT_INFO)) {
finalStatus = FinalApplicationStatus.valueOf(eventInfo.get(ApplicationMetricsConstants.FINAL_STATUS_EVENT_INFO).toString());
}
if (eventInfo.containsKey(ApplicationMetricsConstants.STATE_EVENT_INFO)) {
state = YarnApplicationState.valueOf(eventInfo.get(ApplicationMetricsConstants.STATE_EVENT_INFO).toString());
}
}
}
}
return new ApplicationReportExt(ApplicationReport.newInstance(ApplicationId.fromString(entity.getEntityId()), latestApplicationAttemptId, user, queue, name, null, -1, null, state, diagnosticsInfo, null, createdTime, finishedTime, finalStatus, appResources, null, progress, type, null, appTags, unmanagedApplication, Priority.newInstance(applicationPriority), appNodeLabelExpression, amNodeLabelExpression), appViewACLs);
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvent in project hadoop by apache.
the class KeyValueBasedTimelineStore method getEntityTimelines.
@Override
public synchronized TimelineEvents getEntityTimelines(String entityType, SortedSet<String> entityIds, Long limit, Long windowStart, Long windowEnd, Set<String> eventTypes) {
if (getServiceStopped()) {
LOG.info("Service stopped, return null for the storage");
return null;
}
TimelineEvents allEvents = new TimelineEvents();
if (entityIds == null) {
return allEvents;
}
if (limit == null) {
limit = DEFAULT_LIMIT;
}
if (windowStart == null) {
windowStart = Long.MIN_VALUE;
}
if (windowEnd == null) {
windowEnd = Long.MAX_VALUE;
}
for (String entityId : entityIds) {
EntityIdentifier entityID = new EntityIdentifier(entityId, entityType);
TimelineEntity entity = entities.get(entityID);
if (entity == null) {
continue;
}
EventsOfOneEntity events = new EventsOfOneEntity();
events.setEntityId(entityId);
events.setEntityType(entityType);
for (TimelineEvent event : entity.getEvents()) {
if (events.getEvents().size() >= limit) {
break;
}
if (event.getTimestamp() <= windowStart) {
continue;
}
if (event.getTimestamp() > windowEnd) {
continue;
}
if (eventTypes != null && !eventTypes.contains(event.getEventType())) {
continue;
}
events.addEvent(event);
}
allEvents.addEvent(events);
}
return allEvents;
}
Aggregations