use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvents in project hadoop by apache.
the class RollingLevelDBTimelineStore method getEntityTimelines.
@Override
public TimelineEvents getEntityTimelines(String entityType, SortedSet<String> entityIds, Long limit, Long windowStart, Long windowEnd, Set<String> eventType) throws IOException {
TimelineEvents events = new TimelineEvents();
if (entityIds == null || entityIds.isEmpty()) {
return events;
}
// create a lexicographically-ordered map from start time to entities
Map<byte[], List<EntityIdentifier>> startTimeMap = new TreeMap<byte[], List<EntityIdentifier>>(new Comparator<byte[]>() {
@Override
public int compare(byte[] o1, byte[] o2) {
return WritableComparator.compareBytes(o1, 0, o1.length, o2, 0, o2.length);
}
});
DBIterator iterator = null;
try {
// skip entities with no start time
for (String entityId : entityIds) {
byte[] startTime = getStartTime(entityId, entityType);
if (startTime != null) {
List<EntityIdentifier> entities = startTimeMap.get(startTime);
if (entities == null) {
entities = new ArrayList<EntityIdentifier>();
startTimeMap.put(startTime, entities);
}
entities.add(new EntityIdentifier(entityId, entityType));
}
}
for (Entry<byte[], List<EntityIdentifier>> entry : startTimeMap.entrySet()) {
// look up the events matching the given parameters (limit,
// start time, end time, event types) for entities whose start times
// were found and add the entities to the return list
byte[] revStartTime = entry.getKey();
for (EntityIdentifier entityIdentifier : entry.getValue()) {
EventsOfOneEntity entity = new EventsOfOneEntity();
entity.setEntityId(entityIdentifier.getId());
entity.setEntityType(entityType);
events.addEvent(entity);
KeyBuilder kb = KeyBuilder.newInstance().add(entityType).add(revStartTime).add(entityIdentifier.getId()).add(EVENTS_COLUMN);
byte[] prefix = kb.getBytesForLookup();
if (windowEnd == null) {
windowEnd = Long.MAX_VALUE;
}
byte[] revts = writeReverseOrderedLong(windowEnd);
kb.add(revts);
byte[] first = kb.getBytesForLookup();
byte[] last = null;
if (windowStart != null) {
last = KeyBuilder.newInstance().add(prefix).add(writeReverseOrderedLong(windowStart)).getBytesForLookup();
}
if (limit == null) {
limit = DEFAULT_LIMIT;
}
DB db = entitydb.getDBForStartTime(readReverseOrderedLong(revStartTime, 0));
if (db == null) {
continue;
}
iterator = db.iterator();
for (iterator.seek(first); entity.getEvents().size() < limit && iterator.hasNext(); iterator.next()) {
byte[] key = iterator.peekNext().getKey();
if (!prefixMatches(prefix, prefix.length, key) || (last != null && WritableComparator.compareBytes(key, 0, key.length, last, 0, last.length) > 0)) {
break;
}
TimelineEvent event = getEntityEvent(eventType, key, prefix.length, iterator.peekNext().getValue());
if (event != null) {
entity.addEvent(event);
}
}
}
}
} finally {
IOUtils.cleanup(LOG, iterator);
}
return events;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvents in project hadoop by apache.
the class TimelineDataManager method doGetEvents.
private TimelineEvents doGetEvents(String entityType, SortedSet<String> entityIds, SortedSet<String> eventTypes, Long windowStart, Long windowEnd, Long limit, UserGroupInformation callerUGI) throws YarnException, IOException {
TimelineEvents events = null;
events = store.getEntityTimelines(entityType, entityIds, limit, windowStart, windowEnd, eventTypes);
if (events != null) {
Iterator<TimelineEvents.EventsOfOneEntity> eventsItr = events.getAllEvents().iterator();
while (eventsItr.hasNext()) {
TimelineEvents.EventsOfOneEntity eventsOfOneEntity = eventsItr.next();
try {
TimelineEntity entity = store.getEntity(eventsOfOneEntity.getEntityId(), eventsOfOneEntity.getEntityType(), EnumSet.of(Field.PRIMARY_FILTERS));
addDefaultDomainIdIfAbsent(entity);
// check ACLs
if (!timelineACLsManager.checkAccess(callerUGI, ApplicationAccessType.VIEW_APP, entity)) {
eventsItr.remove();
}
} catch (Exception e) {
LOG.warn("Error when verifying access for user " + callerUGI + " on the events of the timeline entity " + new EntityIdentifier(eventsOfOneEntity.getEntityId(), eventsOfOneEntity.getEntityType()), e);
eventsItr.remove();
}
}
}
if (events == null) {
return new TimelineEvents();
}
return events;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvents in project hadoop by apache.
the class TimelineDataManager method getEvents.
/**
* Get the events whose entities the given user has access to. The meaning of
* each argument has been documented with
* {@link TimelineReader#getEntityTimelines}.
*
* @see TimelineReader#getEntityTimelines
*/
public TimelineEvents getEvents(String entityType, SortedSet<String> entityIds, SortedSet<String> eventTypes, Long windowStart, Long windowEnd, Long limit, UserGroupInformation callerUGI) throws YarnException, IOException {
long startTime = Time.monotonicNow();
metrics.incrGetEventsOps();
try {
TimelineEvents events = doGetEvents(entityType, entityIds, eventTypes, windowStart, windowEnd, limit, callerUGI);
metrics.incrGetEventsTotal(events.getAllEvents().size());
return events;
} finally {
metrics.addGetEventsTime(Time.monotonicNow() - startTime);
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvents in project hadoop by apache.
the class EntityGroupFSTimelineStore method getEntityTimelines.
@Override
public TimelineEvents getEntityTimelines(String entityType, SortedSet<String> entityIds, Long limit, Long windowStart, Long windowEnd, Set<String> eventTypes) throws IOException {
LOG.debug("getEntityTimelines type={} ids={}", entityType, entityIds);
TimelineEvents returnEvents = new TimelineEvents();
List<EntityCacheItem> relatedCacheItems = new ArrayList<>();
for (String entityId : entityIds) {
LOG.debug("getEntityTimeline type={} id={}", entityType, entityId);
List<TimelineStore> stores = getTimelineStoresForRead(entityId, entityType, relatedCacheItems);
for (TimelineStore store : stores) {
LOG.debug("Try timeline store {}:{} for the request", store.getName(), store.toString());
SortedSet<String> entityIdSet = new TreeSet<>();
entityIdSet.add(entityId);
TimelineEvents events = store.getEntityTimelines(entityType, entityIdSet, limit, windowStart, windowEnd, eventTypes);
if (events != null) {
returnEvents.addEvents(events.getAllEvents());
}
}
}
return returnEvents;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEvents in project hadoop by apache.
the class KeyValueBasedTimelineStore method getEntityTimelines.
@Override
public synchronized TimelineEvents getEntityTimelines(String entityType, SortedSet<String> entityIds, Long limit, Long windowStart, Long windowEnd, Set<String> eventTypes) {
if (getServiceStopped()) {
LOG.info("Service stopped, return null for the storage");
return null;
}
TimelineEvents allEvents = new TimelineEvents();
if (entityIds == null) {
return allEvents;
}
if (limit == null) {
limit = DEFAULT_LIMIT;
}
if (windowStart == null) {
windowStart = Long.MIN_VALUE;
}
if (windowEnd == null) {
windowEnd = Long.MAX_VALUE;
}
for (String entityId : entityIds) {
EntityIdentifier entityID = new EntityIdentifier(entityId, entityType);
TimelineEntity entity = entities.get(entityID);
if (entity == null) {
continue;
}
EventsOfOneEntity events = new EventsOfOneEntity();
events.setEntityId(entityId);
events.setEntityType(entityType);
for (TimelineEvent event : entity.getEvents()) {
if (events.getEvents().size() >= limit) {
break;
}
if (event.getTimestamp() <= windowStart) {
continue;
}
if (event.getTimestamp() > windowEnd) {
continue;
}
if (eventTypes != null && !eventTypes.contains(event.getEventType())) {
continue;
}
events.addEvent(event);
}
allEvents.addEvent(events);
}
return allEvents;
}
Aggregations