use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class ApplicationHistoryManagerOnTimelineStore method getApplicationAttempt.
private ApplicationAttemptReport getApplicationAttempt(ApplicationAttemptId appAttemptId, boolean checkACLs) throws YarnException, IOException {
if (checkACLs) {
ApplicationReportExt app = getApplication(appAttemptId.getApplicationId(), ApplicationReportField.USER_AND_ACLS);
checkAccess(app);
}
TimelineEntity entity = timelineDataManager.getEntity(AppAttemptMetricsConstants.ENTITY_TYPE, appAttemptId.toString(), EnumSet.allOf(Field.class), UserGroupInformation.getLoginUser());
if (entity == null) {
throw new ApplicationAttemptNotFoundException("The entity for application attempt " + appAttemptId + " doesn't exist in the timeline store");
} else {
return convertToApplicationAttemptReport(entity);
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class KeyValueBasedTimelineStore method put.
@Override
public synchronized TimelinePutResponse put(TimelineEntities data) {
TimelinePutResponse response = new TimelinePutResponse();
if (getServiceStopped()) {
LOG.info("Service stopped, return null for the storage");
TimelinePutError error = new TimelinePutError();
error.setErrorCode(TimelinePutError.IO_EXCEPTION);
response.addError(error);
return response;
}
for (TimelineEntity entity : data.getEntities()) {
EntityIdentifier entityId = new EntityIdentifier(entity.getEntityId(), entity.getEntityType());
// store entity info in memory
TimelineEntity existingEntity = entities.get(entityId);
boolean needsPut = false;
if (existingEntity == null) {
existingEntity = new TimelineEntity();
existingEntity.setEntityId(entity.getEntityId());
existingEntity.setEntityType(entity.getEntityType());
existingEntity.setStartTime(entity.getStartTime());
if (entity.getDomainId() == null || entity.getDomainId().length() == 0) {
TimelinePutError error = new TimelinePutError();
error.setEntityId(entityId.getId());
error.setEntityType(entityId.getType());
error.setErrorCode(TimelinePutError.NO_DOMAIN);
response.addError(error);
continue;
}
existingEntity.setDomainId(entity.getDomainId());
// insert a new entity to the storage, update insert time map
entityInsertTimes.put(entityId, System.currentTimeMillis());
needsPut = true;
}
if (entity.getEvents() != null) {
if (existingEntity.getEvents() == null) {
existingEntity.setEvents(entity.getEvents());
} else {
existingEntity.addEvents(entity.getEvents());
}
Collections.sort(existingEntity.getEvents());
needsPut = true;
}
// check startTime
if (existingEntity.getStartTime() == null) {
if (existingEntity.getEvents() == null || existingEntity.getEvents().isEmpty()) {
TimelinePutError error = new TimelinePutError();
error.setEntityId(entityId.getId());
error.setEntityType(entityId.getType());
error.setErrorCode(TimelinePutError.NO_START_TIME);
response.addError(error);
entities.remove(entityId);
entityInsertTimes.remove(entityId);
continue;
} else {
Long min = Long.MAX_VALUE;
for (TimelineEvent e : entity.getEvents()) {
if (min > e.getTimestamp()) {
min = e.getTimestamp();
}
}
existingEntity.setStartTime(min);
needsPut = true;
}
}
if (entity.getPrimaryFilters() != null) {
if (existingEntity.getPrimaryFilters() == null) {
existingEntity.setPrimaryFilters(new HashMap<String, Set<Object>>());
}
for (Entry<String, Set<Object>> pf : entity.getPrimaryFilters().entrySet()) {
for (Object pfo : pf.getValue()) {
existingEntity.addPrimaryFilter(pf.getKey(), KeyValueBasedTimelineStoreUtils.compactNumber(pfo));
needsPut = true;
}
}
}
if (entity.getOtherInfo() != null) {
if (existingEntity.getOtherInfo() == null) {
existingEntity.setOtherInfo(new HashMap<String, Object>());
}
for (Entry<String, Object> info : entity.getOtherInfo().entrySet()) {
existingEntity.addOtherInfo(info.getKey(), KeyValueBasedTimelineStoreUtils.compactNumber(info.getValue()));
needsPut = true;
}
}
if (needsPut) {
entities.put(entityId, existingEntity);
}
// relate it to other entities
if (entity.getRelatedEntities() == null) {
continue;
}
for (Entry<String, Set<String>> partRelatedEntities : entity.getRelatedEntities().entrySet()) {
if (partRelatedEntities == null) {
continue;
}
for (String idStr : partRelatedEntities.getValue()) {
EntityIdentifier relatedEntityId = new EntityIdentifier(idStr, partRelatedEntities.getKey());
TimelineEntity relatedEntity = entities.get(relatedEntityId);
if (relatedEntity != null) {
if (relatedEntity.getDomainId().equals(existingEntity.getDomainId())) {
relatedEntity.addRelatedEntity(existingEntity.getEntityType(), existingEntity.getEntityId());
entities.put(relatedEntityId, relatedEntity);
} else {
// in this case the entity will be put, but the relation will be
// ignored
TimelinePutError error = new TimelinePutError();
error.setEntityType(existingEntity.getEntityType());
error.setEntityId(existingEntity.getEntityId());
error.setErrorCode(TimelinePutError.FORBIDDEN_RELATION);
response.addError(error);
}
} else {
relatedEntity = new TimelineEntity();
relatedEntity.setEntityId(relatedEntityId.getId());
relatedEntity.setEntityType(relatedEntityId.getType());
relatedEntity.setStartTime(existingEntity.getStartTime());
relatedEntity.addRelatedEntity(existingEntity.getEntityType(), existingEntity.getEntityId());
relatedEntity.setDomainId(existingEntity.getDomainId());
entities.put(relatedEntityId, relatedEntity);
entityInsertTimes.put(relatedEntityId, System.currentTimeMillis());
}
}
}
}
return response;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class KeyValueBasedTimelineStore method getEntities.
@Override
public synchronized TimelineEntities getEntities(String entityType, Long limit, Long windowStart, Long windowEnd, String fromId, Long fromTs, NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters, EnumSet<Field> fields, CheckAcl checkAcl) throws IOException {
if (getServiceStopped()) {
LOG.info("Service stopped, return null for the storage");
return null;
}
if (limit == null) {
limit = DEFAULT_LIMIT;
}
if (windowStart == null) {
windowStart = Long.MIN_VALUE;
}
if (windowEnd == null) {
windowEnd = Long.MAX_VALUE;
}
if (fields == null) {
fields = EnumSet.allOf(Field.class);
}
Iterator<TimelineEntity> entityIterator = null;
if (fromId != null) {
TimelineEntity firstEntity = entities.get(new EntityIdentifier(fromId, entityType));
if (firstEntity == null) {
return new TimelineEntities();
} else {
entityIterator = entities.valueSetIterator(firstEntity);
}
}
if (entityIterator == null) {
entityIterator = entities.valueSetIterator();
}
List<TimelineEntity> entitiesSelected = new ArrayList<TimelineEntity>();
while (entityIterator.hasNext()) {
TimelineEntity entity = entityIterator.next();
if (entitiesSelected.size() >= limit) {
break;
}
if (!entity.getEntityType().equals(entityType)) {
continue;
}
if (entity.getStartTime() <= windowStart) {
continue;
}
if (entity.getStartTime() > windowEnd) {
continue;
}
if (fromTs != null && entityInsertTimes.get(new EntityIdentifier(entity.getEntityId(), entity.getEntityType())) > fromTs) {
continue;
}
if (primaryFilter != null && !KeyValueBasedTimelineStoreUtils.matchPrimaryFilter(entity.getPrimaryFilters(), primaryFilter)) {
continue;
}
if (secondaryFilters != null) {
// AND logic
boolean flag = true;
for (NameValuePair secondaryFilter : secondaryFilters) {
if (secondaryFilter != null && !KeyValueBasedTimelineStoreUtils.matchPrimaryFilter(entity.getPrimaryFilters(), secondaryFilter) && !KeyValueBasedTimelineStoreUtils.matchFilter(entity.getOtherInfo(), secondaryFilter)) {
flag = false;
break;
}
}
if (!flag) {
continue;
}
}
if (entity.getDomainId() == null) {
entity.setDomainId(DEFAULT_DOMAIN_ID);
}
if (checkAcl == null || checkAcl.check(entity)) {
entitiesSelected.add(entity);
}
}
List<TimelineEntity> entitiesToReturn = new ArrayList<TimelineEntity>();
for (TimelineEntity entitySelected : entitiesSelected) {
entitiesToReturn.add(KeyValueBasedTimelineStoreUtils.maskFields(entitySelected, fields));
}
Collections.sort(entitiesToReturn);
TimelineEntities entitiesWrapper = new TimelineEntities();
entitiesWrapper.setEntities(entitiesToReturn);
return entitiesWrapper;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class RollingLevelDBTimelineStore method getEntity.
/**
* Read entity from a db iterator. If no information is found in the specified
* fields for this entity, return null.
*/
private static TimelineEntity getEntity(String entityId, String entityType, Long startTime, EnumSet<Field> fields, DBIterator iterator, byte[] prefix, int prefixlen) throws IOException {
if (fields == null) {
fields = EnumSet.allOf(Field.class);
}
TimelineEntity entity = new TimelineEntity();
boolean events = false;
boolean lastEvent = false;
if (fields.contains(Field.EVENTS)) {
events = true;
} else if (fields.contains(Field.LAST_EVENT_ONLY)) {
lastEvent = true;
} else {
entity.setEvents(null);
}
boolean relatedEntities = false;
if (fields.contains(Field.RELATED_ENTITIES)) {
relatedEntities = true;
} else {
entity.setRelatedEntities(null);
}
boolean primaryFilters = false;
if (fields.contains(Field.PRIMARY_FILTERS)) {
primaryFilters = true;
} else {
entity.setPrimaryFilters(null);
}
boolean otherInfo = false;
if (fields.contains(Field.OTHER_INFO)) {
otherInfo = true;
} else {
entity.setOtherInfo(null);
}
// of a requested field
for (; iterator.hasNext(); iterator.next()) {
byte[] key = iterator.peekNext().getKey();
if (!prefixMatches(prefix, prefixlen, key)) {
break;
}
if (key.length == prefixlen) {
continue;
}
if (key[prefixlen] == PRIMARY_FILTERS_COLUMN[0]) {
if (primaryFilters) {
addPrimaryFilter(entity, key, prefixlen + PRIMARY_FILTERS_COLUMN.length);
}
} else if (key[prefixlen] == OTHER_INFO_COLUMN[0]) {
if (otherInfo) {
entity.addOtherInfo(parseRemainingKey(key, prefixlen + OTHER_INFO_COLUMN.length), fstConf.asObject(iterator.peekNext().getValue()));
}
} else if (key[prefixlen] == RELATED_ENTITIES_COLUMN[0]) {
if (relatedEntities) {
addRelatedEntity(entity, key, prefixlen + RELATED_ENTITIES_COLUMN.length);
}
} else if (key[prefixlen] == EVENTS_COLUMN[0]) {
if (events || (lastEvent && entity.getEvents().size() == 0)) {
TimelineEvent event = getEntityEvent(null, key, prefixlen + EVENTS_COLUMN.length, iterator.peekNext().getValue());
if (event != null) {
entity.addEvent(event);
}
}
} else if (key[prefixlen] == DOMAIN_ID_COLUMN[0]) {
byte[] v = iterator.peekNext().getValue();
String domainId = new String(v, UTF_8);
entity.setDomainId(domainId);
} else {
LOG.warn(String.format("Found unexpected column for entity %s of " + "type %s (0x%02x)", entityId, entityType, key[prefixlen]));
}
}
entity.setEntityId(entityId);
entity.setEntityType(entityType);
entity.setStartTime(startTime);
return entity;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class ApplicationHistoryManagerOnTimelineStore method getApplicationAttempts.
@Override
public Map<ApplicationAttemptId, ApplicationAttemptReport> getApplicationAttempts(ApplicationId appId) throws YarnException, IOException {
ApplicationReportExt app = getApplication(appId, ApplicationReportField.USER_AND_ACLS);
checkAccess(app);
TimelineEntities entities = timelineDataManager.getEntities(AppAttemptMetricsConstants.ENTITY_TYPE, new NameValuePair(AppAttemptMetricsConstants.PARENT_PRIMARY_FILTER, appId.toString()), null, null, null, null, null, Long.MAX_VALUE, EnumSet.allOf(Field.class), UserGroupInformation.getLoginUser());
Map<ApplicationAttemptId, ApplicationAttemptReport> appAttempts = new LinkedHashMap<ApplicationAttemptId, ApplicationAttemptReport>();
for (TimelineEntity entity : entities.getEntities()) {
ApplicationAttemptReport appAttempt = convertToApplicationAttemptReport(entity);
appAttempts.put(appAttempt.getApplicationAttemptId(), appAttempt);
}
return appAttempts;
}
Aggregations