use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class ApplicationHistoryManagerOnTimelineStore method getApplications.
@Override
public Map<ApplicationId, ApplicationReport> getApplications(long appsNum, long appStartedTimeBegin, long appStartedTimeEnd) throws YarnException, IOException {
TimelineEntities entities = timelineDataManager.getEntities(ApplicationMetricsConstants.ENTITY_TYPE, null, null, appStartedTimeBegin, appStartedTimeEnd, null, null, appsNum == Long.MAX_VALUE ? this.maxLoadedApplications : appsNum, EnumSet.allOf(Field.class), UserGroupInformation.getLoginUser());
Map<ApplicationId, ApplicationReport> apps = new LinkedHashMap<ApplicationId, ApplicationReport>();
if (entities != null && entities.getEntities() != null) {
for (TimelineEntity entity : entities.getEntities()) {
try {
ApplicationReportExt app = generateApplicationReport(entity, ApplicationReportField.ALL);
apps.put(app.appReport.getApplicationId(), app.appReport);
} catch (Exception e) {
LOG.error("Error on generating application report for " + entity.getEntityId(), e);
}
}
}
return apps;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TestApplicationHistoryManagerOnTimelineStore method prepareTimelineStore.
private static void prepareTimelineStore(TimelineStore store, int scale) throws Exception {
for (int i = 1; i <= scale; ++i) {
TimelineEntities entities = new TimelineEntities();
ApplicationId appId = ApplicationId.newInstance(0, i);
if (i == 2) {
entities.addEntity(createApplicationTimelineEntity(appId, true, false, false, true, YarnApplicationState.FINISHED));
} else {
entities.addEntity(createApplicationTimelineEntity(appId, false, false, false, false, YarnApplicationState.FINISHED));
}
store.put(entities);
for (int j = 1; j <= scale; ++j) {
entities = new TimelineEntities();
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, j);
entities.addEntity(createAppAttemptTimelineEntity(appAttemptId));
store.put(entities);
for (int k = 1; k <= scale; ++k) {
entities = new TimelineEntities();
ContainerId containerId = ContainerId.newContainerId(appAttemptId, k);
entities.addEntity(createContainerEntity(containerId));
store.put(entities);
}
}
}
TimelineEntities entities = new TimelineEntities();
ApplicationId appId = ApplicationId.newInstance(1234, 1);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1);
ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
entities.addEntity(createApplicationTimelineEntity(appId, true, false, false, false, YarnApplicationState.RUNNING));
entities.addEntity(createAppAttemptTimelineEntity(appAttemptId));
entities.addEntity(createContainerEntity(containerId));
store.put(entities);
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class RollingLevelDBTimelineStore method getEntityByTime.
/**
* Retrieves a list of entities satisfying given parameters.
*
* @param base
* A byte array prefix for the lookup
* @param entityType
* The type of the entity
* @param limit
* A limit on the number of entities to return
* @param starttime
* The earliest entity start time to retrieve (exclusive)
* @param endtime
* The latest entity start time to retrieve (inclusive)
* @param fromId
* Retrieve entities starting with this entity
* @param fromTs
* Ignore entities with insert timestamp later than this ts
* @param secondaryFilters
* Filter pairs that the entities should match
* @param fields
* The set of fields to retrieve
* @param usingPrimaryFilter
* true if this query is using a primary filter
* @return A list of entities
* @throws IOException
*/
private TimelineEntities getEntityByTime(byte[] base, String entityType, Long limit, Long starttime, Long endtime, String fromId, Long fromTs, Collection<NameValuePair> secondaryFilters, EnumSet<Field> fields, CheckAcl checkAcl, boolean usingPrimaryFilter) throws IOException {
DBIterator iterator = null;
try {
KeyBuilder kb = KeyBuilder.newInstance().add(base).add(entityType);
// only db keys matching the prefix (base + entity type) will be parsed
byte[] prefix = kb.getBytesForLookup();
if (endtime == null) {
// if end time is null, place no restriction on end time
endtime = Long.MAX_VALUE;
}
// Sanitize the fields parameter
if (fields == null) {
fields = EnumSet.allOf(Field.class);
}
// construct a first key that will be seeked to using end time or fromId
long firstStartTime = Long.MAX_VALUE;
byte[] first = null;
if (fromId != null) {
Long fromIdStartTime = getStartTimeLong(fromId, entityType);
if (fromIdStartTime == null) {
// no start time for provided id, so return empty entities
return new TimelineEntities();
}
if (fromIdStartTime <= endtime) {
// if provided id's start time falls before the end of the window,
// use it to construct the seek key
firstStartTime = fromIdStartTime;
first = kb.add(writeReverseOrderedLong(fromIdStartTime)).add(fromId).getBytesForLookup();
}
}
// if seek key wasn't constructed using fromId, construct it using end ts
if (first == null) {
firstStartTime = endtime;
first = kb.add(writeReverseOrderedLong(endtime)).getBytesForLookup();
}
byte[] last = null;
if (starttime != null) {
// if start time is not null, set a last key that will not be
// iterated past
last = KeyBuilder.newInstance().add(base).add(entityType).add(writeReverseOrderedLong(starttime)).getBytesForLookup();
}
if (limit == null) {
// if limit is not specified, use the default
limit = DEFAULT_LIMIT;
}
TimelineEntities entities = new TimelineEntities();
RollingLevelDB rollingdb = null;
if (usingPrimaryFilter) {
rollingdb = indexdb;
} else {
rollingdb = entitydb;
}
DB db = rollingdb.getDBForStartTime(firstStartTime);
while (entities.getEntities().size() < limit && db != null) {
iterator = db.iterator();
iterator.seek(first);
// or a start time has been specified and reached/exceeded
while (entities.getEntities().size() < limit && iterator.hasNext()) {
byte[] key = iterator.peekNext().getKey();
if (!prefixMatches(prefix, prefix.length, key) || (last != null && WritableComparator.compareBytes(key, 0, key.length, last, 0, last.length) > 0)) {
break;
}
// read the start time and entity id from the current key
KeyParser kp = new KeyParser(key, prefix.length);
Long startTime = kp.getNextLong();
String entityId = kp.getNextString();
if (fromTs != null) {
long insertTime = readReverseOrderedLong(iterator.peekNext().getValue(), 0);
if (insertTime > fromTs) {
byte[] firstKey = key;
while (iterator.hasNext()) {
key = iterator.peekNext().getKey();
iterator.next();
if (!prefixMatches(firstKey, kp.getOffset(), key)) {
break;
}
}
continue;
}
}
// Even if other info and primary filter fields are not included, we
// still need to load them to match secondary filters when they are
// non-empty
EnumSet<Field> queryFields = EnumSet.copyOf(fields);
boolean addPrimaryFilters = false;
boolean addOtherInfo = false;
if (secondaryFilters != null && secondaryFilters.size() > 0) {
if (!queryFields.contains(Field.PRIMARY_FILTERS)) {
queryFields.add(Field.PRIMARY_FILTERS);
addPrimaryFilters = true;
}
if (!queryFields.contains(Field.OTHER_INFO)) {
queryFields.add(Field.OTHER_INFO);
addOtherInfo = true;
}
}
// parse the entity that owns this key, iterating over all keys for
// the entity
TimelineEntity entity = null;
if (usingPrimaryFilter) {
entity = getEntity(entityId, entityType, queryFields);
iterator.next();
} else {
entity = getEntity(entityId, entityType, startTime, queryFields, iterator, key, kp.getOffset());
}
// determine if the retrieved entity matches the provided secondary
// filters, and if so add it to the list of entities to return
boolean filterPassed = true;
if (secondaryFilters != null) {
for (NameValuePair filter : secondaryFilters) {
Object v = entity.getOtherInfo().get(filter.getName());
if (v == null) {
Set<Object> vs = entity.getPrimaryFilters().get(filter.getName());
if (vs == null || !vs.contains(filter.getValue())) {
filterPassed = false;
break;
}
} else if (!v.equals(filter.getValue())) {
filterPassed = false;
break;
}
}
}
if (filterPassed) {
if (entity.getDomainId() == null) {
entity.setDomainId(DEFAULT_DOMAIN_ID);
}
if (checkAcl == null || checkAcl.check(entity)) {
// matching secondary filters
if (addPrimaryFilters) {
entity.setPrimaryFilters(null);
}
if (addOtherInfo) {
entity.setOtherInfo(null);
}
entities.addEntity(entity);
}
}
}
db = rollingdb.getPreviousDB(db);
}
return entities;
} finally {
IOUtils.cleanup(LOG, iterator);
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TimelineDataManager method doPostEntities.
private TimelinePutResponse doPostEntities(TimelineEntities entities, UserGroupInformation callerUGI) throws YarnException, IOException {
if (entities == null) {
return new TimelinePutResponse();
}
metrics.incrPostEntitiesTotal(entities.getEntities().size());
TimelineEntities entitiesToPut = new TimelineEntities();
List<TimelinePutResponse.TimelinePutError> errors = new ArrayList<TimelinePutResponse.TimelinePutError>();
for (TimelineEntity entity : entities.getEntities()) {
// the default domain
if (entity.getDomainId() == null || entity.getDomainId().length() == 0) {
entity.setDomainId(DEFAULT_DOMAIN_ID);
}
if (entity.getEntityId() == null || entity.getEntityType() == null) {
throw new BadRequestException("Incomplete entity without entity" + " id/type");
}
// check if there is existing entity
TimelineEntity existingEntity = null;
try {
existingEntity = store.getEntity(entity.getEntityId(), entity.getEntityType(), EnumSet.of(Field.PRIMARY_FILTERS));
if (existingEntity != null) {
addDefaultDomainIdIfAbsent(existingEntity);
if (!existingEntity.getDomainId().equals(entity.getDomainId())) {
throw new YarnException("The domain of the timeline entity " + "{ id: " + entity.getEntityId() + ", type: " + entity.getEntityType() + " } is not allowed to be changed from " + existingEntity.getDomainId() + " to " + entity.getDomainId());
}
}
if (!timelineACLsManager.checkAccess(callerUGI, ApplicationAccessType.MODIFY_APP, entity)) {
throw new YarnException(callerUGI + " is not allowed to put the timeline entity " + "{ id: " + entity.getEntityId() + ", type: " + entity.getEntityType() + " } into the domain " + entity.getDomainId() + ".");
}
} catch (Exception e) {
// Skip the entity which already exists and was put by others
LOG.warn("Skip the timeline entity: { id: " + entity.getEntityId() + ", type: " + entity.getEntityType() + " }", e);
TimelinePutResponse.TimelinePutError error = new TimelinePutResponse.TimelinePutError();
error.setEntityId(entity.getEntityId());
error.setEntityType(entity.getEntityType());
error.setErrorCode(TimelinePutResponse.TimelinePutError.ACCESS_DENIED);
errors.add(error);
continue;
}
entitiesToPut.addEntity(entity);
}
TimelinePutResponse response = store.put(entitiesToPut);
// add the errors of timeline system filter key conflict
response.addErrors(errors);
return response;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities in project hadoop by apache.
the class TimelineDataManager method getEntities.
/**
* Get the timeline entities that the given user have access to. The meaning
* of each argument has been documented with
* {@link TimelineReader#getEntities}.
*
* @see TimelineReader#getEntities
*/
public TimelineEntities getEntities(String entityType, NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilter, Long windowStart, Long windowEnd, String fromId, Long fromTs, Long limit, EnumSet<Field> fields, UserGroupInformation callerUGI) throws YarnException, IOException {
long startTime = Time.monotonicNow();
metrics.incrGetEntitiesOps();
try {
TimelineEntities entities = doGetEntities(entityType, primaryFilter, secondaryFilter, windowStart, windowEnd, fromId, fromTs, limit, fields, callerUGI);
metrics.incrGetEntitiesTotal(entities.getEntities().size());
return entities;
} finally {
metrics.addGetEntitiesTime(Time.monotonicNow() - startTime);
}
}
Aggregations