use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class RollingLevelDBTimelineStore method getEntityByTime.
/**
* Retrieves a list of entities satisfying given parameters.
*
* @param base
* A byte array prefix for the lookup
* @param entityType
* The type of the entity
* @param limit
* A limit on the number of entities to return
* @param starttime
* The earliest entity start time to retrieve (exclusive)
* @param endtime
* The latest entity start time to retrieve (inclusive)
* @param fromId
* Retrieve entities starting with this entity
* @param fromTs
* Ignore entities with insert timestamp later than this ts
* @param secondaryFilters
* Filter pairs that the entities should match
* @param fields
* The set of fields to retrieve
* @param usingPrimaryFilter
* true if this query is using a primary filter
* @return A list of entities
* @throws IOException
*/
private TimelineEntities getEntityByTime(byte[] base, String entityType, Long limit, Long starttime, Long endtime, String fromId, Long fromTs, Collection<NameValuePair> secondaryFilters, EnumSet<Field> fields, CheckAcl checkAcl, boolean usingPrimaryFilter) throws IOException {
DBIterator iterator = null;
try {
KeyBuilder kb = KeyBuilder.newInstance().add(base).add(entityType);
// only db keys matching the prefix (base + entity type) will be parsed
byte[] prefix = kb.getBytesForLookup();
if (endtime == null) {
// if end time is null, place no restriction on end time
endtime = Long.MAX_VALUE;
}
// Sanitize the fields parameter
if (fields == null) {
fields = EnumSet.allOf(Field.class);
}
// construct a first key that will be seeked to using end time or fromId
long firstStartTime = Long.MAX_VALUE;
byte[] first = null;
if (fromId != null) {
Long fromIdStartTime = getStartTimeLong(fromId, entityType);
if (fromIdStartTime == null) {
// no start time for provided id, so return empty entities
return new TimelineEntities();
}
if (fromIdStartTime <= endtime) {
// if provided id's start time falls before the end of the window,
// use it to construct the seek key
firstStartTime = fromIdStartTime;
first = kb.add(writeReverseOrderedLong(fromIdStartTime)).add(fromId).getBytesForLookup();
}
}
// if seek key wasn't constructed using fromId, construct it using end ts
if (first == null) {
firstStartTime = endtime;
first = kb.add(writeReverseOrderedLong(endtime)).getBytesForLookup();
}
byte[] last = null;
if (starttime != null) {
// if start time is not null, set a last key that will not be
// iterated past
last = KeyBuilder.newInstance().add(base).add(entityType).add(writeReverseOrderedLong(starttime)).getBytesForLookup();
}
if (limit == null) {
// if limit is not specified, use the default
limit = DEFAULT_LIMIT;
}
TimelineEntities entities = new TimelineEntities();
RollingLevelDB rollingdb = null;
if (usingPrimaryFilter) {
rollingdb = indexdb;
} else {
rollingdb = entitydb;
}
DB db = rollingdb.getDBForStartTime(firstStartTime);
while (entities.getEntities().size() < limit && db != null) {
iterator = db.iterator();
iterator.seek(first);
// or a start time has been specified and reached/exceeded
while (entities.getEntities().size() < limit && iterator.hasNext()) {
byte[] key = iterator.peekNext().getKey();
if (!prefixMatches(prefix, prefix.length, key) || (last != null && WritableComparator.compareBytes(key, 0, key.length, last, 0, last.length) > 0)) {
break;
}
// read the start time and entity id from the current key
KeyParser kp = new KeyParser(key, prefix.length);
Long startTime = kp.getNextLong();
String entityId = kp.getNextString();
if (fromTs != null) {
long insertTime = readReverseOrderedLong(iterator.peekNext().getValue(), 0);
if (insertTime > fromTs) {
byte[] firstKey = key;
while (iterator.hasNext()) {
key = iterator.peekNext().getKey();
iterator.next();
if (!prefixMatches(firstKey, kp.getOffset(), key)) {
break;
}
}
continue;
}
}
// Even if other info and primary filter fields are not included, we
// still need to load them to match secondary filters when they are
// non-empty
EnumSet<Field> queryFields = EnumSet.copyOf(fields);
boolean addPrimaryFilters = false;
boolean addOtherInfo = false;
if (secondaryFilters != null && secondaryFilters.size() > 0) {
if (!queryFields.contains(Field.PRIMARY_FILTERS)) {
queryFields.add(Field.PRIMARY_FILTERS);
addPrimaryFilters = true;
}
if (!queryFields.contains(Field.OTHER_INFO)) {
queryFields.add(Field.OTHER_INFO);
addOtherInfo = true;
}
}
// parse the entity that owns this key, iterating over all keys for
// the entity
TimelineEntity entity = null;
if (usingPrimaryFilter) {
entity = getEntity(entityId, entityType, queryFields);
iterator.next();
} else {
entity = getEntity(entityId, entityType, startTime, queryFields, iterator, key, kp.getOffset());
}
// determine if the retrieved entity matches the provided secondary
// filters, and if so add it to the list of entities to return
boolean filterPassed = true;
if (secondaryFilters != null) {
for (NameValuePair filter : secondaryFilters) {
Object v = entity.getOtherInfo().get(filter.getName());
if (v == null) {
Set<Object> vs = entity.getPrimaryFilters().get(filter.getName());
if (vs == null || !vs.contains(filter.getValue())) {
filterPassed = false;
break;
}
} else if (!v.equals(filter.getValue())) {
filterPassed = false;
break;
}
}
}
if (filterPassed) {
if (entity.getDomainId() == null) {
entity.setDomainId(DEFAULT_DOMAIN_ID);
}
if (checkAcl == null || checkAcl.check(entity)) {
// matching secondary filters
if (addPrimaryFilters) {
entity.setPrimaryFilters(null);
}
if (addOtherInfo) {
entity.setOtherInfo(null);
}
entities.addEntity(entity);
}
}
}
db = rollingdb.getPreviousDB(db);
}
return entities;
} finally {
IOUtils.cleanup(LOG, iterator);
}
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class TimelineDataManager method doPostEntities.
private TimelinePutResponse doPostEntities(TimelineEntities entities, UserGroupInformation callerUGI) throws YarnException, IOException {
if (entities == null) {
return new TimelinePutResponse();
}
metrics.incrPostEntitiesTotal(entities.getEntities().size());
TimelineEntities entitiesToPut = new TimelineEntities();
List<TimelinePutResponse.TimelinePutError> errors = new ArrayList<TimelinePutResponse.TimelinePutError>();
for (TimelineEntity entity : entities.getEntities()) {
// the default domain
if (entity.getDomainId() == null || entity.getDomainId().length() == 0) {
entity.setDomainId(DEFAULT_DOMAIN_ID);
}
if (entity.getEntityId() == null || entity.getEntityType() == null) {
throw new BadRequestException("Incomplete entity without entity" + " id/type");
}
// check if there is existing entity
TimelineEntity existingEntity = null;
try {
existingEntity = store.getEntity(entity.getEntityId(), entity.getEntityType(), EnumSet.of(Field.PRIMARY_FILTERS));
if (existingEntity != null) {
addDefaultDomainIdIfAbsent(existingEntity);
if (!existingEntity.getDomainId().equals(entity.getDomainId())) {
throw new YarnException("The domain of the timeline entity " + "{ id: " + entity.getEntityId() + ", type: " + entity.getEntityType() + " } is not allowed to be changed from " + existingEntity.getDomainId() + " to " + entity.getDomainId());
}
}
if (!timelineACLsManager.checkAccess(callerUGI, ApplicationAccessType.MODIFY_APP, entity)) {
throw new YarnException(callerUGI + " is not allowed to put the timeline entity " + "{ id: " + entity.getEntityId() + ", type: " + entity.getEntityType() + " } into the domain " + entity.getDomainId() + ".");
}
} catch (Exception e) {
// Skip the entity which already exists and was put by others
LOG.warn("Skip the timeline entity: { id: " + entity.getEntityId() + ", type: " + entity.getEntityType() + " }", e);
TimelinePutResponse.TimelinePutError error = new TimelinePutResponse.TimelinePutError();
error.setEntityId(entity.getEntityId());
error.setEntityType(entity.getEntityType());
error.setErrorCode(TimelinePutResponse.TimelinePutError.ACCESS_DENIED);
errors.add(error);
continue;
}
entitiesToPut.addEntity(entity);
}
TimelinePutResponse response = store.put(entitiesToPut);
// add the errors of timeline system filter key conflict
response.addErrors(errors);
return response;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class TimelineDataManager method doGetEntity.
private TimelineEntity doGetEntity(String entityType, String entityId, EnumSet<Field> fields, UserGroupInformation callerUGI) throws YarnException, IOException {
TimelineEntity entity = null;
entity = store.getEntity(entityId, entityType, fields);
if (entity != null) {
addDefaultDomainIdIfAbsent(entity);
// check ACLs
if (!timelineACLsManager.checkAccess(callerUGI, ApplicationAccessType.VIEW_APP, entity)) {
entity = null;
}
}
return entity;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class TimelineWebServices method getEntity.
/**
* Return a single entity of the given entity type and Id.
*/
@GET
@Path("/{entityType}/{entityId}")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8 })
public TimelineEntity getEntity(@Context HttpServletRequest req, @Context HttpServletResponse res, @PathParam("entityType") String entityType, @PathParam("entityId") String entityId, @QueryParam("fields") String fields) {
init(res);
TimelineEntity entity = null;
try {
entity = timelineDataManager.getEntity(parseStr(entityType), parseStr(entityId), parseFieldsStr(fields, ","), getUser(req));
} catch (IllegalArgumentException e) {
throw new BadRequestException(e);
} catch (Exception e) {
LOG.error("Error getting entity", e);
throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR);
}
if (entity == null) {
throw new NotFoundException("Timeline entity " + new EntityIdentifier(parseStr(entityId), parseStr(entityType)) + " is not found");
}
return entity;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelineEntity in project hadoop by apache.
the class RollingLevelDBTimelineStore method put.
@Override
public TimelinePutResponse put(TimelineEntities entities) {
if (LOG.isDebugEnabled()) {
LOG.debug("Starting put");
}
TimelinePutResponse response = new TimelinePutResponse();
TreeMap<Long, RollingWriteBatch> entityUpdates = new TreeMap<Long, RollingWriteBatch>();
TreeMap<Long, RollingWriteBatch> indexUpdates = new TreeMap<Long, RollingWriteBatch>();
long entityCount = 0;
long indexCount = 0;
try {
for (TimelineEntity entity : entities.getEntities()) {
entityCount += putEntities(entityUpdates, indexUpdates, entity, response);
}
for (RollingWriteBatch entityUpdate : entityUpdates.values()) {
entityUpdate.write();
}
for (RollingWriteBatch indexUpdate : indexUpdates.values()) {
indexUpdate.write();
}
} finally {
for (RollingWriteBatch entityRollingWriteBatch : entityUpdates.values()) {
entityRollingWriteBatch.close();
}
for (RollingWriteBatch indexRollingWriteBatch : indexUpdates.values()) {
indexRollingWriteBatch.close();
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Put " + entityCount + " new leveldb entity entries and " + indexCount + " new leveldb index entries from " + entities.getEntities().size() + " timeline entities");
}
return response;
}
Aggregations