use of org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyBuilder in project hadoop by apache.
the class RollingLevelDBTimelineStore method getEntityTimelines.
@Override
public TimelineEvents getEntityTimelines(String entityType, SortedSet<String> entityIds, Long limit, Long windowStart, Long windowEnd, Set<String> eventType) throws IOException {
TimelineEvents events = new TimelineEvents();
if (entityIds == null || entityIds.isEmpty()) {
return events;
}
// create a lexicographically-ordered map from start time to entities
Map<byte[], List<EntityIdentifier>> startTimeMap = new TreeMap<byte[], List<EntityIdentifier>>(new Comparator<byte[]>() {
@Override
public int compare(byte[] o1, byte[] o2) {
return WritableComparator.compareBytes(o1, 0, o1.length, o2, 0, o2.length);
}
});
DBIterator iterator = null;
try {
// skip entities with no start time
for (String entityId : entityIds) {
byte[] startTime = getStartTime(entityId, entityType);
if (startTime != null) {
List<EntityIdentifier> entities = startTimeMap.get(startTime);
if (entities == null) {
entities = new ArrayList<EntityIdentifier>();
startTimeMap.put(startTime, entities);
}
entities.add(new EntityIdentifier(entityId, entityType));
}
}
for (Entry<byte[], List<EntityIdentifier>> entry : startTimeMap.entrySet()) {
// look up the events matching the given parameters (limit,
// start time, end time, event types) for entities whose start times
// were found and add the entities to the return list
byte[] revStartTime = entry.getKey();
for (EntityIdentifier entityIdentifier : entry.getValue()) {
EventsOfOneEntity entity = new EventsOfOneEntity();
entity.setEntityId(entityIdentifier.getId());
entity.setEntityType(entityType);
events.addEvent(entity);
KeyBuilder kb = KeyBuilder.newInstance().add(entityType).add(revStartTime).add(entityIdentifier.getId()).add(EVENTS_COLUMN);
byte[] prefix = kb.getBytesForLookup();
if (windowEnd == null) {
windowEnd = Long.MAX_VALUE;
}
byte[] revts = writeReverseOrderedLong(windowEnd);
kb.add(revts);
byte[] first = kb.getBytesForLookup();
byte[] last = null;
if (windowStart != null) {
last = KeyBuilder.newInstance().add(prefix).add(writeReverseOrderedLong(windowStart)).getBytesForLookup();
}
if (limit == null) {
limit = DEFAULT_LIMIT;
}
DB db = entitydb.getDBForStartTime(readReverseOrderedLong(revStartTime, 0));
if (db == null) {
continue;
}
iterator = db.iterator();
for (iterator.seek(first); entity.getEvents().size() < limit && iterator.hasNext(); iterator.next()) {
byte[] key = iterator.peekNext().getKey();
if (!prefixMatches(prefix, prefix.length, key) || (last != null && WritableComparator.compareBytes(key, 0, key.length, last, 0, last.length) > 0)) {
break;
}
TimelineEvent event = getEntityEvent(eventType, key, prefix.length, iterator.peekNext().getValue());
if (event != null) {
entity.addEvent(event);
}
}
}
}
} finally {
IOUtils.cleanup(LOG, iterator);
}
return events;
}
use of org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyBuilder in project hadoop by apache.
the class LeveldbTimelineStore method deleteNextEntity.
@VisibleForTesting
boolean deleteNextEntity(String entityType, byte[] reverseTimestamp, LeveldbIterator iterator, LeveldbIterator pfIterator, boolean seeked) throws IOException {
WriteBatch writeBatch = null;
try {
KeyBuilder kb = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entityType);
byte[] typePrefix = kb.getBytesForLookup();
kb.add(reverseTimestamp);
if (!seeked) {
iterator.seek(kb.getBytesForLookup());
}
if (!iterator.hasNext()) {
return false;
}
byte[] entityKey = iterator.peekNext().getKey();
if (!prefixMatches(typePrefix, typePrefix.length, entityKey)) {
return false;
}
// read the start time and entity id from the current key
KeyParser kp = new KeyParser(entityKey, typePrefix.length + 8);
String entityId = kp.getNextString();
int prefixlen = kp.getOffset();
byte[] deletePrefix = new byte[prefixlen];
System.arraycopy(entityKey, 0, deletePrefix, 0, prefixlen);
writeBatch = db.createWriteBatch();
if (LOG.isDebugEnabled()) {
LOG.debug("Deleting entity type:" + entityType + " id:" + entityId);
}
// remove start time from cache and db
writeBatch.delete(createStartTimeLookupKey(entityId, entityType));
EntityIdentifier entityIdentifier = new EntityIdentifier(entityId, entityType);
startTimeReadCache.remove(entityIdentifier);
startTimeWriteCache.remove(entityIdentifier);
// delete current entity
for (; iterator.hasNext(); iterator.next()) {
byte[] key = iterator.peekNext().getKey();
if (!prefixMatches(entityKey, prefixlen, key)) {
break;
}
writeBatch.delete(key);
if (key.length == prefixlen) {
continue;
}
if (key[prefixlen] == PRIMARY_FILTERS_COLUMN[0]) {
kp = new KeyParser(key, prefixlen + PRIMARY_FILTERS_COLUMN.length);
String name = kp.getNextString();
Object value = GenericObjectMapper.read(key, kp.getOffset());
deleteKeysWithPrefix(writeBatch, addPrimaryFilterToKey(name, value, deletePrefix), pfIterator);
if (LOG.isDebugEnabled()) {
LOG.debug("Deleting entity type:" + entityType + " id:" + entityId + " primary filter entry " + name + " " + value);
}
} else if (key[prefixlen] == RELATED_ENTITIES_COLUMN[0]) {
kp = new KeyParser(key, prefixlen + RELATED_ENTITIES_COLUMN.length);
String type = kp.getNextString();
String id = kp.getNextString();
byte[] relatedEntityStartTime = getStartTime(id, type);
if (relatedEntityStartTime == null) {
LOG.warn("Found no start time for " + "related entity " + id + " of type " + type + " while " + "deleting " + entityId + " of type " + entityType);
continue;
}
writeBatch.delete(createReverseRelatedEntityKey(id, type, relatedEntityStartTime, entityId, entityType));
if (LOG.isDebugEnabled()) {
LOG.debug("Deleting entity type:" + entityType + " id:" + entityId + " from invisible reverse related entity " + "entry of type:" + type + " id:" + id);
}
} else if (key[prefixlen] == INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN[0]) {
kp = new KeyParser(key, prefixlen + INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN.length);
String type = kp.getNextString();
String id = kp.getNextString();
byte[] relatedEntityStartTime = getStartTime(id, type);
if (relatedEntityStartTime == null) {
LOG.warn("Found no start time for reverse " + "related entity " + id + " of type " + type + " while " + "deleting " + entityId + " of type " + entityType);
continue;
}
writeBatch.delete(createRelatedEntityKey(id, type, relatedEntityStartTime, entityId, entityType));
if (LOG.isDebugEnabled()) {
LOG.debug("Deleting entity type:" + entityType + " id:" + entityId + " from related entity entry of type:" + type + " id:" + id);
}
}
}
WriteOptions writeOptions = new WriteOptions();
writeOptions.sync(true);
db.write(writeBatch, writeOptions);
return true;
} catch (DBException e) {
throw new IOException(e);
} finally {
IOUtils.cleanup(LOG, writeBatch);
}
}
use of org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyBuilder in project hadoop by apache.
the class LeveldbTimelineStore method getEntityTimelines.
@Override
public TimelineEvents getEntityTimelines(String entityType, SortedSet<String> entityIds, Long limit, Long windowStart, Long windowEnd, Set<String> eventType) throws IOException {
TimelineEvents events = new TimelineEvents();
if (entityIds == null || entityIds.isEmpty()) {
return events;
}
// create a lexicographically-ordered map from start time to entities
Map<byte[], List<EntityIdentifier>> startTimeMap = new TreeMap<byte[], List<EntityIdentifier>>(new Comparator<byte[]>() {
@Override
public int compare(byte[] o1, byte[] o2) {
return WritableComparator.compareBytes(o1, 0, o1.length, o2, 0, o2.length);
}
});
LeveldbIterator iterator = null;
try {
// skip entities with no start time
for (String entityId : entityIds) {
byte[] startTime = getStartTime(entityId, entityType);
if (startTime != null) {
List<EntityIdentifier> entities = startTimeMap.get(startTime);
if (entities == null) {
entities = new ArrayList<EntityIdentifier>();
startTimeMap.put(startTime, entities);
}
entities.add(new EntityIdentifier(entityId, entityType));
}
}
for (Entry<byte[], List<EntityIdentifier>> entry : startTimeMap.entrySet()) {
// look up the events matching the given parameters (limit,
// start time, end time, event types) for entities whose start times
// were found and add the entities to the return list
byte[] revStartTime = entry.getKey();
for (EntityIdentifier entityIdentifier : entry.getValue()) {
EventsOfOneEntity entity = new EventsOfOneEntity();
entity.setEntityId(entityIdentifier.getId());
entity.setEntityType(entityType);
events.addEvent(entity);
KeyBuilder kb = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entityType).add(revStartTime).add(entityIdentifier.getId()).add(EVENTS_COLUMN);
byte[] prefix = kb.getBytesForLookup();
if (windowEnd == null) {
windowEnd = Long.MAX_VALUE;
}
byte[] revts = writeReverseOrderedLong(windowEnd);
kb.add(revts);
byte[] first = kb.getBytesForLookup();
byte[] last = null;
if (windowStart != null) {
last = KeyBuilder.newInstance().add(prefix).add(writeReverseOrderedLong(windowStart)).getBytesForLookup();
}
if (limit == null) {
limit = DEFAULT_LIMIT;
}
iterator = new LeveldbIterator(db);
for (iterator.seek(first); entity.getEvents().size() < limit && iterator.hasNext(); iterator.next()) {
byte[] key = iterator.peekNext().getKey();
if (!prefixMatches(prefix, prefix.length, key) || (last != null && WritableComparator.compareBytes(key, 0, key.length, last, 0, last.length) > 0)) {
break;
}
TimelineEvent event = getEntityEvent(eventType, key, prefix.length, iterator.peekNext().getValue());
if (event != null) {
entity.addEvent(event);
}
}
}
}
} catch (DBException e) {
throw new IOException(e);
} finally {
IOUtils.cleanup(LOG, iterator);
}
return events;
}
use of org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyBuilder in project hadoop by apache.
the class LeveldbTimelineStore method getEntityByTime.
/**
* Retrieves a list of entities satisfying given parameters.
*
* @param base A byte array prefix for the lookup
* @param entityType The type of the entity
* @param limit A limit on the number of entities to return
* @param starttime The earliest entity start time to retrieve (exclusive)
* @param endtime The latest entity start time to retrieve (inclusive)
* @param fromId Retrieve entities starting with this entity
* @param fromTs Ignore entities with insert timestamp later than this ts
* @param secondaryFilters Filter pairs that the entities should match
* @param fields The set of fields to retrieve
* @return A list of entities
* @throws IOException
*/
private TimelineEntities getEntityByTime(byte[] base, String entityType, Long limit, Long starttime, Long endtime, String fromId, Long fromTs, Collection<NameValuePair> secondaryFilters, EnumSet<Field> fields, CheckAcl checkAcl) throws IOException {
// non-empty
if (fields == null) {
fields = EnumSet.allOf(Field.class);
}
boolean addPrimaryFilters = false;
boolean addOtherInfo = false;
if (secondaryFilters != null && secondaryFilters.size() > 0) {
if (!fields.contains(Field.PRIMARY_FILTERS)) {
fields.add(Field.PRIMARY_FILTERS);
addPrimaryFilters = true;
}
if (!fields.contains(Field.OTHER_INFO)) {
fields.add(Field.OTHER_INFO);
addOtherInfo = true;
}
}
LeveldbIterator iterator = null;
try {
KeyBuilder kb = KeyBuilder.newInstance().add(base).add(entityType);
// only db keys matching the prefix (base + entity type) will be parsed
byte[] prefix = kb.getBytesForLookup();
if (endtime == null) {
// if end time is null, place no restriction on end time
endtime = Long.MAX_VALUE;
}
// construct a first key that will be seeked to using end time or fromId
byte[] first = null;
if (fromId != null) {
Long fromIdStartTime = getStartTimeLong(fromId, entityType);
if (fromIdStartTime == null) {
// no start time for provided id, so return empty entities
return new TimelineEntities();
}
if (fromIdStartTime <= endtime) {
// if provided id's start time falls before the end of the window,
// use it to construct the seek key
first = kb.add(writeReverseOrderedLong(fromIdStartTime)).add(fromId).getBytesForLookup();
}
}
// if seek key wasn't constructed using fromId, construct it using end ts
if (first == null) {
first = kb.add(writeReverseOrderedLong(endtime)).getBytesForLookup();
}
byte[] last = null;
if (starttime != null) {
// if start time is not null, set a last key that will not be
// iterated past
last = KeyBuilder.newInstance().add(base).add(entityType).add(writeReverseOrderedLong(starttime)).getBytesForLookup();
}
if (limit == null) {
// if limit is not specified, use the default
limit = DEFAULT_LIMIT;
}
TimelineEntities entities = new TimelineEntities();
iterator = new LeveldbIterator(db);
iterator.seek(first);
// or a start time has been specified and reached/exceeded
while (entities.getEntities().size() < limit && iterator.hasNext()) {
byte[] key = iterator.peekNext().getKey();
if (!prefixMatches(prefix, prefix.length, key) || (last != null && WritableComparator.compareBytes(key, 0, key.length, last, 0, last.length) > 0)) {
break;
}
// read the start time and entity id from the current key
KeyParser kp = new KeyParser(key, prefix.length);
Long startTime = kp.getNextLong();
String entityId = kp.getNextString();
if (fromTs != null) {
long insertTime = readReverseOrderedLong(iterator.peekNext().getValue(), 0);
if (insertTime > fromTs) {
byte[] firstKey = key;
while (iterator.hasNext() && prefixMatches(firstKey, kp.getOffset(), key)) {
iterator.next();
key = iterator.peekNext().getKey();
}
continue;
}
}
// parse the entity that owns this key, iterating over all keys for
// the entity
TimelineEntity entity = getEntity(entityId, entityType, startTime, fields, iterator, key, kp.getOffset());
// determine if the retrieved entity matches the provided secondary
// filters, and if so add it to the list of entities to return
boolean filterPassed = true;
if (secondaryFilters != null) {
for (NameValuePair filter : secondaryFilters) {
Object v = entity.getOtherInfo().get(filter.getName());
if (v == null) {
Set<Object> vs = entity.getPrimaryFilters().get(filter.getName());
if (vs == null || !vs.contains(filter.getValue())) {
filterPassed = false;
break;
}
} else if (!v.equals(filter.getValue())) {
filterPassed = false;
break;
}
}
}
if (filterPassed) {
if (entity.getDomainId() == null) {
entity.setDomainId(DEFAULT_DOMAIN_ID);
}
if (checkAcl == null || checkAcl.check(entity)) {
// matching secondary filters
if (addPrimaryFilters) {
entity.setPrimaryFilters(null);
}
if (addOtherInfo) {
entity.setOtherInfo(null);
}
entities.addEntity(entity);
}
}
}
return entities;
} catch (DBException e) {
throw new IOException(e);
} finally {
IOUtils.cleanup(LOG, iterator);
}
}
use of org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyBuilder in project hadoop by apache.
the class RollingLevelDBTimelineStore method getEntityByTime.
/**
* Retrieves a list of entities satisfying given parameters.
*
* @param base
* A byte array prefix for the lookup
* @param entityType
* The type of the entity
* @param limit
* A limit on the number of entities to return
* @param starttime
* The earliest entity start time to retrieve (exclusive)
* @param endtime
* The latest entity start time to retrieve (inclusive)
* @param fromId
* Retrieve entities starting with this entity
* @param fromTs
* Ignore entities with insert timestamp later than this ts
* @param secondaryFilters
* Filter pairs that the entities should match
* @param fields
* The set of fields to retrieve
* @param usingPrimaryFilter
* true if this query is using a primary filter
* @return A list of entities
* @throws IOException
*/
private TimelineEntities getEntityByTime(byte[] base, String entityType, Long limit, Long starttime, Long endtime, String fromId, Long fromTs, Collection<NameValuePair> secondaryFilters, EnumSet<Field> fields, CheckAcl checkAcl, boolean usingPrimaryFilter) throws IOException {
DBIterator iterator = null;
try {
KeyBuilder kb = KeyBuilder.newInstance().add(base).add(entityType);
// only db keys matching the prefix (base + entity type) will be parsed
byte[] prefix = kb.getBytesForLookup();
if (endtime == null) {
// if end time is null, place no restriction on end time
endtime = Long.MAX_VALUE;
}
// Sanitize the fields parameter
if (fields == null) {
fields = EnumSet.allOf(Field.class);
}
// construct a first key that will be seeked to using end time or fromId
long firstStartTime = Long.MAX_VALUE;
byte[] first = null;
if (fromId != null) {
Long fromIdStartTime = getStartTimeLong(fromId, entityType);
if (fromIdStartTime == null) {
// no start time for provided id, so return empty entities
return new TimelineEntities();
}
if (fromIdStartTime <= endtime) {
// if provided id's start time falls before the end of the window,
// use it to construct the seek key
firstStartTime = fromIdStartTime;
first = kb.add(writeReverseOrderedLong(fromIdStartTime)).add(fromId).getBytesForLookup();
}
}
// if seek key wasn't constructed using fromId, construct it using end ts
if (first == null) {
firstStartTime = endtime;
first = kb.add(writeReverseOrderedLong(endtime)).getBytesForLookup();
}
byte[] last = null;
if (starttime != null) {
// if start time is not null, set a last key that will not be
// iterated past
last = KeyBuilder.newInstance().add(base).add(entityType).add(writeReverseOrderedLong(starttime)).getBytesForLookup();
}
if (limit == null) {
// if limit is not specified, use the default
limit = DEFAULT_LIMIT;
}
TimelineEntities entities = new TimelineEntities();
RollingLevelDB rollingdb = null;
if (usingPrimaryFilter) {
rollingdb = indexdb;
} else {
rollingdb = entitydb;
}
DB db = rollingdb.getDBForStartTime(firstStartTime);
while (entities.getEntities().size() < limit && db != null) {
iterator = db.iterator();
iterator.seek(first);
// or a start time has been specified and reached/exceeded
while (entities.getEntities().size() < limit && iterator.hasNext()) {
byte[] key = iterator.peekNext().getKey();
if (!prefixMatches(prefix, prefix.length, key) || (last != null && WritableComparator.compareBytes(key, 0, key.length, last, 0, last.length) > 0)) {
break;
}
// read the start time and entity id from the current key
KeyParser kp = new KeyParser(key, prefix.length);
Long startTime = kp.getNextLong();
String entityId = kp.getNextString();
if (fromTs != null) {
long insertTime = readReverseOrderedLong(iterator.peekNext().getValue(), 0);
if (insertTime > fromTs) {
byte[] firstKey = key;
while (iterator.hasNext()) {
key = iterator.peekNext().getKey();
iterator.next();
if (!prefixMatches(firstKey, kp.getOffset(), key)) {
break;
}
}
continue;
}
}
// Even if other info and primary filter fields are not included, we
// still need to load them to match secondary filters when they are
// non-empty
EnumSet<Field> queryFields = EnumSet.copyOf(fields);
boolean addPrimaryFilters = false;
boolean addOtherInfo = false;
if (secondaryFilters != null && secondaryFilters.size() > 0) {
if (!queryFields.contains(Field.PRIMARY_FILTERS)) {
queryFields.add(Field.PRIMARY_FILTERS);
addPrimaryFilters = true;
}
if (!queryFields.contains(Field.OTHER_INFO)) {
queryFields.add(Field.OTHER_INFO);
addOtherInfo = true;
}
}
// parse the entity that owns this key, iterating over all keys for
// the entity
TimelineEntity entity = null;
if (usingPrimaryFilter) {
entity = getEntity(entityId, entityType, queryFields);
iterator.next();
} else {
entity = getEntity(entityId, entityType, startTime, queryFields, iterator, key, kp.getOffset());
}
// determine if the retrieved entity matches the provided secondary
// filters, and if so add it to the list of entities to return
boolean filterPassed = true;
if (secondaryFilters != null) {
for (NameValuePair filter : secondaryFilters) {
Object v = entity.getOtherInfo().get(filter.getName());
if (v == null) {
Set<Object> vs = entity.getPrimaryFilters().get(filter.getName());
if (vs == null || !vs.contains(filter.getValue())) {
filterPassed = false;
break;
}
} else if (!v.equals(filter.getValue())) {
filterPassed = false;
break;
}
}
}
if (filterPassed) {
if (entity.getDomainId() == null) {
entity.setDomainId(DEFAULT_DOMAIN_ID);
}
if (checkAcl == null || checkAcl.check(entity)) {
// matching secondary filters
if (addPrimaryFilters) {
entity.setPrimaryFilters(null);
}
if (addOtherInfo) {
entity.setOtherInfo(null);
}
entities.addEntity(entity);
}
}
}
db = rollingdb.getPreviousDB(db);
}
return entities;
} finally {
IOUtils.cleanup(LOG, iterator);
}
}
Aggregations