Search in sources :

Example 1 with KeyParser

use of org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser in project hadoop by apache.

the class RollingLevelDBTimelineStore method addPrimaryFilter.

/**
   * Parses the primary filter from the given key at the given offset and adds
   * it to the given entity.
   */
private static void addPrimaryFilter(TimelineEntity entity, byte[] key, int offset) throws IOException {
    KeyParser kp = new KeyParser(key, offset);
    String name = kp.getNextString();
    byte[] bytes = kp.getRemainingBytes();
    Object value = fstConf.asObject(bytes);
    entity.addPrimaryFilter(name, value);
}
Also used : KeyParser(org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser)

Example 2 with KeyParser

use of org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser in project hadoop by apache.

the class LeveldbTimelineStore method deleteNextEntity.

@VisibleForTesting
boolean deleteNextEntity(String entityType, byte[] reverseTimestamp, LeveldbIterator iterator, LeveldbIterator pfIterator, boolean seeked) throws IOException {
    WriteBatch writeBatch = null;
    try {
        KeyBuilder kb = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entityType);
        byte[] typePrefix = kb.getBytesForLookup();
        kb.add(reverseTimestamp);
        if (!seeked) {
            iterator.seek(kb.getBytesForLookup());
        }
        if (!iterator.hasNext()) {
            return false;
        }
        byte[] entityKey = iterator.peekNext().getKey();
        if (!prefixMatches(typePrefix, typePrefix.length, entityKey)) {
            return false;
        }
        // read the start time and entity id from the current key
        KeyParser kp = new KeyParser(entityKey, typePrefix.length + 8);
        String entityId = kp.getNextString();
        int prefixlen = kp.getOffset();
        byte[] deletePrefix = new byte[prefixlen];
        System.arraycopy(entityKey, 0, deletePrefix, 0, prefixlen);
        writeBatch = db.createWriteBatch();
        if (LOG.isDebugEnabled()) {
            LOG.debug("Deleting entity type:" + entityType + " id:" + entityId);
        }
        // remove start time from cache and db
        writeBatch.delete(createStartTimeLookupKey(entityId, entityType));
        EntityIdentifier entityIdentifier = new EntityIdentifier(entityId, entityType);
        startTimeReadCache.remove(entityIdentifier);
        startTimeWriteCache.remove(entityIdentifier);
        // delete current entity
        for (; iterator.hasNext(); iterator.next()) {
            byte[] key = iterator.peekNext().getKey();
            if (!prefixMatches(entityKey, prefixlen, key)) {
                break;
            }
            writeBatch.delete(key);
            if (key.length == prefixlen) {
                continue;
            }
            if (key[prefixlen] == PRIMARY_FILTERS_COLUMN[0]) {
                kp = new KeyParser(key, prefixlen + PRIMARY_FILTERS_COLUMN.length);
                String name = kp.getNextString();
                Object value = GenericObjectMapper.read(key, kp.getOffset());
                deleteKeysWithPrefix(writeBatch, addPrimaryFilterToKey(name, value, deletePrefix), pfIterator);
                if (LOG.isDebugEnabled()) {
                    LOG.debug("Deleting entity type:" + entityType + " id:" + entityId + " primary filter entry " + name + " " + value);
                }
            } else if (key[prefixlen] == RELATED_ENTITIES_COLUMN[0]) {
                kp = new KeyParser(key, prefixlen + RELATED_ENTITIES_COLUMN.length);
                String type = kp.getNextString();
                String id = kp.getNextString();
                byte[] relatedEntityStartTime = getStartTime(id, type);
                if (relatedEntityStartTime == null) {
                    LOG.warn("Found no start time for " + "related entity " + id + " of type " + type + " while " + "deleting " + entityId + " of type " + entityType);
                    continue;
                }
                writeBatch.delete(createReverseRelatedEntityKey(id, type, relatedEntityStartTime, entityId, entityType));
                if (LOG.isDebugEnabled()) {
                    LOG.debug("Deleting entity type:" + entityType + " id:" + entityId + " from invisible reverse related entity " + "entry of type:" + type + " id:" + id);
                }
            } else if (key[prefixlen] == INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN[0]) {
                kp = new KeyParser(key, prefixlen + INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN.length);
                String type = kp.getNextString();
                String id = kp.getNextString();
                byte[] relatedEntityStartTime = getStartTime(id, type);
                if (relatedEntityStartTime == null) {
                    LOG.warn("Found no start time for reverse " + "related entity " + id + " of type " + type + " while " + "deleting " + entityId + " of type " + entityType);
                    continue;
                }
                writeBatch.delete(createRelatedEntityKey(id, type, relatedEntityStartTime, entityId, entityType));
                if (LOG.isDebugEnabled()) {
                    LOG.debug("Deleting entity type:" + entityType + " id:" + entityId + " from related entity entry of type:" + type + " id:" + id);
                }
            }
        }
        WriteOptions writeOptions = new WriteOptions();
        writeOptions.sync(true);
        db.write(writeBatch, writeOptions);
        return true;
    } catch (DBException e) {
        throw new IOException(e);
    } finally {
        IOUtils.cleanup(LOG, writeBatch);
    }
}
Also used : KeyBuilder(org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyBuilder) KeyParser(org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser) IOException(java.io.IOException) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Example 3 with KeyParser

use of org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser in project hadoop by apache.

the class LeveldbTimelineStore method getDomains.

@Override
public TimelineDomains getDomains(String owner) throws IOException {
    LeveldbIterator iterator = null;
    try {
        byte[] prefix = KeyBuilder.newInstance().add(OWNER_LOOKUP_PREFIX).add(owner).getBytesForLookup();
        List<TimelineDomain> domains = new ArrayList<TimelineDomain>();
        for (iterator = new LeveldbIterator(db), iterator.seek(prefix); iterator.hasNext(); ) {
            byte[] key = iterator.peekNext().getKey();
            if (!prefixMatches(prefix, prefix.length, key)) {
                break;
            }
            // Iterator to parse the rows of an individual domain
            KeyParser kp = new KeyParser(key, prefix.length);
            String domainId = kp.getNextString();
            byte[] prefixExt = KeyBuilder.newInstance().add(OWNER_LOOKUP_PREFIX).add(owner).add(domainId).getBytesForLookup();
            TimelineDomain domainToReturn = getTimelineDomain(iterator, domainId, prefixExt);
            if (domainToReturn != null) {
                domains.add(domainToReturn);
            }
        }
        // Sort the domains to return
        Collections.sort(domains, new Comparator<TimelineDomain>() {

            @Override
            public int compare(TimelineDomain domain1, TimelineDomain domain2) {
                int result = domain2.getCreatedTime().compareTo(domain1.getCreatedTime());
                if (result == 0) {
                    return domain2.getModifiedTime().compareTo(domain1.getModifiedTime());
                } else {
                    return result;
                }
            }
        });
        TimelineDomains domainsToReturn = new TimelineDomains();
        domainsToReturn.addDomains(domains);
        return domainsToReturn;
    } catch (DBException e) {
        throw new IOException(e);
    } finally {
        IOUtils.cleanup(LOG, iterator);
    }
}
Also used : LeveldbIterator(org.apache.hadoop.yarn.server.utils.LeveldbIterator) KeyParser(org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser) IOException(java.io.IOException)

Example 4 with KeyParser

use of org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser in project hadoop by apache.

the class LeveldbTimelineStore method getEntityByTime.

/**
   * Retrieves a list of entities satisfying given parameters.
   *
   * @param base A byte array prefix for the lookup
   * @param entityType The type of the entity
   * @param limit A limit on the number of entities to return
   * @param starttime The earliest entity start time to retrieve (exclusive)
   * @param endtime The latest entity start time to retrieve (inclusive)
   * @param fromId Retrieve entities starting with this entity
   * @param fromTs Ignore entities with insert timestamp later than this ts
   * @param secondaryFilters Filter pairs that the entities should match
   * @param fields The set of fields to retrieve
   * @return A list of entities
   * @throws IOException
   */
private TimelineEntities getEntityByTime(byte[] base, String entityType, Long limit, Long starttime, Long endtime, String fromId, Long fromTs, Collection<NameValuePair> secondaryFilters, EnumSet<Field> fields, CheckAcl checkAcl) throws IOException {
    // non-empty
    if (fields == null) {
        fields = EnumSet.allOf(Field.class);
    }
    boolean addPrimaryFilters = false;
    boolean addOtherInfo = false;
    if (secondaryFilters != null && secondaryFilters.size() > 0) {
        if (!fields.contains(Field.PRIMARY_FILTERS)) {
            fields.add(Field.PRIMARY_FILTERS);
            addPrimaryFilters = true;
        }
        if (!fields.contains(Field.OTHER_INFO)) {
            fields.add(Field.OTHER_INFO);
            addOtherInfo = true;
        }
    }
    LeveldbIterator iterator = null;
    try {
        KeyBuilder kb = KeyBuilder.newInstance().add(base).add(entityType);
        // only db keys matching the prefix (base + entity type) will be parsed
        byte[] prefix = kb.getBytesForLookup();
        if (endtime == null) {
            // if end time is null, place no restriction on end time
            endtime = Long.MAX_VALUE;
        }
        // construct a first key that will be seeked to using end time or fromId
        byte[] first = null;
        if (fromId != null) {
            Long fromIdStartTime = getStartTimeLong(fromId, entityType);
            if (fromIdStartTime == null) {
                // no start time for provided id, so return empty entities
                return new TimelineEntities();
            }
            if (fromIdStartTime <= endtime) {
                // if provided id's start time falls before the end of the window,
                // use it to construct the seek key
                first = kb.add(writeReverseOrderedLong(fromIdStartTime)).add(fromId).getBytesForLookup();
            }
        }
        // if seek key wasn't constructed using fromId, construct it using end ts
        if (first == null) {
            first = kb.add(writeReverseOrderedLong(endtime)).getBytesForLookup();
        }
        byte[] last = null;
        if (starttime != null) {
            // if start time is not null, set a last key that will not be
            // iterated past
            last = KeyBuilder.newInstance().add(base).add(entityType).add(writeReverseOrderedLong(starttime)).getBytesForLookup();
        }
        if (limit == null) {
            // if limit is not specified, use the default
            limit = DEFAULT_LIMIT;
        }
        TimelineEntities entities = new TimelineEntities();
        iterator = new LeveldbIterator(db);
        iterator.seek(first);
        // or a start time has been specified and reached/exceeded
        while (entities.getEntities().size() < limit && iterator.hasNext()) {
            byte[] key = iterator.peekNext().getKey();
            if (!prefixMatches(prefix, prefix.length, key) || (last != null && WritableComparator.compareBytes(key, 0, key.length, last, 0, last.length) > 0)) {
                break;
            }
            // read the start time and entity id from the current key
            KeyParser kp = new KeyParser(key, prefix.length);
            Long startTime = kp.getNextLong();
            String entityId = kp.getNextString();
            if (fromTs != null) {
                long insertTime = readReverseOrderedLong(iterator.peekNext().getValue(), 0);
                if (insertTime > fromTs) {
                    byte[] firstKey = key;
                    while (iterator.hasNext() && prefixMatches(firstKey, kp.getOffset(), key)) {
                        iterator.next();
                        key = iterator.peekNext().getKey();
                    }
                    continue;
                }
            }
            // parse the entity that owns this key, iterating over all keys for
            // the entity
            TimelineEntity entity = getEntity(entityId, entityType, startTime, fields, iterator, key, kp.getOffset());
            // determine if the retrieved entity matches the provided secondary
            // filters, and if so add it to the list of entities to return
            boolean filterPassed = true;
            if (secondaryFilters != null) {
                for (NameValuePair filter : secondaryFilters) {
                    Object v = entity.getOtherInfo().get(filter.getName());
                    if (v == null) {
                        Set<Object> vs = entity.getPrimaryFilters().get(filter.getName());
                        if (vs == null || !vs.contains(filter.getValue())) {
                            filterPassed = false;
                            break;
                        }
                    } else if (!v.equals(filter.getValue())) {
                        filterPassed = false;
                        break;
                    }
                }
            }
            if (filterPassed) {
                if (entity.getDomainId() == null) {
                    entity.setDomainId(DEFAULT_DOMAIN_ID);
                }
                if (checkAcl == null || checkAcl.check(entity)) {
                    // matching secondary filters
                    if (addPrimaryFilters) {
                        entity.setPrimaryFilters(null);
                    }
                    if (addOtherInfo) {
                        entity.setOtherInfo(null);
                    }
                    entities.addEntity(entity);
                }
            }
        }
        return entities;
    } catch (DBException e) {
        throw new IOException(e);
    } finally {
        IOUtils.cleanup(LOG, iterator);
    }
}
Also used : LeveldbIterator(org.apache.hadoop.yarn.server.utils.LeveldbIterator) IOException(java.io.IOException) KeyBuilder(org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyBuilder) GenericObjectMapper.writeReverseOrderedLong(org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.writeReverseOrderedLong) GenericObjectMapper.readReverseOrderedLong(org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.readReverseOrderedLong) KeyParser(org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser)

Example 5 with KeyParser

use of org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser in project hadoop by apache.

the class LeveldbTimelineStore method getEntityEvent.

/**
   * Creates an event object from the given key, offset, and value.  If the
   * event type is not contained in the specified set of event types,
   * returns null.
   */
private static TimelineEvent getEntityEvent(Set<String> eventTypes, byte[] key, int offset, byte[] value) throws IOException {
    KeyParser kp = new KeyParser(key, offset);
    long ts = kp.getNextLong();
    String tstype = kp.getNextString();
    if (eventTypes == null || eventTypes.contains(tstype)) {
        TimelineEvent event = new TimelineEvent();
        event.setTimestamp(ts);
        event.setEventType(tstype);
        Object o = GenericObjectMapper.read(value);
        if (o == null) {
            event.setEventInfo(null);
        } else if (o instanceof Map) {
            @SuppressWarnings("unchecked") Map<String, Object> m = (Map<String, Object>) o;
            event.setEventInfo(m);
        } else {
            throw new IOException("Couldn't deserialize event info map");
        }
        return event;
    }
    return null;
}
Also used : KeyParser(org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser) IOException(java.io.IOException) LRUMap(org.apache.commons.collections.map.LRUMap)

Aggregations

KeyParser (org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser)12 IOException (java.io.IOException)6 KeyBuilder (org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyBuilder)3 LeveldbIterator (org.apache.hadoop.yarn.server.utils.LeveldbIterator)3 VisibleForTesting (com.google.common.annotations.VisibleForTesting)2 LRUMap (org.apache.commons.collections.map.LRUMap)2 GenericObjectMapper.readReverseOrderedLong (org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.readReverseOrderedLong)2 GenericObjectMapper.writeReverseOrderedLong (org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.writeReverseOrderedLong)2 DBIterator (org.iq80.leveldb.DBIterator)2 ArrayList (java.util.ArrayList)1 Map (java.util.Map)1 TreeMap (java.util.TreeMap)1 TimelineDomain (org.apache.hadoop.yarn.api.records.timeline.TimelineDomain)1 TimelineDomains (org.apache.hadoop.yarn.api.records.timeline.TimelineDomains)1 TimelineEntities (org.apache.hadoop.yarn.api.records.timeline.TimelineEntities)1 TimelineEntity (org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)1 TimelineEvent (org.apache.hadoop.yarn.api.records.timeline.TimelineEvent)1 DB (org.iq80.leveldb.DB)1