Search in sources :

Example 16 with LeveldbIterator

use of org.apache.hadoop.yarn.server.utils.LeveldbIterator in project hadoop by apache.

the class LeveldbTimelineStore method getEntityTimelines.

@Override
public TimelineEvents getEntityTimelines(String entityType, SortedSet<String> entityIds, Long limit, Long windowStart, Long windowEnd, Set<String> eventType) throws IOException {
    TimelineEvents events = new TimelineEvents();
    if (entityIds == null || entityIds.isEmpty()) {
        return events;
    }
    // create a lexicographically-ordered map from start time to entities
    Map<byte[], List<EntityIdentifier>> startTimeMap = new TreeMap<byte[], List<EntityIdentifier>>(new Comparator<byte[]>() {

        @Override
        public int compare(byte[] o1, byte[] o2) {
            return WritableComparator.compareBytes(o1, 0, o1.length, o2, 0, o2.length);
        }
    });
    LeveldbIterator iterator = null;
    try {
        // skip entities with no start time
        for (String entityId : entityIds) {
            byte[] startTime = getStartTime(entityId, entityType);
            if (startTime != null) {
                List<EntityIdentifier> entities = startTimeMap.get(startTime);
                if (entities == null) {
                    entities = new ArrayList<EntityIdentifier>();
                    startTimeMap.put(startTime, entities);
                }
                entities.add(new EntityIdentifier(entityId, entityType));
            }
        }
        for (Entry<byte[], List<EntityIdentifier>> entry : startTimeMap.entrySet()) {
            // look up the events matching the given parameters (limit,
            // start time, end time, event types) for entities whose start times
            // were found and add the entities to the return list
            byte[] revStartTime = entry.getKey();
            for (EntityIdentifier entityIdentifier : entry.getValue()) {
                EventsOfOneEntity entity = new EventsOfOneEntity();
                entity.setEntityId(entityIdentifier.getId());
                entity.setEntityType(entityType);
                events.addEvent(entity);
                KeyBuilder kb = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entityType).add(revStartTime).add(entityIdentifier.getId()).add(EVENTS_COLUMN);
                byte[] prefix = kb.getBytesForLookup();
                if (windowEnd == null) {
                    windowEnd = Long.MAX_VALUE;
                }
                byte[] revts = writeReverseOrderedLong(windowEnd);
                kb.add(revts);
                byte[] first = kb.getBytesForLookup();
                byte[] last = null;
                if (windowStart != null) {
                    last = KeyBuilder.newInstance().add(prefix).add(writeReverseOrderedLong(windowStart)).getBytesForLookup();
                }
                if (limit == null) {
                    limit = DEFAULT_LIMIT;
                }
                iterator = new LeveldbIterator(db);
                for (iterator.seek(first); entity.getEvents().size() < limit && iterator.hasNext(); iterator.next()) {
                    byte[] key = iterator.peekNext().getKey();
                    if (!prefixMatches(prefix, prefix.length, key) || (last != null && WritableComparator.compareBytes(key, 0, key.length, last, 0, last.length) > 0)) {
                        break;
                    }
                    TimelineEvent event = getEntityEvent(eventType, key, prefix.length, iterator.peekNext().getValue());
                    if (event != null) {
                        entity.addEvent(event);
                    }
                }
            }
        }
    } catch (DBException e) {
        throw new IOException(e);
    } finally {
        IOUtils.cleanup(LOG, iterator);
    }
    return events;
}
Also used : LeveldbIterator(org.apache.hadoop.yarn.server.utils.LeveldbIterator) EventsOfOneEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity) IOException(java.io.IOException) KeyBuilder(org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyBuilder)

Example 17 with LeveldbIterator

use of org.apache.hadoop.yarn.server.utils.LeveldbIterator in project hadoop by apache.

the class LeveldbTimelineStore method getEntityByTime.

/**
   * Retrieves a list of entities satisfying given parameters.
   *
   * @param base A byte array prefix for the lookup
   * @param entityType The type of the entity
   * @param limit A limit on the number of entities to return
   * @param starttime The earliest entity start time to retrieve (exclusive)
   * @param endtime The latest entity start time to retrieve (inclusive)
   * @param fromId Retrieve entities starting with this entity
   * @param fromTs Ignore entities with insert timestamp later than this ts
   * @param secondaryFilters Filter pairs that the entities should match
   * @param fields The set of fields to retrieve
   * @return A list of entities
   * @throws IOException
   */
private TimelineEntities getEntityByTime(byte[] base, String entityType, Long limit, Long starttime, Long endtime, String fromId, Long fromTs, Collection<NameValuePair> secondaryFilters, EnumSet<Field> fields, CheckAcl checkAcl) throws IOException {
    // non-empty
    if (fields == null) {
        fields = EnumSet.allOf(Field.class);
    }
    boolean addPrimaryFilters = false;
    boolean addOtherInfo = false;
    if (secondaryFilters != null && secondaryFilters.size() > 0) {
        if (!fields.contains(Field.PRIMARY_FILTERS)) {
            fields.add(Field.PRIMARY_FILTERS);
            addPrimaryFilters = true;
        }
        if (!fields.contains(Field.OTHER_INFO)) {
            fields.add(Field.OTHER_INFO);
            addOtherInfo = true;
        }
    }
    LeveldbIterator iterator = null;
    try {
        KeyBuilder kb = KeyBuilder.newInstance().add(base).add(entityType);
        // only db keys matching the prefix (base + entity type) will be parsed
        byte[] prefix = kb.getBytesForLookup();
        if (endtime == null) {
            // if end time is null, place no restriction on end time
            endtime = Long.MAX_VALUE;
        }
        // construct a first key that will be seeked to using end time or fromId
        byte[] first = null;
        if (fromId != null) {
            Long fromIdStartTime = getStartTimeLong(fromId, entityType);
            if (fromIdStartTime == null) {
                // no start time for provided id, so return empty entities
                return new TimelineEntities();
            }
            if (fromIdStartTime <= endtime) {
                // if provided id's start time falls before the end of the window,
                // use it to construct the seek key
                first = kb.add(writeReverseOrderedLong(fromIdStartTime)).add(fromId).getBytesForLookup();
            }
        }
        // if seek key wasn't constructed using fromId, construct it using end ts
        if (first == null) {
            first = kb.add(writeReverseOrderedLong(endtime)).getBytesForLookup();
        }
        byte[] last = null;
        if (starttime != null) {
            // if start time is not null, set a last key that will not be
            // iterated past
            last = KeyBuilder.newInstance().add(base).add(entityType).add(writeReverseOrderedLong(starttime)).getBytesForLookup();
        }
        if (limit == null) {
            // if limit is not specified, use the default
            limit = DEFAULT_LIMIT;
        }
        TimelineEntities entities = new TimelineEntities();
        iterator = new LeveldbIterator(db);
        iterator.seek(first);
        // or a start time has been specified and reached/exceeded
        while (entities.getEntities().size() < limit && iterator.hasNext()) {
            byte[] key = iterator.peekNext().getKey();
            if (!prefixMatches(prefix, prefix.length, key) || (last != null && WritableComparator.compareBytes(key, 0, key.length, last, 0, last.length) > 0)) {
                break;
            }
            // read the start time and entity id from the current key
            KeyParser kp = new KeyParser(key, prefix.length);
            Long startTime = kp.getNextLong();
            String entityId = kp.getNextString();
            if (fromTs != null) {
                long insertTime = readReverseOrderedLong(iterator.peekNext().getValue(), 0);
                if (insertTime > fromTs) {
                    byte[] firstKey = key;
                    while (iterator.hasNext() && prefixMatches(firstKey, kp.getOffset(), key)) {
                        iterator.next();
                        key = iterator.peekNext().getKey();
                    }
                    continue;
                }
            }
            // parse the entity that owns this key, iterating over all keys for
            // the entity
            TimelineEntity entity = getEntity(entityId, entityType, startTime, fields, iterator, key, kp.getOffset());
            // determine if the retrieved entity matches the provided secondary
            // filters, and if so add it to the list of entities to return
            boolean filterPassed = true;
            if (secondaryFilters != null) {
                for (NameValuePair filter : secondaryFilters) {
                    Object v = entity.getOtherInfo().get(filter.getName());
                    if (v == null) {
                        Set<Object> vs = entity.getPrimaryFilters().get(filter.getName());
                        if (vs == null || !vs.contains(filter.getValue())) {
                            filterPassed = false;
                            break;
                        }
                    } else if (!v.equals(filter.getValue())) {
                        filterPassed = false;
                        break;
                    }
                }
            }
            if (filterPassed) {
                if (entity.getDomainId() == null) {
                    entity.setDomainId(DEFAULT_DOMAIN_ID);
                }
                if (checkAcl == null || checkAcl.check(entity)) {
                    // matching secondary filters
                    if (addPrimaryFilters) {
                        entity.setPrimaryFilters(null);
                    }
                    if (addOtherInfo) {
                        entity.setOtherInfo(null);
                    }
                    entities.addEntity(entity);
                }
            }
        }
        return entities;
    } catch (DBException e) {
        throw new IOException(e);
    } finally {
        IOUtils.cleanup(LOG, iterator);
    }
}
Also used : LeveldbIterator(org.apache.hadoop.yarn.server.utils.LeveldbIterator) IOException(java.io.IOException) KeyBuilder(org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyBuilder) GenericObjectMapper.writeReverseOrderedLong(org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.writeReverseOrderedLong) GenericObjectMapper.readReverseOrderedLong(org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.readReverseOrderedLong) KeyParser(org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser)

Example 18 with LeveldbIterator

use of org.apache.hadoop.yarn.server.utils.LeveldbIterator in project hadoop by apache.

the class LeveldbTimelineStore method getDomain.

@Override
public TimelineDomain getDomain(String domainId) throws IOException {
    LeveldbIterator iterator = null;
    try {
        byte[] prefix = KeyBuilder.newInstance().add(DOMAIN_ENTRY_PREFIX).add(domainId).getBytesForLookup();
        iterator = new LeveldbIterator(db);
        iterator.seek(prefix);
        return getTimelineDomain(iterator, domainId, prefix);
    } catch (DBException e) {
        throw new IOException(e);
    } finally {
        IOUtils.cleanup(LOG, iterator);
    }
}
Also used : LeveldbIterator(org.apache.hadoop.yarn.server.utils.LeveldbIterator) IOException(java.io.IOException)

Example 19 with LeveldbIterator

use of org.apache.hadoop.yarn.server.utils.LeveldbIterator in project hadoop by apache.

the class LeveldbTimelineStore method getEntityTypes.

@VisibleForTesting
List<String> getEntityTypes() throws IOException {
    LeveldbIterator iterator = null;
    try {
        iterator = getDbIterator(false);
        List<String> entityTypes = new ArrayList<String>();
        iterator.seek(ENTITY_ENTRY_PREFIX);
        while (iterator.hasNext()) {
            byte[] key = iterator.peekNext().getKey();
            if (key[0] != ENTITY_ENTRY_PREFIX[0]) {
                break;
            }
            KeyParser kp = new KeyParser(key, ENTITY_ENTRY_PREFIX.length);
            String entityType = kp.getNextString();
            entityTypes.add(entityType);
            byte[] lookupKey = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entityType).getBytesForLookup();
            if (lookupKey[lookupKey.length - 1] != 0x0) {
                throw new IOException("Found unexpected end byte in lookup key");
            }
            lookupKey[lookupKey.length - 1] = 0x1;
            iterator.seek(lookupKey);
        }
        return entityTypes;
    } catch (DBException e) {
        throw new IOException(e);
    } finally {
        IOUtils.cleanup(LOG, iterator);
    }
}
Also used : LeveldbIterator(org.apache.hadoop.yarn.server.utils.LeveldbIterator) KeyParser(org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser) IOException(java.io.IOException) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Example 20 with LeveldbIterator

use of org.apache.hadoop.yarn.server.utils.LeveldbIterator in project hadoop by apache.

the class LeveldbTimelineStateStore method loadTokens.

private int loadTokens(TimelineServiceState state) throws IOException {
    byte[] base = KeyBuilder.newInstance().add(TOKEN_ENTRY_PREFIX).getBytesForLookup();
    int numTokens = 0;
    LeveldbIterator iterator = null;
    try {
        for (iterator = new LeveldbIterator(db), iterator.seek(base); iterator.hasNext(); iterator.next()) {
            byte[] k = iterator.peekNext().getKey();
            if (!prefixMatches(base, base.length, k)) {
                break;
            }
            byte[] v = iterator.peekNext().getValue();
            loadTokenData(state, v);
            ++numTokens;
        }
    } catch (DBException e) {
        throw new IOException(e);
    } finally {
        IOUtils.cleanup(LOG, iterator);
    }
    return numTokens;
}
Also used : DBException(org.iq80.leveldb.DBException) LeveldbIterator(org.apache.hadoop.yarn.server.utils.LeveldbIterator) IOException(java.io.IOException)

Aggregations

LeveldbIterator (org.apache.hadoop.yarn.server.utils.LeveldbIterator)26 IOException (java.io.IOException)25 DBException (org.iq80.leveldb.DBException)18 JniDBFactory.asString (org.fusesource.leveldbjni.JniDBFactory.asString)15 VisibleForTesting (com.google.common.annotations.VisibleForTesting)3 KeyParser (org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser)3 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)2 GenericObjectMapper.readReverseOrderedLong (org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.readReverseOrderedLong)2 GenericObjectMapper.writeReverseOrderedLong (org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.writeReverseOrderedLong)2 KeyBuilder (org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyBuilder)2 ByteString (com.google.protobuf.ByteString)1 ArrayList (java.util.ArrayList)1 HashMap (java.util.HashMap)1 Map (java.util.Map)1 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)1 Pattern (java.util.regex.Pattern)1 Path (org.apache.hadoop.fs.Path)1 DelegationKey (org.apache.hadoop.security.token.delegation.DelegationKey)1 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)1 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)1