Search in sources :

Example 11 with DBIterator

use of org.iq80.leveldb.DBIterator in project hadoop by apache.

the class RollingLevelDBTimelineStore method getEntity.

@Override
public TimelineEntity getEntity(String entityId, String entityType, EnumSet<Field> fields) throws IOException {
    Long revStartTime = getStartTimeLong(entityId, entityType);
    if (revStartTime == null) {
        return null;
    }
    byte[] prefix = KeyBuilder.newInstance().add(entityType).add(writeReverseOrderedLong(revStartTime)).add(entityId).getBytesForLookup();
    DBIterator iterator = null;
    try {
        DB db = entitydb.getDBForStartTime(revStartTime);
        if (db == null) {
            return null;
        }
        iterator = db.iterator();
        iterator.seek(prefix);
        return getEntity(entityId, entityType, revStartTime, fields, iterator, prefix, prefix.length);
    } finally {
        IOUtils.cleanup(LOG, iterator);
    }
}
Also used : DBIterator(org.iq80.leveldb.DBIterator) GenericObjectMapper.writeReverseOrderedLong(org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.writeReverseOrderedLong) GenericObjectMapper.readReverseOrderedLong(org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.readReverseOrderedLong) DB(org.iq80.leveldb.DB)

Example 12 with DBIterator

use of org.iq80.leveldb.DBIterator in project hadoop by apache.

the class RollingLevelDBTimelineStore method getEntityByTime.

/**
   * Retrieves a list of entities satisfying given parameters.
   *
   * @param base
   *          A byte array prefix for the lookup
   * @param entityType
   *          The type of the entity
   * @param limit
   *          A limit on the number of entities to return
   * @param starttime
   *          The earliest entity start time to retrieve (exclusive)
   * @param endtime
   *          The latest entity start time to retrieve (inclusive)
   * @param fromId
   *          Retrieve entities starting with this entity
   * @param fromTs
   *          Ignore entities with insert timestamp later than this ts
   * @param secondaryFilters
   *          Filter pairs that the entities should match
   * @param fields
   *          The set of fields to retrieve
   * @param usingPrimaryFilter
   *          true if this query is using a primary filter
   * @return A list of entities
   * @throws IOException
   */
private TimelineEntities getEntityByTime(byte[] base, String entityType, Long limit, Long starttime, Long endtime, String fromId, Long fromTs, Collection<NameValuePair> secondaryFilters, EnumSet<Field> fields, CheckAcl checkAcl, boolean usingPrimaryFilter) throws IOException {
    DBIterator iterator = null;
    try {
        KeyBuilder kb = KeyBuilder.newInstance().add(base).add(entityType);
        // only db keys matching the prefix (base + entity type) will be parsed
        byte[] prefix = kb.getBytesForLookup();
        if (endtime == null) {
            // if end time is null, place no restriction on end time
            endtime = Long.MAX_VALUE;
        }
        // Sanitize the fields parameter
        if (fields == null) {
            fields = EnumSet.allOf(Field.class);
        }
        // construct a first key that will be seeked to using end time or fromId
        long firstStartTime = Long.MAX_VALUE;
        byte[] first = null;
        if (fromId != null) {
            Long fromIdStartTime = getStartTimeLong(fromId, entityType);
            if (fromIdStartTime == null) {
                // no start time for provided id, so return empty entities
                return new TimelineEntities();
            }
            if (fromIdStartTime <= endtime) {
                // if provided id's start time falls before the end of the window,
                // use it to construct the seek key
                firstStartTime = fromIdStartTime;
                first = kb.add(writeReverseOrderedLong(fromIdStartTime)).add(fromId).getBytesForLookup();
            }
        }
        // if seek key wasn't constructed using fromId, construct it using end ts
        if (first == null) {
            firstStartTime = endtime;
            first = kb.add(writeReverseOrderedLong(endtime)).getBytesForLookup();
        }
        byte[] last = null;
        if (starttime != null) {
            // if start time is not null, set a last key that will not be
            // iterated past
            last = KeyBuilder.newInstance().add(base).add(entityType).add(writeReverseOrderedLong(starttime)).getBytesForLookup();
        }
        if (limit == null) {
            // if limit is not specified, use the default
            limit = DEFAULT_LIMIT;
        }
        TimelineEntities entities = new TimelineEntities();
        RollingLevelDB rollingdb = null;
        if (usingPrimaryFilter) {
            rollingdb = indexdb;
        } else {
            rollingdb = entitydb;
        }
        DB db = rollingdb.getDBForStartTime(firstStartTime);
        while (entities.getEntities().size() < limit && db != null) {
            iterator = db.iterator();
            iterator.seek(first);
            // or a start time has been specified and reached/exceeded
            while (entities.getEntities().size() < limit && iterator.hasNext()) {
                byte[] key = iterator.peekNext().getKey();
                if (!prefixMatches(prefix, prefix.length, key) || (last != null && WritableComparator.compareBytes(key, 0, key.length, last, 0, last.length) > 0)) {
                    break;
                }
                // read the start time and entity id from the current key
                KeyParser kp = new KeyParser(key, prefix.length);
                Long startTime = kp.getNextLong();
                String entityId = kp.getNextString();
                if (fromTs != null) {
                    long insertTime = readReverseOrderedLong(iterator.peekNext().getValue(), 0);
                    if (insertTime > fromTs) {
                        byte[] firstKey = key;
                        while (iterator.hasNext()) {
                            key = iterator.peekNext().getKey();
                            iterator.next();
                            if (!prefixMatches(firstKey, kp.getOffset(), key)) {
                                break;
                            }
                        }
                        continue;
                    }
                }
                // Even if other info and primary filter fields are not included, we
                // still need to load them to match secondary filters when they are
                // non-empty
                EnumSet<Field> queryFields = EnumSet.copyOf(fields);
                boolean addPrimaryFilters = false;
                boolean addOtherInfo = false;
                if (secondaryFilters != null && secondaryFilters.size() > 0) {
                    if (!queryFields.contains(Field.PRIMARY_FILTERS)) {
                        queryFields.add(Field.PRIMARY_FILTERS);
                        addPrimaryFilters = true;
                    }
                    if (!queryFields.contains(Field.OTHER_INFO)) {
                        queryFields.add(Field.OTHER_INFO);
                        addOtherInfo = true;
                    }
                }
                // parse the entity that owns this key, iterating over all keys for
                // the entity
                TimelineEntity entity = null;
                if (usingPrimaryFilter) {
                    entity = getEntity(entityId, entityType, queryFields);
                    iterator.next();
                } else {
                    entity = getEntity(entityId, entityType, startTime, queryFields, iterator, key, kp.getOffset());
                }
                // determine if the retrieved entity matches the provided secondary
                // filters, and if so add it to the list of entities to return
                boolean filterPassed = true;
                if (secondaryFilters != null) {
                    for (NameValuePair filter : secondaryFilters) {
                        Object v = entity.getOtherInfo().get(filter.getName());
                        if (v == null) {
                            Set<Object> vs = entity.getPrimaryFilters().get(filter.getName());
                            if (vs == null || !vs.contains(filter.getValue())) {
                                filterPassed = false;
                                break;
                            }
                        } else if (!v.equals(filter.getValue())) {
                            filterPassed = false;
                            break;
                        }
                    }
                }
                if (filterPassed) {
                    if (entity.getDomainId() == null) {
                        entity.setDomainId(DEFAULT_DOMAIN_ID);
                    }
                    if (checkAcl == null || checkAcl.check(entity)) {
                        // matching secondary filters
                        if (addPrimaryFilters) {
                            entity.setPrimaryFilters(null);
                        }
                        if (addOtherInfo) {
                            entity.setOtherInfo(null);
                        }
                        entities.addEntity(entity);
                    }
                }
            }
            db = rollingdb.getPreviousDB(db);
        }
        return entities;
    } finally {
        IOUtils.cleanup(LOG, iterator);
    }
}
Also used : TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) DBIterator(org.iq80.leveldb.DBIterator) KeyBuilder(org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyBuilder) TimelineEntities(org.apache.hadoop.yarn.api.records.timeline.TimelineEntities) GenericObjectMapper.writeReverseOrderedLong(org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.writeReverseOrderedLong) GenericObjectMapper.readReverseOrderedLong(org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.readReverseOrderedLong) KeyParser(org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser) DB(org.iq80.leveldb.DB)

Example 13 with DBIterator

use of org.iq80.leveldb.DBIterator in project hadoop by apache.

the class RollingLevelDBTimelineStore method getDomains.

@Override
public TimelineDomains getDomains(String owner) throws IOException {
    DBIterator iterator = null;
    try {
        byte[] prefix = KeyBuilder.newInstance().add(owner).getBytesForLookup();
        List<TimelineDomain> domains = new ArrayList<TimelineDomain>();
        for (iterator = ownerdb.iterator(), iterator.seek(prefix); iterator.hasNext(); ) {
            byte[] key = iterator.peekNext().getKey();
            if (!prefixMatches(prefix, prefix.length, key)) {
                break;
            }
            // Iterator to parse the rows of an individual domain
            KeyParser kp = new KeyParser(key, prefix.length);
            String domainId = kp.getNextString();
            byte[] prefixExt = KeyBuilder.newInstance().add(owner).add(domainId).getBytesForLookup();
            TimelineDomain domainToReturn = getTimelineDomain(iterator, domainId, prefixExt);
            if (domainToReturn != null) {
                domains.add(domainToReturn);
            }
        }
        // Sort the domains to return
        Collections.sort(domains, new Comparator<TimelineDomain>() {

            @Override
            public int compare(TimelineDomain domain1, TimelineDomain domain2) {
                int result = domain2.getCreatedTime().compareTo(domain1.getCreatedTime());
                if (result == 0) {
                    return domain2.getModifiedTime().compareTo(domain1.getModifiedTime());
                } else {
                    return result;
                }
            }
        });
        TimelineDomains domainsToReturn = new TimelineDomains();
        domainsToReturn.addDomains(domains);
        return domainsToReturn;
    } finally {
        IOUtils.cleanup(LOG, iterator);
    }
}
Also used : DBIterator(org.iq80.leveldb.DBIterator) TimelineDomains(org.apache.hadoop.yarn.api.records.timeline.TimelineDomains) ArrayList(java.util.ArrayList) TimelineDomain(org.apache.hadoop.yarn.api.records.timeline.TimelineDomain) KeyParser(org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser)

Example 14 with DBIterator

use of org.iq80.leveldb.DBIterator in project hadoop by apache.

the class TestLeveldbIterator method testExceptionHandling.

@Test
public void testExceptionHandling() throws Exception {
    InvocationHandler rtExcHandler = new InvocationHandler() {

        @Override
        public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
            throw new RuntimeException("forced runtime error");
        }
    };
    DBIterator dbiter = (DBIterator) Proxy.newProxyInstance(DBIterator.class.getClassLoader(), new Class[] { DBIterator.class }, rtExcHandler);
    LeveldbIterator iter = new LeveldbIterator(dbiter);
    for (CallInfo ci : RTEXC_METHODS) {
        Method method = iter.getClass().getMethod(ci.methodName, ci.argTypes);
        assertNotNull("unable to locate method " + ci.methodName, method);
        try {
            method.invoke(iter, ci.args);
            fail("operation should have thrown");
        } catch (InvocationTargetException ite) {
            Throwable exc = ite.getTargetException();
            assertTrue("Method " + ci.methodName + " threw non-DBException: " + exc, exc instanceof DBException);
            assertFalse("Method " + ci.methodName + " double-wrapped DBException", exc.getCause() instanceof DBException);
        }
    }
    // check close() throws IOException
    try {
        iter.close();
        fail("operation shoul have thrown");
    } catch (IOException e) {
    // expected
    }
}
Also used : DBIterator(org.iq80.leveldb.DBIterator) DBException(org.iq80.leveldb.DBException) Method(java.lang.reflect.Method) IOException(java.io.IOException) InvocationHandler(java.lang.reflect.InvocationHandler) InvocationTargetException(java.lang.reflect.InvocationTargetException) Test(org.junit.Test)

Example 15 with DBIterator

use of org.iq80.leveldb.DBIterator in project EventHub by Codecademy.

the class DB method findByPrefix.

public List<String> findByPrefix(String prefix, int substringStartsAt) {
    try (DBIterator iterator = db.iterator()) {
        List<String> keys = Lists.newArrayList();
        for (iterator.seek(bytes(prefix)); iterator.hasNext(); iterator.next()) {
            String key = asString(iterator.peekNext().getKey());
            if (!key.startsWith(prefix)) {
                break;
            }
            keys.add(key.substring(substringStartsAt));
        }
        return keys;
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}
Also used : DBIterator(org.iq80.leveldb.DBIterator) JniDBFactory.asString(org.fusesource.leveldbjni.JniDBFactory.asString) IOException(java.io.IOException)

Aggregations

DBIterator (org.iq80.leveldb.DBIterator)17 DB (org.iq80.leveldb.DB)7 WriteBatch (org.iq80.leveldb.WriteBatch)5 JniDBFactory.asString (org.fusesource.leveldbjni.JniDBFactory.asString)4 Map (java.util.Map)3 ImmutableMap (com.google.common.collect.ImmutableMap)2 ImmutableSortedMap (com.google.common.collect.ImmutableSortedMap)2 IOException (java.io.IOException)2 ArrayList (java.util.ArrayList)2 LinkedHashSet (java.util.LinkedHashSet)2 NavigableMap (java.util.NavigableMap)2 TreeMap (java.util.TreeMap)2 GenericObjectMapper.readReverseOrderedLong (org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.readReverseOrderedLong)2 GenericObjectMapper.writeReverseOrderedLong (org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.writeReverseOrderedLong)2 KeyBuilder (org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyBuilder)2 KeyParser (org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils.KeyParser)2 Row (co.cask.cdap.api.dataset.table.Row)1 Scanner (co.cask.cdap.api.dataset.table.Scanner)1 VisibleForTesting (com.google.common.annotations.VisibleForTesting)1 InvocationHandler (java.lang.reflect.InvocationHandler)1