Search in sources :

Example 1 with CacheDirective

use of org.apache.hadoop.hdfs.protocol.CacheDirective in project hadoop by apache.

the class CacheManager method listCacheDirectives.

public BatchedListEntries<CacheDirectiveEntry> listCacheDirectives(long prevId, CacheDirectiveInfo filter, FSPermissionChecker pc) throws IOException {
    assert namesystem.hasReadLock();
    final int NUM_PRE_ALLOCATED_ENTRIES = 16;
    String filterPath = null;
    if (filter.getPath() != null) {
        filterPath = validatePath(filter);
    }
    if (filter.getReplication() != null) {
        throw new InvalidRequestException("Filtering by replication is unsupported.");
    }
    // Querying for a single ID
    final Long id = filter.getId();
    if (id != null) {
        if (!directivesById.containsKey(id)) {
            throw new InvalidRequestException("Did not find requested id " + id);
        }
        // Since we use a tailMap on directivesById, setting prev to id-1 gets
        // us the directive with the id (if present)
        prevId = id - 1;
    }
    ArrayList<CacheDirectiveEntry> replies = new ArrayList<CacheDirectiveEntry>(NUM_PRE_ALLOCATED_ENTRIES);
    int numReplies = 0;
    SortedMap<Long, CacheDirective> tailMap = directivesById.tailMap(prevId + 1);
    for (Entry<Long, CacheDirective> cur : tailMap.entrySet()) {
        if (numReplies >= maxListCacheDirectivesNumResponses) {
            return new BatchedListEntries<CacheDirectiveEntry>(replies, true);
        }
        CacheDirective curDirective = cur.getValue();
        CacheDirectiveInfo info = cur.getValue().toInfo();
        // item and should break out.
        if (id != null && !(info.getId().equals(id))) {
            break;
        }
        if (filter.getPool() != null && !info.getPool().equals(filter.getPool())) {
            continue;
        }
        if (filterPath != null && !info.getPath().toUri().getPath().equals(filterPath)) {
            continue;
        }
        boolean hasPermission = true;
        if (pc != null) {
            try {
                pc.checkPermission(curDirective.getPool(), FsAction.READ);
            } catch (AccessControlException e) {
                hasPermission = false;
            }
        }
        if (hasPermission) {
            replies.add(new CacheDirectiveEntry(info, cur.getValue().toStats()));
            numReplies++;
        }
    }
    return new BatchedListEntries<CacheDirectiveEntry>(replies, false);
}
Also used : ArrayList(java.util.ArrayList) AccessControlException(org.apache.hadoop.security.AccessControlException) CacheDirectiveInfo(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo) CacheDirective(org.apache.hadoop.hdfs.protocol.CacheDirective) BatchedListEntries(org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries) CacheDirectiveEntry(org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry) InvalidRequestException(org.apache.hadoop.fs.InvalidRequestException)

Example 2 with CacheDirective

use of org.apache.hadoop.hdfs.protocol.CacheDirective in project hadoop by apache.

the class CacheManager method removeDirective.

public void removeDirective(long id, FSPermissionChecker pc) throws IOException {
    assert namesystem.hasWriteLock();
    try {
        CacheDirective directive = getById(id);
        checkWritePermission(pc, directive.getPool());
        removeInternal(directive);
    } catch (IOException e) {
        LOG.warn("removeDirective of " + id + " failed: ", e);
        throw e;
    }
    LOG.info("removeDirective of " + id + " successful.");
}
Also used : CacheDirective(org.apache.hadoop.hdfs.protocol.CacheDirective) IOException(java.io.IOException)

Example 3 with CacheDirective

use of org.apache.hadoop.hdfs.protocol.CacheDirective in project hadoop by apache.

the class CacheManager method modifyDirective.

public void modifyDirective(CacheDirectiveInfo info, FSPermissionChecker pc, EnumSet<CacheFlag> flags) throws IOException {
    assert namesystem.hasWriteLock();
    String idString = (info.getId() == null) ? "(null)" : info.getId().toString();
    try {
        // Check for invalid IDs.
        Long id = info.getId();
        if (id == null) {
            throw new InvalidRequestException("Must supply an ID.");
        }
        CacheDirective prevEntry = getById(id);
        checkWritePermission(pc, prevEntry.getPool());
        // Fill in defaults
        CacheDirectiveInfo infoWithDefaults = createFromInfoAndDefaults(info, prevEntry);
        CacheDirectiveInfo.Builder builder = new CacheDirectiveInfo.Builder(infoWithDefaults);
        // Do validation
        validatePath(infoWithDefaults);
        validateReplication(infoWithDefaults, (short) -1);
        // Need to test the pool being set here to avoid rejecting a modify for a
        // directive that's already been forced into a pool
        CachePool srcPool = prevEntry.getPool();
        CachePool destPool = getCachePool(validatePoolName(infoWithDefaults));
        if (!srcPool.getPoolName().equals(destPool.getPoolName())) {
            checkWritePermission(pc, destPool);
            if (!flags.contains(CacheFlag.FORCE)) {
                checkLimit(destPool, infoWithDefaults.getPath().toUri().getPath(), infoWithDefaults.getReplication());
            }
        }
        // Verify the expiration against the destination pool
        validateExpiryTime(infoWithDefaults, destPool.getMaxRelativeExpiryMs());
        // Indicate changes to the CRM
        setNeedsRescan();
        // Validation passed
        removeInternal(prevEntry);
        addInternal(new CacheDirective(builder.build()), destPool);
    } catch (IOException e) {
        LOG.warn("modifyDirective of " + idString + " failed: ", e);
        throw e;
    }
    LOG.info("modifyDirective of {} successfully applied {}.", idString, info);
}
Also used : CacheDirectiveInfo(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo) CacheDirective(org.apache.hadoop.hdfs.protocol.CacheDirective) InvalidRequestException(org.apache.hadoop.fs.InvalidRequestException) IOException(java.io.IOException)

Example 4 with CacheDirective

use of org.apache.hadoop.hdfs.protocol.CacheDirective in project hadoop by apache.

the class CacheManager method addDirective.

public CacheDirectiveInfo addDirective(CacheDirectiveInfo info, FSPermissionChecker pc, EnumSet<CacheFlag> flags) throws IOException {
    assert namesystem.hasWriteLock();
    CacheDirective directive;
    try {
        CachePool pool = getCachePool(validatePoolName(info));
        checkWritePermission(pc, pool);
        String path = validatePath(info);
        short replication = validateReplication(info, pool.getDefaultReplication());
        long expiryTime = validateExpiryTime(info, pool.getMaxRelativeExpiryMs());
        // Do quota validation if required
        if (!flags.contains(CacheFlag.FORCE)) {
            checkLimit(pool, path, replication);
        }
        // All validation passed
        // Add a new entry with the next available ID.
        long id = getNextDirectiveId();
        directive = new CacheDirective(id, path, replication, expiryTime);
        addInternal(directive, pool);
    } catch (IOException e) {
        LOG.warn("addDirective of " + info + " failed: ", e);
        throw e;
    }
    LOG.info("addDirective of {} successful.", info);
    return directive.toInfo();
}
Also used : CacheDirective(org.apache.hadoop.hdfs.protocol.CacheDirective) IOException(java.io.IOException)

Example 5 with CacheDirective

use of org.apache.hadoop.hdfs.protocol.CacheDirective in project hadoop by apache.

the class CacheManager method removeCachePool.

/**
   * Remove a cache pool.
   * 
   * Only the superuser should be able to call this function.
   *
   * @param poolName
   *          The name for the cache pool to remove.
   */
public void removeCachePool(String poolName) throws IOException {
    assert namesystem.hasWriteLock();
    try {
        CachePoolInfo.validateName(poolName);
        CachePool pool = cachePools.remove(poolName);
        if (pool == null) {
            throw new InvalidRequestException("Cannot remove non-existent cache pool " + poolName);
        }
        // Remove all directives in this pool.
        Iterator<CacheDirective> iter = pool.getDirectiveList().iterator();
        while (iter.hasNext()) {
            CacheDirective directive = iter.next();
            directivesByPath.remove(directive.getPath());
            directivesById.remove(directive.getId());
            iter.remove();
        }
        setNeedsRescan();
    } catch (IOException e) {
        LOG.info("removeCachePool of " + poolName + " failed: ", e);
        throw e;
    }
    LOG.info("removeCachePool of " + poolName + " successful.");
}
Also used : CacheDirective(org.apache.hadoop.hdfs.protocol.CacheDirective) InvalidRequestException(org.apache.hadoop.fs.InvalidRequestException) IOException(java.io.IOException)

Aggregations

CacheDirective (org.apache.hadoop.hdfs.protocol.CacheDirective)11 IOException (java.io.IOException)5 InvalidRequestException (org.apache.hadoop.fs.InvalidRequestException)4 CacheDirectiveInfo (org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo)4 CachePoolInfo (org.apache.hadoop.hdfs.protocol.CachePoolInfo)2 CacheDirectiveInfoProto (org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoProto)2 CachePoolInfoProto (org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto)2 ArrayList (java.util.ArrayList)1 Date (java.util.Date)1 BatchedListEntries (org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries)1 Path (org.apache.hadoop.fs.Path)1 FsPermission (org.apache.hadoop.fs.permission.FsPermission)1 CacheDirectiveEntry (org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry)1 Expiration (org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo.Expiration)1 CacheDirectiveStats (org.apache.hadoop.hdfs.protocol.CacheDirectiveStats)1 FSDirectory (org.apache.hadoop.hdfs.server.namenode.FSDirectory)1 CacheManagerSection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection)1 INode (org.apache.hadoop.hdfs.server.namenode.INode)1 INodeDirectory (org.apache.hadoop.hdfs.server.namenode.INodeDirectory)1 ReadOnlyList (org.apache.hadoop.hdfs.util.ReadOnlyList)1