Search in sources :

Example 6 with CacheDirective

use of org.apache.hadoop.hdfs.protocol.CacheDirective in project hadoop by apache.

the class CacheManager method modifyDirectiveFromEditLog.

/**
   * Modifies a directive, skipping most error checking. This is for careful
   * internal use only. modifyDirective can be non-deterministic since its error
   * checking depends on current system time, which poses a problem for edit log
   * replay.
   */
void modifyDirectiveFromEditLog(CacheDirectiveInfo info) throws InvalidRequestException {
    // Check for invalid IDs.
    Long id = info.getId();
    if (id == null) {
        throw new InvalidRequestException("Must supply an ID.");
    }
    CacheDirective prevEntry = getById(id);
    CacheDirectiveInfo newInfo = createFromInfoAndDefaults(info, prevEntry);
    removeInternal(prevEntry);
    addInternal(new CacheDirective(newInfo), getCachePool(newInfo.getPool()));
}
Also used : CacheDirectiveInfo(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo) CacheDirective(org.apache.hadoop.hdfs.protocol.CacheDirective) InvalidRequestException(org.apache.hadoop.fs.InvalidRequestException)

Example 7 with CacheDirective

use of org.apache.hadoop.hdfs.protocol.CacheDirective in project hadoop by apache.

the class CacheReplicationMonitor method rescanCacheDirectives.

/**
   * Scan all CacheDirectives.  Use the information to figure out
   * what cache replication factor each block should have.
   */
private void rescanCacheDirectives() {
    FSDirectory fsDir = namesystem.getFSDirectory();
    final long now = new Date().getTime();
    for (CacheDirective directive : cacheManager.getCacheDirectives()) {
        scannedDirectives++;
        // Skip processing this entry if it has expired
        if (directive.getExpiryTime() > 0 && directive.getExpiryTime() <= now) {
            LOG.debug("Directive {}: the directive expired at {} (now = {})", directive.getId(), directive.getExpiryTime(), now);
            continue;
        }
        String path = directive.getPath();
        INode node;
        try {
            node = fsDir.getINode(path, DirOp.READ);
        } catch (IOException e) {
            // We don't cache through symlinks or invalid paths
            LOG.debug("Directive {}: Failed to resolve path {} ({})", directive.getId(), path, e.getMessage());
            continue;
        }
        if (node == null) {
            LOG.debug("Directive {}: No inode found at {}", directive.getId(), path);
        } else if (node.isDirectory()) {
            INodeDirectory dir = node.asDirectory();
            ReadOnlyList<INode> children = dir.getChildrenList(Snapshot.CURRENT_STATE_ID);
            for (INode child : children) {
                if (child.isFile()) {
                    rescanFile(directive, child.asFile());
                }
            }
        } else if (node.isFile()) {
            rescanFile(directive, node.asFile());
        } else {
            LOG.debug("Directive {}: ignoring non-directive, non-file inode {} ", directive.getId(), node);
        }
    }
}
Also used : INodeDirectory(org.apache.hadoop.hdfs.server.namenode.INodeDirectory) INode(org.apache.hadoop.hdfs.server.namenode.INode) ReadOnlyList(org.apache.hadoop.hdfs.util.ReadOnlyList) CacheDirective(org.apache.hadoop.hdfs.protocol.CacheDirective) FSDirectory(org.apache.hadoop.hdfs.server.namenode.FSDirectory) IOException(java.io.IOException) Date(java.util.Date)

Example 8 with CacheDirective

use of org.apache.hadoop.hdfs.protocol.CacheDirective in project hadoop by apache.

the class CacheManager method addDirectiveFromEditLog.

/**
   * Adds a directive, skipping most error checking. This should only be called
   * internally in special scenarios like edit log replay.
   */
CacheDirectiveInfo addDirectiveFromEditLog(CacheDirectiveInfo directive) throws InvalidRequestException {
    long id = directive.getId();
    CacheDirective entry = new CacheDirective(directive);
    CachePool pool = cachePools.get(directive.getPool());
    addInternal(entry, pool);
    if (nextDirectiveId <= id) {
        nextDirectiveId = id + 1;
    }
    return entry.toInfo();
}
Also used : CacheDirective(org.apache.hadoop.hdfs.protocol.CacheDirective)

Example 9 with CacheDirective

use of org.apache.hadoop.hdfs.protocol.CacheDirective in project hadoop by apache.

the class CacheManager method addInternal.

// RPC handlers
private void addInternal(CacheDirective directive, CachePool pool) {
    boolean addedDirective = pool.getDirectiveList().add(directive);
    assert addedDirective;
    directivesById.put(directive.getId(), directive);
    String path = directive.getPath();
    List<CacheDirective> directives = directivesByPath.get(path);
    if (directives == null) {
        directives = new ArrayList<CacheDirective>(1);
        directivesByPath.put(path, directives);
    }
    directives.add(directive);
    // Fix up pool stats
    CacheDirectiveStats stats = computeNeeded(directive.getPath(), directive.getReplication());
    directive.addBytesNeeded(stats.getBytesNeeded());
    directive.addFilesNeeded(directive.getFilesNeeded());
    setNeedsRescan();
}
Also used : CacheDirective(org.apache.hadoop.hdfs.protocol.CacheDirective) CacheDirectiveStats(org.apache.hadoop.hdfs.protocol.CacheDirectiveStats)

Example 10 with CacheDirective

use of org.apache.hadoop.hdfs.protocol.CacheDirective in project hadoop by apache.

the class CacheManager method loadState.

public void loadState(PersistState s) throws IOException {
    nextDirectiveId = s.section.getNextDirectiveId();
    for (CachePoolInfoProto p : s.pools) {
        CachePoolInfo info = new CachePoolInfo(p.getPoolName());
        if (p.hasOwnerName())
            info.setOwnerName(p.getOwnerName());
        if (p.hasGroupName())
            info.setGroupName(p.getGroupName());
        if (p.hasMode())
            info.setMode(new FsPermission((short) p.getMode()));
        if (p.hasDefaultReplication()) {
            info.setDefaultReplication((short) p.getDefaultReplication());
        }
        if (p.hasLimit())
            info.setLimit(p.getLimit());
        addCachePool(info);
    }
    for (CacheDirectiveInfoProto p : s.directives) {
        // Get pool reference by looking it up in the map
        final String poolName = p.getPool();
        CacheDirective directive = new CacheDirective(p.getId(), new Path(p.getPath()).toUri().getPath(), (short) p.getReplication(), p.getExpiration().getMillis());
        addCacheDirective(poolName, directive);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) CachePoolInfoProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto) CacheDirective(org.apache.hadoop.hdfs.protocol.CacheDirective) FsPermission(org.apache.hadoop.fs.permission.FsPermission) CachePoolInfo(org.apache.hadoop.hdfs.protocol.CachePoolInfo) CacheDirectiveInfoProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoProto)

Aggregations

CacheDirective (org.apache.hadoop.hdfs.protocol.CacheDirective)11 IOException (java.io.IOException)5 InvalidRequestException (org.apache.hadoop.fs.InvalidRequestException)4 CacheDirectiveInfo (org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo)4 CachePoolInfo (org.apache.hadoop.hdfs.protocol.CachePoolInfo)2 CacheDirectiveInfoProto (org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoProto)2 CachePoolInfoProto (org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto)2 ArrayList (java.util.ArrayList)1 Date (java.util.Date)1 BatchedListEntries (org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries)1 Path (org.apache.hadoop.fs.Path)1 FsPermission (org.apache.hadoop.fs.permission.FsPermission)1 CacheDirectiveEntry (org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry)1 Expiration (org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo.Expiration)1 CacheDirectiveStats (org.apache.hadoop.hdfs.protocol.CacheDirectiveStats)1 FSDirectory (org.apache.hadoop.hdfs.server.namenode.FSDirectory)1 CacheManagerSection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection)1 INode (org.apache.hadoop.hdfs.server.namenode.INode)1 INodeDirectory (org.apache.hadoop.hdfs.server.namenode.INodeDirectory)1 ReadOnlyList (org.apache.hadoop.hdfs.util.ReadOnlyList)1