Search in sources :

Example 11 with CacheDirective

use of org.apache.hadoop.hdfs.protocol.CacheDirective in project hadoop by apache.

the class CacheManager method saveState.

public PersistState saveState() throws IOException {
    ArrayList<CachePoolInfoProto> pools = Lists.newArrayListWithCapacity(cachePools.size());
    ArrayList<CacheDirectiveInfoProto> directives = Lists.newArrayListWithCapacity(directivesById.size());
    for (CachePool pool : cachePools.values()) {
        CachePoolInfo p = pool.getInfo(true);
        CachePoolInfoProto.Builder b = CachePoolInfoProto.newBuilder().setPoolName(p.getPoolName());
        if (p.getOwnerName() != null)
            b.setOwnerName(p.getOwnerName());
        if (p.getGroupName() != null)
            b.setGroupName(p.getGroupName());
        if (p.getMode() != null)
            b.setMode(p.getMode().toShort());
        if (p.getLimit() != null)
            b.setLimit(p.getLimit());
        pools.add(b.build());
    }
    for (CacheDirective directive : directivesById.values()) {
        CacheDirectiveInfo info = directive.toInfo();
        CacheDirectiveInfoProto.Builder b = CacheDirectiveInfoProto.newBuilder().setId(info.getId());
        if (info.getPath() != null) {
            b.setPath(info.getPath().toUri().getPath());
        }
        if (info.getReplication() != null) {
            b.setReplication(info.getReplication());
        }
        if (info.getPool() != null) {
            b.setPool(info.getPool());
        }
        Expiration expiry = info.getExpiration();
        if (expiry != null) {
            assert (!expiry.isRelative());
            b.setExpiration(PBHelperClient.convert(expiry));
        }
        directives.add(b.build());
    }
    CacheManagerSection s = CacheManagerSection.newBuilder().setNextDirectiveId(nextDirectiveId).setNumPools(pools.size()).setNumDirectives(directives.size()).build();
    return new PersistState(s, pools, directives);
}
Also used : CachePoolInfoProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto) Expiration(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo.Expiration) CacheDirectiveInfoProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoProto) CacheManagerSection(org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection) CacheDirectiveInfo(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo) CacheDirective(org.apache.hadoop.hdfs.protocol.CacheDirective) CachePoolInfo(org.apache.hadoop.hdfs.protocol.CachePoolInfo)

Aggregations

CacheDirective (org.apache.hadoop.hdfs.protocol.CacheDirective)11 IOException (java.io.IOException)5 InvalidRequestException (org.apache.hadoop.fs.InvalidRequestException)4 CacheDirectiveInfo (org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo)4 CachePoolInfo (org.apache.hadoop.hdfs.protocol.CachePoolInfo)2 CacheDirectiveInfoProto (org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoProto)2 CachePoolInfoProto (org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto)2 ArrayList (java.util.ArrayList)1 Date (java.util.Date)1 BatchedListEntries (org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries)1 Path (org.apache.hadoop.fs.Path)1 FsPermission (org.apache.hadoop.fs.permission.FsPermission)1 CacheDirectiveEntry (org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry)1 Expiration (org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo.Expiration)1 CacheDirectiveStats (org.apache.hadoop.hdfs.protocol.CacheDirectiveStats)1 FSDirectory (org.apache.hadoop.hdfs.server.namenode.FSDirectory)1 CacheManagerSection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection)1 INode (org.apache.hadoop.hdfs.server.namenode.INode)1 INodeDirectory (org.apache.hadoop.hdfs.server.namenode.INodeDirectory)1 ReadOnlyList (org.apache.hadoop.hdfs.util.ReadOnlyList)1