Search in sources :

Example 1 with CachePoolInfoProto

use of org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto in project hadoop by apache.

the class PBImageXmlWriter method dumpCacheManagerSection.

private void dumpCacheManagerSection(InputStream is) throws IOException {
    out.print("<" + CACHE_MANAGER_SECTION_NAME + ">");
    CacheManagerSection s = CacheManagerSection.parseDelimitedFrom(is);
    o(CACHE_MANAGER_SECTION_NEXT_DIRECTIVE_ID, s.getNextDirectiveId());
    o(CACHE_MANAGER_SECTION_NUM_DIRECTIVES, s.getNumDirectives());
    o(CACHE_MANAGER_SECTION_NUM_POOLS, s.getNumPools());
    for (int i = 0; i < s.getNumPools(); ++i) {
        CachePoolInfoProto p = CachePoolInfoProto.parseDelimitedFrom(is);
        out.print("<" + CACHE_MANAGER_SECTION_POOL + ">");
        o(CACHE_MANAGER_SECTION_POOL_NAME, p.getPoolName()).o(CACHE_MANAGER_SECTION_OWNER_NAME, p.getOwnerName()).o(CACHE_MANAGER_SECTION_GROUP_NAME, p.getGroupName()).o(CACHE_MANAGER_SECTION_MODE, p.getMode()).o(CACHE_MANAGER_SECTION_LIMIT, p.getLimit()).o(CACHE_MANAGER_SECTION_MAX_RELATIVE_EXPIRY, p.getMaxRelativeExpiry());
        out.print("</" + CACHE_MANAGER_SECTION_POOL + ">\n");
    }
    for (int i = 0; i < s.getNumDirectives(); ++i) {
        CacheDirectiveInfoProto p = CacheDirectiveInfoProto.parseDelimitedFrom(is);
        out.print("<" + CACHE_MANAGER_SECTION_DIRECTIVE + ">");
        o(SECTION_ID, p.getId()).o(SECTION_PATH, p.getPath()).o(SECTION_REPLICATION, p.getReplication()).o(CACHE_MANAGER_SECTION_POOL, p.getPool());
        out.print("<" + CACHE_MANAGER_SECTION_EXPIRATION + ">");
        CacheDirectiveInfoExpirationProto e = p.getExpiration();
        o(CACHE_MANAGER_SECTION_MILLIS, e.getMillis()).o(CACHE_MANAGER_SECTION_RELATIVE, e.getIsRelative());
        out.print("</" + CACHE_MANAGER_SECTION_EXPIRATION + ">\n");
        out.print("</" + CACHE_MANAGER_SECTION_DIRECTIVE + ">\n");
    }
    out.print("</" + CACHE_MANAGER_SECTION_NAME + ">\n");
}
Also used : CachePoolInfoProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto) CacheDirectiveInfoExpirationProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoExpirationProto) CacheManagerSection(org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection) CacheDirectiveInfoProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoProto)

Example 2 with CachePoolInfoProto

use of org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto in project hadoop by apache.

the class CacheManager method loadState.

public void loadState(PersistState s) throws IOException {
    nextDirectiveId = s.section.getNextDirectiveId();
    for (CachePoolInfoProto p : s.pools) {
        CachePoolInfo info = new CachePoolInfo(p.getPoolName());
        if (p.hasOwnerName())
            info.setOwnerName(p.getOwnerName());
        if (p.hasGroupName())
            info.setGroupName(p.getGroupName());
        if (p.hasMode())
            info.setMode(new FsPermission((short) p.getMode()));
        if (p.hasDefaultReplication()) {
            info.setDefaultReplication((short) p.getDefaultReplication());
        }
        if (p.hasLimit())
            info.setLimit(p.getLimit());
        addCachePool(info);
    }
    for (CacheDirectiveInfoProto p : s.directives) {
        // Get pool reference by looking it up in the map
        final String poolName = p.getPool();
        CacheDirective directive = new CacheDirective(p.getId(), new Path(p.getPath()).toUri().getPath(), (short) p.getReplication(), p.getExpiration().getMillis());
        addCacheDirective(poolName, directive);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) CachePoolInfoProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto) CacheDirective(org.apache.hadoop.hdfs.protocol.CacheDirective) FsPermission(org.apache.hadoop.fs.permission.FsPermission) CachePoolInfo(org.apache.hadoop.hdfs.protocol.CachePoolInfo) CacheDirectiveInfoProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoProto)

Example 3 with CachePoolInfoProto

use of org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto in project hadoop by apache.

the class CacheManager method saveState.

public PersistState saveState() throws IOException {
    ArrayList<CachePoolInfoProto> pools = Lists.newArrayListWithCapacity(cachePools.size());
    ArrayList<CacheDirectiveInfoProto> directives = Lists.newArrayListWithCapacity(directivesById.size());
    for (CachePool pool : cachePools.values()) {
        CachePoolInfo p = pool.getInfo(true);
        CachePoolInfoProto.Builder b = CachePoolInfoProto.newBuilder().setPoolName(p.getPoolName());
        if (p.getOwnerName() != null)
            b.setOwnerName(p.getOwnerName());
        if (p.getGroupName() != null)
            b.setGroupName(p.getGroupName());
        if (p.getMode() != null)
            b.setMode(p.getMode().toShort());
        if (p.getLimit() != null)
            b.setLimit(p.getLimit());
        pools.add(b.build());
    }
    for (CacheDirective directive : directivesById.values()) {
        CacheDirectiveInfo info = directive.toInfo();
        CacheDirectiveInfoProto.Builder b = CacheDirectiveInfoProto.newBuilder().setId(info.getId());
        if (info.getPath() != null) {
            b.setPath(info.getPath().toUri().getPath());
        }
        if (info.getReplication() != null) {
            b.setReplication(info.getReplication());
        }
        if (info.getPool() != null) {
            b.setPool(info.getPool());
        }
        Expiration expiry = info.getExpiration();
        if (expiry != null) {
            assert (!expiry.isRelative());
            b.setExpiration(PBHelperClient.convert(expiry));
        }
        directives.add(b.build());
    }
    CacheManagerSection s = CacheManagerSection.newBuilder().setNextDirectiveId(nextDirectiveId).setNumPools(pools.size()).setNumDirectives(directives.size()).build();
    return new PersistState(s, pools, directives);
}
Also used : CachePoolInfoProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto) Expiration(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo.Expiration) CacheDirectiveInfoProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoProto) CacheManagerSection(org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection) CacheDirectiveInfo(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo) CacheDirective(org.apache.hadoop.hdfs.protocol.CacheDirective) CachePoolInfo(org.apache.hadoop.hdfs.protocol.CachePoolInfo)

Aggregations

CacheDirectiveInfoProto (org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoProto)3 CachePoolInfoProto (org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto)3 CacheDirective (org.apache.hadoop.hdfs.protocol.CacheDirective)2 CachePoolInfo (org.apache.hadoop.hdfs.protocol.CachePoolInfo)2 CacheManagerSection (org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection)2 Path (org.apache.hadoop.fs.Path)1 FsPermission (org.apache.hadoop.fs.permission.FsPermission)1 CacheDirectiveInfo (org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo)1 Expiration (org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo.Expiration)1 CacheDirectiveInfoExpirationProto (org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoExpirationProto)1