Search in sources :

Example 11 with CacheDirectiveInfo

use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.

the class CacheManager method saveState.

public PersistState saveState() throws IOException {
    ArrayList<CachePoolInfoProto> pools = Lists.newArrayListWithCapacity(cachePools.size());
    ArrayList<CacheDirectiveInfoProto> directives = Lists.newArrayListWithCapacity(directivesById.size());
    for (CachePool pool : cachePools.values()) {
        CachePoolInfo p = pool.getInfo(true);
        CachePoolInfoProto.Builder b = CachePoolInfoProto.newBuilder().setPoolName(p.getPoolName());
        if (p.getOwnerName() != null)
            b.setOwnerName(p.getOwnerName());
        if (p.getGroupName() != null)
            b.setGroupName(p.getGroupName());
        if (p.getMode() != null)
            b.setMode(p.getMode().toShort());
        if (p.getLimit() != null)
            b.setLimit(p.getLimit());
        pools.add(b.build());
    }
    for (CacheDirective directive : directivesById.values()) {
        CacheDirectiveInfo info = directive.toInfo();
        CacheDirectiveInfoProto.Builder b = CacheDirectiveInfoProto.newBuilder().setId(info.getId());
        if (info.getPath() != null) {
            b.setPath(info.getPath().toUri().getPath());
        }
        if (info.getReplication() != null) {
            b.setReplication(info.getReplication());
        }
        if (info.getPool() != null) {
            b.setPool(info.getPool());
        }
        Expiration expiry = info.getExpiration();
        if (expiry != null) {
            assert (!expiry.isRelative());
            b.setExpiration(PBHelperClient.convert(expiry));
        }
        directives.add(b.build());
    }
    CacheManagerSection s = CacheManagerSection.newBuilder().setNextDirectiveId(nextDirectiveId).setNumPools(pools.size()).setNumDirectives(directives.size()).build();
    return new PersistState(s, pools, directives);
}
Also used : CachePoolInfoProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto) Expiration(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo.Expiration) CacheDirectiveInfoProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoProto) CacheManagerSection(org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection) CacheDirectiveInfo(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo) CacheDirective(org.apache.hadoop.hdfs.protocol.CacheDirective) CachePoolInfo(org.apache.hadoop.hdfs.protocol.CachePoolInfo)

Example 12 with CacheDirectiveInfo

use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.

the class FSNDNCacheOp method addCacheDirective.

static CacheDirectiveInfo addCacheDirective(FSNamesystem fsn, CacheManager cacheManager, CacheDirectiveInfo directive, EnumSet<CacheFlag> flags, boolean logRetryCache) throws IOException {
    final FSPermissionChecker pc = getFsPermissionChecker(fsn);
    if (directive.getId() != null) {
        throw new IOException("addDirective: you cannot specify an ID " + "for this operation.");
    }
    CacheDirectiveInfo effectiveDirective = cacheManager.addDirective(directive, pc, flags);
    fsn.getEditLog().logAddCacheDirectiveInfo(effectiveDirective, logRetryCache);
    return effectiveDirective;
}
Also used : CacheDirectiveInfo(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo) IOException(java.io.IOException)

Example 13 with CacheDirectiveInfo

use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.

the class FSNamesystem method addCacheDirective.

long addCacheDirective(CacheDirectiveInfo directive, EnumSet<CacheFlag> flags, boolean logRetryCache) throws IOException {
    final String operationName = "addCacheDirective";
    CacheDirectiveInfo effectiveDirective = null;
    boolean success = false;
    String effectiveDirectiveStr;
    if (!flags.contains(CacheFlag.FORCE)) {
        cacheManager.waitForRescanIfNeeded();
    }
    writeLock();
    try {
        checkOperation(OperationCategory.WRITE);
        checkNameNodeSafeMode("Cannot add cache directive");
        effectiveDirective = FSNDNCacheOp.addCacheDirective(this, cacheManager, directive, flags, logRetryCache);
        success = true;
    } catch (AccessControlException ace) {
        logAuditEvent(success, operationName, null, null, null);
        throw ace;
    } finally {
        writeUnlock(operationName);
        if (success) {
            getEditLog().logSync();
        }
    }
    effectiveDirectiveStr = effectiveDirective.toString();
    logAuditEvent(success, operationName, effectiveDirectiveStr, null, null);
    return effectiveDirective.getId();
}
Also used : CacheDirectiveInfo(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo) AccessControlException(org.apache.hadoop.security.AccessControlException) SnapshotAccessControlException(org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException)

Example 14 with CacheDirectiveInfo

use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.

the class FSImageSerialization method readCacheDirectiveInfo.

public static CacheDirectiveInfo readCacheDirectiveInfo(DataInput in) throws IOException {
    CacheDirectiveInfo.Builder builder = new CacheDirectiveInfo.Builder();
    builder.setId(readLong(in));
    int flags = in.readInt();
    if ((flags & 0x1) != 0) {
        builder.setPath(new Path(readString(in)));
    }
    if ((flags & 0x2) != 0) {
        builder.setReplication(readShort(in));
    }
    if ((flags & 0x4) != 0) {
        builder.setPool(readString(in));
    }
    if ((flags & 0x8) != 0) {
        builder.setExpiration(CacheDirectiveInfo.Expiration.newAbsolute(readLong(in)));
    }
    if ((flags & ~0xF) != 0) {
        throw new IOException("unknown flags set in " + "ModifyCacheDirectiveInfoOp: " + flags);
    }
    return builder.build();
}
Also used : Path(org.apache.hadoop.fs.Path) CacheDirectiveInfo(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo) IOException(java.io.IOException)

Example 15 with CacheDirectiveInfo

use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.

the class FSImageSerialization method readCacheDirectiveInfo.

public static CacheDirectiveInfo readCacheDirectiveInfo(Stanza st) throws InvalidXmlException {
    CacheDirectiveInfo.Builder builder = new CacheDirectiveInfo.Builder();
    builder.setId(Long.parseLong(st.getValue("ID")));
    String path = st.getValueOrNull("PATH");
    if (path != null) {
        builder.setPath(new Path(path));
    }
    String replicationString = st.getValueOrNull("REPLICATION");
    if (replicationString != null) {
        builder.setReplication(Short.parseShort(replicationString));
    }
    String pool = st.getValueOrNull("POOL");
    if (pool != null) {
        builder.setPool(pool);
    }
    String expiryTime = st.getValueOrNull("EXPIRATION");
    if (expiryTime != null) {
        builder.setExpiration(CacheDirectiveInfo.Expiration.newAbsolute(Long.parseLong(expiryTime)));
    }
    return builder.build();
}
Also used : Path(org.apache.hadoop.fs.Path) CacheDirectiveInfo(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo)

Aggregations

CacheDirectiveInfo (org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo)24 Path (org.apache.hadoop.fs.Path)15 CachePoolInfo (org.apache.hadoop.hdfs.protocol.CachePoolInfo)10 IOException (java.io.IOException)9 CacheDirectiveEntry (org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry)9 Test (org.junit.Test)9 InvalidRequestException (org.apache.hadoop.fs.InvalidRequestException)6 AccessControlException (org.apache.hadoop.security.AccessControlException)6 FsPermission (org.apache.hadoop.fs.permission.FsPermission)5 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)4 CacheDirective (org.apache.hadoop.hdfs.protocol.CacheDirective)4 CacheFlag (org.apache.hadoop.fs.CacheFlag)3 CachePoolEntry (org.apache.hadoop.hdfs.protocol.CachePoolEntry)3 ArrayList (java.util.ArrayList)2 Date (java.util.Date)2 ServiceException (com.google.protobuf.ServiceException)1 HashSet (java.util.HashSet)1 LinkedList (java.util.LinkedList)1 List (java.util.List)1 BatchedListEntries (org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries)1