use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.
the class CacheManager method saveState.
public PersistState saveState() throws IOException {
ArrayList<CachePoolInfoProto> pools = Lists.newArrayListWithCapacity(cachePools.size());
ArrayList<CacheDirectiveInfoProto> directives = Lists.newArrayListWithCapacity(directivesById.size());
for (CachePool pool : cachePools.values()) {
CachePoolInfo p = pool.getInfo(true);
CachePoolInfoProto.Builder b = CachePoolInfoProto.newBuilder().setPoolName(p.getPoolName());
if (p.getOwnerName() != null)
b.setOwnerName(p.getOwnerName());
if (p.getGroupName() != null)
b.setGroupName(p.getGroupName());
if (p.getMode() != null)
b.setMode(p.getMode().toShort());
if (p.getLimit() != null)
b.setLimit(p.getLimit());
pools.add(b.build());
}
for (CacheDirective directive : directivesById.values()) {
CacheDirectiveInfo info = directive.toInfo();
CacheDirectiveInfoProto.Builder b = CacheDirectiveInfoProto.newBuilder().setId(info.getId());
if (info.getPath() != null) {
b.setPath(info.getPath().toUri().getPath());
}
if (info.getReplication() != null) {
b.setReplication(info.getReplication());
}
if (info.getPool() != null) {
b.setPool(info.getPool());
}
Expiration expiry = info.getExpiration();
if (expiry != null) {
assert (!expiry.isRelative());
b.setExpiration(PBHelperClient.convert(expiry));
}
directives.add(b.build());
}
CacheManagerSection s = CacheManagerSection.newBuilder().setNextDirectiveId(nextDirectiveId).setNumPools(pools.size()).setNumDirectives(directives.size()).build();
return new PersistState(s, pools, directives);
}
use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.
the class FSNDNCacheOp method addCacheDirective.
static CacheDirectiveInfo addCacheDirective(FSNamesystem fsn, CacheManager cacheManager, CacheDirectiveInfo directive, EnumSet<CacheFlag> flags, boolean logRetryCache) throws IOException {
final FSPermissionChecker pc = getFsPermissionChecker(fsn);
if (directive.getId() != null) {
throw new IOException("addDirective: you cannot specify an ID " + "for this operation.");
}
CacheDirectiveInfo effectiveDirective = cacheManager.addDirective(directive, pc, flags);
fsn.getEditLog().logAddCacheDirectiveInfo(effectiveDirective, logRetryCache);
return effectiveDirective;
}
use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.
the class FSNamesystem method addCacheDirective.
long addCacheDirective(CacheDirectiveInfo directive, EnumSet<CacheFlag> flags, boolean logRetryCache) throws IOException {
final String operationName = "addCacheDirective";
CacheDirectiveInfo effectiveDirective = null;
boolean success = false;
String effectiveDirectiveStr;
if (!flags.contains(CacheFlag.FORCE)) {
cacheManager.waitForRescanIfNeeded();
}
writeLock();
try {
checkOperation(OperationCategory.WRITE);
checkNameNodeSafeMode("Cannot add cache directive");
effectiveDirective = FSNDNCacheOp.addCacheDirective(this, cacheManager, directive, flags, logRetryCache);
success = true;
} catch (AccessControlException ace) {
logAuditEvent(success, operationName, null, null, null);
throw ace;
} finally {
writeUnlock(operationName);
if (success) {
getEditLog().logSync();
}
}
effectiveDirectiveStr = effectiveDirective.toString();
logAuditEvent(success, operationName, effectiveDirectiveStr, null, null);
return effectiveDirective.getId();
}
use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.
the class FSImageSerialization method readCacheDirectiveInfo.
public static CacheDirectiveInfo readCacheDirectiveInfo(DataInput in) throws IOException {
CacheDirectiveInfo.Builder builder = new CacheDirectiveInfo.Builder();
builder.setId(readLong(in));
int flags = in.readInt();
if ((flags & 0x1) != 0) {
builder.setPath(new Path(readString(in)));
}
if ((flags & 0x2) != 0) {
builder.setReplication(readShort(in));
}
if ((flags & 0x4) != 0) {
builder.setPool(readString(in));
}
if ((flags & 0x8) != 0) {
builder.setExpiration(CacheDirectiveInfo.Expiration.newAbsolute(readLong(in)));
}
if ((flags & ~0xF) != 0) {
throw new IOException("unknown flags set in " + "ModifyCacheDirectiveInfoOp: " + flags);
}
return builder.build();
}
use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.
the class FSImageSerialization method readCacheDirectiveInfo.
public static CacheDirectiveInfo readCacheDirectiveInfo(Stanza st) throws InvalidXmlException {
CacheDirectiveInfo.Builder builder = new CacheDirectiveInfo.Builder();
builder.setId(Long.parseLong(st.getValue("ID")));
String path = st.getValueOrNull("PATH");
if (path != null) {
builder.setPath(new Path(path));
}
String replicationString = st.getValueOrNull("REPLICATION");
if (replicationString != null) {
builder.setReplication(Short.parseShort(replicationString));
}
String pool = st.getValueOrNull("POOL");
if (pool != null) {
builder.setPool(pool);
}
String expiryTime = st.getValueOrNull("EXPIRATION");
if (expiryTime != null) {
builder.setExpiration(CacheDirectiveInfo.Expiration.newAbsolute(Long.parseLong(expiryTime)));
}
return builder.build();
}
Aggregations