use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.
the class TestAuditLoggerWithCommands method testModifyCacheDirective.
@Test
public void testModifyCacheDirective() throws Exception {
removeExistingCachePools(null);
proto.addCachePool(new CachePoolInfo("pool1").setMode(new FsPermission((short) 0)));
CacheDirectiveInfo alpha = new CacheDirectiveInfo.Builder().setPath(new Path("/alpha")).setPool("pool1").build();
fileSys = DFSTestUtil.getFileSystemAs(user1, conf);
Long id = ((DistributedFileSystem) fs).addCacheDirective(alpha);
try {
((DistributedFileSystem) fileSys).modifyCacheDirective(new CacheDirectiveInfo.Builder().setId(id).setReplication((short) 1).build());
fail("The operation should have failed with AccessControlException");
} catch (AccessControlException ace) {
}
String aceModifyCachePattern = ".*allowed=false.*ugi=theDoctor.*cmd=modifyCache.*";
verifyAuditLogs(aceModifyCachePattern);
fileSys.close();
try {
((DistributedFileSystem) fileSys).modifyCacheDirective(new CacheDirectiveInfo.Builder().setId(id).setReplication((short) 1).build());
fail("The operation should have failed with IOException");
} catch (IOException e) {
}
}
use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.
the class TestRetryCacheWithHA method testListCacheDirectives.
/**
* Add a list of cache directives, list cache directives,
* switch active NN, and list cache directives again.
*/
@Test(timeout = 60000)
public void testListCacheDirectives() throws Exception {
final int poolCount = 7;
HashSet<String> poolNames = new HashSet<String>(poolCount);
Path path = new Path("/p");
for (int i = 0; i < poolCount; i++) {
String poolName = "testListCacheDirectives-" + i;
CacheDirectiveInfo directiveInfo = new CacheDirectiveInfo.Builder().setPool(poolName).setPath(path).build();
dfs.addCachePool(new CachePoolInfo(poolName));
dfs.addCacheDirective(directiveInfo, EnumSet.of(CacheFlag.FORCE));
poolNames.add(poolName);
}
listCacheDirectives(poolNames, 0);
cluster.transitionToStandby(0);
cluster.transitionToActive(1);
cluster.waitActive(1);
listCacheDirectives(poolNames, 1);
}
use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project SSM by Intel-bigdata.
the class CacheFileAction method isCached.
public boolean isCached(String fileName) throws Exception {
CacheDirectiveInfo.Builder filterBuilder = new CacheDirectiveInfo.Builder();
filterBuilder.setPath(new Path(fileName));
CacheDirectiveInfo filter = filterBuilder.build();
RemoteIterator<CacheDirectiveEntry> directiveEntries = dfsClient.listCacheDirectives(filter);
return directiveEntries.hasNext();
}
use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project SSM by Intel-bigdata.
the class CacheFileAction method addDirective.
private void addDirective(String fileName) throws Exception {
CacheDirectiveInfo.Builder filterBuilder = new CacheDirectiveInfo.Builder();
filterBuilder.setPath(new Path(fileName));
filterBuilder.setPool(SSMPOOL);
CacheDirectiveInfo filter = filterBuilder.build();
EnumSet<CacheFlag> flags = EnumSet.noneOf(CacheFlag.class);
dfsClient.addCacheDirective(filter, flags);
}
Aggregations