use of org.apache.hadoop.hdfs.protocol.CachePoolInfo in project hadoop by apache.
the class TestAuditLoggerWithCommands method testRemoveCachePool.
@Test
public void testRemoveCachePool() throws Exception {
removeExistingCachePools(null);
CachePoolInfo cacheInfo = new CachePoolInfo("pool1").setMode(new FsPermission((short) 0));
((DistributedFileSystem) fs).addCachePool(cacheInfo);
fileSys = DFSTestUtil.getFileSystemAs(user1, conf);
try {
((DistributedFileSystem) fileSys).removeCachePool("pool1");
fail("The operation should have failed with AccessControlException");
} catch (AccessControlException ace) {
}
String aceRemoveCachePoolPattern = ".*allowed=false.*ugi=theDoctor.*cmd=removeCachePool.*";
int length = verifyAuditLogs(aceRemoveCachePoolPattern);
assertTrue("Unexpected log!", length == auditlog.getOutput().split("\n").length);
try {
fileSys.close();
((DistributedFileSystem) fileSys).removeCachePool("pool1");
fail("The operation should have failed with IOException");
} catch (IOException e) {
}
assertTrue("Unexpected log!", length == auditlog.getOutput().split("\n").length);
}
use of org.apache.hadoop.hdfs.protocol.CachePoolInfo in project hadoop by apache.
the class TestAuditLoggerWithCommands method testAddCachePool.
@Test
public void testAddCachePool() throws Exception {
removeExistingCachePools(null);
CachePoolInfo cacheInfo = new CachePoolInfo("pool1").setMode(new FsPermission((short) 0));
fileSys = DFSTestUtil.getFileSystemAs(user1, conf);
try {
((DistributedFileSystem) fileSys).addCachePool(cacheInfo);
fail("The operation should have failed with AccessControlException");
} catch (AccessControlException ace) {
}
String aceAddCachePoolPattern = ".*allowed=false.*ugi=theDoctor.*cmd=addCachePool.*";
int length = verifyAuditLogs(aceAddCachePoolPattern);
try {
fileSys.close();
((DistributedFileSystem) fileSys).addCachePool(cacheInfo);
fail("The operation should have failed with IOException");
} catch (IOException e) {
}
assertTrue("Unexpected log!", length == auditlog.getOutput().split("\n").length);
}
use of org.apache.hadoop.hdfs.protocol.CachePoolInfo in project hadoop by apache.
the class TestAuditLoggerWithCommands method testRemoveCacheDirective.
@Test
public void testRemoveCacheDirective() throws Exception {
removeExistingCachePools(null);
proto.addCachePool(new CachePoolInfo("pool1").setMode(new FsPermission((short) 0)));
CacheDirectiveInfo alpha = new CacheDirectiveInfo.Builder().setPath(new Path("/alpha")).setPool("pool1").build();
String aceRemoveCachePattern = ".*allowed=false.*ugi=theDoctor.*cmd=removeCache.*";
int length = -1;
Long id = ((DistributedFileSystem) fs).addCacheDirective(alpha);
fileSys = DFSTestUtil.getFileSystemAs(user1, conf);
try {
((DistributedFileSystem) fileSys).removeCacheDirective(id);
fail("It should have failed with an AccessControlException");
} catch (AccessControlException ace) {
length = verifyAuditLogs(aceRemoveCachePattern);
}
try {
fileSys.close();
((DistributedFileSystem) fileSys).removeCacheDirective(id);
fail("The operation should have failed with IOException");
} catch (IOException e) {
}
assertTrue("Unexpected log!", length == auditlog.getOutput().split("\n").length);
}
use of org.apache.hadoop.hdfs.protocol.CachePoolInfo in project hadoop by apache.
the class TestAuditLoggerWithCommands method testModifyCachePool.
@Test
public void testModifyCachePool() throws Exception {
removeExistingCachePools(null);
CachePoolInfo cacheInfo = new CachePoolInfo("pool1").setMode(new FsPermission((short) 0));
((DistributedFileSystem) fs).addCachePool(cacheInfo);
fileSys = DFSTestUtil.getFileSystemAs(user1, conf);
try {
((DistributedFileSystem) fileSys).modifyCachePool(cacheInfo);
fail("The operation should have failed with AccessControlException");
} catch (AccessControlException ace) {
}
String aceModifyCachePoolPattern = ".*allowed=false.*ugi=theDoctor.*cmd=modifyCachePool.*";
int length = verifyAuditLogs(aceModifyCachePoolPattern);
try {
fileSys.close();
((DistributedFileSystem) fileSys).modifyCachePool(cacheInfo);
fail("The operation should have failed with IOException");
} catch (IOException e) {
}
assertTrue("Unexpected log!", length == auditlog.getOutput().split("\n").length);
}
use of org.apache.hadoop.hdfs.protocol.CachePoolInfo in project SSM by Intel-bigdata.
the class CacheFileAction method createCachePool.
private void createCachePool() throws Exception {
RemoteIterator<CachePoolEntry> poolEntries = dfsClient.listCachePools();
while (poolEntries.hasNext()) {
CachePoolEntry poolEntry = poolEntries.next();
if (poolEntry.getInfo().getPoolName().equals(SSMPOOL)) {
return;
}
}
dfsClient.addCachePool(new CachePoolInfo(SSMPOOL));
}
Aggregations