Search in sources :

Example 11 with CachePoolInfo

use of org.apache.hadoop.hdfs.protocol.CachePoolInfo in project hadoop by apache.

the class TestAuditLoggerWithCommands method testRemoveCachePool.

@Test
public void testRemoveCachePool() throws Exception {
    removeExistingCachePools(null);
    CachePoolInfo cacheInfo = new CachePoolInfo("pool1").setMode(new FsPermission((short) 0));
    ((DistributedFileSystem) fs).addCachePool(cacheInfo);
    fileSys = DFSTestUtil.getFileSystemAs(user1, conf);
    try {
        ((DistributedFileSystem) fileSys).removeCachePool("pool1");
        fail("The operation should have failed with AccessControlException");
    } catch (AccessControlException ace) {
    }
    String aceRemoveCachePoolPattern = ".*allowed=false.*ugi=theDoctor.*cmd=removeCachePool.*";
    int length = verifyAuditLogs(aceRemoveCachePoolPattern);
    assertTrue("Unexpected log!", length == auditlog.getOutput().split("\n").length);
    try {
        fileSys.close();
        ((DistributedFileSystem) fileSys).removeCachePool("pool1");
        fail("The operation should have failed with IOException");
    } catch (IOException e) {
    }
    assertTrue("Unexpected log!", length == auditlog.getOutput().split("\n").length);
}
Also used : AccessControlException(org.apache.hadoop.security.AccessControlException) FsPermission(org.apache.hadoop.fs.permission.FsPermission) IOException(java.io.IOException) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) CachePoolInfo(org.apache.hadoop.hdfs.protocol.CachePoolInfo) Test(org.junit.Test)

Example 12 with CachePoolInfo

use of org.apache.hadoop.hdfs.protocol.CachePoolInfo in project hadoop by apache.

the class TestAuditLoggerWithCommands method testAddCachePool.

@Test
public void testAddCachePool() throws Exception {
    removeExistingCachePools(null);
    CachePoolInfo cacheInfo = new CachePoolInfo("pool1").setMode(new FsPermission((short) 0));
    fileSys = DFSTestUtil.getFileSystemAs(user1, conf);
    try {
        ((DistributedFileSystem) fileSys).addCachePool(cacheInfo);
        fail("The operation should have failed with AccessControlException");
    } catch (AccessControlException ace) {
    }
    String aceAddCachePoolPattern = ".*allowed=false.*ugi=theDoctor.*cmd=addCachePool.*";
    int length = verifyAuditLogs(aceAddCachePoolPattern);
    try {
        fileSys.close();
        ((DistributedFileSystem) fileSys).addCachePool(cacheInfo);
        fail("The operation should have failed with IOException");
    } catch (IOException e) {
    }
    assertTrue("Unexpected log!", length == auditlog.getOutput().split("\n").length);
}
Also used : AccessControlException(org.apache.hadoop.security.AccessControlException) FsPermission(org.apache.hadoop.fs.permission.FsPermission) IOException(java.io.IOException) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) CachePoolInfo(org.apache.hadoop.hdfs.protocol.CachePoolInfo) Test(org.junit.Test)

Example 13 with CachePoolInfo

use of org.apache.hadoop.hdfs.protocol.CachePoolInfo in project hadoop by apache.

the class TestAuditLoggerWithCommands method testRemoveCacheDirective.

@Test
public void testRemoveCacheDirective() throws Exception {
    removeExistingCachePools(null);
    proto.addCachePool(new CachePoolInfo("pool1").setMode(new FsPermission((short) 0)));
    CacheDirectiveInfo alpha = new CacheDirectiveInfo.Builder().setPath(new Path("/alpha")).setPool("pool1").build();
    String aceRemoveCachePattern = ".*allowed=false.*ugi=theDoctor.*cmd=removeCache.*";
    int length = -1;
    Long id = ((DistributedFileSystem) fs).addCacheDirective(alpha);
    fileSys = DFSTestUtil.getFileSystemAs(user1, conf);
    try {
        ((DistributedFileSystem) fileSys).removeCacheDirective(id);
        fail("It should have failed with an AccessControlException");
    } catch (AccessControlException ace) {
        length = verifyAuditLogs(aceRemoveCachePattern);
    }
    try {
        fileSys.close();
        ((DistributedFileSystem) fileSys).removeCacheDirective(id);
        fail("The operation should have failed with IOException");
    } catch (IOException e) {
    }
    assertTrue("Unexpected log!", length == auditlog.getOutput().split("\n").length);
}
Also used : Path(org.apache.hadoop.fs.Path) CacheDirectiveInfo(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo) AccessControlException(org.apache.hadoop.security.AccessControlException) FsPermission(org.apache.hadoop.fs.permission.FsPermission) IOException(java.io.IOException) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) CachePoolInfo(org.apache.hadoop.hdfs.protocol.CachePoolInfo) Test(org.junit.Test)

Example 14 with CachePoolInfo

use of org.apache.hadoop.hdfs.protocol.CachePoolInfo in project hadoop by apache.

the class TestAuditLoggerWithCommands method testModifyCachePool.

@Test
public void testModifyCachePool() throws Exception {
    removeExistingCachePools(null);
    CachePoolInfo cacheInfo = new CachePoolInfo("pool1").setMode(new FsPermission((short) 0));
    ((DistributedFileSystem) fs).addCachePool(cacheInfo);
    fileSys = DFSTestUtil.getFileSystemAs(user1, conf);
    try {
        ((DistributedFileSystem) fileSys).modifyCachePool(cacheInfo);
        fail("The operation should have failed with AccessControlException");
    } catch (AccessControlException ace) {
    }
    String aceModifyCachePoolPattern = ".*allowed=false.*ugi=theDoctor.*cmd=modifyCachePool.*";
    int length = verifyAuditLogs(aceModifyCachePoolPattern);
    try {
        fileSys.close();
        ((DistributedFileSystem) fileSys).modifyCachePool(cacheInfo);
        fail("The operation should have failed with IOException");
    } catch (IOException e) {
    }
    assertTrue("Unexpected log!", length == auditlog.getOutput().split("\n").length);
}
Also used : AccessControlException(org.apache.hadoop.security.AccessControlException) FsPermission(org.apache.hadoop.fs.permission.FsPermission) IOException(java.io.IOException) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) CachePoolInfo(org.apache.hadoop.hdfs.protocol.CachePoolInfo) Test(org.junit.Test)

Example 15 with CachePoolInfo

use of org.apache.hadoop.hdfs.protocol.CachePoolInfo in project SSM by Intel-bigdata.

the class CacheFileAction method createCachePool.

private void createCachePool() throws Exception {
    RemoteIterator<CachePoolEntry> poolEntries = dfsClient.listCachePools();
    while (poolEntries.hasNext()) {
        CachePoolEntry poolEntry = poolEntries.next();
        if (poolEntry.getInfo().getPoolName().equals(SSMPOOL)) {
            return;
        }
    }
    dfsClient.addCachePool(new CachePoolInfo(SSMPOOL));
}
Also used : CachePoolInfo(org.apache.hadoop.hdfs.protocol.CachePoolInfo) CachePoolEntry(org.apache.hadoop.hdfs.protocol.CachePoolEntry)

Aggregations

CachePoolInfo (org.apache.hadoop.hdfs.protocol.CachePoolInfo)36 Test (org.junit.Test)26 Path (org.apache.hadoop.fs.Path)20 CacheDirectiveInfo (org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo)20 FsPermission (org.apache.hadoop.fs.permission.FsPermission)14 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)14 IOException (java.io.IOException)11 AccessControlException (org.apache.hadoop.security.AccessControlException)8 CachePoolEntry (org.apache.hadoop.hdfs.protocol.CachePoolEntry)7 CacheDirectiveEntry (org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry)6 InvalidRequestException (org.apache.hadoop.fs.InvalidRequestException)4 ByteBuffer (java.nio.ByteBuffer)3 Date (java.util.Date)3 LinkedList (java.util.LinkedList)3 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)3 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)3 HashSet (java.util.HashSet)2 Configuration (org.apache.hadoop.conf.Configuration)2 CacheFlag (org.apache.hadoop.fs.CacheFlag)2 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)2