Search in sources :

Example 41 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project hadoop by apache.

the class TestStickyBit method testStickyBitRecursiveDeleteDir.

@Test
public void testStickyBitRecursiveDeleteDir() throws Exception {
    Path root = new Path("/" + GenericTestUtils.getMethodName());
    Path tmp = new Path(root, "tmp");
    Path dir = new Path(tmp, "dir");
    Path file = new Path(dir, "file");
    // Create a tmp directory with wide-open permissions and sticky bit
    hdfs.mkdirs(tmp);
    hdfs.setPermission(root, new FsPermission((short) 0777));
    hdfs.setPermission(tmp, new FsPermission((short) 01777));
    // Create a dir protected by sticky bit
    hdfsAsUser1.mkdirs(dir);
    hdfsAsUser1.setPermission(dir, new FsPermission((short) 0777));
    // Create a file in dir
    writeFile(hdfsAsUser1, file);
    hdfs.setPermission(file, new FsPermission((short) 0666));
    try {
        hdfsAsUser2.delete(tmp, true);
        fail("Non-owner can not delete a directory protected by sticky bit" + " recursively");
    } catch (AccessControlException e) {
        GenericTestUtils.assertExceptionContains(FSExceptionMessages.PERMISSION_DENIED_BY_STICKY_BIT, e);
    }
    // Owner can delete a directory protected by sticky bit recursively
    hdfsAsUser1.delete(tmp, true);
}
Also used : Path(org.apache.hadoop.fs.Path) AccessControlException(org.apache.hadoop.security.AccessControlException) Test(org.junit.Test)

Example 42 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project hadoop by apache.

the class TestAuditLogs method testAuditDenied.

/** test that denied operation puts proper entry in audit log */
@Test
public void testAuditDenied() throws Exception {
    final Path file = new Path(fnames[0]);
    FileSystem userfs = DFSTestUtil.getFileSystemAs(userGroupInfo, conf);
    fs.setPermission(file, new FsPermission((short) 0600));
    fs.setOwner(file, "root", null);
    setupAuditLogs();
    try {
        userfs.open(file);
        fail("open must not succeed");
    } catch (AccessControlException e) {
        System.out.println("got access denied, as expected.");
    }
    verifyAuditLogs(false);
}
Also used : Path(org.apache.hadoop.fs.Path) FileSystem(org.apache.hadoop.fs.FileSystem) WebHdfsFileSystem(org.apache.hadoop.hdfs.web.WebHdfsFileSystem) AccessControlException(org.apache.hadoop.security.AccessControlException) FsPermission(org.apache.hadoop.fs.permission.FsPermission) Test(org.junit.Test)

Example 43 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project hadoop by apache.

the class TestAuditLogs method testAuditWebHdfsDenied.

/** test that denied access via webhdfs puts proper entry in audit log */
@Test
public void testAuditWebHdfsDenied() throws Exception {
    final Path file = new Path(fnames[0]);
    fs.setPermission(file, new FsPermission((short) 0600));
    fs.setOwner(file, "root", null);
    setupAuditLogs();
    try {
        WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsConstants.WEBHDFS_SCHEME);
        InputStream istream = webfs.open(file);
        int val = istream.read();
        fail("open+read must not succeed, got " + val);
    } catch (AccessControlException E) {
        System.out.println("got access denied, as expected.");
    }
    verifyAuditLogsRepeat(false, 2);
}
Also used : Path(org.apache.hadoop.fs.Path) InputStream(java.io.InputStream) AccessControlException(org.apache.hadoop.security.AccessControlException) FsPermission(org.apache.hadoop.fs.permission.FsPermission) WebHdfsFileSystem(org.apache.hadoop.hdfs.web.WebHdfsFileSystem) Test(org.junit.Test)

Example 44 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project hadoop by apache.

the class TestAuditLoggerWithCommands method testSetQuota.

@Test
public void testSetQuota() throws Exception {
    Path path = new Path("/testdir/testdir1");
    fs.mkdirs(path);
    fileSys = DFSTestUtil.getFileSystemAs(user1, conf);
    try {
        ((DistributedFileSystem) fileSys).setQuota(path, 10l, 10l);
        fail("The operation should have failed with AccessControlException");
    } catch (AccessControlException ace) {
    }
    String acePattern = ".*allowed=false.*ugi=theDoctor.*cmd=setQuota.*";
    int length = verifyAuditLogs(acePattern);
    fileSys.close();
    try {
        ((DistributedFileSystem) fileSys).setQuota(path, 10l, 10l);
        fail("The operation should have failed with IOException");
    } catch (IOException ace) {
    }
    assertTrue("Unexpected log from getContentSummary", length == auditlog.getOutput().split("\n").length);
}
Also used : Path(org.apache.hadoop.fs.Path) AccessControlException(org.apache.hadoop.security.AccessControlException) IOException(java.io.IOException) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) Test(org.junit.Test)

Example 45 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project hadoop by apache.

the class TestAuditLoggerWithCommands method testRemoveCachePool.

@Test
public void testRemoveCachePool() throws Exception {
    removeExistingCachePools(null);
    CachePoolInfo cacheInfo = new CachePoolInfo("pool1").setMode(new FsPermission((short) 0));
    ((DistributedFileSystem) fs).addCachePool(cacheInfo);
    fileSys = DFSTestUtil.getFileSystemAs(user1, conf);
    try {
        ((DistributedFileSystem) fileSys).removeCachePool("pool1");
        fail("The operation should have failed with AccessControlException");
    } catch (AccessControlException ace) {
    }
    String aceRemoveCachePoolPattern = ".*allowed=false.*ugi=theDoctor.*cmd=removeCachePool.*";
    int length = verifyAuditLogs(aceRemoveCachePoolPattern);
    assertTrue("Unexpected log!", length == auditlog.getOutput().split("\n").length);
    try {
        fileSys.close();
        ((DistributedFileSystem) fileSys).removeCachePool("pool1");
        fail("The operation should have failed with IOException");
    } catch (IOException e) {
    }
    assertTrue("Unexpected log!", length == auditlog.getOutput().split("\n").length);
}
Also used : AccessControlException(org.apache.hadoop.security.AccessControlException) FsPermission(org.apache.hadoop.fs.permission.FsPermission) IOException(java.io.IOException) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) CachePoolInfo(org.apache.hadoop.hdfs.protocol.CachePoolInfo) Test(org.junit.Test)

Aggregations

AccessControlException (org.apache.hadoop.security.AccessControlException)129 Test (org.junit.Test)59 Path (org.apache.hadoop.fs.Path)53 IOException (java.io.IOException)52 SnapshotAccessControlException (org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException)35 FsPermission (org.apache.hadoop.fs.permission.FsPermission)33 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)22 HdfsFileStatus (org.apache.hadoop.hdfs.protocol.HdfsFileStatus)21 FileSystem (org.apache.hadoop.fs.FileSystem)19 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)14 Configuration (org.apache.hadoop.conf.Configuration)11 FileNotFoundException (java.io.FileNotFoundException)10 CachePoolInfo (org.apache.hadoop.hdfs.protocol.CachePoolInfo)8 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)7 FileStatus (org.apache.hadoop.fs.FileStatus)6 CacheDirectiveInfo (org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo)6 Text (org.apache.hadoop.io.Text)5 InvalidToken (org.apache.hadoop.security.token.SecretManager.InvalidToken)5 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)5 ArrayList (java.util.ArrayList)4