Search in sources :

Example 76 with AclEntry

use of org.apache.hadoop.fs.permission.AclEntry in project hadoop by apache.

the class TestAclWithSnapshot method doSnapshotRootRemovalAssertions.

private static void doSnapshotRootRemovalAssertions(Path path, Path snapshotPath) throws Exception {
    AclStatus s = hdfs.getAclStatus(path);
    AclEntry[] returned = s.getEntries().toArray(new AclEntry[0]);
    assertArrayEquals(new AclEntry[] {}, returned);
    assertPermission((short) 0700, path);
    s = hdfs.getAclStatus(snapshotPath);
    returned = s.getEntries().toArray(new AclEntry[0]);
    assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, GROUP, NONE) }, returned);
    assertPermission((short) 010750, snapshotPath);
    assertDirPermissionDenied(fsAsBruce, BRUCE, path);
    assertDirPermissionDenied(fsAsDiana, DIANA, path);
    assertDirPermissionGranted(fsAsBruce, BRUCE, snapshotPath);
    assertDirPermissionDenied(fsAsDiana, DIANA, snapshotPath);
}
Also used : AclStatus(org.apache.hadoop.fs.permission.AclStatus) AclEntry(org.apache.hadoop.fs.permission.AclEntry)

Example 77 with AclEntry

use of org.apache.hadoop.fs.permission.AclEntry in project hadoop by apache.

the class TestAclWithSnapshot method testChangeAclExceedsQuota.

@Test
public void testChangeAclExceedsQuota() throws Exception {
    Path filePath = new Path(path, "file1");
    Path fileSnapshotPath = new Path(snapshotPath, "file1");
    FileSystem.mkdirs(hdfs, path, FsPermission.createImmutable((short) 0755));
    hdfs.allowSnapshot(path);
    hdfs.setQuota(path, 3, HdfsConstants.QUOTA_DONT_SET);
    FileSystem.create(hdfs, filePath, FsPermission.createImmutable((short) 0600)).close();
    hdfs.setPermission(filePath, FsPermission.createImmutable((short) 0600));
    List<AclEntry> aclSpec = Lists.newArrayList(aclEntry(ACCESS, USER, "bruce", READ_WRITE));
    hdfs.modifyAclEntries(filePath, aclSpec);
    hdfs.createSnapshot(path, snapshotName);
    AclStatus s = hdfs.getAclStatus(filePath);
    AclEntry[] returned = s.getEntries().toArray(new AclEntry[0]);
    assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_WRITE), aclEntry(ACCESS, GROUP, NONE) }, returned);
    assertPermission((short) 010660, filePath);
    s = hdfs.getAclStatus(fileSnapshotPath);
    returned = s.getEntries().toArray(new AclEntry[0]);
    assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_WRITE), aclEntry(ACCESS, GROUP, NONE) }, returned);
    assertPermission((short) 010660, filePath);
    aclSpec = Lists.newArrayList(aclEntry(ACCESS, USER, "bruce", READ));
    hdfs.modifyAclEntries(filePath, aclSpec);
}
Also used : Path(org.apache.hadoop.fs.Path) AclStatus(org.apache.hadoop.fs.permission.AclStatus) AclEntry(org.apache.hadoop.fs.permission.AclEntry) FSAclBaseTest(org.apache.hadoop.hdfs.server.namenode.FSAclBaseTest) Test(org.junit.Test)

Example 78 with AclEntry

use of org.apache.hadoop.fs.permission.AclEntry in project hadoop by apache.

the class TestAclWithSnapshot method testDefaultAclNotCopiedToAccessAclOfNewSnapshot.

@Test
public void testDefaultAclNotCopiedToAccessAclOfNewSnapshot() throws Exception {
    FileSystem.mkdirs(hdfs, path, FsPermission.createImmutable((short) 0700));
    List<AclEntry> aclSpec = Lists.newArrayList(aclEntry(DEFAULT, USER, "bruce", READ_EXECUTE));
    hdfs.modifyAclEntries(path, aclSpec);
    SnapshotTestHelper.createSnapshot(hdfs, path, snapshotName);
    AclStatus s = hdfs.getAclStatus(path);
    AclEntry[] returned = s.getEntries().toArray(new AclEntry[0]);
    assertArrayEquals(new AclEntry[] { aclEntry(DEFAULT, USER, ALL), aclEntry(DEFAULT, USER, "bruce", READ_EXECUTE), aclEntry(DEFAULT, GROUP, NONE), aclEntry(DEFAULT, MASK, READ_EXECUTE), aclEntry(DEFAULT, OTHER, NONE) }, returned);
    assertPermission((short) 010700, path);
    s = hdfs.getAclStatus(snapshotPath);
    returned = s.getEntries().toArray(new AclEntry[0]);
    assertArrayEquals(new AclEntry[] { aclEntry(DEFAULT, USER, ALL), aclEntry(DEFAULT, USER, "bruce", READ_EXECUTE), aclEntry(DEFAULT, GROUP, NONE), aclEntry(DEFAULT, MASK, READ_EXECUTE), aclEntry(DEFAULT, OTHER, NONE) }, returned);
    assertPermission((short) 010700, snapshotPath);
    assertDirPermissionDenied(fsAsBruce, BRUCE, snapshotPath);
}
Also used : AclStatus(org.apache.hadoop.fs.permission.AclStatus) AclEntry(org.apache.hadoop.fs.permission.AclEntry) FSAclBaseTest(org.apache.hadoop.hdfs.server.namenode.FSAclBaseTest) Test(org.junit.Test)

Example 79 with AclEntry

use of org.apache.hadoop.fs.permission.AclEntry in project hadoop by apache.

the class TestAclWithSnapshot method testOriginalAclEnforcedForSnapshotRootAfterChange.

@Test
public void testOriginalAclEnforcedForSnapshotRootAfterChange() throws Exception {
    FileSystem.mkdirs(hdfs, path, FsPermission.createImmutable((short) 0700));
    List<AclEntry> aclSpec = Lists.newArrayList(aclEntry(ACCESS, USER, ALL), aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, GROUP, NONE), aclEntry(ACCESS, OTHER, NONE));
    hdfs.setAcl(path, aclSpec);
    assertDirPermissionGranted(fsAsBruce, BRUCE, path);
    assertDirPermissionDenied(fsAsDiana, DIANA, path);
    SnapshotTestHelper.createSnapshot(hdfs, path, snapshotName);
    // Both original and snapshot still have same ACL.
    AclStatus s = hdfs.getAclStatus(path);
    AclEntry[] returned = s.getEntries().toArray(new AclEntry[0]);
    assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, GROUP, NONE) }, returned);
    assertPermission((short) 010750, path);
    s = hdfs.getAclStatus(snapshotPath);
    returned = s.getEntries().toArray(new AclEntry[0]);
    assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, GROUP, NONE) }, returned);
    assertPermission((short) 010750, snapshotPath);
    assertDirPermissionGranted(fsAsBruce, BRUCE, snapshotPath);
    assertDirPermissionDenied(fsAsDiana, DIANA, snapshotPath);
    aclSpec = Lists.newArrayList(aclEntry(ACCESS, USER, READ_EXECUTE), aclEntry(ACCESS, USER, "diana", READ_EXECUTE), aclEntry(ACCESS, GROUP, NONE), aclEntry(ACCESS, OTHER, NONE));
    hdfs.setAcl(path, aclSpec);
    // Original has changed, but snapshot still has old ACL.
    doSnapshotRootChangeAssertions(path, snapshotPath);
    restart(false);
    doSnapshotRootChangeAssertions(path, snapshotPath);
    restart(true);
    doSnapshotRootChangeAssertions(path, snapshotPath);
}
Also used : AclStatus(org.apache.hadoop.fs.permission.AclStatus) AclEntry(org.apache.hadoop.fs.permission.AclEntry) FSAclBaseTest(org.apache.hadoop.hdfs.server.namenode.FSAclBaseTest) Test(org.junit.Test)

Example 80 with AclEntry

use of org.apache.hadoop.fs.permission.AclEntry in project hadoop by apache.

the class TestAclWithSnapshot method testOriginalAclEnforcedForSnapshotRootAfterRemoval.

@Test
public void testOriginalAclEnforcedForSnapshotRootAfterRemoval() throws Exception {
    FileSystem.mkdirs(hdfs, path, FsPermission.createImmutable((short) 0700));
    List<AclEntry> aclSpec = Lists.newArrayList(aclEntry(ACCESS, USER, ALL), aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, GROUP, NONE), aclEntry(ACCESS, OTHER, NONE));
    hdfs.setAcl(path, aclSpec);
    assertDirPermissionGranted(fsAsBruce, BRUCE, path);
    assertDirPermissionDenied(fsAsDiana, DIANA, path);
    SnapshotTestHelper.createSnapshot(hdfs, path, snapshotName);
    // Both original and snapshot still have same ACL.
    AclStatus s = hdfs.getAclStatus(path);
    AclEntry[] returned = s.getEntries().toArray(new AclEntry[0]);
    assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, GROUP, NONE) }, returned);
    assertPermission((short) 010750, path);
    s = hdfs.getAclStatus(snapshotPath);
    returned = s.getEntries().toArray(new AclEntry[0]);
    assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, GROUP, NONE) }, returned);
    assertPermission((short) 010750, snapshotPath);
    assertDirPermissionGranted(fsAsBruce, BRUCE, snapshotPath);
    assertDirPermissionDenied(fsAsDiana, DIANA, snapshotPath);
    hdfs.removeAcl(path);
    // Original has changed, but snapshot still has old ACL.
    doSnapshotRootRemovalAssertions(path, snapshotPath);
    restart(false);
    doSnapshotRootRemovalAssertions(path, snapshotPath);
    restart(true);
    doSnapshotRootRemovalAssertions(path, snapshotPath);
}
Also used : AclStatus(org.apache.hadoop.fs.permission.AclStatus) AclEntry(org.apache.hadoop.fs.permission.AclEntry) FSAclBaseTest(org.apache.hadoop.hdfs.server.namenode.FSAclBaseTest) Test(org.junit.Test)

Aggregations

AclEntry (org.apache.hadoop.fs.permission.AclEntry)137 Test (org.junit.Test)90 AclStatus (org.apache.hadoop.fs.permission.AclStatus)81 Path (org.apache.hadoop.fs.Path)52 FsPermission (org.apache.hadoop.fs.permission.FsPermission)25 ArrayList (java.util.ArrayList)11 FSAclBaseTest (org.apache.hadoop.hdfs.server.namenode.FSAclBaseTest)11 FileSystem (org.apache.hadoop.fs.FileSystem)10 Configuration (org.apache.hadoop.conf.Configuration)7 FileStatus (org.apache.hadoop.fs.FileStatus)6 MockResponse (com.squareup.okhttp.mockwebserver.MockResponse)5 ScopedAclEntries (org.apache.hadoop.fs.permission.ScopedAclEntries)5 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)5 DatanodeInfoBuilder (org.apache.hadoop.hdfs.protocol.DatanodeInfo.DatanodeInfoBuilder)5 IOException (java.io.IOException)4 List (java.util.List)4 AclEntryScope (org.apache.hadoop.fs.permission.AclEntryScope)4 AclEntryProto (org.apache.hadoop.hdfs.protocol.proto.AclProtos.AclEntryProto)4 URI (java.net.URI)3 AclEntryType (org.apache.hadoop.fs.permission.AclEntryType)3