use of org.apache.hadoop.fs.permission.AclEntry in project hadoop by apache.
the class TestAclWithSnapshot method testRemoveReadsCurrentState.
@Test
public void testRemoveReadsCurrentState() throws Exception {
FileSystem.mkdirs(hdfs, path, FsPermission.createImmutable((short) 0700));
SnapshotTestHelper.createSnapshot(hdfs, path, snapshotName);
List<AclEntry> aclSpec = Lists.newArrayList(aclEntry(ACCESS, USER, "bruce", ALL));
hdfs.modifyAclEntries(path, aclSpec);
hdfs.removeAcl(path);
AclEntry[] expected = new AclEntry[] {};
AclStatus s = hdfs.getAclStatus(path);
AclEntry[] returned = s.getEntries().toArray(new AclEntry[0]);
assertArrayEquals(expected, returned);
assertPermission((short) 0700, path);
assertDirPermissionDenied(fsAsBruce, BRUCE, path);
assertDirPermissionDenied(fsAsDiana, DIANA, path);
}
use of org.apache.hadoop.fs.permission.AclEntry in project hadoop by apache.
the class TestAclWithSnapshot method doSnapshotContentsRemovalAssertions.
private static void doSnapshotContentsRemovalAssertions(Path filePath, Path fileSnapshotPath, Path subdirPath, Path subdirSnapshotPath) throws Exception {
AclEntry[] expected = new AclEntry[] {};
AclStatus s = hdfs.getAclStatus(filePath);
AclEntry[] returned = s.getEntries().toArray(new AclEntry[0]);
assertArrayEquals(expected, returned);
assertPermission((short) 0500, filePath);
assertFilePermissionDenied(fsAsBruce, BRUCE, filePath);
assertFilePermissionDenied(fsAsDiana, DIANA, filePath);
s = hdfs.getAclStatus(subdirPath);
returned = s.getEntries().toArray(new AclEntry[0]);
assertArrayEquals(expected, returned);
assertPermission((short) 0500, subdirPath);
assertDirPermissionDenied(fsAsBruce, BRUCE, subdirPath);
assertDirPermissionDenied(fsAsDiana, DIANA, subdirPath);
expected = new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, GROUP, NONE) };
s = hdfs.getAclStatus(fileSnapshotPath);
returned = s.getEntries().toArray(new AclEntry[0]);
assertArrayEquals(expected, returned);
assertPermission((short) 010550, fileSnapshotPath);
assertFilePermissionGranted(fsAsBruce, BRUCE, fileSnapshotPath);
assertFilePermissionDenied(fsAsDiana, DIANA, fileSnapshotPath);
s = hdfs.getAclStatus(subdirSnapshotPath);
returned = s.getEntries().toArray(new AclEntry[0]);
assertArrayEquals(expected, returned);
assertPermission((short) 010550, subdirSnapshotPath);
assertDirPermissionGranted(fsAsBruce, BRUCE, subdirSnapshotPath);
assertDirPermissionDenied(fsAsDiana, DIANA, subdirSnapshotPath);
}
use of org.apache.hadoop.fs.permission.AclEntry in project hadoop by apache.
the class TestLs method resetMock.
@Before
public void resetMock() throws IOException {
reset(mockFs);
AclStatus mockAclStatus = mock(AclStatus.class);
when(mockAclStatus.getEntries()).thenReturn(new ArrayList<AclEntry>());
when(mockFs.getAclStatus(any(Path.class))).thenReturn(mockAclStatus);
}
use of org.apache.hadoop.fs.permission.AclEntry in project hadoop by apache.
the class TestViewFileSystemDelegation method testAclMethods.
/**
* Tests that ViewFileSystem dispatches calls for every ACL method through the
* mount table to the correct underlying FileSystem with all Path arguments
* translated as required.
*/
@Test
public void testAclMethods() throws Exception {
Configuration conf = ViewFileSystemTestSetup.createConfig();
FileSystem mockFs1 = setupMockFileSystem(conf, new URI("mockfs1:/"));
FileSystem mockFs2 = setupMockFileSystem(conf, new URI("mockfs2:/"));
FileSystem viewFs = FileSystem.get(FsConstants.VIEWFS_URI, conf);
Path viewFsPath1 = new Path("/mounts/mockfs1/a/b/c");
Path mockFsPath1 = new Path("/a/b/c");
Path viewFsPath2 = new Path("/mounts/mockfs2/d/e/f");
Path mockFsPath2 = new Path("/d/e/f");
List<AclEntry> entries = Collections.emptyList();
viewFs.modifyAclEntries(viewFsPath1, entries);
verify(mockFs1).modifyAclEntries(mockFsPath1, entries);
viewFs.modifyAclEntries(viewFsPath2, entries);
verify(mockFs2).modifyAclEntries(mockFsPath2, entries);
viewFs.removeAclEntries(viewFsPath1, entries);
verify(mockFs1).removeAclEntries(mockFsPath1, entries);
viewFs.removeAclEntries(viewFsPath2, entries);
verify(mockFs2).removeAclEntries(mockFsPath2, entries);
viewFs.removeDefaultAcl(viewFsPath1);
verify(mockFs1).removeDefaultAcl(mockFsPath1);
viewFs.removeDefaultAcl(viewFsPath2);
verify(mockFs2).removeDefaultAcl(mockFsPath2);
viewFs.removeAcl(viewFsPath1);
verify(mockFs1).removeAcl(mockFsPath1);
viewFs.removeAcl(viewFsPath2);
verify(mockFs2).removeAcl(mockFsPath2);
viewFs.setAcl(viewFsPath1, entries);
verify(mockFs1).setAcl(mockFsPath1, entries);
viewFs.setAcl(viewFsPath2, entries);
verify(mockFs2).setAcl(mockFsPath2, entries);
viewFs.getAclStatus(viewFsPath1);
verify(mockFs1).getAclStatus(mockFsPath1);
viewFs.getAclStatus(viewFsPath2);
verify(mockFs2).getAclStatus(mockFsPath2);
}
use of org.apache.hadoop.fs.permission.AclEntry in project hadoop by apache.
the class OfflineImageReconstructor method aclXmlToProto.
private INodeSection.AclFeatureProto.Builder aclXmlToProto(Node acls) throws IOException {
AclFeatureProto.Builder b = AclFeatureProto.newBuilder();
while (true) {
Node acl = acls.removeChild(INODE_SECTION_ACL);
if (acl == null) {
break;
}
String val = acl.getVal();
AclEntry entry = AclEntry.parseAclEntry(val, true);
int nameId = registerStringId(entry.getName() == null ? EMPTY_STRING : entry.getName());
int v = ((nameId & ACL_ENTRY_NAME_MASK) << ACL_ENTRY_NAME_OFFSET) | (entry.getType().ordinal() << ACL_ENTRY_TYPE_OFFSET) | (entry.getScope().ordinal() << ACL_ENTRY_SCOPE_OFFSET) | (entry.getPermission().ordinal());
b.addEntries(v);
}
return b;
}
Aggregations