use of org.apache.hadoop.fs.permission.AclStatus in project hive by apache.
the class TestHdfsUtils method testSetFullFileStatusFailInheritAclsRecursive.
/**
* Tests that HdfsUtils#setFullFileStatus
* does not thrown an exception when setting ACLs and with recursion.
*/
@Test
public void testSetFullFileStatusFailInheritAclsRecursive() throws Exception {
Configuration conf = new Configuration();
conf.set("dfs.namenode.acls.enabled", "true");
Path fakeTarget = new Path("fakePath");
HdfsUtils.HadoopFileStatus mockHadoopFileStatus = mock(HdfsUtils.HadoopFileStatus.class);
FileStatus mockSourceStatus = mock(FileStatus.class);
FsShell mockFsShell = mock(FsShell.class);
AclStatus mockAclStatus = mock(AclStatus.class);
when(mockSourceStatus.getPermission()).thenReturn(new FsPermission((short) 777));
when(mockAclStatus.toString()).thenReturn("");
when(mockHadoopFileStatus.getFileStatus()).thenReturn(mockSourceStatus);
when(mockHadoopFileStatus.getAclEntries()).thenReturn(new ArrayList<>());
when(mockHadoopFileStatus.getAclStatus()).thenReturn(mockAclStatus);
doThrow(RuntimeException.class).when(mockFsShell).run(any(String[].class));
HdfsUtils.setFullFileStatus(conf, mockHadoopFileStatus, "", mock(FileSystem.class), fakeTarget, true, mockFsShell);
verify(mockFsShell).run(new String[] { "-setfacl", "-R", "--set", any(), fakeTarget.toString() });
}
use of org.apache.hadoop.fs.permission.AclStatus in project hive by apache.
the class TestHdfsUtils method testSetFullFileStatusFailInheritAcls.
/**
* Tests that link HdfsUtils#setFullFileStatus
* does not thrown an exception when setting ACLs and without recursion.
*/
@Test
public void testSetFullFileStatusFailInheritAcls() throws IOException {
Configuration conf = new Configuration();
conf.set("dfs.namenode.acls.enabled", "true");
HdfsUtils.HadoopFileStatus mockHadoopFileStatus = mock(HdfsUtils.HadoopFileStatus.class);
FileStatus mockSourceStatus = mock(FileStatus.class);
AclStatus mockAclStatus = mock(AclStatus.class);
FileSystem mockFs = mock(FileSystem.class);
when(mockSourceStatus.getPermission()).thenReturn(new FsPermission((short) 777));
when(mockAclStatus.toString()).thenReturn("");
when(mockHadoopFileStatus.getFileStatus()).thenReturn(mockSourceStatus);
when(mockHadoopFileStatus.getAclEntries()).thenReturn(new ArrayList<>());
when(mockHadoopFileStatus.getAclStatus()).thenReturn(mockAclStatus);
doThrow(RuntimeException.class).when(mockFs).setAcl(any(Path.class), any(List.class));
HdfsUtils.setFullFileStatus(conf, mockHadoopFileStatus, null, mockFs, new Path("fakePath"), false);
verify(mockFs).setAcl(any(Path.class), any(List.class));
}
use of org.apache.hadoop.fs.permission.AclStatus in project hive by apache.
the class TestHdfsUtils method testSetFullFileStatusFailInheritAclsRecursive.
/**
* Tests that {@link HdfsUtils#setFullFileStatus(Configuration, HdfsUtils.HadoopFileStatus, FileSystem, Path, boolean)}
* does not thrown an exception when setting ACLs and with recursion.
*/
@Test
public void testSetFullFileStatusFailInheritAclsRecursive() throws Exception {
Configuration conf = new Configuration();
conf.set("dfs.namenode.acls.enabled", "true");
Path fakeTarget = new Path("fakePath");
HdfsUtils.HadoopFileStatus mockHadoopFileStatus = mock(HdfsUtils.HadoopFileStatus.class);
FileStatus mockSourceStatus = mock(FileStatus.class);
FsShell mockFsShell = mock(FsShell.class);
AclStatus mockAclStatus = mock(AclStatus.class);
when(mockSourceStatus.getPermission()).thenReturn(new FsPermission((short) 777));
when(mockAclStatus.toString()).thenReturn("");
when(mockHadoopFileStatus.getFileStatus()).thenReturn(mockSourceStatus);
when(mockHadoopFileStatus.getAclEntries()).thenReturn(new ArrayList<AclEntry>());
when(mockHadoopFileStatus.getAclStatus()).thenReturn(mockAclStatus);
doThrow(RuntimeException.class).when(mockFsShell).run(any(String[].class));
HdfsUtils.setFullFileStatus(conf, mockHadoopFileStatus, "", mock(FileSystem.class), fakeTarget, true, mockFsShell);
verify(mockFsShell).run(new String[] { "-setfacl", "-R", "--set", any(), fakeTarget.toString() });
}
use of org.apache.hadoop.fs.permission.AclStatus in project hadoop by apache.
the class TestFSImageWithAcl method testAcl.
private void testAcl(boolean persistNamespace) throws IOException {
Path p = new Path("/p");
DistributedFileSystem fs = cluster.getFileSystem();
fs.create(p).close();
fs.mkdirs(new Path("/23"));
AclEntry e = new AclEntry.Builder().setName("foo").setPermission(READ_EXECUTE).setScope(ACCESS).setType(USER).build();
fs.modifyAclEntries(p, Lists.newArrayList(e));
restart(fs, persistNamespace);
AclStatus s = cluster.getNamesystem().getAclStatus(p.toString());
AclEntry[] returned = Lists.newArrayList(s.getEntries()).toArray(new AclEntry[0]);
Assert.assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "foo", READ_EXECUTE), aclEntry(ACCESS, GROUP, READ) }, returned);
fs.removeAcl(p);
if (persistNamespace) {
fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER);
fs.saveNamespace();
fs.setSafeMode(SafeModeAction.SAFEMODE_LEAVE);
}
cluster.restartNameNode();
cluster.waitActive();
s = cluster.getNamesystem().getAclStatus(p.toString());
returned = Lists.newArrayList(s.getEntries()).toArray(new AclEntry[0]);
Assert.assertArrayEquals(new AclEntry[] {}, returned);
fs.modifyAclEntries(p, Lists.newArrayList(e));
s = cluster.getNamesystem().getAclStatus(p.toString());
returned = Lists.newArrayList(s.getEntries()).toArray(new AclEntry[0]);
Assert.assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "foo", READ_EXECUTE), aclEntry(ACCESS, GROUP, READ) }, returned);
}
use of org.apache.hadoop.fs.permission.AclStatus in project hadoop by apache.
the class TestAclWithSnapshot method doSnapshotContentsChangeAssertions.
private static void doSnapshotContentsChangeAssertions(Path filePath, Path fileSnapshotPath, Path subdirPath, Path subdirSnapshotPath) throws Exception {
AclEntry[] expected = new AclEntry[] { aclEntry(ACCESS, USER, "diana", ALL), aclEntry(ACCESS, GROUP, NONE) };
AclStatus s = hdfs.getAclStatus(filePath);
AclEntry[] returned = s.getEntries().toArray(new AclEntry[0]);
assertArrayEquals(expected, returned);
assertPermission((short) 010570, filePath);
assertFilePermissionDenied(fsAsBruce, BRUCE, filePath);
assertFilePermissionGranted(fsAsDiana, DIANA, filePath);
s = hdfs.getAclStatus(subdirPath);
returned = s.getEntries().toArray(new AclEntry[0]);
assertArrayEquals(expected, returned);
assertPermission((short) 010570, subdirPath);
assertDirPermissionDenied(fsAsBruce, BRUCE, subdirPath);
assertDirPermissionGranted(fsAsDiana, DIANA, subdirPath);
expected = new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, GROUP, NONE) };
s = hdfs.getAclStatus(fileSnapshotPath);
returned = s.getEntries().toArray(new AclEntry[0]);
assertArrayEquals(expected, returned);
assertPermission((short) 010550, fileSnapshotPath);
assertFilePermissionGranted(fsAsBruce, BRUCE, fileSnapshotPath);
assertFilePermissionDenied(fsAsDiana, DIANA, fileSnapshotPath);
s = hdfs.getAclStatus(subdirSnapshotPath);
returned = s.getEntries().toArray(new AclEntry[0]);
assertArrayEquals(expected, returned);
assertPermission((short) 010550, subdirSnapshotPath);
assertDirPermissionGranted(fsAsBruce, BRUCE, subdirSnapshotPath);
assertDirPermissionDenied(fsAsDiana, DIANA, subdirSnapshotPath);
}
Aggregations