use of org.apache.hadoop.fs.permission.FsPermission in project hadoop by apache.
the class TestLocalFSFileContextMainOperations method testDefaultFilePermission.
@Test
public void testDefaultFilePermission() throws IOException {
Path file = fileContextTestHelper.getTestRootPath(fc, "testDefaultFilePermission");
FileContextTestHelper.createFile(fc, file);
FsPermission expect = FileContext.FILE_DEFAULT_PERM.applyUMask(fc.getUMask());
Assert.assertEquals(expect, fc.getFileStatus(file).getPermission());
}
use of org.apache.hadoop.fs.permission.FsPermission in project hadoop by apache.
the class TestLocalFileSystemPermission method testSetUmaskInRealTime.
/**
* Steps:
* 1. Create a directory with default permissions: 777 with umask 022
* 2. Check the directory has good permissions: 755
* 3. Set the umask to 062.
* 4. Create a new directory with default permissions.
* 5. For this directory we expect 715 as permission not 755
* @throws Exception we can throw away all the exception.
*/
@Test
public void testSetUmaskInRealTime() throws Exception {
assumeNotWindows();
LocalFileSystem localfs = FileSystem.getLocal(new Configuration());
Configuration conf = localfs.getConf();
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "022");
LOGGER.info("Current umask is {}", conf.get(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY));
Path dir = new Path(TEST_PATH_PREFIX + "dir");
Path dir2 = new Path(TEST_PATH_PREFIX + "dir2");
try {
assertTrue(localfs.mkdirs(dir));
FsPermission initialPermission = getPermission(localfs, dir);
assertEquals("With umask 022 permission should be 755 since the default " + "permission is 777", new FsPermission("755"), initialPermission);
// Modify umask and create a new directory
// and check if new umask is applied
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "062");
assertTrue(localfs.mkdirs(dir2));
FsPermission finalPermission = localfs.getFileStatus(dir2).getPermission();
assertThat("With umask 062 permission should not be 755 since the " + "default permission is 777", new FsPermission("755"), is(not(finalPermission)));
assertEquals("With umask 062 we expect 715 since the default permission is 777", new FsPermission("715"), finalPermission);
} finally {
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "022");
cleanup(localfs, dir);
cleanup(localfs, dir2);
}
}
use of org.apache.hadoop.fs.permission.FsPermission in project hadoop by apache.
the class TestFsShellCopy method testPutSrcFileNoPerm.
@Test
public void testPutSrcFileNoPerm() throws Exception {
final Path src = new Path(testRootDir, "srcNoPerm");
final Path dst = new Path(testRootDir, "dst");
lfs.delete(src, true);
lfs.create(src);
lfs.setPermission(src, new FsPermission((short) 0));
lfs.delete(dst, true);
try {
final ByteArrayOutputStream err = new ByteArrayOutputStream();
PrintStream oldErr = System.err;
System.setErr(new PrintStream(err));
shellRun(1, "-put", src.toString(), dst.toString());
System.setErr(oldErr);
System.err.print(err.toString());
assertTrue(err.toString().contains("(Permission denied)"));
} finally {
// make sure the test file can be deleted
lfs.setPermission(src, new FsPermission((short) 0755));
}
}
use of org.apache.hadoop.fs.permission.FsPermission in project hadoop by apache.
the class FileSystem method checkAccessPermissions.
/**
* This method provides the default implementation of
* {@link #access(Path, FsAction)}.
*
* @param stat FileStatus to check
* @param mode type of access to check
* @throws AccessControlException if access is denied
* @throws IOException for any error
*/
@InterfaceAudience.Private
static void checkAccessPermissions(FileStatus stat, FsAction mode) throws AccessControlException, IOException {
FsPermission perm = stat.getPermission();
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
String user = ugi.getShortUserName();
if (user.equals(stat.getOwner())) {
if (perm.getUserAction().implies(mode)) {
return;
}
} else if (ugi.getGroups().contains(stat.getGroup())) {
if (perm.getGroupAction().implies(mode)) {
return;
}
} else {
if (perm.getOtherAction().implies(mode)) {
return;
}
}
throw new AccessControlException(String.format("Permission denied: user=%s, path=\"%s\":%s:%s:%s%s", user, stat.getPath(), stat.getOwner(), stat.getGroup(), stat.isDirectory() ? "d" : "-", perm));
}
use of org.apache.hadoop.fs.permission.FsPermission in project hadoop by apache.
the class JavaKeyStoreProvider method tryLoadFromPath.
/**
* Try loading from the user specified path, else load from the backup
* path in case Exception is not due to bad/wrong password.
* @param path Actual path to load from
* @param backupPath Backup path (_OLD)
* @return The permissions of the loaded file
* @throws NoSuchAlgorithmException
* @throws CertificateException
* @throws IOException
*/
private FsPermission tryLoadFromPath(Path path, Path backupPath) throws NoSuchAlgorithmException, CertificateException, IOException {
FsPermission perm = null;
try {
perm = loadFromPath(path, password);
// Remove _OLD if exists
fs.delete(backupPath, true);
LOG.debug("KeyStore loaded successfully !!");
} catch (IOException ioe) {
// wrong password try the _OLD file if exits
if (!isBadorWrongPassword(ioe)) {
perm = loadFromPath(backupPath, password);
// Rename CURRENT to CORRUPTED
renameOrFail(path, new Path(path.toString() + "_CORRUPTED_" + System.currentTimeMillis()));
renameOrFail(backupPath, path);
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("KeyStore loaded successfully from '%s' since '%s'" + "was corrupted !!", backupPath, path));
}
} else {
throw ioe;
}
}
return perm;
}
Aggregations