Search in sources :

Example 76 with FsPermission

use of org.apache.hadoop.fs.permission.FsPermission in project hadoop by apache.

the class JobHistoryEventHandler method moveToDoneNow.

// TODO If the FS objects are the same, this should be a rename instead of a
// copy.
private void moveToDoneNow(Path fromPath, Path toPath) throws IOException {
    // check if path exists, in case of retries it may not exist
    if (stagingDirFS.exists(fromPath)) {
        LOG.info("Copying " + fromPath.toString() + " to " + toPath.toString());
        // TODO temporarily removing the existing dst
        doneDirFS.delete(toPath, true);
        boolean copied = FileUtil.copy(stagingDirFS, fromPath, doneDirFS, toPath, false, getConfig());
        if (copied)
            LOG.info("Copied to done location: " + toPath);
        else
            LOG.info("copy failed");
        doneDirFS.setPermission(toPath, new FsPermission(JobHistoryUtils.HISTORY_INTERMEDIATE_FILE_PERMISSIONS));
    }
}
Also used : FsPermission(org.apache.hadoop.fs.permission.FsPermission)

Example 77 with FsPermission

use of org.apache.hadoop.fs.permission.FsPermission in project hadoop by apache.

the class TestPermission method testBackwardCompatibility.

/**
   * Tests backward compatibility. Configuration can be
   * either set with old param dfs.umask that takes decimal umasks
   * or dfs.umaskmode that takes symbolic or octal umask.
   */
@Test
public void testBackwardCompatibility() {
    // Test 1 - old configuration key with decimal 
    // umask value should be handled when set using 
    // FSPermission.setUMask() API
    FsPermission perm = new FsPermission((short) 18);
    Configuration conf = new Configuration();
    FsPermission.setUMask(conf, perm);
    assertEquals(18, FsPermission.getUMask(conf).toShort());
    // Test 2 - new configuration key is handled
    conf = new Configuration();
    conf.set(FsPermission.UMASK_LABEL, "022");
    assertEquals(18, FsPermission.getUMask(conf).toShort());
    // Test 3 - equivalent valid umask
    conf = new Configuration();
    conf.set(FsPermission.UMASK_LABEL, "0022");
    assertEquals(18, FsPermission.getUMask(conf).toShort());
    // Test 4 - invalid umask
    conf = new Configuration();
    conf.set(FsPermission.UMASK_LABEL, "1222");
    try {
        FsPermission.getUMask(conf);
        fail("expect IllegalArgumentException happen");
    } catch (IllegalArgumentException e) {
    //pass, exception successfully trigger
    }
    // Test 5 - invalid umask
    conf = new Configuration();
    conf.set(FsPermission.UMASK_LABEL, "01222");
    try {
        FsPermission.getUMask(conf);
        fail("expect IllegalArgumentException happen");
    } catch (IllegalArgumentException e) {
    //pass, exception successfully trigger
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) FsPermission(org.apache.hadoop.fs.permission.FsPermission) Test(org.junit.Test)

Example 78 with FsPermission

use of org.apache.hadoop.fs.permission.FsPermission in project hadoop by apache.

the class TestPermission method testCreate.

@Test
public void testCreate() throws Exception {
    Configuration conf = new HdfsConfiguration();
    conf.setBoolean(DFSConfigKeys.DFS_PERMISSIONS_ENABLED_KEY, true);
    conf.set(FsPermission.UMASK_LABEL, "000");
    MiniDFSCluster cluster = null;
    FileSystem fs = null;
    try {
        cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
        cluster.waitActive();
        fs = FileSystem.get(conf);
        FsPermission rootPerm = checkPermission(fs, "/", null);
        FsPermission inheritPerm = FsPermission.createImmutable((short) (rootPerm.toShort() | 0300));
        FsPermission dirPerm = new FsPermission((short) 0777);
        fs.mkdirs(new Path("/a1/a2/a3"), dirPerm);
        checkPermission(fs, "/a1", dirPerm);
        checkPermission(fs, "/a1/a2", dirPerm);
        checkPermission(fs, "/a1/a2/a3", dirPerm);
        dirPerm = new FsPermission((short) 0123);
        FsPermission permission = FsPermission.createImmutable((short) (dirPerm.toShort() | 0300));
        fs.mkdirs(new Path("/aa/1/aa/2/aa/3"), dirPerm);
        checkPermission(fs, "/aa/1", permission);
        checkPermission(fs, "/aa/1/aa/2", permission);
        checkPermission(fs, "/aa/1/aa/2/aa/3", dirPerm);
        FsPermission filePerm = new FsPermission((short) 0444);
        Path p = new Path("/b1/b2/b3.txt");
        FSDataOutputStream out = fs.create(p, filePerm, true, conf.getInt(CommonConfigurationKeys.IO_FILE_BUFFER_SIZE_KEY, 4096), fs.getDefaultReplication(p), fs.getDefaultBlockSize(p), null);
        out.write(123);
        out.close();
        checkPermission(fs, "/b1", inheritPerm);
        checkPermission(fs, "/b1/b2", inheritPerm);
        checkPermission(fs, "/b1/b2/b3.txt", filePerm);
        conf.set(FsPermission.UMASK_LABEL, "022");
        permission = FsPermission.createImmutable((short) 0666);
        FileSystem.mkdirs(fs, new Path("/c1"), new FsPermission(permission));
        FileSystem.create(fs, new Path("/c1/c2.txt"), new FsPermission(permission));
        checkPermission(fs, "/c1", permission);
        checkPermission(fs, "/c1/c2.txt", permission);
    } finally {
        try {
            if (fs != null)
                fs.close();
        } catch (Exception e) {
            LOG.error(StringUtils.stringifyException(e));
        }
        try {
            if (cluster != null)
                cluster.shutdown();
        } catch (Exception e) {
            LOG.error(StringUtils.stringifyException(e));
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) MiniDFSCluster(org.apache.hadoop.hdfs.MiniDFSCluster) Configuration(org.apache.hadoop.conf.Configuration) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) FileSystem(org.apache.hadoop.fs.FileSystem) FsPermission(org.apache.hadoop.fs.permission.FsPermission) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) Test(org.junit.Test)

Example 79 with FsPermission

use of org.apache.hadoop.fs.permission.FsPermission in project hadoop by apache.

the class TestPermission method testNonSuperCannotChangeOwnerForOtherFile.

private void testNonSuperCannotChangeOwnerForOtherFile() throws Exception {
    Path file = createFile(nnfs, "testNonSuperCannotChangeOwnerForOtherFile");
    nnfs.setPermission(file, new FsPermission("777"));
    try {
        userfs.setOwner(file, USER_NAME, null);
        fail("Expect ACE when a non-super user tries to own a file");
    } catch (AccessControlException e) {
        assertThat(e.getMessage(), startsWith("Permission denied"));
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FsPermission(org.apache.hadoop.fs.permission.FsPermission)

Example 80 with FsPermission

use of org.apache.hadoop.fs.permission.FsPermission in project hadoop by apache.

the class TestPermissionSymlinks method testRenameSrcNotWritableFS.

@Test(timeout = 5000)
public void testRenameSrcNotWritableFS() throws Exception {
    fs.setPermission(linkParent, new FsPermission((short) 0555));
    doRenameSrcNotWritableFS();
}
Also used : FsPermission(org.apache.hadoop.fs.permission.FsPermission) Test(org.junit.Test)

Aggregations

FsPermission (org.apache.hadoop.fs.permission.FsPermission)427 Path (org.apache.hadoop.fs.Path)267 Test (org.junit.Test)180 IOException (java.io.IOException)120 FileSystem (org.apache.hadoop.fs.FileSystem)93 Configuration (org.apache.hadoop.conf.Configuration)89 FileStatus (org.apache.hadoop.fs.FileStatus)87 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)52 AccessControlException (org.apache.hadoop.security.AccessControlException)43 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)36 FileNotFoundException (java.io.FileNotFoundException)33 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)29 File (java.io.File)26 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)26 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)26 AclEntry (org.apache.hadoop.fs.permission.AclEntry)25 ArrayList (java.util.ArrayList)22 HashMap (java.util.HashMap)19 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)16 URI (java.net.URI)15