Search in sources :

Example 81 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project ranger by apache.

the class HDFSRangerTest method HDFSReadFailTest.

void HDFSReadFailTest(String fileName) throws Exception {
    FileSystem fileSystem = hdfsCluster.getFileSystem();
    // Write a file - the AccessControlEnforcer won't be invoked as we are the "superuser"
    final Path file = new Path(fileName);
    FSDataOutputStream out = fileSystem.create(file);
    for (int i = 0; i < 1024; ++i) {
        out.write(("data" + i + "\n").getBytes("UTF-8"));
        out.flush();
    }
    out.close();
    // Change permissions to read-only
    fileSystem.setPermission(file, new FsPermission(FsAction.READ, FsAction.NONE, FsAction.NONE));
    // Now try to read the file as "bob" - this should NOT be allowed
    UserGroupInformation ugi = UserGroupInformation.createUserForTesting("bob", new String[] {});
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);
            FileSystem fs = FileSystem.get(conf);
            // Read the file
            try {
                fs.open(file);
                Assert.fail("Failure expected on an incorrect permission");
            } catch (AccessControlException ex) {
                // expected
                Assert.assertTrue(AccessControlException.class.getName().equals(ex.getClass().getName()));
            }
            fs.close();
            return null;
        }
    });
    // Now try to read the file as "alice" - this should NOT be allowed
    ugi = UserGroupInformation.createUserForTesting("alice", new String[] { "IT" });
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);
            FileSystem fs = FileSystem.get(conf);
            // Read the file
            try {
                fs.open(file);
                Assert.fail("Failure expected on an incorrect permission");
            } catch (AccessControlException ex) {
                // expected
                Assert.assertTrue(AccessControlException.class.getName().equals(ex.getClass().getName()));
            }
            fs.close();
            return null;
        }
    });
    // Now try to read the file as unknown user "eve" - this should not be allowed
    ugi = UserGroupInformation.createUserForTesting("eve", new String[] {});
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);
            FileSystem fs = FileSystem.get(conf);
            // Read the file
            try {
                fs.open(file);
                Assert.fail("Failure expected on an incorrect permission");
            } catch (AccessControlException ex) {
                // expected
                Assert.assertTrue(AccessControlException.class.getName().equals(ex.getClass().getName()));
            }
            fs.close();
            return null;
        }
    });
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) FileSystem(org.apache.hadoop.fs.FileSystem) AccessControlException(org.apache.hadoop.security.AccessControlException) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) FsPermission(org.apache.hadoop.fs.permission.FsPermission) AccessControlException(org.apache.hadoop.security.AccessControlException) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 82 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project ranger by apache.

the class HDFSRangerTest method readTestUsingTagPolicy.

@org.junit.Test
public void readTestUsingTagPolicy() throws Exception {
    FileSystem fileSystem = hdfsCluster.getFileSystem();
    // Write a file - the AccessControlEnforcer won't be invoked as we are the "superuser"
    final Path file = new Path("/tmp/tmpdir6/data-file2");
    FSDataOutputStream out = fileSystem.create(file);
    for (int i = 0; i < 1024; ++i) {
        out.write(("data" + i + "\n").getBytes("UTF-8"));
        out.flush();
    }
    out.close();
    // Change permissions to read-only
    fileSystem.setPermission(file, new FsPermission(FsAction.READ, FsAction.NONE, FsAction.NONE));
    // Now try to read the file as "bob" - this should be allowed (by the policy - user)
    UserGroupInformation ugi = UserGroupInformation.createUserForTesting("bob", new String[] {});
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);
            FileSystem fs = FileSystem.get(conf);
            // Read the file
            FSDataInputStream in = fs.open(file);
            ByteArrayOutputStream output = new ByteArrayOutputStream();
            IOUtils.copy(in, output);
            String content = new String(output.toByteArray());
            Assert.assertTrue(content.startsWith("data0"));
            fs.close();
            return null;
        }
    });
    // Now try to read the file as "alice" - this should be allowed (by the policy - group)
    ugi = UserGroupInformation.createUserForTesting("alice", new String[] { "IT" });
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);
            FileSystem fs = FileSystem.get(conf);
            // Read the file
            FSDataInputStream in = fs.open(file);
            ByteArrayOutputStream output = new ByteArrayOutputStream();
            IOUtils.copy(in, output);
            String content = new String(output.toByteArray());
            Assert.assertTrue(content.startsWith("data0"));
            fs.close();
            return null;
        }
    });
    // Now try to read the file as unknown user "eve" - this should not be allowed
    ugi = UserGroupInformation.createUserForTesting("eve", new String[] {});
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);
            FileSystem fs = FileSystem.get(conf);
            // Read the file
            try {
                fs.open(file);
                Assert.fail("Failure expected on an incorrect permission");
            } catch (AccessControlException ex) {
                // expected
                Assert.assertTrue(AccessControlException.class.getName().equals(ex.getClass().getName()));
            }
            fs.close();
            return null;
        }
    });
    // Now try to read the file as known user "dave" - this should not be allowed, as he doesn't have the correct permissions
    ugi = UserGroupInformation.createUserForTesting("dave", new String[] {});
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);
            FileSystem fs = FileSystem.get(conf);
            // Read the file
            try {
                fs.open(file);
                Assert.fail("Failure expected on an incorrect permission");
            } catch (AccessControlException ex) {
                // expected
                Assert.assertTrue(AccessControlException.class.getName().equals(ex.getClass().getName()));
            }
            fs.close();
            return null;
        }
    });
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) AccessControlException(org.apache.hadoop.security.AccessControlException) ByteArrayOutputStream(java.io.ByteArrayOutputStream) AccessControlException(org.apache.hadoop.security.AccessControlException) FileSystem(org.apache.hadoop.fs.FileSystem) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) FsPermission(org.apache.hadoop.fs.permission.FsPermission) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 83 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project ranger by apache.

the class HDFSRangerTest method writeTest.

@org.junit.Test
public void writeTest() throws Exception {
    FileSystem fileSystem = hdfsCluster.getFileSystem();
    // Write a file - the AccessControlEnforcer won't be invoked as we are the "superuser"
    final Path file = new Path("/tmp/tmpdir2/data-file3");
    FSDataOutputStream out = fileSystem.create(file);
    for (int i = 0; i < 1024; ++i) {
        out.write(("data" + i + "\n").getBytes("UTF-8"));
        out.flush();
    }
    out.close();
    // Now try to write to the file as "bob" - this should be allowed (by the policy - user)
    UserGroupInformation ugi = UserGroupInformation.createUserForTesting("bob", new String[] {});
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);
            FileSystem fs = FileSystem.get(conf);
            // Write to the file
            fs.append(file);
            fs.close();
            return null;
        }
    });
    // Now try to write to the file as "alice" - this should be allowed (by the policy - group)
    ugi = UserGroupInformation.createUserForTesting("alice", new String[] { "IT" });
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);
            FileSystem fs = FileSystem.get(conf);
            // Write to the file
            fs.append(file);
            fs.close();
            return null;
        }
    });
    // Now try to read the file as unknown user "eve" - this should not be allowed
    ugi = UserGroupInformation.createUserForTesting("eve", new String[] {});
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);
            FileSystem fs = FileSystem.get(conf);
            // Write to the file
            try {
                fs.append(file);
                Assert.fail("Failure expected on an incorrect permission");
            } catch (AccessControlException ex) {
                // expected
                Assert.assertTrue(AccessControlException.class.getName().equals(ex.getClass().getName()));
            }
            fs.close();
            return null;
        }
    });
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) FileSystem(org.apache.hadoop.fs.FileSystem) AccessControlException(org.apache.hadoop.security.AccessControlException) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) AccessControlException(org.apache.hadoop.security.AccessControlException) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 84 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project ranger by apache.

the class HDFSRangerTest method executeTest.

@org.junit.Test
public void executeTest() throws Exception {
    FileSystem fileSystem = hdfsCluster.getFileSystem();
    // Write a file - the AccessControlEnforcer won't be invoked as we are the "superuser"
    final Path file = new Path("/tmp/tmpdir3/data-file2");
    FSDataOutputStream out = fileSystem.create(file);
    for (int i = 0; i < 1024; ++i) {
        out.write(("data" + i + "\n").getBytes("UTF-8"));
        out.flush();
    }
    out.close();
    // Change permissions to read-only
    fileSystem.setPermission(file, new FsPermission(FsAction.READ, FsAction.NONE, FsAction.NONE));
    // Change the parent directory permissions to be execute only for the owner
    Path parentDir = new Path("/tmp/tmpdir3");
    fileSystem.setPermission(parentDir, new FsPermission(FsAction.EXECUTE, FsAction.NONE, FsAction.NONE));
    // Try to read the directory as "bob" - this should be allowed (by the policy - user)
    UserGroupInformation ugi = UserGroupInformation.createUserForTesting("bob", new String[] {});
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);
            FileSystem fs = FileSystem.get(conf);
            RemoteIterator<LocatedFileStatus> iter = fs.listFiles(file.getParent(), false);
            Assert.assertTrue(iter.hasNext());
            fs.close();
            return null;
        }
    });
    // Try to read the directory as "alice" - this should be allowed (by the policy - group)
    ugi = UserGroupInformation.createUserForTesting("alice", new String[] { "IT" });
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);
            FileSystem fs = FileSystem.get(conf);
            RemoteIterator<LocatedFileStatus> iter = fs.listFiles(file.getParent(), false);
            Assert.assertTrue(iter.hasNext());
            fs.close();
            return null;
        }
    });
    // Now try to read the directory as unknown user "eve" - this should not be allowed
    ugi = UserGroupInformation.createUserForTesting("eve", new String[] {});
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);
            FileSystem fs = FileSystem.get(conf);
            // Write to the file
            try {
                RemoteIterator<LocatedFileStatus> iter = fs.listFiles(file.getParent(), false);
                Assert.assertTrue(iter.hasNext());
                Assert.fail("Failure expected on an incorrect permission");
            } catch (AccessControlException ex) {
                // expected
                Assert.assertTrue(AccessControlException.class.getName().equals(ex.getClass().getName()));
            }
            fs.close();
            return null;
        }
    });
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) AccessControlException(org.apache.hadoop.security.AccessControlException) AccessControlException(org.apache.hadoop.security.AccessControlException) RemoteIterator(org.apache.hadoop.fs.RemoteIterator) FileSystem(org.apache.hadoop.fs.FileSystem) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) FsPermission(org.apache.hadoop.fs.permission.FsPermission) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 85 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project oozie by apache.

the class AuthorizationService method authorizeForApp.

/**
 * Check if the user+group is authorized to use the specified application. <p> The check is done by checking the
 * file system permissions on the workflow application.
 *
 * @param user user name.
 * @param group group name.
 * @param appPath application path.
 * @param fileName workflow or coordinator.xml
 * @param conf
 * @throws AuthorizationException thrown if the user is not authorized for the app.
 */
public void authorizeForApp(String user, String group, String appPath, String fileName, Configuration conf) throws AuthorizationException {
    try {
        HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
        URI uri = new Path(appPath).toUri();
        Configuration fsConf = has.createConfiguration(uri.getAuthority());
        FileSystem fs = has.createFileSystem(user, uri, fsConf);
        Path path = new Path(appPath);
        try {
            if (!fs.exists(path)) {
                incrCounter(INSTR_FAILED_AUTH_COUNTER, 1);
                throw new AuthorizationException(ErrorCode.E0504, appPath);
            }
            if (conf.get(XOozieClient.IS_PROXY_SUBMISSION) == null) {
                // files for non proxy submission jobs;
                if (!fs.isFile(path)) {
                    Path appXml = new Path(path, fileName);
                    if (!fs.exists(appXml)) {
                        incrCounter(INSTR_FAILED_AUTH_COUNTER, 1);
                        throw new AuthorizationException(ErrorCode.E0505, appPath);
                    }
                    if (!fs.isFile(appXml)) {
                        incrCounter(INSTR_FAILED_AUTH_COUNTER, 1);
                        throw new AuthorizationException(ErrorCode.E0506, appPath);
                    }
                    fs.open(appXml).close();
                }
            }
        } catch (AccessControlException ex) {
            incrCounter(INSTR_FAILED_AUTH_COUNTER, 1);
            throw new AuthorizationException(ErrorCode.E0507, appPath, ex.getMessage(), ex);
        }
    } catch (IOException ex) {
        incrCounter(INSTR_FAILED_AUTH_COUNTER, 1);
        throw new AuthorizationException(ErrorCode.E0501, ex.getMessage(), ex);
    } catch (HadoopAccessorException e) {
        throw new AuthorizationException(e);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) FileSystem(org.apache.hadoop.fs.FileSystem) AccessControlException(org.apache.hadoop.security.AccessControlException) IOException(java.io.IOException) URI(java.net.URI)

Aggregations

AccessControlException (org.apache.hadoop.security.AccessControlException)165 Path (org.apache.hadoop.fs.Path)72 IOException (java.io.IOException)69 Test (org.junit.Test)60 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)44 FsPermission (org.apache.hadoop.fs.permission.FsPermission)41 SnapshotAccessControlException (org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException)35 FileSystem (org.apache.hadoop.fs.FileSystem)33 Configuration (org.apache.hadoop.conf.Configuration)25 HdfsFileStatus (org.apache.hadoop.hdfs.protocol.HdfsFileStatus)21 FileNotFoundException (java.io.FileNotFoundException)19 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)14 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)13 UnsupportedEncodingException (java.io.UnsupportedEncodingException)11 HashMap (java.util.HashMap)10 FileStatus (org.apache.hadoop.fs.FileStatus)10 ClientResponse (com.sun.jersey.api.client.ClientResponse)9 PrivilegedAction (java.security.PrivilegedAction)9 ParentNotDirectoryException (org.apache.hadoop.fs.ParentNotDirectoryException)9 RESTResponse (org.apache.ranger.admin.client.datatype.RESTResponse)9