use of org.apache.hadoop.security.AccessControlException in project ranger by apache.
the class HDFSRangerTest method HDFSReadFailTest.
void HDFSReadFailTest(String fileName) throws Exception {
FileSystem fileSystem = hdfsCluster.getFileSystem();
// Write a file - the AccessControlEnforcer won't be invoked as we are the "superuser"
final Path file = new Path(fileName);
FSDataOutputStream out = fileSystem.create(file);
for (int i = 0; i < 1024; ++i) {
out.write(("data" + i + "\n").getBytes("UTF-8"));
out.flush();
}
out.close();
// Change permissions to read-only
fileSystem.setPermission(file, new FsPermission(FsAction.READ, FsAction.NONE, FsAction.NONE));
// Now try to read the file as "bob" - this should NOT be allowed
UserGroupInformation ugi = UserGroupInformation.createUserForTesting("bob", new String[] {});
ugi.doAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", defaultFs);
FileSystem fs = FileSystem.get(conf);
// Read the file
try {
fs.open(file);
Assert.fail("Failure expected on an incorrect permission");
} catch (AccessControlException ex) {
// expected
Assert.assertTrue(AccessControlException.class.getName().equals(ex.getClass().getName()));
}
fs.close();
return null;
}
});
// Now try to read the file as "alice" - this should NOT be allowed
ugi = UserGroupInformation.createUserForTesting("alice", new String[] { "IT" });
ugi.doAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", defaultFs);
FileSystem fs = FileSystem.get(conf);
// Read the file
try {
fs.open(file);
Assert.fail("Failure expected on an incorrect permission");
} catch (AccessControlException ex) {
// expected
Assert.assertTrue(AccessControlException.class.getName().equals(ex.getClass().getName()));
}
fs.close();
return null;
}
});
// Now try to read the file as unknown user "eve" - this should not be allowed
ugi = UserGroupInformation.createUserForTesting("eve", new String[] {});
ugi.doAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", defaultFs);
FileSystem fs = FileSystem.get(conf);
// Read the file
try {
fs.open(file);
Assert.fail("Failure expected on an incorrect permission");
} catch (AccessControlException ex) {
// expected
Assert.assertTrue(AccessControlException.class.getName().equals(ex.getClass().getName()));
}
fs.close();
return null;
}
});
}
use of org.apache.hadoop.security.AccessControlException in project ranger by apache.
the class HDFSRangerTest method readTestUsingTagPolicy.
@org.junit.Test
public void readTestUsingTagPolicy() throws Exception {
FileSystem fileSystem = hdfsCluster.getFileSystem();
// Write a file - the AccessControlEnforcer won't be invoked as we are the "superuser"
final Path file = new Path("/tmp/tmpdir6/data-file2");
FSDataOutputStream out = fileSystem.create(file);
for (int i = 0; i < 1024; ++i) {
out.write(("data" + i + "\n").getBytes("UTF-8"));
out.flush();
}
out.close();
// Change permissions to read-only
fileSystem.setPermission(file, new FsPermission(FsAction.READ, FsAction.NONE, FsAction.NONE));
// Now try to read the file as "bob" - this should be allowed (by the policy - user)
UserGroupInformation ugi = UserGroupInformation.createUserForTesting("bob", new String[] {});
ugi.doAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", defaultFs);
FileSystem fs = FileSystem.get(conf);
// Read the file
FSDataInputStream in = fs.open(file);
ByteArrayOutputStream output = new ByteArrayOutputStream();
IOUtils.copy(in, output);
String content = new String(output.toByteArray());
Assert.assertTrue(content.startsWith("data0"));
fs.close();
return null;
}
});
// Now try to read the file as "alice" - this should be allowed (by the policy - group)
ugi = UserGroupInformation.createUserForTesting("alice", new String[] { "IT" });
ugi.doAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", defaultFs);
FileSystem fs = FileSystem.get(conf);
// Read the file
FSDataInputStream in = fs.open(file);
ByteArrayOutputStream output = new ByteArrayOutputStream();
IOUtils.copy(in, output);
String content = new String(output.toByteArray());
Assert.assertTrue(content.startsWith("data0"));
fs.close();
return null;
}
});
// Now try to read the file as unknown user "eve" - this should not be allowed
ugi = UserGroupInformation.createUserForTesting("eve", new String[] {});
ugi.doAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", defaultFs);
FileSystem fs = FileSystem.get(conf);
// Read the file
try {
fs.open(file);
Assert.fail("Failure expected on an incorrect permission");
} catch (AccessControlException ex) {
// expected
Assert.assertTrue(AccessControlException.class.getName().equals(ex.getClass().getName()));
}
fs.close();
return null;
}
});
// Now try to read the file as known user "dave" - this should not be allowed, as he doesn't have the correct permissions
ugi = UserGroupInformation.createUserForTesting("dave", new String[] {});
ugi.doAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", defaultFs);
FileSystem fs = FileSystem.get(conf);
// Read the file
try {
fs.open(file);
Assert.fail("Failure expected on an incorrect permission");
} catch (AccessControlException ex) {
// expected
Assert.assertTrue(AccessControlException.class.getName().equals(ex.getClass().getName()));
}
fs.close();
return null;
}
});
}
use of org.apache.hadoop.security.AccessControlException in project ranger by apache.
the class HDFSRangerTest method writeTest.
@org.junit.Test
public void writeTest() throws Exception {
FileSystem fileSystem = hdfsCluster.getFileSystem();
// Write a file - the AccessControlEnforcer won't be invoked as we are the "superuser"
final Path file = new Path("/tmp/tmpdir2/data-file3");
FSDataOutputStream out = fileSystem.create(file);
for (int i = 0; i < 1024; ++i) {
out.write(("data" + i + "\n").getBytes("UTF-8"));
out.flush();
}
out.close();
// Now try to write to the file as "bob" - this should be allowed (by the policy - user)
UserGroupInformation ugi = UserGroupInformation.createUserForTesting("bob", new String[] {});
ugi.doAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", defaultFs);
FileSystem fs = FileSystem.get(conf);
// Write to the file
fs.append(file);
fs.close();
return null;
}
});
// Now try to write to the file as "alice" - this should be allowed (by the policy - group)
ugi = UserGroupInformation.createUserForTesting("alice", new String[] { "IT" });
ugi.doAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", defaultFs);
FileSystem fs = FileSystem.get(conf);
// Write to the file
fs.append(file);
fs.close();
return null;
}
});
// Now try to read the file as unknown user "eve" - this should not be allowed
ugi = UserGroupInformation.createUserForTesting("eve", new String[] {});
ugi.doAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", defaultFs);
FileSystem fs = FileSystem.get(conf);
// Write to the file
try {
fs.append(file);
Assert.fail("Failure expected on an incorrect permission");
} catch (AccessControlException ex) {
// expected
Assert.assertTrue(AccessControlException.class.getName().equals(ex.getClass().getName()));
}
fs.close();
return null;
}
});
}
use of org.apache.hadoop.security.AccessControlException in project ranger by apache.
the class HDFSRangerTest method executeTest.
@org.junit.Test
public void executeTest() throws Exception {
FileSystem fileSystem = hdfsCluster.getFileSystem();
// Write a file - the AccessControlEnforcer won't be invoked as we are the "superuser"
final Path file = new Path("/tmp/tmpdir3/data-file2");
FSDataOutputStream out = fileSystem.create(file);
for (int i = 0; i < 1024; ++i) {
out.write(("data" + i + "\n").getBytes("UTF-8"));
out.flush();
}
out.close();
// Change permissions to read-only
fileSystem.setPermission(file, new FsPermission(FsAction.READ, FsAction.NONE, FsAction.NONE));
// Change the parent directory permissions to be execute only for the owner
Path parentDir = new Path("/tmp/tmpdir3");
fileSystem.setPermission(parentDir, new FsPermission(FsAction.EXECUTE, FsAction.NONE, FsAction.NONE));
// Try to read the directory as "bob" - this should be allowed (by the policy - user)
UserGroupInformation ugi = UserGroupInformation.createUserForTesting("bob", new String[] {});
ugi.doAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", defaultFs);
FileSystem fs = FileSystem.get(conf);
RemoteIterator<LocatedFileStatus> iter = fs.listFiles(file.getParent(), false);
Assert.assertTrue(iter.hasNext());
fs.close();
return null;
}
});
// Try to read the directory as "alice" - this should be allowed (by the policy - group)
ugi = UserGroupInformation.createUserForTesting("alice", new String[] { "IT" });
ugi.doAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", defaultFs);
FileSystem fs = FileSystem.get(conf);
RemoteIterator<LocatedFileStatus> iter = fs.listFiles(file.getParent(), false);
Assert.assertTrue(iter.hasNext());
fs.close();
return null;
}
});
// Now try to read the directory as unknown user "eve" - this should not be allowed
ugi = UserGroupInformation.createUserForTesting("eve", new String[] {});
ugi.doAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", defaultFs);
FileSystem fs = FileSystem.get(conf);
// Write to the file
try {
RemoteIterator<LocatedFileStatus> iter = fs.listFiles(file.getParent(), false);
Assert.assertTrue(iter.hasNext());
Assert.fail("Failure expected on an incorrect permission");
} catch (AccessControlException ex) {
// expected
Assert.assertTrue(AccessControlException.class.getName().equals(ex.getClass().getName()));
}
fs.close();
return null;
}
});
}
use of org.apache.hadoop.security.AccessControlException in project oozie by apache.
the class AuthorizationService method authorizeForApp.
/**
* Check if the user+group is authorized to use the specified application. <p> The check is done by checking the
* file system permissions on the workflow application.
*
* @param user user name.
* @param group group name.
* @param appPath application path.
* @param fileName workflow or coordinator.xml
* @param conf
* @throws AuthorizationException thrown if the user is not authorized for the app.
*/
public void authorizeForApp(String user, String group, String appPath, String fileName, Configuration conf) throws AuthorizationException {
try {
HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
URI uri = new Path(appPath).toUri();
Configuration fsConf = has.createConfiguration(uri.getAuthority());
FileSystem fs = has.createFileSystem(user, uri, fsConf);
Path path = new Path(appPath);
try {
if (!fs.exists(path)) {
incrCounter(INSTR_FAILED_AUTH_COUNTER, 1);
throw new AuthorizationException(ErrorCode.E0504, appPath);
}
if (conf.get(XOozieClient.IS_PROXY_SUBMISSION) == null) {
// files for non proxy submission jobs;
if (!fs.isFile(path)) {
Path appXml = new Path(path, fileName);
if (!fs.exists(appXml)) {
incrCounter(INSTR_FAILED_AUTH_COUNTER, 1);
throw new AuthorizationException(ErrorCode.E0505, appPath);
}
if (!fs.isFile(appXml)) {
incrCounter(INSTR_FAILED_AUTH_COUNTER, 1);
throw new AuthorizationException(ErrorCode.E0506, appPath);
}
fs.open(appXml).close();
}
}
} catch (AccessControlException ex) {
incrCounter(INSTR_FAILED_AUTH_COUNTER, 1);
throw new AuthorizationException(ErrorCode.E0507, appPath, ex.getMessage(), ex);
}
} catch (IOException ex) {
incrCounter(INSTR_FAILED_AUTH_COUNTER, 1);
throw new AuthorizationException(ErrorCode.E0501, ex.getMessage(), ex);
} catch (HadoopAccessorException e) {
throw new AuthorizationException(e);
}
}
Aggregations