use of org.apache.hadoop.security.authorize.AuthorizationException in project hadoop by apache.
the class RpcProgramNfs3 method fsstat.
@VisibleForTesting
FSSTAT3Response fsstat(XDR xdr, SecurityHandler securityHandler, SocketAddress remoteAddress) {
FSSTAT3Response response = new FSSTAT3Response(Nfs3Status.NFS3_OK);
if (!checkAccessPrivilege(remoteAddress, AccessPrivilege.READ_ONLY)) {
response.setStatus(Nfs3Status.NFS3ERR_ACCES);
return response;
}
DFSClient dfsClient = clientCache.getDfsClient(securityHandler.getUser());
if (dfsClient == null) {
response.setStatus(Nfs3Status.NFS3ERR_SERVERFAULT);
return response;
}
FSSTAT3Request request;
try {
request = FSSTAT3Request.deserialize(xdr);
} catch (IOException e) {
LOG.error("Invalid FSSTAT request");
return new FSSTAT3Response(Nfs3Status.NFS3ERR_INVAL);
}
FileHandle handle = request.getHandle();
if (LOG.isDebugEnabled()) {
LOG.debug("NFS FSSTAT fileId: " + handle.getFileId() + " client: " + remoteAddress);
}
try {
FsStatus fsStatus = dfsClient.getDiskStatus();
long totalBytes = fsStatus.getCapacity();
long freeBytes = fsStatus.getRemaining();
Nfs3FileAttributes attrs = writeManager.getFileAttr(dfsClient, handle, iug);
if (attrs == null) {
LOG.info("Can't get path for fileId: " + handle.getFileId());
return new FSSTAT3Response(Nfs3Status.NFS3ERR_STALE);
}
long maxFsObjects = config.getLong("dfs.max.objects", 0);
if (maxFsObjects == 0) {
// A value of zero in HDFS indicates no limit to the number
// of objects that dfs supports. Using Integer.MAX_VALUE instead of
// Long.MAX_VALUE so 32bit client won't complain.
maxFsObjects = Integer.MAX_VALUE;
}
return new FSSTAT3Response(Nfs3Status.NFS3_OK, attrs, totalBytes, freeBytes, freeBytes, maxFsObjects, maxFsObjects, maxFsObjects, 0);
} catch (RemoteException r) {
LOG.warn("Exception ", r);
IOException io = r.unwrapRemoteException();
/**
* AuthorizationException can be thrown if the user can't be proxy'ed.
*/
if (io instanceof AuthorizationException) {
return new FSSTAT3Response(Nfs3Status.NFS3ERR_ACCES);
} else {
return new FSSTAT3Response(Nfs3Status.NFS3ERR_IO);
}
} catch (IOException e) {
LOG.warn("Exception ", e);
int status = mapErrorStatus(e);
return new FSSTAT3Response(status);
}
}
use of org.apache.hadoop.security.authorize.AuthorizationException in project hadoop by apache.
the class TestKMS method testACLs.
@Test
@SuppressWarnings("checkstyle:methodlength")
public void testACLs() throws Exception {
Configuration conf = new Configuration();
conf.set("hadoop.security.authentication", "kerberos");
final File testDir = getTestDir();
conf = createBaseKMSConf(testDir, conf);
conf.set("hadoop.kms.authentication.type", "kerberos");
conf.set("hadoop.kms.authentication.kerberos.keytab", keytab.getAbsolutePath());
conf.set("hadoop.kms.authentication.kerberos.principal", "HTTP/localhost");
conf.set("hadoop.kms.authentication.kerberos.name.rules", "DEFAULT");
for (KMSACLs.Type type : KMSACLs.Type.values()) {
conf.set(type.getAclConfigKey(), type.toString());
}
conf.set(KMSACLs.Type.CREATE.getAclConfigKey(), KMSACLs.Type.CREATE.toString() + ",SET_KEY_MATERIAL");
conf.set(KMSACLs.Type.ROLLOVER.getAclConfigKey(), KMSACLs.Type.ROLLOVER.toString() + ",SET_KEY_MATERIAL");
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "k0.ALL", "*");
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "k1.ALL", "*");
writeConf(testDir, conf);
runServer(null, null, testDir, new KMSCallable<Void>() {
@Override
public Void call() throws Exception {
final Configuration conf = new Configuration();
conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
final URI uri = createKMSUri(getKMSUrl());
//nothing allowed
doAs("client", new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
KeyProvider kp = createProvider(uri, conf);
try {
kp.createKey("k", new KeyProvider.Options(conf));
Assert.fail();
} catch (AuthorizationException ex) {
//NOP
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
try {
kp.createKey("k", new byte[16], new KeyProvider.Options(conf));
Assert.fail();
} catch (AuthorizationException ex) {
//NOP
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
try {
kp.rollNewVersion("k");
Assert.fail();
} catch (AuthorizationException ex) {
//NOP
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
try {
kp.rollNewVersion("k", new byte[16]);
Assert.fail();
} catch (AuthorizationException ex) {
//NOP
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
try {
kp.getKeys();
Assert.fail();
} catch (AuthorizationException ex) {
//NOP
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
try {
kp.getKeysMetadata("k");
Assert.fail();
} catch (AuthorizationException ex) {
//NOP
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
try {
// we are using JavaKeyStoreProvider for testing, so we know how
// the keyversion is created.
kp.getKeyVersion("k@0");
Assert.fail();
} catch (AuthorizationException ex) {
//NOP
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
try {
kp.getCurrentKey("k");
Assert.fail();
} catch (AuthorizationException ex) {
//NOP
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
try {
kp.getMetadata("k");
Assert.fail();
} catch (AuthorizationException ex) {
//NOP
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
try {
kp.getKeyVersions("k");
Assert.fail();
} catch (AuthorizationException ex) {
//NOP
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
return null;
}
});
doAs("CREATE", new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
KeyProvider kp = createProvider(uri, conf);
try {
KeyProvider.KeyVersion kv = kp.createKey("k0", new KeyProvider.Options(conf));
Assert.assertNull(kv.getMaterial());
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
return null;
}
});
doAs("DELETE", new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
KeyProvider kp = createProvider(uri, conf);
try {
kp.deleteKey("k0");
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
return null;
}
});
doAs("SET_KEY_MATERIAL", new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
KeyProvider kp = createProvider(uri, conf);
try {
KeyProvider.KeyVersion kv = kp.createKey("k1", new byte[16], new KeyProvider.Options(conf));
Assert.assertNull(kv.getMaterial());
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
return null;
}
});
doAs("ROLLOVER", new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
KeyProvider kp = createProvider(uri, conf);
try {
KeyProvider.KeyVersion kv = kp.rollNewVersion("k1");
Assert.assertNull(kv.getMaterial());
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
return null;
}
});
doAs("SET_KEY_MATERIAL", new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
KeyProvider kp = createProvider(uri, conf);
try {
KeyProvider.KeyVersion kv = kp.rollNewVersion("k1", new byte[16]);
Assert.assertNull(kv.getMaterial());
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
return null;
}
});
final KeyVersion currKv = doAs("GET", new PrivilegedExceptionAction<KeyVersion>() {
@Override
public KeyVersion run() throws Exception {
KeyProvider kp = createProvider(uri, conf);
try {
kp.getKeyVersion("k1@0");
KeyVersion kv = kp.getCurrentKey("k1");
return kv;
} catch (Exception ex) {
Assert.fail(ex.toString());
}
return null;
}
});
final EncryptedKeyVersion encKv = doAs("GENERATE_EEK", new PrivilegedExceptionAction<EncryptedKeyVersion>() {
@Override
public EncryptedKeyVersion run() throws Exception {
KeyProvider kp = createProvider(uri, conf);
try {
KeyProviderCryptoExtension kpCE = KeyProviderCryptoExtension.createKeyProviderCryptoExtension(kp);
EncryptedKeyVersion ek1 = kpCE.generateEncryptedKey(currKv.getName());
return ek1;
} catch (Exception ex) {
Assert.fail(ex.toString());
}
return null;
}
});
doAs("GENERATE_EEK", new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
KeyProvider kp = createProvider(uri, conf);
KeyProviderCryptoExtension kpCE = KeyProviderCryptoExtension.createKeyProviderCryptoExtension(kp);
kpCE.reencryptEncryptedKey(encKv);
return null;
}
});
doAs("DECRYPT_EEK", new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
KeyProvider kp = createProvider(uri, conf);
try {
KeyProviderCryptoExtension kpCE = KeyProviderCryptoExtension.createKeyProviderCryptoExtension(kp);
kpCE.decryptEncryptedKey(encKv);
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
return null;
}
});
doAs("GET_KEYS", new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
KeyProvider kp = createProvider(uri, conf);
try {
kp.getKeys();
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
return null;
}
});
doAs("GET_METADATA", new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
KeyProvider kp = createProvider(uri, conf);
try {
kp.getMetadata("k1");
kp.getKeysMetadata("k1");
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
return null;
}
});
//stop the reloader, to avoid running while we are writing the new file
KMSWebApp.getACLs().stopReloader();
// test ACL reloading
// to ensure the ACLs file modifiedTime is newer
Thread.sleep(10);
conf.set(KMSACLs.Type.CREATE.getAclConfigKey(), "foo");
conf.set(KMSACLs.Type.GENERATE_EEK.getAclConfigKey(), "foo");
writeConf(testDir, conf);
Thread.sleep(1000);
// forcing a reload by hand.
KMSWebApp.getACLs().run();
// should not be able to create a key now
doAs("CREATE", new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
KeyProvider kp = createProvider(uri, conf);
KeyProvider.KeyVersion kv = kp.createKey("k2", new KeyProvider.Options(conf));
Assert.fail();
} catch (AuthorizationException ex) {
//NOP
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
return null;
}
});
doAs("GENERATE_EEK", new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
KeyProvider kp = createProvider(uri, conf);
try {
KeyProviderCryptoExtension kpCE = KeyProviderCryptoExtension.createKeyProviderCryptoExtension(kp);
kpCE.generateEncryptedKey("k1");
} catch (IOException ex) {
// through the ValueQueue. See KMSCP#generateEncryptedKey.
if (ex.getCause().getCause() instanceof AuthorizationException) {
LOG.info("Caught expected exception.", ex);
} else {
throw ex;
}
}
return null;
}
});
doAs("GENERATE_EEK", new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
KeyProvider kp = createProvider(uri, conf);
try {
KeyProviderCryptoExtension kpCE = KeyProviderCryptoExtension.createKeyProviderCryptoExtension(kp);
kpCE.reencryptEncryptedKey(encKv);
} catch (AuthorizationException ex) {
LOG.info("Caught expected exception.", ex);
}
return null;
}
});
return null;
}
});
}
use of org.apache.hadoop.security.authorize.AuthorizationException in project hadoop by apache.
the class ExceptionHandler method exceptionCaught.
static DefaultFullHttpResponse exceptionCaught(Throwable cause) {
Exception e = cause instanceof Exception ? (Exception) cause : new Exception(cause);
if (LOG.isTraceEnabled()) {
LOG.trace("GOT EXCEPTION", e);
}
//Convert exception
if (e instanceof ParamException) {
final ParamException paramexception = (ParamException) e;
e = new IllegalArgumentException("Invalid value for webhdfs parameter \"" + paramexception.getParameterName() + "\": " + e.getCause().getMessage(), e);
} else if (e instanceof ContainerException || e instanceof SecurityException) {
e = toCause(e);
} else if (e instanceof RemoteException) {
e = ((RemoteException) e).unwrapRemoteException();
}
//Map response status
final HttpResponseStatus s;
if (e instanceof SecurityException) {
s = FORBIDDEN;
} else if (e instanceof AuthorizationException) {
s = FORBIDDEN;
} else if (e instanceof FileNotFoundException) {
s = NOT_FOUND;
} else if (e instanceof IOException) {
s = FORBIDDEN;
} else if (e instanceof UnsupportedOperationException) {
s = BAD_REQUEST;
} else if (e instanceof IllegalArgumentException) {
s = BAD_REQUEST;
} else {
LOG.warn("INTERNAL_SERVER_ERROR", e);
s = INTERNAL_SERVER_ERROR;
}
final byte[] js = JsonUtil.toJsonString(e).getBytes(Charsets.UTF_8);
DefaultFullHttpResponse resp = new DefaultFullHttpResponse(HTTP_1_1, s, Unpooled.wrappedBuffer(js));
resp.headers().set(CONTENT_TYPE, APPLICATION_JSON_UTF8);
resp.headers().set(CONTENT_LENGTH, js.length);
return resp;
}
use of org.apache.hadoop.security.authorize.AuthorizationException in project hadoop by apache.
the class ExceptionHandler method toResponse.
@Override
public Response toResponse(Exception e) {
if (LOG.isTraceEnabled()) {
LOG.trace("GOT EXCEPITION", e);
}
//clear content type
response.setContentType(null);
//Convert exception
if (e instanceof ParamException) {
final ParamException paramexception = (ParamException) e;
e = new IllegalArgumentException("Invalid value for webhdfs parameter \"" + paramexception.getParameterName() + "\": " + e.getCause().getMessage(), e);
}
if (e instanceof ContainerException) {
e = toCause(e);
}
if (e instanceof RemoteException) {
e = ((RemoteException) e).unwrapRemoteException();
}
if (e instanceof SecurityException) {
e = toCause(e);
}
//Map response status
final Response.Status s;
if (e instanceof SecurityException) {
s = Response.Status.FORBIDDEN;
} else if (e instanceof AuthorizationException) {
s = Response.Status.FORBIDDEN;
} else if (e instanceof FileNotFoundException) {
s = Response.Status.NOT_FOUND;
} else if (e instanceof IOException) {
s = Response.Status.FORBIDDEN;
} else if (e instanceof UnsupportedOperationException) {
s = Response.Status.BAD_REQUEST;
} else if (e instanceof IllegalArgumentException) {
s = Response.Status.BAD_REQUEST;
} else {
LOG.warn("INTERNAL_SERVER_ERROR", e);
s = Response.Status.INTERNAL_SERVER_ERROR;
}
final String js = JsonUtil.toJsonString(e);
return Response.status(s).type(MediaType.APPLICATION_JSON).entity(js).build();
}
Aggregations