use of org.apache.hadoop.security.authorize.AuthorizationException in project hadoop by apache.
the class DelegationTokenAuthenticationHandler method managementOperation.
@Override
@SuppressWarnings("unchecked")
public boolean managementOperation(AuthenticationToken token, HttpServletRequest request, HttpServletResponse response) throws IOException, AuthenticationException {
boolean requestContinues = true;
String op = ServletUtils.getParameter(request, KerberosDelegationTokenAuthenticator.OP_PARAM);
op = (op != null) ? StringUtils.toUpperCase(op) : null;
if (isManagementOperation(request)) {
KerberosDelegationTokenAuthenticator.DelegationTokenOperation dtOp = KerberosDelegationTokenAuthenticator.DelegationTokenOperation.valueOf(op);
if (dtOp.getHttpMethod().equals(request.getMethod())) {
boolean doManagement;
if (dtOp.requiresKerberosCredentials() && token == null) {
// Don't authenticate via DT for DT ops.
token = authHandler.authenticate(request, response);
if (token == null) {
requestContinues = false;
doManagement = false;
} else {
doManagement = true;
}
} else {
doManagement = true;
}
if (doManagement) {
UserGroupInformation requestUgi = (token != null) ? UserGroupInformation.createRemoteUser(token.getUserName()) : null;
// Create the proxy user if doAsUser exists
String doAsUser = DelegationTokenAuthenticationFilter.getDoAs(request);
if (requestUgi != null && doAsUser != null) {
requestUgi = UserGroupInformation.createProxyUser(doAsUser, requestUgi);
try {
ProxyUsers.authorize(requestUgi, request.getRemoteAddr());
} catch (AuthorizationException ex) {
HttpExceptionUtils.createServletExceptionResponse(response, HttpServletResponse.SC_FORBIDDEN, ex);
return false;
}
}
Map map = null;
switch(dtOp) {
case GETDELEGATIONTOKEN:
if (requestUgi == null) {
throw new IllegalStateException("request UGI cannot be NULL");
}
String renewer = ServletUtils.getParameter(request, KerberosDelegationTokenAuthenticator.RENEWER_PARAM);
try {
Token<?> dToken = tokenManager.createToken(requestUgi, renewer);
map = delegationTokenToJSON(dToken);
} catch (IOException ex) {
throw new AuthenticationException(ex.toString(), ex);
}
break;
case RENEWDELEGATIONTOKEN:
if (requestUgi == null) {
throw new IllegalStateException("request UGI cannot be NULL");
}
String tokenToRenew = ServletUtils.getParameter(request, KerberosDelegationTokenAuthenticator.TOKEN_PARAM);
if (tokenToRenew == null) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST, MessageFormat.format("Operation [{0}] requires the parameter [{1}]", dtOp, KerberosDelegationTokenAuthenticator.TOKEN_PARAM));
requestContinues = false;
} else {
Token<AbstractDelegationTokenIdentifier> dt = new Token();
try {
dt.decodeFromUrlString(tokenToRenew);
long expirationTime = tokenManager.renewToken(dt, requestUgi.getShortUserName());
map = new HashMap();
map.put("long", expirationTime);
} catch (IOException ex) {
throw new AuthenticationException(ex.toString(), ex);
}
}
break;
case CANCELDELEGATIONTOKEN:
String tokenToCancel = ServletUtils.getParameter(request, KerberosDelegationTokenAuthenticator.TOKEN_PARAM);
if (tokenToCancel == null) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST, MessageFormat.format("Operation [{0}] requires the parameter [{1}]", dtOp, KerberosDelegationTokenAuthenticator.TOKEN_PARAM));
requestContinues = false;
} else {
Token<AbstractDelegationTokenIdentifier> dt = new Token();
try {
dt.decodeFromUrlString(tokenToCancel);
tokenManager.cancelToken(dt, (requestUgi != null) ? requestUgi.getShortUserName() : null);
} catch (IOException ex) {
response.sendError(HttpServletResponse.SC_NOT_FOUND, "Invalid delegation token, cannot cancel");
requestContinues = false;
}
}
break;
}
if (requestContinues) {
response.setStatus(HttpServletResponse.SC_OK);
if (map != null) {
response.setContentType(MediaType.APPLICATION_JSON);
Writer writer = response.getWriter();
ObjectMapper jsonMapper = new ObjectMapper(jsonFactory);
jsonMapper.writeValue(writer, map);
writer.write(ENTER);
writer.flush();
}
requestContinues = false;
}
}
} else {
response.sendError(HttpServletResponse.SC_BAD_REQUEST, MessageFormat.format("Wrong HTTP method [{0}] for operation [{1}], it should be " + "[{2}]", request.getMethod(), dtOp, dtOp.getHttpMethod()));
requestContinues = false;
}
}
return requestContinues;
}
use of org.apache.hadoop.security.authorize.AuthorizationException in project hadoop by apache.
the class TestJspHelper method testGetProxyUgi.
@Test
public void testGetProxyUgi() throws IOException {
conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "hdfs://localhost:4321/");
ServletContext context = mock(ServletContext.class);
String realUser = "TheDoctor";
String user = "TheNurse";
conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
conf.set(DefaultImpersonationProvider.getTestProvider().getProxySuperuserGroupConfKey(realUser), "*");
conf.set(DefaultImpersonationProvider.getTestProvider().getProxySuperuserIpConfKey(realUser), "*");
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
UserGroupInformation.setConfiguration(conf);
UserGroupInformation ugi;
HttpServletRequest request;
// have to be auth-ed with remote user
request = getMockRequest(null, null, user);
try {
JspHelper.getUGI(context, request, conf);
Assert.fail("bad request allowed");
} catch (IOException ioe) {
Assert.assertEquals("Security enabled but user not authenticated by filter", ioe.getMessage());
}
request = getMockRequest(null, realUser, user);
try {
JspHelper.getUGI(context, request, conf);
Assert.fail("bad request allowed");
} catch (IOException ioe) {
Assert.assertEquals("Security enabled but user not authenticated by filter", ioe.getMessage());
}
// proxy ugi for user via remote user
request = getMockRequest(realUser, null, user);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromAuth(ugi);
// proxy ugi for user vi a remote user = real user
request = getMockRequest(realUser, realUser, user);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromAuth(ugi);
// proxy ugi for user via remote user != real user
request = getMockRequest(realUser, user, user);
try {
JspHelper.getUGI(context, request, conf);
Assert.fail("bad request allowed");
} catch (IOException ioe) {
Assert.assertEquals("Usernames not matched: name=" + user + " != expected=" + realUser, ioe.getMessage());
}
// try to get get a proxy user with unauthorized user
try {
request = getMockRequest(user, null, realUser);
JspHelper.getUGI(context, request, conf);
Assert.fail("bad proxy request allowed");
} catch (AuthorizationException ae) {
Assert.assertEquals("User: " + user + " is not allowed to impersonate " + realUser, ae.getMessage());
}
try {
request = getMockRequest(user, user, realUser);
JspHelper.getUGI(context, request, conf);
Assert.fail("bad proxy request allowed");
} catch (AuthorizationException ae) {
Assert.assertEquals("User: " + user + " is not allowed to impersonate " + realUser, ae.getMessage());
}
}
use of org.apache.hadoop.security.authorize.AuthorizationException in project hadoop by apache.
the class RpcProgramNfs3 method getattr.
@VisibleForTesting
GETATTR3Response getattr(XDR xdr, SecurityHandler securityHandler, SocketAddress remoteAddress) {
GETATTR3Response response = new GETATTR3Response(Nfs3Status.NFS3_OK);
if (!checkAccessPrivilege(remoteAddress, AccessPrivilege.READ_ONLY)) {
response.setStatus(Nfs3Status.NFS3ERR_ACCES);
return response;
}
DFSClient dfsClient = clientCache.getDfsClient(securityHandler.getUser());
if (dfsClient == null) {
response.setStatus(Nfs3Status.NFS3ERR_SERVERFAULT);
return response;
}
GETATTR3Request request;
try {
request = GETATTR3Request.deserialize(xdr);
} catch (IOException e) {
LOG.error("Invalid GETATTR request");
response.setStatus(Nfs3Status.NFS3ERR_INVAL);
return response;
}
FileHandle handle = request.getHandle();
if (LOG.isDebugEnabled()) {
LOG.debug("GETATTR for fileId: " + handle.getFileId() + " client: " + remoteAddress);
}
Nfs3FileAttributes attrs = null;
try {
attrs = writeManager.getFileAttr(dfsClient, handle, iug);
} catch (RemoteException r) {
LOG.warn("Exception ", r);
IOException io = r.unwrapRemoteException();
/**
* AuthorizationException can be thrown if the user can't be proxy'ed.
*/
if (io instanceof AuthorizationException) {
return new GETATTR3Response(Nfs3Status.NFS3ERR_ACCES);
} else {
return new GETATTR3Response(Nfs3Status.NFS3ERR_IO);
}
} catch (IOException e) {
LOG.info("Can't get file attribute, fileId=" + handle.getFileId(), e);
int status = mapErrorStatus(e);
response.setStatus(status);
return response;
}
if (attrs == null) {
LOG.error("Can't get path for fileId: " + handle.getFileId());
response.setStatus(Nfs3Status.NFS3ERR_STALE);
return response;
}
response.setPostOpAttr(attrs);
return response;
}
use of org.apache.hadoop.security.authorize.AuthorizationException in project hadoop by apache.
the class KeyAuthorizationKeyProvider method authorizeCreateKey.
// This method first checks if "key.acl.name" attribute is present as an
// attribute in the provider Options. If yes, use the aclName for any
// subsequent access checks, else use the keyName as the aclName and set it
// as the value of the "key.acl.name" in the key's metadata.
private void authorizeCreateKey(String keyName, Options options, UserGroupInformation ugi) throws IOException {
Preconditions.checkNotNull(ugi, "UserGroupInformation cannot be null");
Map<String, String> attributes = options.getAttributes();
String aclName = attributes.get(KEY_ACL_NAME);
boolean success = false;
if (Strings.isNullOrEmpty(aclName)) {
if (acls.isACLPresent(keyName, KeyOpType.MANAGEMENT)) {
options.setAttributes(ImmutableMap.<String, String>builder().putAll(attributes).put(KEY_ACL_NAME, keyName).build());
success = acls.hasAccessToKey(keyName, ugi, KeyOpType.MANAGEMENT) || acls.hasAccessToKey(keyName, ugi, KeyOpType.ALL);
} else {
success = false;
}
} else {
success = acls.isACLPresent(aclName, KeyOpType.MANAGEMENT) && (acls.hasAccessToKey(aclName, ugi, KeyOpType.MANAGEMENT) || acls.hasAccessToKey(aclName, ugi, KeyOpType.ALL));
}
if (!success)
throw new AuthorizationException(String.format("User [%s] is not" + " authorized to create key !!", ugi.getShortUserName()));
}
use of org.apache.hadoop.security.authorize.AuthorizationException in project hadoop by apache.
the class TestKMS method doProxyUserTest.
public void doProxyUserTest(final boolean kerberos) throws Exception {
Configuration conf = new Configuration();
conf.set("hadoop.security.authentication", "kerberos");
final File testDir = getTestDir();
conf = createBaseKMSConf(testDir, conf);
if (kerberos) {
conf.set("hadoop.kms.authentication.type", "kerberos");
}
conf.set("hadoop.kms.authentication.kerberos.keytab", keytab.getAbsolutePath());
conf.set("hadoop.kms.authentication.kerberos.principal", "HTTP/localhost");
conf.set("hadoop.kms.authentication.kerberos.name.rules", "DEFAULT");
conf.set("hadoop.kms.proxyuser.client.users", "foo,bar");
conf.set("hadoop.kms.proxyuser.client.hosts", "*");
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "kaa.ALL", "client");
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "kbb.ALL", "foo");
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "kcc.ALL", "foo1");
conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "kdd.ALL", "bar");
writeConf(testDir, conf);
runServer(null, null, testDir, new KMSCallable<Void>() {
@Override
public Void call() throws Exception {
final Configuration conf = new Configuration();
conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
final URI uri = createKMSUri(getKMSUrl());
UserGroupInformation proxyUgi = null;
if (kerberos) {
// proxyuser client using kerberos credentials
proxyUgi = UserGroupInformation.loginUserFromKeytabAndReturnUGI("client", keytab.getAbsolutePath());
} else {
proxyUgi = UserGroupInformation.createRemoteUser("client");
UserGroupInformation.setLoginUser(proxyUgi);
}
final UserGroupInformation clientUgi = proxyUgi;
clientUgi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
final KeyProvider kp = createProvider(uri, conf);
kp.createKey("kaa", new KeyProvider.Options(conf));
// authorized proxyuser
UserGroupInformation fooUgi = UserGroupInformation.createProxyUser("foo", clientUgi);
fooUgi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
Assert.assertNotNull(kp.createKey("kbb", new KeyProvider.Options(conf)));
return null;
}
});
// unauthorized proxyuser
UserGroupInformation foo1Ugi = UserGroupInformation.createProxyUser("foo1", clientUgi);
foo1Ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
kp.createKey("kcc", new KeyProvider.Options(conf));
Assert.fail();
} catch (AuthorizationException ex) {
// OK
} catch (Exception ex) {
Assert.fail(ex.getMessage());
}
return null;
}
});
// authorized proxyuser
UserGroupInformation barUgi = UserGroupInformation.createProxyUser("bar", clientUgi);
barUgi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
Assert.assertNotNull(kp.createKey("kdd", new KeyProvider.Options(conf)));
return null;
}
});
return null;
}
});
return null;
}
});
}
Aggregations