use of org.apache.hadoop.security.authorize.AuthorizationException in project hadoop by apache.
the class TestApplicationHistoryManagerOnTimelineStore method testGetApplicationAttemptReport.
@Test
public void testGetApplicationAttemptReport() throws Exception {
final ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1);
ApplicationAttemptReport appAttempt;
if (callerUGI == null) {
appAttempt = historyManager.getApplicationAttempt(appAttemptId);
} else {
try {
appAttempt = callerUGI.doAs(new PrivilegedExceptionAction<ApplicationAttemptReport>() {
@Override
public ApplicationAttemptReport run() throws Exception {
return historyManager.getApplicationAttempt(appAttemptId);
}
});
if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) {
// The exception is expected
Assert.fail();
}
} catch (AuthorizationException e) {
if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) {
// The exception is expected
return;
}
throw e;
}
}
Assert.assertNotNull(appAttempt);
Assert.assertEquals(appAttemptId, appAttempt.getApplicationAttemptId());
Assert.assertEquals(ContainerId.newContainerId(appAttemptId, 1), appAttempt.getAMContainerId());
Assert.assertEquals("test host", appAttempt.getHost());
Assert.assertEquals(100, appAttempt.getRpcPort());
Assert.assertEquals("test tracking url", appAttempt.getTrackingUrl());
Assert.assertEquals("test original tracking url", appAttempt.getOriginalTrackingUrl());
Assert.assertEquals("test diagnostics info", appAttempt.getDiagnostics());
Assert.assertEquals(YarnApplicationAttemptState.FINISHED, appAttempt.getYarnApplicationAttemptState());
}
use of org.apache.hadoop.security.authorize.AuthorizationException in project hadoop by apache.
the class TestApplicationHistoryManagerOnTimelineStore method testGetContainers.
@Test
public void testGetContainers() throws Exception {
final ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1);
Collection<ContainerReport> containers;
if (callerUGI == null) {
containers = historyManager.getContainers(appAttemptId).values();
} else {
try {
containers = callerUGI.doAs(new PrivilegedExceptionAction<Collection<ContainerReport>>() {
@Override
public Collection<ContainerReport> run() throws Exception {
return historyManager.getContainers(appAttemptId).values();
}
});
if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) {
// The exception is expected
Assert.fail();
}
} catch (AuthorizationException e) {
if (callerUGI != null && callerUGI.getShortUserName().equals("user3")) {
// The exception is expected
return;
}
throw e;
}
}
Assert.assertNotNull(containers);
Assert.assertEquals(SCALE, containers.size());
}
use of org.apache.hadoop.security.authorize.AuthorizationException in project hadoop by apache.
the class Server method authorize.
/**
* Authorize the incoming client connection.
*
* @param user client user
* @param protocolName - the protocol
* @param addr InetAddress of incoming connection
* @throws AuthorizationException when the client isn't authorized to talk the protocol
*/
private void authorize(UserGroupInformation user, String protocolName, InetAddress addr) throws AuthorizationException {
if (authorize) {
if (protocolName == null) {
throw new AuthorizationException("Null protocol not authorized");
}
Class<?> protocol = null;
try {
protocol = getProtocolClass(protocolName, getConf());
} catch (ClassNotFoundException cfne) {
throw new AuthorizationException("Unknown protocol: " + protocolName);
}
serviceAuthorizationManager.authorize(user, protocol, getConf(), addr);
}
}
use of org.apache.hadoop.security.authorize.AuthorizationException in project hadoop by apache.
the class AuthenticationWithProxyUserFilter method doFilter.
/**
* This method provide the ability to do pre/post tasks
* in filter chain. Override this method to authorize
* proxy user between AuthenticationFilter and next filter.
* @param filterChain the filter chain object.
* @param request the request object.
* @param response the response object.
*
* @throws IOException
* @throws ServletException
*/
@Override
protected void doFilter(FilterChain filterChain, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException {
final String proxyUser = getDoAs(request);
if (proxyUser != null) {
// Change the remote user after proxy user is authorized.
final HttpServletRequest finalReq = request;
request = new HttpServletRequestWrapper(finalReq) {
private String getRemoteOrProxyUser() throws AuthorizationException {
UserGroupInformation realUser = UserGroupInformation.createRemoteUser(finalReq.getRemoteUser());
UserGroupInformation proxyUserInfo = UserGroupInformation.createProxyUser(proxyUser, realUser);
ProxyUsers.authorize(proxyUserInfo, finalReq.getRemoteAddr());
return proxyUserInfo.getUserName();
}
@Override
public String getRemoteUser() {
try {
return getRemoteOrProxyUser();
} catch (AuthorizationException ex) {
LOG.error("Unable to verify proxy user: " + ex.getMessage(), ex);
}
return null;
}
};
}
filterChain.doFilter(request, response);
}
use of org.apache.hadoop.security.authorize.AuthorizationException in project hadoop by apache.
the class TestHSAdminServer method testRefreshSuperUserGroups.
@Test
public void testRefreshSuperUserGroups() throws Exception {
UserGroupInformation ugi = mock(UserGroupInformation.class);
UserGroupInformation superUser = mock(UserGroupInformation.class);
when(ugi.getRealUser()).thenReturn(superUser);
when(superUser.getShortUserName()).thenReturn("superuser");
when(superUser.getUserName()).thenReturn("superuser");
when(ugi.getGroups()).thenReturn(Arrays.asList(new String[] { "group3" }));
when(ugi.getUserName()).thenReturn("regularUser");
// Set super user groups not to include groups of regularUser
conf.set("hadoop.proxyuser.superuser.groups", "group1,group2");
conf.set("hadoop.proxyuser.superuser.hosts", "127.0.0.1");
String[] args = new String[1];
args[0] = "-refreshSuperUserGroupsConfiguration";
hsAdminClient.run(args);
Throwable th = null;
try {
ProxyUsers.authorize(ugi, "127.0.0.1");
} catch (Exception e) {
th = e;
}
// Exception should be thrown
assertTrue(th instanceof AuthorizationException);
// Now add regularUser group to superuser group but not execute
// refreshSuperUserGroupMapping
conf.set("hadoop.proxyuser.superuser.groups", "group1,group2,group3");
// Again,lets run ProxyUsers.authorize and see if regularUser can be
// impersonated
// resetting th
th = null;
try {
ProxyUsers.authorize(ugi, "127.0.0.1");
} catch (Exception e) {
th = e;
}
// Exception should be thrown again since we didn't refresh the configs
assertTrue(th instanceof AuthorizationException);
// Lets refresh the config by running refreshSuperUserGroupsConfiguration
hsAdminClient.run(args);
th = null;
try {
ProxyUsers.authorize(ugi, "127.0.0.1");
} catch (Exception e) {
th = e;
}
// No exception thrown since regularUser can be impersonated.
assertNull("Unexpected exception thrown: " + th, th);
}
Aggregations