use of org.apache.hadoop.security.Groups in project hadoop by apache.
the class TestGroupsCaching method testThreadBlockedWhenExpiredEntryExistsWithoutBackgroundRefresh.
@Test
public void testThreadBlockedWhenExpiredEntryExistsWithoutBackgroundRefresh() throws Exception {
conf.setLong(CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_SECS, 1);
conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_BACKGROUND_RELOAD, false);
FakeTimer timer = new FakeTimer();
final Groups groups = new Groups(conf, timer);
groups.cacheGroupsAdd(Arrays.asList(myGroups));
groups.refresh();
FakeGroupMapping.clearBlackList();
// We make an initial request to populate the cache
groups.getGroups("me");
// Further lookups will have a delay
FakeGroupMapping.setGetGroupsDelayMs(100);
// add another group
groups.cacheGroupsAdd(Arrays.asList("grp3"));
int startingRequestCount = FakeGroupMapping.getRequestCount();
// Then expire that entry
timer.advance(4 * 1000);
// Now get the cache entry - it should block and return the new
// 3 group value
assertEquals(groups.getGroups("me").size(), 3);
assertEquals(startingRequestCount + 1, FakeGroupMapping.getRequestCount());
}
use of org.apache.hadoop.security.Groups in project hadoop by apache.
the class TestProxyUsers method testNetgroups.
/**
* Test the netgroups (groups in ACL rules that start with @)
*
* This is a manual test because it requires:
* - host setup
* - native code compiled
* - specify the group mapping class
*
* Host setup:
*
* /etc/nsswitch.conf should have a line like this:
* netgroup: files
*
* /etc/netgroup should be (the whole file):
* foo_group (,proxied_user,)
*
* To run this test:
*
* export JAVA_HOME='path/to/java'
* mvn test \
* -Dtest=TestProxyUsers \
* -DTestProxyUsersGroupMapping=$className \
*
* where $className is one of the classes that provide group
* mapping services, i.e. classes that implement
* GroupMappingServiceProvider interface, at this time:
* - org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping
* - org.apache.hadoop.security.ShellBasedUnixGroupsNetgroupMapping
*
*/
@Test
public void testNetgroups() throws IOException {
if (!NativeCodeLoader.isNativeCodeLoaded()) {
LOG.info("Not testing netgroups, " + "this test only runs when native code is compiled");
return;
}
String groupMappingClassName = System.getProperty("TestProxyUsersGroupMapping");
if (groupMappingClassName == null) {
LOG.info("Not testing netgroups, no group mapping class specified, " + "use -DTestProxyUsersGroupMapping=$className to specify " + "group mapping class (must implement GroupMappingServiceProvider " + "interface and support netgroups)");
return;
}
LOG.info("Testing netgroups using: " + groupMappingClassName);
Configuration conf = new Configuration();
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_GROUP_MAPPING, groupMappingClassName);
conf.set(DefaultImpersonationProvider.getTestProvider().getProxySuperuserGroupConfKey(REAL_USER_NAME), StringUtils.join(",", Arrays.asList(NETGROUP_NAMES)));
conf.set(DefaultImpersonationProvider.getTestProvider().getProxySuperuserIpConfKey(REAL_USER_NAME), PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
Groups groups = Groups.getUserToGroupsMappingService(conf);
// try proxying a group that's allowed
UserGroupInformation realUserUgi = UserGroupInformation.createRemoteUser(REAL_USER_NAME);
UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(PROXY_USER_NAME, realUserUgi, groups.getGroups(PROXY_USER_NAME).toArray(new String[groups.getGroups(PROXY_USER_NAME).size()]));
assertAuthorized(proxyUserUgi, PROXY_IP);
}
use of org.apache.hadoop.security.Groups in project hadoop by apache.
the class TestGroupsCaching method testNegativeCacheEntriesExpire.
@Test
public void testNegativeCacheEntriesExpire() throws Exception {
conf.setLong(CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS, 2);
FakeTimer timer = new FakeTimer();
// Ensure that stale entries are removed from negative cache every 2 seconds
Groups groups = new Groups(conf, timer);
groups.cacheGroupsAdd(Arrays.asList(myGroups));
groups.refresh();
// Add both these users to blacklist so that they
// can be added to negative cache
FakeGroupMapping.addToBlackList("user1");
FakeGroupMapping.addToBlackList("user2");
// Put user1 in negative cache.
try {
groups.getGroups("user1");
fail("Did not throw IOException : Failed to obtain groups" + " from FakeGroupMapping.");
} catch (IOException e) {
GenericTestUtils.assertExceptionContains("No groups found for user", e);
}
// Check if user1 exists in negative cache
assertTrue(groups.getNegativeCache().contains("user1"));
// Advance fake timer
timer.advance(1000);
// Put user2 in negative cache
try {
groups.getGroups("user2");
fail("Did not throw IOException : Failed to obtain groups" + " from FakeGroupMapping.");
} catch (IOException e) {
GenericTestUtils.assertExceptionContains("No groups found for user", e);
}
// Check if user2 exists in negative cache
assertTrue(groups.getNegativeCache().contains("user2"));
// Advance timer. Only user2 should be present in negative cache.
timer.advance(1100);
assertFalse(groups.getNegativeCache().contains("user1"));
assertTrue(groups.getNegativeCache().contains("user2"));
// Advance timer. Even user2 should not be present in negative cache.
timer.advance(1000);
assertFalse(groups.getNegativeCache().contains("user2"));
}
use of org.apache.hadoop.security.Groups in project hadoop by apache.
the class TestGroupsCaching method testNegativeCacheClearedOnRefresh.
@Test
public void testNegativeCacheClearedOnRefresh() throws Exception {
conf.setLong(CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS, 100);
final Groups groups = new Groups(conf);
groups.cacheGroupsAdd(Arrays.asList(myGroups));
groups.refresh();
FakeGroupMapping.clearBlackList();
FakeGroupMapping.addToBlackList("dne");
try {
groups.getGroups("dne");
fail("Should have failed to find this group");
} catch (IOException e) {
// pass
}
int startingRequestCount = FakeGroupMapping.getRequestCount();
groups.refresh();
FakeGroupMapping.addToBlackList("dne");
try {
List<String> g = groups.getGroups("dne");
fail("Should have failed to find this group");
} catch (IOException e) {
// pass
}
assertEquals(startingRequestCount + 1, FakeGroupMapping.getRequestCount());
}
use of org.apache.hadoop.security.Groups in project hadoop by apache.
the class TestHSAdminServer method testRefreshUserToGroupsMappings.
@Test
public void testRefreshUserToGroupsMappings() throws Exception {
String[] args = new String[] { "-refreshUserToGroupsMappings" };
Groups groups = Groups.getUserToGroupsMappingService(conf);
String user = UserGroupInformation.getCurrentUser().getUserName();
System.out.println("first attempt:");
List<String> g1 = groups.getGroups(user);
String[] str_groups = new String[g1.size()];
g1.toArray(str_groups);
System.out.println(Arrays.toString(str_groups));
// Now groups of this user has changed but getGroups returns from the
// cache,so we would see same groups as before
System.out.println("second attempt, should be same:");
List<String> g2 = groups.getGroups(user);
g2.toArray(str_groups);
System.out.println(Arrays.toString(str_groups));
for (int i = 0; i < g2.size(); i++) {
assertEquals("Should be same group ", g1.get(i), g2.get(i));
}
// run the command,which clears the cache
hsAdminClient.run(args);
System.out.println("third attempt(after refresh command), should be different:");
// Now get groups should return new groups
List<String> g3 = groups.getGroups(user);
g3.toArray(str_groups);
System.out.println(Arrays.toString(str_groups));
for (int i = 0; i < g3.size(); i++) {
assertFalse("Should be different group: " + g1.get(i) + " and " + g3.get(i), g1.get(i).equals(g3.get(i)));
}
}
Aggregations