Search in sources :

Example 41 with DFSAdmin

use of org.apache.hadoop.hdfs.tools.DFSAdmin in project hadoop by apache.

the class TestRefreshUserMappings method testRefreshSuperUserGroupsConfiguration.

@Test
public void testRefreshSuperUserGroupsConfiguration() throws Exception {
    final String SUPER_USER = "super_user";
    final List<String> groupNames1 = new ArrayList<>();
    groupNames1.add("gr1");
    groupNames1.add("gr2");
    final List<String> groupNames2 = new ArrayList<>();
    groupNames2.add("gr3");
    groupNames2.add("gr4");
    //keys in conf
    String userKeyGroups = DefaultImpersonationProvider.getTestProvider().getProxySuperuserGroupConfKey(SUPER_USER);
    String userKeyHosts = DefaultImpersonationProvider.getTestProvider().getProxySuperuserIpConfKey(SUPER_USER);
    // superuser can proxy for this group
    config.set(userKeyGroups, "gr3,gr4,gr5");
    config.set(userKeyHosts, "127.0.0.1");
    ProxyUsers.refreshSuperUserGroupsConfiguration(config);
    UserGroupInformation ugi1 = mock(UserGroupInformation.class);
    UserGroupInformation ugi2 = mock(UserGroupInformation.class);
    UserGroupInformation suUgi = mock(UserGroupInformation.class);
    when(ugi1.getRealUser()).thenReturn(suUgi);
    when(ugi2.getRealUser()).thenReturn(suUgi);
    // super user
    when(suUgi.getShortUserName()).thenReturn(SUPER_USER);
    // super user
    when(suUgi.getUserName()).thenReturn(SUPER_USER + "L");
    when(ugi1.getShortUserName()).thenReturn("user1");
    when(ugi2.getShortUserName()).thenReturn("user2");
    when(ugi1.getUserName()).thenReturn("userL1");
    when(ugi2.getUserName()).thenReturn("userL2");
    // set groups for users
    when(ugi1.getGroups()).thenReturn(groupNames1);
    when(ugi2.getGroups()).thenReturn(groupNames2);
    // check before
    try {
        ProxyUsers.authorize(ugi1, "127.0.0.1");
        fail("first auth for " + ugi1.getShortUserName() + " should've failed ");
    } catch (AuthorizationException e) {
        // expected
        System.err.println("auth for " + ugi1.getUserName() + " failed");
    }
    try {
        ProxyUsers.authorize(ugi2, "127.0.0.1");
        System.err.println("auth for " + ugi2.getUserName() + " succeeded");
    // expected
    } catch (AuthorizationException e) {
        fail("first auth for " + ugi2.getShortUserName() + " should've succeeded: " + e.getLocalizedMessage());
    }
    // refresh will look at configuration on the server side
    // add additional resource with the new value
    // so the server side will pick it up
    String rsrc = "testGroupMappingRefresh_rsrc.xml";
    addNewConfigResource(rsrc, userKeyGroups, "gr2", userKeyHosts, "127.0.0.1");
    DFSAdmin admin = new DFSAdmin(config);
    String[] args = new String[] { "-refreshSuperUserGroupsConfiguration" };
    admin.run(args);
    try {
        ProxyUsers.authorize(ugi2, "127.0.0.1");
        fail("second auth for " + ugi2.getShortUserName() + " should've failed ");
    } catch (AuthorizationException e) {
        // expected
        System.err.println("auth for " + ugi2.getUserName() + " failed");
    }
    try {
        ProxyUsers.authorize(ugi1, "127.0.0.1");
        System.err.println("auth for " + ugi1.getUserName() + " succeeded");
    // expected
    } catch (AuthorizationException e) {
        fail("second auth for " + ugi1.getShortUserName() + " should've succeeded: " + e.getLocalizedMessage());
    }
}
Also used : AuthorizationException(org.apache.hadoop.security.authorize.AuthorizationException) DFSAdmin(org.apache.hadoop.hdfs.tools.DFSAdmin) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Aggregations

DFSAdmin (org.apache.hadoop.hdfs.tools.DFSAdmin)41 Test (org.junit.Test)31 Configuration (org.apache.hadoop.conf.Configuration)15 Path (org.apache.hadoop.fs.Path)14 CoreMatchers.containsString (org.hamcrest.CoreMatchers.containsString)9 IOException (java.io.IOException)6 FileSystem (org.apache.hadoop.fs.FileSystem)6 DataNode (org.apache.hadoop.hdfs.server.datanode.DataNode)5 DSQuotaExceededException (org.apache.hadoop.hdfs.protocol.DSQuotaExceededException)4 NSQuotaExceededException (org.apache.hadoop.hdfs.protocol.NSQuotaExceededException)4 QuotaExceededException (org.apache.hadoop.hdfs.protocol.QuotaExceededException)4 ContentSummary (org.apache.hadoop.fs.ContentSummary)3 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)3 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)3 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)3 ByteArrayOutputStream (java.io.ByteArrayOutputStream)2 File (java.io.File)2 QuotaUsage (org.apache.hadoop.fs.QuotaUsage)2 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)2 DatanodeInfo (org.apache.hadoop.hdfs.protocol.DatanodeInfo)2