Search in sources :

Example 1 with AuthenticationTokenIdentifier

use of org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier in project hbase by apache.

the class TestTableMapReduceUtil method testInitCredentialsForCluster4.

@Test
@SuppressWarnings("unchecked")
public void testInitCredentialsForCluster4() throws Exception {
    HBaseTestingUtil util1 = new HBaseTestingUtil();
    // Assume util1 is insecure cluster
    // Do not start util1 because cannot boot secured mini cluster and insecure mini cluster at once
    HBaseTestingUtil util2 = new HBaseTestingUtil();
    File keytab = new File(util2.getDataTestDir("keytab").toUri().getPath());
    MiniKdc kdc = util2.setupMiniKdc(keytab);
    try {
        String username = UserGroupInformation.getLoginUser().getShortUserName();
        String userPrincipal = username + "/localhost";
        kdc.createPrincipal(keytab, userPrincipal, HTTP_PRINCIPAL);
        loginUserFromKeytab(userPrincipal + '@' + kdc.getRealm(), keytab.getAbsolutePath());
        try (Closeable util2Closeable = startSecureMiniCluster(util2, kdc, userPrincipal)) {
            Configuration conf1 = util1.getConfiguration();
            Job job = Job.getInstance(conf1);
            TableMapReduceUtil.initCredentialsForCluster(job, util2.getConfiguration());
            Credentials credentials = job.getCredentials();
            Collection<Token<? extends TokenIdentifier>> tokens = credentials.getAllTokens();
            assertEquals(1, tokens.size());
            String clusterId = ZKClusterId.readClusterIdZNode(util2.getZooKeeperWatcher());
            Token<AuthenticationTokenIdentifier> tokenForCluster = (Token<AuthenticationTokenIdentifier>) credentials.getToken(new Text(clusterId));
            assertEquals(userPrincipal + '@' + kdc.getRealm(), tokenForCluster.decodeIdentifier().getUsername());
        }
    } finally {
        kdc.stop();
    }
}
Also used : TokenIdentifier(org.apache.hadoop.security.token.TokenIdentifier) AuthenticationTokenIdentifier(org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier) Configuration(org.apache.hadoop.conf.Configuration) Closeable(java.io.Closeable) Token(org.apache.hadoop.security.token.Token) Text(org.apache.hadoop.io.Text) HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) AuthenticationTokenIdentifier(org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier) MiniKdc(org.apache.hadoop.minikdc.MiniKdc) Job(org.apache.hadoop.mapreduce.Job) File(java.io.File) Credentials(org.apache.hadoop.security.Credentials) Test(org.junit.Test)

Example 2 with AuthenticationTokenIdentifier

use of org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier in project oozie by apache.

the class HbaseCredentials method obtainToken.

private void obtainToken(Credentials credentials, final Configuration jobConf, Context context) throws IOException, InterruptedException {
    String user = context.getWorkflow().getUser();
    UserGroupInformation ugi = UserGroupInformation.createProxyUser(user, UserGroupInformation.getLoginUser());
    User u = User.create(ugi);
    // A direct doAs is required here vs. User#obtainAuthTokenForJob(...)
    // See OOZIE-2419 for more
    XLog.getLog(getClass()).debug("Getting Hbase token for user {0}", user);
    Token<AuthenticationTokenIdentifier> token = u.runAs(new PrivilegedExceptionAction<Token<AuthenticationTokenIdentifier>>() {

        public Token<AuthenticationTokenIdentifier> run() throws Exception {
            Token<AuthenticationTokenIdentifier> newToken = null;
            try (Connection connection = ConnectionFactory.createConnection(jobConf)) {
                newToken = TokenUtil.obtainToken(connection);
            }
            return newToken;
        }
    });
    XLog.getLog(getClass()).debug("Got token, adding it to credentials.");
    credentials.addToken(CredentialsProviderFactory.getUniqueAlias(token), token);
}
Also used : User(org.apache.hadoop.hbase.security.User) AuthenticationTokenIdentifier(org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier) Connection(org.apache.hadoop.hbase.client.Connection) Token(org.apache.hadoop.security.token.Token) IOException(java.io.IOException) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 3 with AuthenticationTokenIdentifier

use of org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier in project hbase by apache.

the class TestVerifyReplicationSecureClusterCredentials method testJobCredentials.

@Test
@SuppressWarnings("unchecked")
public void testJobCredentials() throws Exception {
    Job job = new VerifyReplication().createSubmittableJob(new Configuration(UTIL1.getConfiguration()), new String[] { peer.get(), "table" });
    Credentials credentials = job.getCredentials();
    Collection<Token<? extends TokenIdentifier>> tokens = credentials.getAllTokens();
    assertEquals(2, tokens.size());
    String clusterId1 = ZKClusterId.readClusterIdZNode(UTIL1.getZooKeeperWatcher());
    Token<AuthenticationTokenIdentifier> tokenForCluster1 = (Token<AuthenticationTokenIdentifier>) credentials.getToken(new Text(clusterId1));
    assertEquals(FULL_USER_PRINCIPAL, tokenForCluster1.decodeIdentifier().getUsername());
    String clusterId2 = ZKClusterId.readClusterIdZNode(UTIL2.getZooKeeperWatcher());
    Token<AuthenticationTokenIdentifier> tokenForCluster2 = (Token<AuthenticationTokenIdentifier>) credentials.getToken(new Text(clusterId2));
    assertEquals(FULL_USER_PRINCIPAL, tokenForCluster2.decodeIdentifier().getUsername());
}
Also used : TokenIdentifier(org.apache.hadoop.security.token.TokenIdentifier) AuthenticationTokenIdentifier(org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier) Configuration(org.apache.hadoop.conf.Configuration) AuthenticationTokenIdentifier(org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier) VerifyReplication(org.apache.hadoop.hbase.mapreduce.replication.VerifyReplication) Token(org.apache.hadoop.security.token.Token) Text(org.apache.hadoop.io.Text) Job(org.apache.hadoop.mapreduce.Job) Credentials(org.apache.hadoop.security.Credentials) Test(org.junit.Test)

Example 4 with AuthenticationTokenIdentifier

use of org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier in project hbase by apache.

the class TestTableMapReduceUtil method testInitCredentialsForCluster2.

@Test
@SuppressWarnings("unchecked")
public void testInitCredentialsForCluster2() throws Exception {
    HBaseTestingUtil util1 = new HBaseTestingUtil();
    HBaseTestingUtil util2 = new HBaseTestingUtil();
    File keytab = new File(util1.getDataTestDir("keytab").toUri().getPath());
    MiniKdc kdc = util1.setupMiniKdc(keytab);
    try {
        String username = UserGroupInformation.getLoginUser().getShortUserName();
        String userPrincipal = username + "/localhost";
        kdc.createPrincipal(keytab, userPrincipal, HTTP_PRINCIPAL);
        loginUserFromKeytab(userPrincipal + '@' + kdc.getRealm(), keytab.getAbsolutePath());
        try (Closeable util1Closeable = startSecureMiniCluster(util1, kdc, userPrincipal);
            Closeable util2Closeable = startSecureMiniCluster(util2, kdc, userPrincipal)) {
            Configuration conf1 = util1.getConfiguration();
            Job job = Job.getInstance(conf1);
            TableMapReduceUtil.initCredentialsForCluster(job, util2.getConfiguration());
            Credentials credentials = job.getCredentials();
            Collection<Token<? extends TokenIdentifier>> tokens = credentials.getAllTokens();
            assertEquals(1, tokens.size());
            String clusterId = ZKClusterId.readClusterIdZNode(util2.getZooKeeperWatcher());
            Token<AuthenticationTokenIdentifier> tokenForCluster = (Token<AuthenticationTokenIdentifier>) credentials.getToken(new Text(clusterId));
            assertEquals(userPrincipal + '@' + kdc.getRealm(), tokenForCluster.decodeIdentifier().getUsername());
        }
    } finally {
        kdc.stop();
    }
}
Also used : TokenIdentifier(org.apache.hadoop.security.token.TokenIdentifier) AuthenticationTokenIdentifier(org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier) Configuration(org.apache.hadoop.conf.Configuration) Closeable(java.io.Closeable) Token(org.apache.hadoop.security.token.Token) Text(org.apache.hadoop.io.Text) HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) AuthenticationTokenIdentifier(org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier) MiniKdc(org.apache.hadoop.minikdc.MiniKdc) Job(org.apache.hadoop.mapreduce.Job) File(java.io.File) Credentials(org.apache.hadoop.security.Credentials) Test(org.junit.Test)

Example 5 with AuthenticationTokenIdentifier

use of org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier in project hbase by apache.

the class SecureBulkLoadManager method secureBulkLoadHFiles.

public Map<byte[], List<Path>> secureBulkLoadHFiles(final HRegion region, final BulkLoadHFileRequest request, List<String> clusterIds) throws IOException {
    final List<Pair<byte[], String>> familyPaths = new ArrayList<>(request.getFamilyPathCount());
    for (ClientProtos.BulkLoadHFileRequest.FamilyPath el : request.getFamilyPathList()) {
        familyPaths.add(new Pair<>(el.getFamily().toByteArray(), el.getPath()));
    }
    Token<AuthenticationTokenIdentifier> userToken = null;
    if (userProvider.isHadoopSecurityEnabled()) {
        userToken = new Token<>(request.getFsToken().getIdentifier().toByteArray(), request.getFsToken().getPassword().toByteArray(), new Text(request.getFsToken().getKind()), new Text(request.getFsToken().getService()));
    }
    final String bulkToken = request.getBulkToken();
    User user = getActiveUser();
    final UserGroupInformation ugi = user.getUGI();
    if (userProvider.isHadoopSecurityEnabled()) {
        try {
            Token<AuthenticationTokenIdentifier> tok = ClientTokenUtil.obtainToken(conn).get();
            if (tok != null) {
                boolean b = ugi.addToken(tok);
                LOG.debug("token added " + tok + " for user " + ugi + " return=" + b);
            }
        } catch (Exception ioe) {
            LOG.warn("unable to add token", ioe);
        }
    }
    if (userToken != null) {
        ugi.addToken(userToken);
    } else if (userProvider.isHadoopSecurityEnabled()) {
        // for mini cluster testing
        throw new DoNotRetryIOException("User token cannot be null");
    }
    if (region.getCoprocessorHost() != null) {
        region.getCoprocessorHost().preBulkLoadHFile(familyPaths);
    }
    Map<byte[], List<Path>> map = null;
    try {
        incrementUgiReference(ugi);
        // ('request user'), another for the target fs (HBase region server principal).
        if (userProvider.isHadoopSecurityEnabled()) {
            FsDelegationToken targetfsDelegationToken = new FsDelegationToken(userProvider, "renewer");
            targetfsDelegationToken.acquireDelegationToken(fs);
            Token<?> targetFsToken = targetfsDelegationToken.getUserToken();
            if (targetFsToken != null && (userToken == null || !targetFsToken.getService().equals(userToken.getService()))) {
                ugi.addToken(targetFsToken);
            }
        }
        map = ugi.doAs(new PrivilegedAction<Map<byte[], List<Path>>>() {

            @Override
            public Map<byte[], List<Path>> run() {
                FileSystem fs = null;
                try {
                    /*
             * This is creating and caching a new FileSystem instance. Other code called
             * "beneath" this method will rely on this FileSystem instance being in the
             * cache. This is important as those methods make _no_ attempt to close this
             * FileSystem instance. It is critical that here, in SecureBulkLoadManager,
             * we are tracking the lifecycle and closing the FS when safe to do so.
             */
                    fs = FileSystem.get(conf);
                    for (Pair<byte[], String> el : familyPaths) {
                        Path stageFamily = new Path(bulkToken, Bytes.toString(el.getFirst()));
                        if (!fs.exists(stageFamily)) {
                            fs.mkdirs(stageFamily);
                            fs.setPermission(stageFamily, PERM_ALL_ACCESS);
                        }
                    }
                    if (fsCreatedListener != null) {
                        fsCreatedListener.accept(region);
                    }
                    // To enable access prior to staging
                    return region.bulkLoadHFiles(familyPaths, true, new SecureBulkLoadListener(fs, bulkToken, conf), request.getCopyFile(), clusterIds, request.getReplicate());
                } catch (Exception e) {
                    LOG.error("Failed to complete bulk load", e);
                }
                return null;
            }
        });
    } finally {
        decrementUgiReference(ugi);
        try {
            if (!UserGroupInformation.getLoginUser().equals(ugi) && !isUserReferenced(ugi)) {
                FileSystem.closeAllForUGI(ugi);
            }
        } catch (IOException e) {
            LOG.error("Failed to close FileSystem for: {}", ugi, e);
        }
        if (region.getCoprocessorHost() != null) {
            region.getCoprocessorHost().postBulkLoadHFile(familyPaths, map);
        }
    }
    return map;
}
Also used : User(org.apache.hadoop.hbase.security.User) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) ArrayList(java.util.ArrayList) PrivilegedAction(java.security.PrivilegedAction) AuthenticationTokenIdentifier(org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier) FileSystem(org.apache.hadoop.fs.FileSystem) ArrayList(java.util.ArrayList) List(java.util.List) Pair(org.apache.hadoop.hbase.util.Pair) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Path(org.apache.hadoop.fs.Path) BulkLoadHFileRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest) Text(org.apache.hadoop.io.Text) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) FsDelegationToken(org.apache.hadoop.hbase.security.token.FsDelegationToken)

Aggregations

AuthenticationTokenIdentifier (org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier)5 Text (org.apache.hadoop.io.Text)4 Token (org.apache.hadoop.security.token.Token)4 Configuration (org.apache.hadoop.conf.Configuration)3 Job (org.apache.hadoop.mapreduce.Job)3 Credentials (org.apache.hadoop.security.Credentials)3 TokenIdentifier (org.apache.hadoop.security.token.TokenIdentifier)3 Test (org.junit.Test)3 Closeable (java.io.Closeable)2 File (java.io.File)2 IOException (java.io.IOException)2 HBaseTestingUtil (org.apache.hadoop.hbase.HBaseTestingUtil)2 User (org.apache.hadoop.hbase.security.User)2 MiniKdc (org.apache.hadoop.minikdc.MiniKdc)2 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)2 PrivilegedAction (java.security.PrivilegedAction)1 ArrayList (java.util.ArrayList)1 List (java.util.List)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 Path (org.apache.hadoop.fs.Path)1