Search in sources :

Example 71 with Credentials

use of org.apache.hadoop.security.Credentials in project hadoop by apache.

the class TestRMRestart method testDelegationTokenRestoredInDelegationTokenRenewer.

@Test(timeout = 60000)
public void testDelegationTokenRestoredInDelegationTokenRenewer() throws Exception {
    conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 2);
    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    UserGroupInformation.setConfiguration(conf);
    MemoryRMStateStore memStore = new MemoryRMStateStore();
    memStore.init(conf);
    RMState rmState = memStore.getState();
    Map<ApplicationId, ApplicationStateData> rmAppState = rmState.getApplicationState();
    MockRM rm1 = new TestSecurityMockRM(conf, memStore);
    rm1.start();
    HashSet<Token<RMDelegationTokenIdentifier>> tokenSet = new HashSet<Token<RMDelegationTokenIdentifier>>();
    // create an empty credential
    Credentials ts = new Credentials();
    // create tokens and add into credential
    Text userText1 = new Text("user1");
    RMDelegationTokenIdentifier dtId1 = new RMDelegationTokenIdentifier(userText1, new Text("renewer1"), userText1);
    Token<RMDelegationTokenIdentifier> token1 = new Token<RMDelegationTokenIdentifier>(dtId1, rm1.getRMContext().getRMDelegationTokenSecretManager());
    SecurityUtil.setTokenService(token1, rmAddr);
    ts.addToken(userText1, token1);
    tokenSet.add(token1);
    Text userText2 = new Text("user2");
    RMDelegationTokenIdentifier dtId2 = new RMDelegationTokenIdentifier(userText2, new Text("renewer2"), userText2);
    Token<RMDelegationTokenIdentifier> token2 = new Token<RMDelegationTokenIdentifier>(dtId2, rm1.getRMContext().getRMDelegationTokenSecretManager());
    SecurityUtil.setTokenService(token2, rmAddr);
    ts.addToken(userText2, token2);
    tokenSet.add(token2);
    // submit an app with customized credential
    RMApp app = rm1.submitApp(200, "name", "user", new HashMap<ApplicationAccessType, String>(), false, "default", 1, ts);
    // assert app info is saved
    ApplicationStateData appState = rmAppState.get(app.getApplicationId());
    Assert.assertNotNull(appState);
    // assert delegation tokens exist in rm1 DelegationTokenRenewr
    Assert.assertEquals(tokenSet, rm1.getRMContext().getDelegationTokenRenewer().getDelegationTokens());
    // assert delegation tokens are saved
    DataOutputBuffer dob = new DataOutputBuffer();
    ts.writeTokenStorageToStream(dob);
    ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
    securityTokens.rewind();
    Assert.assertEquals(securityTokens, appState.getApplicationSubmissionContext().getAMContainerSpec().getTokens());
    // start new RM
    MockRM rm2 = new TestSecurityMockRM(conf, memStore);
    rm2.start();
    // Need to wait for a while as now token renewal happens on another thread
    // and is asynchronous in nature.
    waitForTokensToBeRenewed(rm2, tokenSet);
    // verify tokens are properly populated back to rm2 DelegationTokenRenewer
    Assert.assertEquals(tokenSet, rm2.getRMContext().getDelegationTokenRenewer().getDelegationTokens());
}
Also used : RMApp(org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp) Token(org.apache.hadoop.security.token.Token) Text(org.apache.hadoop.io.Text) ApplicationStateData(org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationStateData) RMDelegationTokenIdentifier(org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier) ByteBuffer(java.nio.ByteBuffer) MemoryRMStateStore(org.apache.hadoop.yarn.server.resourcemanager.recovery.MemoryRMStateStore) ApplicationAccessType(org.apache.hadoop.yarn.api.records.ApplicationAccessType) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) RMState(org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.RMState) Credentials(org.apache.hadoop.security.Credentials) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 72 with Credentials

use of org.apache.hadoop.security.Credentials in project hadoop by apache.

the class TestRMRestart method testAppAttemptTokensRestoredOnRMRestart.

@Test(timeout = 60000)
public void testAppAttemptTokensRestoredOnRMRestart() throws Exception {
    conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 2);
    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    UserGroupInformation.setConfiguration(conf);
    MemoryRMStateStore memStore = new MemoryRMStateStore();
    memStore.init(conf);
    RMState rmState = memStore.getState();
    Map<ApplicationId, ApplicationStateData> rmAppState = rmState.getApplicationState();
    MockRM rm1 = new TestSecurityMockRM(conf, memStore);
    rm1.start();
    MockNM nm1 = new MockNM("0.0.0.0:4321", 15120, rm1.getResourceTrackerService());
    nm1.registerNode();
    // submit an app
    RMApp app1 = rm1.submitApp(200, "name", "user", new HashMap<ApplicationAccessType, String>(), "default");
    // assert app info is saved
    ApplicationStateData appState = rmAppState.get(app1.getApplicationId());
    Assert.assertNotNull(appState);
    // Allocate the AM
    nm1.nodeHeartbeat(true);
    RMAppAttempt attempt1 = app1.getCurrentAppAttempt();
    ApplicationAttemptId attemptId1 = attempt1.getAppAttemptId();
    rm1.waitForState(attemptId1, RMAppAttemptState.ALLOCATED);
    // assert attempt info is saved
    ApplicationAttemptStateData attemptState = appState.getAttempt(attemptId1);
    Assert.assertNotNull(attemptState);
    Assert.assertEquals(BuilderUtils.newContainerId(attemptId1, 1), attemptState.getMasterContainer().getId());
    // the clientTokenMasterKey that are generated when
    // RMAppAttempt is created,
    byte[] clientTokenMasterKey = attempt1.getClientTokenMasterKey().getEncoded();
    // assert application credentials are saved
    Credentials savedCredentials = attemptState.getAppAttemptTokens();
    Assert.assertArrayEquals("client token master key not saved", clientTokenMasterKey, savedCredentials.getSecretKey(RMStateStore.AM_CLIENT_TOKEN_MASTER_KEY_NAME));
    // start new RM
    MockRM rm2 = new TestSecurityMockRM(conf, memStore);
    rm2.start();
    RMApp loadedApp1 = rm2.getRMContext().getRMApps().get(app1.getApplicationId());
    RMAppAttempt loadedAttempt1 = loadedApp1.getRMAppAttempt(attemptId1);
    // assert loaded attempt recovered
    Assert.assertNotNull(loadedAttempt1);
    // assert client token master key is recovered back to api-versioned
    // client token master key
    Assert.assertEquals("client token master key not restored", attempt1.getClientTokenMasterKey(), loadedAttempt1.getClientTokenMasterKey());
    // assert ClientTokenSecretManager also knows about the key
    Assert.assertArrayEquals(clientTokenMasterKey, rm2.getClientToAMTokenSecretManager().getMasterKey(attemptId1).getEncoded());
    // assert AMRMTokenSecretManager also knows about the AMRMToken password
    Token<AMRMTokenIdentifier> amrmToken = loadedAttempt1.getAMRMToken();
    Assert.assertArrayEquals(amrmToken.getPassword(), rm2.getRMContext().getAMRMTokenSecretManager().retrievePassword(amrmToken.decodeIdentifier()));
}
Also used : RMApp(org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp) RMAppAttempt(org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt) ApplicationStateData(org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationStateData) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) MemoryRMStateStore(org.apache.hadoop.yarn.server.resourcemanager.recovery.MemoryRMStateStore) AMRMTokenIdentifier(org.apache.hadoop.yarn.security.AMRMTokenIdentifier) ApplicationAccessType(org.apache.hadoop.yarn.api.records.ApplicationAccessType) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) RMState(org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.RMState) ApplicationAttemptStateData(org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationAttemptStateData) Credentials(org.apache.hadoop.security.Credentials) Test(org.junit.Test)

Example 73 with Credentials

use of org.apache.hadoop.security.Credentials in project flink by apache.

the class HadoopModule method install.

@Override
public void install(SecurityUtils.SecurityConfiguration securityConfig) throws SecurityInstallException {
    UserGroupInformation.setConfiguration(securityConfig.getHadoopConfiguration());
    try {
        if (UserGroupInformation.isSecurityEnabled() && !StringUtils.isBlank(securityConfig.getKeytab()) && !StringUtils.isBlank(securityConfig.getPrincipal())) {
            String keytabPath = (new File(securityConfig.getKeytab())).getAbsolutePath();
            UserGroupInformation.loginUserFromKeytab(securityConfig.getPrincipal(), keytabPath);
            loginUser = UserGroupInformation.getLoginUser();
            // supplement with any available tokens
            String fileLocation = System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
            if (fileLocation != null) {
                /*
					 * Use reflection API since the API semantics are not available in Hadoop1 profile. Below APIs are
					 * used in the context of reading the stored tokens from UGI.
					 * Credentials cred = Credentials.readTokenStorageFile(new File(fileLocation), config.hadoopConf);
					 * loginUser.addCredentials(cred);
					*/
                try {
                    Method readTokenStorageFileMethod = Credentials.class.getMethod("readTokenStorageFile", File.class, org.apache.hadoop.conf.Configuration.class);
                    Credentials cred = (Credentials) readTokenStorageFileMethod.invoke(null, new File(fileLocation), securityConfig.getHadoopConfiguration());
                    Method addCredentialsMethod = UserGroupInformation.class.getMethod("addCredentials", Credentials.class);
                    addCredentialsMethod.invoke(loginUser, cred);
                } catch (NoSuchMethodException e) {
                    LOG.warn("Could not find method implementations in the shaded jar. Exception: {}", e);
                } catch (InvocationTargetException e) {
                    throw e.getTargetException();
                }
            }
        } else {
            // note that the stored tokens are read automatically
            try {
                //Use reflection API to get the login user object
                //UserGroupInformation.loginUserFromSubject(null);
                Method loginUserFromSubjectMethod = UserGroupInformation.class.getMethod("loginUserFromSubject", Subject.class);
                loginUserFromSubjectMethod.invoke(null, (Subject) null);
            } catch (NoSuchMethodException e) {
                LOG.warn("Could not find method implementations in the shaded jar. Exception: {}", e);
            } catch (InvocationTargetException e) {
                throw e.getTargetException();
            }
            loginUser = UserGroupInformation.getLoginUser();
        }
        if (UserGroupInformation.isSecurityEnabled()) {
            // so we check only in ticket cache scenario.
            if (securityConfig.useTicketCache() && !loginUser.hasKerberosCredentials()) {
                // a delegation token is an adequate substitute in most cases
                if (!HadoopUtils.hasHDFSDelegationToken()) {
                    LOG.warn("Hadoop security is enabled but current login user does not have Kerberos credentials");
                }
            }
        }
        LOG.info("Hadoop user set to {}", loginUser);
    } catch (Throwable ex) {
        throw new SecurityInstallException("Unable to set the Hadoop login user", ex);
    }
}
Also used : Method(java.lang.reflect.Method) File(java.io.File) Credentials(org.apache.hadoop.security.Credentials) InvocationTargetException(java.lang.reflect.InvocationTargetException)

Example 74 with Credentials

use of org.apache.hadoop.security.Credentials in project hadoop by apache.

the class DtFileOperations method getTokenFile.

/** Fetch a token from a service and save to file in the local filesystem.
   *  @param tokenFile a local File object to hold the output.
   *  @param fileFormat a string equal to FORMAT_PB or FORMAT_JAVA, for output
   *  @param alias overwrite service field of fetched token with this text.
   *  @param service use a DtFetcher implementation matching this service text.
   *  @param url pass this URL to fetcher after stripping any http/s prefix.
   *  @param renewer pass this renewer to the fetcher.
   *  @param conf Configuration object passed along.
   *  @throws IOException
   */
public static void getTokenFile(File tokenFile, String fileFormat, Text alias, Text service, String url, String renewer, Configuration conf) throws Exception {
    Token<?> token = null;
    Credentials creds = tokenFile.exists() ? Credentials.readTokenStorageFile(tokenFile, conf) : new Credentials();
    ServiceLoader<DtFetcher> loader = ServiceLoader.load(DtFetcher.class);
    for (DtFetcher fetcher : loader) {
        if (matchService(fetcher, service, url)) {
            if (!fetcher.isTokenRequired()) {
                String message = "DtFetcher for service '" + service + "' does not require a token.  Check your configuration.  " + "Note: security may be disabled or there may be two DtFetcher " + "providers for the same service designation.";
                LOG.error(message);
                throw new IllegalArgumentException(message);
            }
            token = fetcher.addDelegationTokens(conf, creds, renewer, stripPrefix(url));
        }
    }
    if (alias != null) {
        if (token == null) {
            String message = "DtFetcher for service '" + service + "'" + " does not allow aliasing.  Cannot apply alias '" + alias + "'." + "  Drop alias flag to get token for this service.";
            LOG.error(message);
            throw new IOException(message);
        }
        Token<?> aliasedToken = token.copyToken();
        aliasedToken.setService(alias);
        creds.addToken(alias, aliasedToken);
        LOG.info("Add token with service " + alias);
    }
    doFormattedWrite(tokenFile, fileFormat, creds, conf);
}
Also used : IOException(java.io.IOException) Credentials(org.apache.hadoop.security.Credentials)

Example 75 with Credentials

use of org.apache.hadoop.security.Credentials in project hadoop by apache.

the class DtFileOperations method aliasTokenFile.

/** Alias a token from a file and save back to file in the local filesystem.
   *  @param tokenFile a local File object to hold the input and output.
   *  @param fileFormat a string equal to FORMAT_PB or FORMAT_JAVA, for output
   *  @param alias overwrite service field of fetched token with this text.
   *  @param service only apply alias to tokens matching this service text.
   *  @param conf Configuration object passed along.
   *  @throws IOException
   */
public static void aliasTokenFile(File tokenFile, String fileFormat, Text alias, Text service, Configuration conf) throws Exception {
    Credentials newCreds = new Credentials();
    Credentials creds = Credentials.readTokenStorageFile(tokenFile, conf);
    for (Token<?> token : creds.getAllTokens()) {
        newCreds.addToken(token.getService(), token);
        if (token.getService().equals(service)) {
            Token<?> aliasedToken = token.copyToken();
            aliasedToken.setService(alias);
            newCreds.addToken(alias, aliasedToken);
        }
    }
    doFormattedWrite(tokenFile, fileFormat, newCreds, conf);
}
Also used : Credentials(org.apache.hadoop.security.Credentials)

Aggregations

Credentials (org.apache.hadoop.security.Credentials)238 Test (org.junit.Test)105 Token (org.apache.hadoop.security.token.Token)76 Text (org.apache.hadoop.io.Text)64 IOException (java.io.IOException)63 Path (org.apache.hadoop.fs.Path)50 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)48 ByteBuffer (java.nio.ByteBuffer)42 Configuration (org.apache.hadoop.conf.Configuration)41 DataOutputBuffer (org.apache.hadoop.io.DataOutputBuffer)37 HashMap (java.util.HashMap)34 InetSocketAddress (java.net.InetSocketAddress)30 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)30 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)28 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)27 File (java.io.File)25 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)24 ContainerLaunchContext (org.apache.hadoop.yarn.api.records.ContainerLaunchContext)23 JobConf (org.apache.hadoop.mapred.JobConf)20 LocalResource (org.apache.hadoop.yarn.api.records.LocalResource)19