use of org.apache.hadoop.security.token.TokenIdentifier in project hive by apache.
the class LlapTokenChecker method getLlapTokens.
static List<LlapTokenIdentifier> getLlapTokens(UserGroupInformation ugi, String clusterId) {
List<LlapTokenIdentifier> tokens = null;
for (TokenIdentifier id : ugi.getTokenIdentifiers()) {
if (!LlapTokenIdentifier.KIND_NAME.equals(id.getKind()))
continue;
if (LOG.isDebugEnabled()) {
LOG.debug("Token {}", id);
}
LlapTokenIdentifier llapId = (LlapTokenIdentifier) id;
if (clusterId != null && !clusterId.equals(llapId.getClusterId()))
continue;
if (tokens == null) {
tokens = new ArrayList<>();
}
tokens.add((LlapTokenIdentifier) id);
}
return tokens;
}
use of org.apache.hadoop.security.token.TokenIdentifier in project hive by apache.
the class HiveAccumuloHelper method getHadoopToken.
public Token<? extends TokenIdentifier> getHadoopToken(AuthenticationToken delegationToken) throws IOException {
try {
// DelegationTokenImpl class
Class<?> delegationTokenClass = JavaUtils.loadClass(DELEGATION_TOKEN_IMPL_CLASS_NAME);
// Methods on DelegationToken
Method getIdentifierMethod = delegationTokenClass.getMethod(GET_IDENTIFIER_METHOD_NAME);
Method getPasswordMethod = delegationTokenClass.getMethod(GET_PASSWORD_METHOD_NAME);
Method getServiceNameMethod = delegationTokenClass.getMethod(GET_SERVICE_NAME_METHOD_NAME);
// Treat the TokenIdentifier implementation as the abstract class to avoid dependency issues
// AuthenticationTokenIdentifier identifier = delegationToken.getIdentifier();
TokenIdentifier identifier = (TokenIdentifier) getIdentifierMethod.invoke(delegationToken);
// delegationToken.getPassword(), identifier.getKind(), delegationToken.getServiceName());
return new Token<TokenIdentifier>(identifier.getBytes(), (byte[]) getPasswordMethod.invoke(delegationToken), identifier.getKind(), (Text) getServiceNameMethod.invoke(delegationToken));
} catch (Exception e) {
throw new IOException("Failed to create Hadoop token from Accumulo DelegationToken", e);
}
}
use of org.apache.hadoop.security.token.TokenIdentifier in project storm by apache.
the class AutoHDFS method renew.
/**
* {@inheritDoc}
*/
@Override
@SuppressWarnings("unchecked")
public void renew(Map<String, String> credentials, Map topologyConf) {
try {
Credentials credential = getCredentials(credentials);
if (credential != null) {
Configuration configuration = new Configuration();
Collection<Token<? extends TokenIdentifier>> tokens = credential.getAllTokens();
if (tokens != null && tokens.isEmpty() == false) {
for (Token token : tokens) {
//We need to re-login some other thread might have logged into hadoop using
// their credentials (e.g. AutoHBase might be also part of nimbu auto creds)
login(configuration);
long expiration = (Long) token.renew(configuration);
LOG.info("HDFS delegation token renewed, new expiration time {}", expiration);
}
} else {
LOG.debug("No tokens found for credentials, skipping renewal.");
}
}
} catch (Exception e) {
LOG.warn("could not renew the credentials, one of the possible reason is tokens are beyond " + "renewal period so attempting to get new tokens.", e);
populateCredentials(credentials, topologyConf);
}
}
use of org.apache.hadoop.security.token.TokenIdentifier in project weave by continuuity.
the class Hadoop20YarnAppClient method addRMToken.
private void addRMToken(ContainerLaunchContext context) {
if (!UserGroupInformation.isSecurityEnabled()) {
return;
}
try {
Credentials credentials = YarnUtils.decodeCredentials(context.getContainerTokens());
Configuration config = yarnClient.getConfig();
Token<TokenIdentifier> token = convertToken(yarnClient.getRMDelegationToken(new Text(YarnUtils.getYarnTokenRenewer(config))), YarnUtils.getRMAddress(config));
LOG.info("Added RM delegation token {}", token);
credentials.addToken(token.getService(), token);
context.setContainerTokens(YarnUtils.encodeCredentials(credentials));
} catch (Exception e) {
LOG.error("Fails to create credentials.", e);
throw Throwables.propagate(e);
}
}
use of org.apache.hadoop.security.token.TokenIdentifier in project weave by continuuity.
the class Hadoop21YarnAppClient method addRMToken.
private void addRMToken(ContainerLaunchContext context) {
if (!UserGroupInformation.isSecurityEnabled()) {
return;
}
try {
Credentials credentials = YarnUtils.decodeCredentials(context.getTokens());
Configuration config = yarnClient.getConfig();
Token<TokenIdentifier> token = ConverterUtils.convertFromYarn(yarnClient.getRMDelegationToken(new Text(YarnUtils.getYarnTokenRenewer(config))), YarnUtils.getRMAddress(config));
LOG.info("Added RM delegation token {}", token);
credentials.addToken(token.getService(), token);
context.setTokens(YarnUtils.encodeCredentials(credentials));
} catch (Exception e) {
LOG.error("Fails to create credentials.", e);
throw Throwables.propagate(e);
}
}
Aggregations