use of org.apache.hadoop.security.token.TokenIdentifier in project hadoop by apache.
the class TestDelegationToken method testEmptyToken.
@Test
public void testEmptyToken() throws IOException {
Token<?> token1 = new Token<TokenIdentifier>();
Token<?> token2 = new Token<TokenIdentifier>(new byte[0], new byte[0], new Text(), new Text());
assertEquals(token1, token2);
assertEquals(token1.encodeToUrlString(), token2.encodeToUrlString());
token2 = new Token<TokenIdentifier>(null, null, null, null);
assertEquals(token1, token2);
assertEquals(token1.encodeToUrlString(), token2.encodeToUrlString());
}
use of org.apache.hadoop.security.token.TokenIdentifier in project hadoop by apache.
the class TestTokenCache method createFileSystemForServiceName.
private MockFileSystem createFileSystemForServiceName(final String service) throws IOException {
MockFileSystem mockFs = new MockFileSystem();
when(mockFs.getCanonicalServiceName()).thenReturn(service);
when(mockFs.getDelegationToken(any(String.class))).thenAnswer(new Answer<Token<?>>() {
int unique = 0;
@Override
public Token<?> answer(InvocationOnMock invocation) throws Throwable {
Token<?> token = new Token<TokenIdentifier>();
token.setService(new Text(service));
// use unique value so when we restore from token storage, we can
// tell if it's really the same token
token.setKind(new Text("token" + unique++));
return token;
}
});
return mockFs;
}
use of org.apache.hadoop.security.token.TokenIdentifier in project hadoop by apache.
the class NamenodeWebHdfsMethods method redirectURI.
private URI redirectURI(final NameNode namenode, final UserGroupInformation ugi, final DelegationParam delegation, final UserParam username, final DoAsParam doAsUser, final String path, final HttpOpParam.Op op, final long openOffset, final long blocksize, final String excludeDatanodes, final Param<?, ?>... parameters) throws URISyntaxException, IOException {
final DatanodeInfo dn;
dn = chooseDatanode(namenode, path, op, openOffset, blocksize, excludeDatanodes, remoteAddr);
if (dn == null) {
throw new IOException("Failed to find datanode, suggest to check cluster" + " health. excludeDatanodes=" + excludeDatanodes);
}
final String delegationQuery;
if (!UserGroupInformation.isSecurityEnabled()) {
//security disabled
delegationQuery = Param.toSortedString("&", doAsUser, username);
} else if (delegation.getValue() != null) {
//client has provided a token
delegationQuery = "&" + delegation;
} else {
//generate a token
final Token<? extends TokenIdentifier> t = generateDelegationToken(namenode, ugi, null);
delegationQuery = "&" + new DelegationParam(t.encodeToUrlString());
}
final String query = op.toQueryString() + delegationQuery + "&" + new NamenodeAddressParam(namenode) + Param.toSortedString("&", parameters);
final String uripath = WebHdfsFileSystem.PATH_PREFIX + path;
int port = "http".equals(scheme) ? dn.getInfoPort() : dn.getInfoSecurePort();
final URI uri = new URI(scheme, null, dn.getHostName(), port, uripath, query, null);
if (LOG.isTraceEnabled()) {
LOG.trace("redirectURI=" + uri);
}
return uri;
}
use of org.apache.hadoop.security.token.TokenIdentifier in project cdap by caskdata.
the class JobHistoryServerTokenUtils method obtainToken.
/**
* Gets a JHS delegation token and stores it in the given Credentials.
*
* @return the same Credentials instance as the one given in parameter.
*/
public static Credentials obtainToken(Configuration configuration, Credentials credentials) {
if (!UserGroupInformation.isSecurityEnabled()) {
return credentials;
}
String historyServerAddress = configuration.get("mapreduce.jobhistory.address");
HostAndPort hostAndPort = HostAndPort.fromString(historyServerAddress);
try {
ResourceMgrDelegate resourceMgrDelegate = new ResourceMgrDelegate(new YarnConfiguration(configuration));
MRClientCache clientCache = new MRClientCache(configuration, resourceMgrDelegate);
MRClientProtocol hsProxy = clientCache.getInitializedHSProxy();
GetDelegationTokenRequest request = new GetDelegationTokenRequestPBImpl();
request.setRenewer(YarnUtils.getYarnTokenRenewer(configuration));
InetSocketAddress address = new InetSocketAddress(hostAndPort.getHostText(), hostAndPort.getPort());
Token<TokenIdentifier> token = ConverterUtils.convertFromYarn(hsProxy.getDelegationToken(request).getDelegationToken(), address);
credentials.addToken(new Text(token.getService()), token);
LOG.debug("Adding JobHistoryServer delegation token {}.", token);
return credentials;
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
use of org.apache.hadoop.security.token.TokenIdentifier in project hive by apache.
the class TestHiveAccumuloHelper method testInputFormatWithKerberosToken.
@Test
public void testInputFormatWithKerberosToken() throws Exception {
final JobConf jobConf = new JobConf();
final HiveAccumuloHelper helper = Mockito.mock(HiveAccumuloHelper.class);
final AuthenticationToken authToken = Mockito.mock(AuthenticationToken.class);
final Token hadoopToken = Mockito.mock(Token.class);
final AccumuloConnectionParameters cnxnParams = Mockito.mock(AccumuloConnectionParameters.class);
final Connector connector = Mockito.mock(Connector.class);
final String user = "bob";
final String instanceName = "accumulo";
final String zookeepers = "host1:2181,host2:2181,host3:2181";
UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[0]);
// Call the real methods for these
Mockito.doCallRealMethod().when(helper).updateOutputFormatConfWithAccumuloToken(jobConf, ugi, cnxnParams);
Mockito.doCallRealMethod().when(helper).updateInputFormatConfWithAccumuloToken(jobConf, ugi, cnxnParams);
Mockito.doCallRealMethod().when(helper).updateConfWithAccumuloToken(jobConf, ugi, cnxnParams, true);
// Return our mocked objects
Mockito.when(cnxnParams.getConnector()).thenReturn(connector);
Mockito.when(helper.getDelegationToken(connector)).thenReturn(authToken);
Mockito.when(helper.getHadoopToken(authToken)).thenReturn(hadoopToken);
// Stub AccumuloConnectionParameters actions
Mockito.when(cnxnParams.useSasl()).thenReturn(true);
Mockito.when(cnxnParams.getAccumuloUserName()).thenReturn(user);
Mockito.when(cnxnParams.getAccumuloInstanceName()).thenReturn(instanceName);
Mockito.when(cnxnParams.getZooKeepers()).thenReturn(zookeepers);
// Test the InputFormat execution path
//
Mockito.when(helper.hasKerberosCredentials(ugi)).thenReturn(true);
// Invoke the InputFormat entrypoint
helper.updateInputFormatConfWithAccumuloToken(jobConf, ugi, cnxnParams);
Mockito.verify(helper).setInputFormatConnectorInfo(jobConf, user, authToken);
Mockito.verify(helper).mergeTokenIntoJobConf(jobConf, hadoopToken);
Mockito.verify(helper).addTokenFromUserToJobConf(ugi, jobConf);
// Make sure the token made it into the UGI
Collection<Token<? extends TokenIdentifier>> tokens = ugi.getTokens();
Assert.assertEquals(1, tokens.size());
Assert.assertEquals(hadoopToken, tokens.iterator().next());
}
Aggregations