use of org.apache.hadoop.security.Credentials in project hadoop by apache.
the class TestProtocolRecords method testNodeHeartBeatResponse.
@Test
public void testNodeHeartBeatResponse() throws IOException {
NodeHeartbeatResponse record = Records.newRecord(NodeHeartbeatResponse.class);
Map<ApplicationId, ByteBuffer> appCredentials = new HashMap<ApplicationId, ByteBuffer>();
Credentials app1Cred = new Credentials();
Token<DelegationTokenIdentifier> token1 = new Token<DelegationTokenIdentifier>();
token1.setKind(new Text("kind1"));
app1Cred.addToken(new Text("token1"), token1);
Token<DelegationTokenIdentifier> token2 = new Token<DelegationTokenIdentifier>();
token2.setKind(new Text("kind2"));
app1Cred.addToken(new Text("token2"), token2);
DataOutputBuffer dob = new DataOutputBuffer();
app1Cred.writeTokenStorageToStream(dob);
ByteBuffer byteBuffer1 = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
appCredentials.put(ApplicationId.newInstance(1234, 1), byteBuffer1);
record.setSystemCredentialsForApps(appCredentials);
NodeHeartbeatResponse proto = new NodeHeartbeatResponsePBImpl(((NodeHeartbeatResponsePBImpl) record).getProto());
Assert.assertEquals(appCredentials, proto.getSystemCredentialsForApps());
}
use of org.apache.hadoop.security.Credentials in project hadoop by apache.
the class TestMerger method testEncryptedMerger.
@Test
public void testEncryptedMerger() throws Throwable {
jobConf.setBoolean(MRJobConfig.MR_ENCRYPTED_INTERMEDIATE_DATA, true);
conf.setBoolean(MRJobConfig.MR_ENCRYPTED_INTERMEDIATE_DATA, true);
Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials();
TokenCache.setEncryptedSpillKey(new byte[16], credentials);
UserGroupInformation.getCurrentUser().addCredentials(credentials);
testInMemoryAndOnDiskMerger();
}
use of org.apache.hadoop.security.Credentials in project hadoop by apache.
the class TestTokenCache method testSingleTokenFetch.
@Test
public void testSingleTokenFetch() throws Exception {
Configuration conf = new Configuration();
conf.set(YarnConfiguration.RM_PRINCIPAL, "mapred/host@REALM");
String renewer = Master.getMasterPrincipal(conf);
Credentials credentials = new Credentials();
final MockFileSystem fs = new MockFileSystem();
final MockFileSystem mockFs = (MockFileSystem) fs.getRawFileSystem();
when(mockFs.getCanonicalServiceName()).thenReturn("host:0");
when(mockFs.getUri()).thenReturn(new URI("mockfs://host:0"));
Path mockPath = mock(Path.class);
when(mockPath.getFileSystem(conf)).thenReturn(mockFs);
Path[] paths = new Path[] { mockPath, mockPath };
when(mockFs.addDelegationTokens("me", credentials)).thenReturn(null);
TokenCache.obtainTokensForNamenodesInternal(credentials, paths, conf);
verify(mockFs, times(1)).addDelegationTokens(renewer, credentials);
}
use of org.apache.hadoop.security.Credentials in project storm by apache.
the class AutoHBase method addTokensToUGI.
public void addTokensToUGI(Subject subject) {
if (subject != null) {
Set<Credentials> privateCredentials = subject.getPrivateCredentials(Credentials.class);
if (privateCredentials != null) {
for (Credentials cred : privateCredentials) {
Collection<Token<? extends TokenIdentifier>> allTokens = cred.getAllTokens();
if (allTokens != null) {
for (Token<? extends TokenIdentifier> token : allTokens) {
try {
UserGroupInformation.getCurrentUser().addToken(token);
LOG.info("Added delegation tokens to UGI.");
} catch (IOException e) {
LOG.error("Exception while trying to add tokens to ugi", e);
}
}
}
}
}
}
}
use of org.apache.hadoop.security.Credentials in project storm by apache.
the class AutoHBase method getCredentials.
/*
*
* @param credentials map with creds.
* @return instance of org.apache.hadoop.security.Credentials.
* this class's populateCredentials must have been called before.
*/
@SuppressWarnings("unchecked")
protected Object getCredentials(Map<String, String> credentials) {
Credentials credential = null;
if (credentials != null && credentials.containsKey(getCredentialKey())) {
try {
byte[] credBytes = DatatypeConverter.parseBase64Binary(credentials.get(getCredentialKey()));
ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(credBytes));
credential = new Credentials();
credential.readFields(in);
LOG.info("Got hbase credentials from credentials Map.");
} catch (Exception e) {
LOG.error("Could not obtain credentials from credentials map.", e);
}
}
return credential;
}
Aggregations