use of org.apache.hadoop.io.DataInputByteBuffer in project hadoop by apache.
the class TestDelegationTokenRenewer method testReplaceExpiringDelegationToken.
@Test(timeout = 20000)
public void testReplaceExpiringDelegationToken() throws Exception {
conf.setBoolean(YarnConfiguration.RM_PROXY_USER_PRIVILEGES_ENABLED, true);
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(conf);
// create Token1:
Text userText1 = new Text("user1");
DelegationTokenIdentifier dtId1 = new DelegationTokenIdentifier(userText1, new Text("renewer1"), userText1);
// set max date to 0 to simulate an expiring token;
dtId1.setMaxDate(0);
final Token<DelegationTokenIdentifier> token1 = new Token<DelegationTokenIdentifier>(dtId1.getBytes(), "password1".getBytes(), dtId1.getKind(), new Text("service1"));
// create token2
Text userText2 = new Text("user2");
DelegationTokenIdentifier dtId2 = new DelegationTokenIdentifier(userText1, new Text("renewer2"), userText2);
final Token<DelegationTokenIdentifier> expectedToken = new Token<DelegationTokenIdentifier>(dtId2.getBytes(), "password2".getBytes(), dtId2.getKind(), new Text("service2"));
final MockRM rm = new TestSecurityMockRM(conf, null) {
@Override
protected DelegationTokenRenewer createDelegationTokenRenewer() {
return new DelegationTokenRenewer() {
@Override
protected Token<?>[] obtainSystemTokensForUser(String user, final Credentials credentials) throws IOException {
credentials.addToken(expectedToken.getService(), expectedToken);
return new Token<?>[] { expectedToken };
}
};
}
};
rm.start();
Credentials credentials = new Credentials();
credentials.addToken(userText1, token1);
RMApp app = rm.submitApp(200, "name", "user", new HashMap<ApplicationAccessType, String>(), false, "default", 1, credentials);
// wait for the initial expiring hdfs token to be removed from allTokens
GenericTestUtils.waitFor(new Supplier<Boolean>() {
public Boolean get() {
return rm.getRMContext().getDelegationTokenRenewer().getAllTokens().get(token1) == null;
}
}, 1000, 20000);
// wait for the initial expiring hdfs token to be removed from appTokens
GenericTestUtils.waitFor(new Supplier<Boolean>() {
public Boolean get() {
return !rm.getRMContext().getDelegationTokenRenewer().getDelegationTokens().contains(token1);
}
}, 1000, 20000);
// wait for the new retrieved hdfs token.
GenericTestUtils.waitFor(new Supplier<Boolean>() {
public Boolean get() {
return rm.getRMContext().getDelegationTokenRenewer().getDelegationTokens().contains(expectedToken);
}
}, 1000, 20000);
// check nm can retrieve the token
final MockNM nm1 = new MockNM("127.0.0.1:1234", 15120, rm.getResourceTrackerService());
nm1.registerNode();
NodeHeartbeatResponse response = nm1.nodeHeartbeat(true);
ByteBuffer tokenBuffer = response.getSystemCredentialsForApps().get(app.getApplicationId());
Assert.assertNotNull(tokenBuffer);
Credentials appCredentials = new Credentials();
DataInputByteBuffer buf = new DataInputByteBuffer();
tokenBuffer.rewind();
buf.reset(tokenBuffer);
appCredentials.readTokenStorageStream(buf);
Assert.assertTrue(appCredentials.getAllTokens().contains(expectedToken));
}
use of org.apache.hadoop.io.DataInputByteBuffer in project hadoop by apache.
the class TestTaskAttemptContainerRequest method testAttemptContainerRequest.
// WARNING: This test must be the only test in this file. This is because
// there is an optimization where the credentials passed in are cached
// statically so they do not need to be recomputed when creating a new
// ContainerLaunchContext. if other tests run first this code will cache
// their credentials and this test will fail trying to look for the
// credentials it inserted in.
@Test
public void testAttemptContainerRequest() throws Exception {
final Text SECRET_KEY_ALIAS = new Text("secretkeyalias");
final byte[] SECRET_KEY = ("secretkey").getBytes();
Map<ApplicationAccessType, String> acls = new HashMap<ApplicationAccessType, String>(1);
acls.put(ApplicationAccessType.VIEW_APP, "otheruser");
ApplicationId appId = ApplicationId.newInstance(1, 1);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
Path jobFile = mock(Path.class);
EventHandler eventHandler = mock(EventHandler.class);
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
when(taListener.getAddress()).thenReturn(new InetSocketAddress("localhost", 0));
JobConf jobConf = new JobConf();
jobConf.setClass("fs.file.impl", StubbedFS.class, FileSystem.class);
jobConf.setBoolean("fs.file.impl.disable.cache", true);
jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "");
// setup UGI for security so tokens and keys are preserved
jobConf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(jobConf);
Credentials credentials = new Credentials();
credentials.addSecretKey(SECRET_KEY_ALIAS, SECRET_KEY);
Token<JobTokenIdentifier> jobToken = new Token<JobTokenIdentifier>(("tokenid").getBytes(), ("tokenpw").getBytes(), new Text("tokenkind"), new Text("tokenservice"));
TaskAttemptImpl taImpl = new MapTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1, mock(TaskSplitMetaInfo.class), jobConf, taListener, jobToken, credentials, new SystemClock(), null);
jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, taImpl.getID().toString());
ContainerLaunchContext launchCtx = TaskAttemptImpl.createContainerLaunchContext(acls, jobConf, jobToken, taImpl.createRemoteTask(), TypeConverter.fromYarn(jobId), mock(WrappedJvmID.class), taListener, credentials);
Assert.assertEquals("ACLs mismatch", acls, launchCtx.getApplicationACLs());
Credentials launchCredentials = new Credentials();
DataInputByteBuffer dibb = new DataInputByteBuffer();
dibb.reset(launchCtx.getTokens());
launchCredentials.readTokenStorageStream(dibb);
// verify all tokens specified for the task attempt are in the launch context
for (Token<? extends TokenIdentifier> token : credentials.getAllTokens()) {
Token<? extends TokenIdentifier> launchToken = launchCredentials.getToken(token.getService());
Assert.assertNotNull("Token " + token.getService() + " is missing", launchToken);
Assert.assertEquals("Token " + token.getService() + " mismatch", token, launchToken);
}
// verify the secret key is in the launch context
Assert.assertNotNull("Secret key missing", launchCredentials.getSecretKey(SECRET_KEY_ALIAS));
Assert.assertTrue("Secret key mismatch", Arrays.equals(SECRET_KEY, launchCredentials.getSecretKey(SECRET_KEY_ALIAS)));
}
use of org.apache.hadoop.io.DataInputByteBuffer in project hadoop by apache.
the class ShuffleHandler method deserializeMetaData.
/**
* A helper function to deserialize the metadata returned by ShuffleHandler.
* @param meta the metadata returned by the ShuffleHandler
* @return the port the Shuffle Handler is listening on to serve shuffle data.
*/
public static int deserializeMetaData(ByteBuffer meta) throws IOException {
// TODO this should be returning a class not just an int
DataInputByteBuffer in = new DataInputByteBuffer();
in.reset(meta);
int port = in.readInt();
return port;
}
use of org.apache.hadoop.io.DataInputByteBuffer in project hadoop by apache.
the class YarnClientImpl method addTimelineDelegationToken.
private void addTimelineDelegationToken(ContainerLaunchContext clc) throws YarnException, IOException {
Credentials credentials = new Credentials();
DataInputByteBuffer dibb = new DataInputByteBuffer();
ByteBuffer tokens = clc.getTokens();
if (tokens != null) {
dibb.reset(tokens);
credentials.readTokenStorageStream(dibb);
tokens.rewind();
}
// one more
for (org.apache.hadoop.security.token.Token<? extends TokenIdentifier> token : credentials.getAllTokens()) {
if (token.getKind().equals(TimelineDelegationTokenIdentifier.KIND_NAME)) {
return;
}
}
org.apache.hadoop.security.token.Token<TimelineDelegationTokenIdentifier> timelineDelegationToken = getTimelineDelegationToken();
if (timelineDelegationToken == null) {
return;
}
credentials.addToken(timelineService, timelineDelegationToken);
if (LOG.isDebugEnabled()) {
LOG.debug("Add timline delegation token into credentials: " + timelineDelegationToken);
}
DataOutputBuffer dob = new DataOutputBuffer();
credentials.writeTokenStorageToStream(dob);
tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
clc.setTokens(tokens);
}
use of org.apache.hadoop.io.DataInputByteBuffer in project geowave by locationtech.
the class NNMapReduceTest method testWritable.
@Test
public void testWritable() throws IOException {
final PartitionDataWritable writable1 = new PartitionDataWritable();
final PartitionDataWritable writable2 = new PartitionDataWritable();
writable1.setPartitionData(new PartitionData(new ByteArray(new byte[] {}), new ByteArray("abc"), true));
writable2.setPartitionData(new PartitionData(new ByteArray(new byte[] {}), new ByteArray("abc"), false));
assertTrue(writable1.compareTo(writable2) == 0);
writable2.setPartitionData(new PartitionData(new ByteArray(new byte[] {}), new ByteArray("abd"), false));
assertTrue(writable1.compareTo(writable2) < 0);
writable2.setPartitionData(new PartitionData(new ByteArray(new byte[] {}), new ByteArray("abd"), true));
assertTrue(writable1.compareTo(writable2) < 0);
final DataOutputBuffer output = new DataOutputBuffer();
writable1.write(output);
output.flush();
final DataInputByteBuffer input = new DataInputByteBuffer();
input.reset(ByteBuffer.wrap(output.getData()));
writable2.readFields(input);
assertTrue(writable1.compareTo(writable2) == 0);
}
Aggregations