use of org.apache.hadoop.io.DataInputByteBuffer in project weave by continuuity.
the class YarnUtils method decodeCredentials.
/**
* Decodes {@link Credentials} from the given buffer.
* If the buffer is null or empty, it returns an empty Credentials.
*/
public static Credentials decodeCredentials(ByteBuffer buffer) throws IOException {
Credentials credentials = new Credentials();
if (buffer != null && buffer.hasRemaining()) {
DataInputByteBuffer in = new DataInputByteBuffer();
in.reset(buffer);
credentials.readTokenStorageStream(in);
}
return credentials;
}
use of org.apache.hadoop.io.DataInputByteBuffer in project hadoop by apache.
the class TestTaskAttemptContainerRequest method testAttemptContainerRequest.
//WARNING: This test must be the only test in this file. This is because
// there is an optimization where the credentials passed in are cached
// statically so they do not need to be recomputed when creating a new
// ContainerLaunchContext. if other tests run first this code will cache
// their credentials and this test will fail trying to look for the
// credentials it inserted in.
@Test
public void testAttemptContainerRequest() throws Exception {
final Text SECRET_KEY_ALIAS = new Text("secretkeyalias");
final byte[] SECRET_KEY = ("secretkey").getBytes();
Map<ApplicationAccessType, String> acls = new HashMap<ApplicationAccessType, String>(1);
acls.put(ApplicationAccessType.VIEW_APP, "otheruser");
ApplicationId appId = ApplicationId.newInstance(1, 1);
JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
Path jobFile = mock(Path.class);
EventHandler eventHandler = mock(EventHandler.class);
TaskAttemptListener taListener = mock(TaskAttemptListener.class);
when(taListener.getAddress()).thenReturn(new InetSocketAddress("localhost", 0));
JobConf jobConf = new JobConf();
jobConf.setClass("fs.file.impl", StubbedFS.class, FileSystem.class);
jobConf.setBoolean("fs.file.impl.disable.cache", true);
jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "");
// setup UGI for security so tokens and keys are preserved
jobConf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(jobConf);
Credentials credentials = new Credentials();
credentials.addSecretKey(SECRET_KEY_ALIAS, SECRET_KEY);
Token<JobTokenIdentifier> jobToken = new Token<JobTokenIdentifier>(("tokenid").getBytes(), ("tokenpw").getBytes(), new Text("tokenkind"), new Text("tokenservice"));
TaskAttemptImpl taImpl = new MapTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1, mock(TaskSplitMetaInfo.class), jobConf, taListener, jobToken, credentials, SystemClock.getInstance(), null);
jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, taImpl.getID().toString());
ContainerLaunchContext launchCtx = TaskAttemptImpl.createContainerLaunchContext(acls, jobConf, jobToken, taImpl.createRemoteTask(), TypeConverter.fromYarn(jobId), mock(WrappedJvmID.class), taListener, credentials);
Assert.assertEquals("ACLs mismatch", acls, launchCtx.getApplicationACLs());
Credentials launchCredentials = new Credentials();
DataInputByteBuffer dibb = new DataInputByteBuffer();
dibb.reset(launchCtx.getTokens());
launchCredentials.readTokenStorageStream(dibb);
// verify all tokens specified for the task attempt are in the launch context
for (Token<? extends TokenIdentifier> token : credentials.getAllTokens()) {
Token<? extends TokenIdentifier> launchToken = launchCredentials.getToken(token.getService());
Assert.assertNotNull("Token " + token.getService() + " is missing", launchToken);
Assert.assertEquals("Token " + token.getService() + " mismatch", token, launchToken);
}
// verify the secret key is in the launch context
Assert.assertNotNull("Secret key missing", launchCredentials.getSecretKey(SECRET_KEY_ALIAS));
Assert.assertTrue("Secret key mismatch", Arrays.equals(SECRET_KEY, launchCredentials.getSecretKey(SECRET_KEY_ALIAS)));
}
use of org.apache.hadoop.io.DataInputByteBuffer in project hadoop by apache.
the class TestTaskID method testReadFields.
/**
* Test of readFields method, of class TaskID.
*/
@Test
public void testReadFields() throws Exception {
DataOutputByteBuffer out = new DataOutputByteBuffer();
out.writeInt(0);
out.writeInt(1);
WritableUtils.writeVInt(out, 4);
out.write(new byte[] { 0x31, 0x32, 0x33, 0x34 });
WritableUtils.writeEnum(out, TaskType.REDUCE);
DataInputByteBuffer in = new DataInputByteBuffer();
in.reset(out.getData());
TaskID instance = new TaskID();
instance.readFields(in);
assertEquals("The readFields() method did not produce the expected task ID", "task_1234_0001_r_000000", instance.toString());
}
use of org.apache.hadoop.io.DataInputByteBuffer in project hadoop by apache.
the class ShuffleHandler method deserializeMetaData.
/**
* A helper function to deserialize the metadata returned by ShuffleHandler.
* @param meta the metadata returned by the ShuffleHandler
* @return the port the Shuffle Handler is listening on to serve shuffle data.
*/
public static int deserializeMetaData(ByteBuffer meta) throws IOException {
//TODO this should be returning a class not just an int
DataInputByteBuffer in = new DataInputByteBuffer();
in.reset(meta);
int port = in.readInt();
return port;
}
use of org.apache.hadoop.io.DataInputByteBuffer in project hadoop by apache.
the class YarnClientImpl method addTimelineDelegationToken.
private void addTimelineDelegationToken(ContainerLaunchContext clc) throws YarnException, IOException {
Credentials credentials = new Credentials();
DataInputByteBuffer dibb = new DataInputByteBuffer();
ByteBuffer tokens = clc.getTokens();
if (tokens != null) {
dibb.reset(tokens);
credentials.readTokenStorageStream(dibb);
tokens.rewind();
}
// one more
for (org.apache.hadoop.security.token.Token<? extends TokenIdentifier> token : credentials.getAllTokens()) {
if (token.getKind().equals(TimelineDelegationTokenIdentifier.KIND_NAME)) {
return;
}
}
org.apache.hadoop.security.token.Token<TimelineDelegationTokenIdentifier> timelineDelegationToken = getTimelineDelegationToken();
if (timelineDelegationToken == null) {
return;
}
credentials.addToken(timelineService, timelineDelegationToken);
if (LOG.isDebugEnabled()) {
LOG.debug("Add timline delegation token into credentials: " + timelineDelegationToken);
}
DataOutputBuffer dob = new DataOutputBuffer();
credentials.writeTokenStorageToStream(dob);
tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
clc.setTokens(tokens);
}
Aggregations