use of org.apache.hadoop.io.DataInputByteBuffer in project hadoop by apache.
the class TestTaskID method testWrite.
/**
* Test of write method, of class TaskID.
*/
@Test
public void testWrite() throws Exception {
JobID jobId = new JobID("1234", 1);
TaskID taskId = new TaskID(jobId, TaskType.JOB_SETUP, 0);
DataOutputByteBuffer out = new DataOutputByteBuffer();
taskId.write(out);
DataInputByteBuffer in = new DataInputByteBuffer();
byte[] buffer = new byte[4];
in.reset(out.getData());
assertEquals("The write() method did not write the expected task ID", 0, in.readInt());
assertEquals("The write() method did not write the expected job ID", 1, in.readInt());
assertEquals("The write() method did not write the expected job " + "identifier length", 4, WritableUtils.readVInt(in));
in.readFully(buffer, 0, 4);
assertEquals("The write() method did not write the expected job " + "identifier length", "1234", new String(buffer));
assertEquals("The write() method did not write the expected task type", TaskType.JOB_SETUP, WritableUtils.readEnum(in, TaskType.class));
}
use of org.apache.hadoop.io.DataInputByteBuffer in project hadoop by apache.
the class TestYarnClient method testAutomaticTimelineDelegationTokenLoading.
@Test
public void testAutomaticTimelineDelegationTokenLoading() throws Exception {
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
TimelineDelegationTokenIdentifier timelineDT = new TimelineDelegationTokenIdentifier();
final Token<TimelineDelegationTokenIdentifier> dToken = new Token<TimelineDelegationTokenIdentifier>(timelineDT.getBytes(), new byte[0], timelineDT.getKind(), new Text());
// create a mock client
YarnClientImpl client = spy(new YarnClientImpl() {
@Override
TimelineClient createTimelineClient() throws IOException, YarnException {
timelineClient = mock(TimelineClient.class);
when(timelineClient.getDelegationToken(any(String.class))).thenReturn(dToken);
return timelineClient;
}
@Override
protected void serviceStart() throws Exception {
rmClient = mock(ApplicationClientProtocol.class);
}
@Override
protected void serviceStop() throws Exception {
}
@Override
public ApplicationReport getApplicationReport(ApplicationId appId) {
ApplicationReport report = mock(ApplicationReport.class);
when(report.getYarnApplicationState()).thenReturn(YarnApplicationState.RUNNING);
return report;
}
@Override
public boolean isSecurityEnabled() {
return true;
}
});
client.init(conf);
client.start();
try {
// when i == 1, timeline DT doesn't exist, need to get one more
for (int i = 0; i < 2; ++i) {
ApplicationSubmissionContext context = mock(ApplicationSubmissionContext.class);
ApplicationId applicationId = ApplicationId.newInstance(0, i + 1);
when(context.getApplicationId()).thenReturn(applicationId);
DataOutputBuffer dob = new DataOutputBuffer();
Credentials credentials = new Credentials();
if (i == 0) {
credentials.addToken(client.timelineService, dToken);
}
credentials.writeTokenStorageToStream(dob);
ByteBuffer tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
ContainerLaunchContext clc = ContainerLaunchContext.newInstance(null, null, null, null, tokens, null);
when(context.getAMContainerSpec()).thenReturn(clc);
client.submitApplication(context);
if (i == 0) {
// GetTimelineDelegationToken shouldn't be called
verify(client, never()).getTimelineDelegationToken();
}
// In either way, token should be there
credentials = new Credentials();
DataInputByteBuffer dibb = new DataInputByteBuffer();
tokens = clc.getTokens();
if (tokens != null) {
dibb.reset(tokens);
credentials.readTokenStorageStream(dibb);
tokens.rewind();
}
Collection<Token<? extends TokenIdentifier>> dTokens = credentials.getAllTokens();
Assert.assertEquals(1, dTokens.size());
Assert.assertEquals(dToken, dTokens.iterator().next());
}
} finally {
client.stop();
}
}
use of org.apache.hadoop.io.DataInputByteBuffer in project hadoop by apache.
the class BuilderUtils method parseTokensConf.
public static Configuration parseTokensConf(ApplicationSubmissionContext context) throws IOException {
ByteBuffer tokensConf = context.getAMContainerSpec().getTokensConf();
if (tokensConf == null) {
return null;
}
DataInputByteBuffer dibb = new DataInputByteBuffer();
dibb.reset(tokensConf);
Configuration appConf = new Configuration(false);
appConf.readFields(dibb);
tokensConf.rewind();
return appConf;
}
use of org.apache.hadoop.io.DataInputByteBuffer in project hadoop by apache.
the class BuilderUtils method parseCredentials.
public static Credentials parseCredentials(ApplicationSubmissionContext application) throws IOException {
Credentials credentials = new Credentials();
DataInputByteBuffer dibb = new DataInputByteBuffer();
ByteBuffer tokens = application.getAMContainerSpec().getTokens();
if (tokens != null) {
dibb.reset(tokens);
credentials.readTokenStorageStream(dibb);
tokens.rewind();
}
return credentials;
}
use of org.apache.hadoop.io.DataInputByteBuffer in project hadoop by apache.
the class ApplicationAttemptStateDataPBImpl method convertCredentialsFromByteBuffer.
private static Credentials convertCredentialsFromByteBuffer(ByteBuffer appAttemptTokens) {
DataInputByteBuffer dibb = new DataInputByteBuffer();
try {
Credentials credentials = null;
if (appAttemptTokens != null) {
credentials = new Credentials();
appAttemptTokens.rewind();
dibb.reset(appAttemptTokens);
credentials.readTokenStorageStream(dibb);
}
return credentials;
} catch (IOException e) {
LOG.error("Failed to convert Credentials from ByteBuffer.");
assert false;
return null;
} finally {
IOUtils.closeStream(dibb);
}
}
Aggregations