use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.
the class TestPBRecordImpl method testLocalizerStatusSerDe.
@Test(timeout = 10000)
public void testLocalizerStatusSerDe() throws Exception {
LocalizerStatus rsrcS = createLocalizerStatus();
assertTrue(rsrcS instanceof LocalizerStatusPBImpl);
LocalizerStatusPBImpl rsrcPb = (LocalizerStatusPBImpl) rsrcS;
DataOutputBuffer out = new DataOutputBuffer();
rsrcPb.getProto().writeDelimitedTo(out);
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), 0, out.getLength());
LocalizerStatusProto rsrcPbD = LocalizerStatusProto.parseDelimitedFrom(in);
assertNotNull(rsrcPbD);
LocalizerStatus rsrcD = new LocalizerStatusPBImpl(rsrcPbD);
assertEquals(rsrcS, rsrcD);
assertEquals("localizer0", rsrcS.getLocalizerId());
assertEquals("localizer0", rsrcD.getLocalizerId());
assertEquals(createLocalResourceStatus(), rsrcS.getResourceStatus(0));
assertEquals(createLocalResourceStatus(), rsrcD.getResourceStatus(0));
}
use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.
the class TestPBRecordImpl method testLocalizerHeartbeatResponseSerDe.
@Test(timeout = 10000)
public void testLocalizerHeartbeatResponseSerDe() throws Exception {
LocalizerHeartbeatResponse rsrcS = createLocalizerHeartbeatResponse();
assertTrue(rsrcS instanceof LocalizerHeartbeatResponsePBImpl);
LocalizerHeartbeatResponsePBImpl rsrcPb = (LocalizerHeartbeatResponsePBImpl) rsrcS;
DataOutputBuffer out = new DataOutputBuffer();
rsrcPb.getProto().writeDelimitedTo(out);
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), 0, out.getLength());
LocalizerHeartbeatResponseProto rsrcPbD = LocalizerHeartbeatResponseProto.parseDelimitedFrom(in);
assertNotNull(rsrcPbD);
LocalizerHeartbeatResponse rsrcD = new LocalizerHeartbeatResponsePBImpl(rsrcPbD);
assertEquals(rsrcS, rsrcD);
assertEquals(createResource(), rsrcS.getResourceSpecs().get(0).getResource());
assertEquals(createResource(), rsrcD.getResourceSpecs().get(0).getResource());
}
use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.
the class ApplicationAttemptStateDataPBImpl method convertCredentialsToByteBuffer.
private static ByteBuffer convertCredentialsToByteBuffer(Credentials credentials) {
ByteBuffer appAttemptTokens = null;
DataOutputBuffer dob = new DataOutputBuffer();
try {
if (credentials != null) {
credentials.writeTokenStorageToStream(dob);
appAttemptTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
}
return appAttemptTokens;
} catch (IOException e) {
LOG.error("Failed to convert Credentials to ByteBuffer.");
assert false;
return null;
} finally {
IOUtils.closeStream(dob);
}
}
use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.
the class TestRMRestart method testDelegationTokenRestoredInDelegationTokenRenewer.
@Test(timeout = 60000)
public void testDelegationTokenRestoredInDelegationTokenRenewer() throws Exception {
conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 2);
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(conf);
MemoryRMStateStore memStore = new MemoryRMStateStore();
memStore.init(conf);
RMState rmState = memStore.getState();
Map<ApplicationId, ApplicationStateData> rmAppState = rmState.getApplicationState();
MockRM rm1 = new TestSecurityMockRM(conf, memStore);
rm1.start();
HashSet<Token<RMDelegationTokenIdentifier>> tokenSet = new HashSet<Token<RMDelegationTokenIdentifier>>();
// create an empty credential
Credentials ts = new Credentials();
// create tokens and add into credential
Text userText1 = new Text("user1");
RMDelegationTokenIdentifier dtId1 = new RMDelegationTokenIdentifier(userText1, new Text("renewer1"), userText1);
Token<RMDelegationTokenIdentifier> token1 = new Token<RMDelegationTokenIdentifier>(dtId1, rm1.getRMContext().getRMDelegationTokenSecretManager());
SecurityUtil.setTokenService(token1, rmAddr);
ts.addToken(userText1, token1);
tokenSet.add(token1);
Text userText2 = new Text("user2");
RMDelegationTokenIdentifier dtId2 = new RMDelegationTokenIdentifier(userText2, new Text("renewer2"), userText2);
Token<RMDelegationTokenIdentifier> token2 = new Token<RMDelegationTokenIdentifier>(dtId2, rm1.getRMContext().getRMDelegationTokenSecretManager());
SecurityUtil.setTokenService(token2, rmAddr);
ts.addToken(userText2, token2);
tokenSet.add(token2);
// submit an app with customized credential
RMApp app = rm1.submitApp(200, "name", "user", new HashMap<ApplicationAccessType, String>(), false, "default", 1, ts);
// assert app info is saved
ApplicationStateData appState = rmAppState.get(app.getApplicationId());
Assert.assertNotNull(appState);
// assert delegation tokens exist in rm1 DelegationTokenRenewr
Assert.assertEquals(tokenSet, rm1.getRMContext().getDelegationTokenRenewer().getDelegationTokens());
// assert delegation tokens are saved
DataOutputBuffer dob = new DataOutputBuffer();
ts.writeTokenStorageToStream(dob);
ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
securityTokens.rewind();
Assert.assertEquals(securityTokens, appState.getApplicationSubmissionContext().getAMContainerSpec().getTokens());
// start new RM
MockRM rm2 = new TestSecurityMockRM(conf, memStore);
rm2.start();
// Need to wait for a while as now token renewal happens on another thread
// and is asynchronous in nature.
waitForTokensToBeRenewed(rm2, tokenSet);
// verify tokens are properly populated back to rm2 DelegationTokenRenewer
Assert.assertEquals(tokenSet, rm2.getRMContext().getDelegationTokenRenewer().getDelegationTokens());
}
use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.
the class TestJspHelper method testReadWriteReplicaState.
@Test
public void testReadWriteReplicaState() {
try {
DataOutputBuffer out = new DataOutputBuffer();
DataInputBuffer in = new DataInputBuffer();
for (HdfsServerConstants.ReplicaState repState : HdfsServerConstants.ReplicaState.values()) {
repState.write(out);
in.reset(out.getData(), out.getLength());
HdfsServerConstants.ReplicaState result = HdfsServerConstants.ReplicaState.read(in);
assertTrue("testReadWrite error !!!", repState == result);
out.reset();
in.reset();
}
} catch (Exception ex) {
fail("testReadWrite ex error ReplicaState");
}
}
Aggregations