Search in sources :

Example 36 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.

the class TestPBRecordImpl method testLocalizerStatusSerDe.

@Test(timeout = 10000)
public void testLocalizerStatusSerDe() throws Exception {
    LocalizerStatus rsrcS = createLocalizerStatus();
    assertTrue(rsrcS instanceof LocalizerStatusPBImpl);
    LocalizerStatusPBImpl rsrcPb = (LocalizerStatusPBImpl) rsrcS;
    DataOutputBuffer out = new DataOutputBuffer();
    rsrcPb.getProto().writeDelimitedTo(out);
    DataInputBuffer in = new DataInputBuffer();
    in.reset(out.getData(), 0, out.getLength());
    LocalizerStatusProto rsrcPbD = LocalizerStatusProto.parseDelimitedFrom(in);
    assertNotNull(rsrcPbD);
    LocalizerStatus rsrcD = new LocalizerStatusPBImpl(rsrcPbD);
    assertEquals(rsrcS, rsrcD);
    assertEquals("localizer0", rsrcS.getLocalizerId());
    assertEquals("localizer0", rsrcD.getLocalizerId());
    assertEquals(createLocalResourceStatus(), rsrcS.getResourceStatus(0));
    assertEquals(createLocalResourceStatus(), rsrcD.getResourceStatus(0));
}
Also used : DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) LocalizerStatus(org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerStatus) LocalizerStatusProto(org.apache.hadoop.yarn.proto.YarnServerNodemanagerServiceProtos.LocalizerStatusProto) Test(org.junit.Test)

Example 37 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.

the class TestPBRecordImpl method testLocalizerHeartbeatResponseSerDe.

@Test(timeout = 10000)
public void testLocalizerHeartbeatResponseSerDe() throws Exception {
    LocalizerHeartbeatResponse rsrcS = createLocalizerHeartbeatResponse();
    assertTrue(rsrcS instanceof LocalizerHeartbeatResponsePBImpl);
    LocalizerHeartbeatResponsePBImpl rsrcPb = (LocalizerHeartbeatResponsePBImpl) rsrcS;
    DataOutputBuffer out = new DataOutputBuffer();
    rsrcPb.getProto().writeDelimitedTo(out);
    DataInputBuffer in = new DataInputBuffer();
    in.reset(out.getData(), 0, out.getLength());
    LocalizerHeartbeatResponseProto rsrcPbD = LocalizerHeartbeatResponseProto.parseDelimitedFrom(in);
    assertNotNull(rsrcPbD);
    LocalizerHeartbeatResponse rsrcD = new LocalizerHeartbeatResponsePBImpl(rsrcPbD);
    assertEquals(rsrcS, rsrcD);
    assertEquals(createResource(), rsrcS.getResourceSpecs().get(0).getResource());
    assertEquals(createResource(), rsrcD.getResourceSpecs().get(0).getResource());
}
Also used : LocalizerHeartbeatResponseProto(org.apache.hadoop.yarn.proto.YarnServerNodemanagerServiceProtos.LocalizerHeartbeatResponseProto) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) LocalizerHeartbeatResponse(org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerHeartbeatResponse) Test(org.junit.Test)

Example 38 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.

the class ApplicationAttemptStateDataPBImpl method convertCredentialsToByteBuffer.

private static ByteBuffer convertCredentialsToByteBuffer(Credentials credentials) {
    ByteBuffer appAttemptTokens = null;
    DataOutputBuffer dob = new DataOutputBuffer();
    try {
        if (credentials != null) {
            credentials.writeTokenStorageToStream(dob);
            appAttemptTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
        }
        return appAttemptTokens;
    } catch (IOException e) {
        LOG.error("Failed to convert Credentials to ByteBuffer.");
        assert false;
        return null;
    } finally {
        IOUtils.closeStream(dob);
    }
}
Also used : DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) IOException(java.io.IOException) ByteBuffer(java.nio.ByteBuffer) DataInputByteBuffer(org.apache.hadoop.io.DataInputByteBuffer)

Example 39 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.

the class TestRMRestart method testDelegationTokenRestoredInDelegationTokenRenewer.

@Test(timeout = 60000)
public void testDelegationTokenRestoredInDelegationTokenRenewer() throws Exception {
    conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 2);
    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    UserGroupInformation.setConfiguration(conf);
    MemoryRMStateStore memStore = new MemoryRMStateStore();
    memStore.init(conf);
    RMState rmState = memStore.getState();
    Map<ApplicationId, ApplicationStateData> rmAppState = rmState.getApplicationState();
    MockRM rm1 = new TestSecurityMockRM(conf, memStore);
    rm1.start();
    HashSet<Token<RMDelegationTokenIdentifier>> tokenSet = new HashSet<Token<RMDelegationTokenIdentifier>>();
    // create an empty credential
    Credentials ts = new Credentials();
    // create tokens and add into credential
    Text userText1 = new Text("user1");
    RMDelegationTokenIdentifier dtId1 = new RMDelegationTokenIdentifier(userText1, new Text("renewer1"), userText1);
    Token<RMDelegationTokenIdentifier> token1 = new Token<RMDelegationTokenIdentifier>(dtId1, rm1.getRMContext().getRMDelegationTokenSecretManager());
    SecurityUtil.setTokenService(token1, rmAddr);
    ts.addToken(userText1, token1);
    tokenSet.add(token1);
    Text userText2 = new Text("user2");
    RMDelegationTokenIdentifier dtId2 = new RMDelegationTokenIdentifier(userText2, new Text("renewer2"), userText2);
    Token<RMDelegationTokenIdentifier> token2 = new Token<RMDelegationTokenIdentifier>(dtId2, rm1.getRMContext().getRMDelegationTokenSecretManager());
    SecurityUtil.setTokenService(token2, rmAddr);
    ts.addToken(userText2, token2);
    tokenSet.add(token2);
    // submit an app with customized credential
    RMApp app = rm1.submitApp(200, "name", "user", new HashMap<ApplicationAccessType, String>(), false, "default", 1, ts);
    // assert app info is saved
    ApplicationStateData appState = rmAppState.get(app.getApplicationId());
    Assert.assertNotNull(appState);
    // assert delegation tokens exist in rm1 DelegationTokenRenewr
    Assert.assertEquals(tokenSet, rm1.getRMContext().getDelegationTokenRenewer().getDelegationTokens());
    // assert delegation tokens are saved
    DataOutputBuffer dob = new DataOutputBuffer();
    ts.writeTokenStorageToStream(dob);
    ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
    securityTokens.rewind();
    Assert.assertEquals(securityTokens, appState.getApplicationSubmissionContext().getAMContainerSpec().getTokens());
    // start new RM
    MockRM rm2 = new TestSecurityMockRM(conf, memStore);
    rm2.start();
    // Need to wait for a while as now token renewal happens on another thread
    // and is asynchronous in nature.
    waitForTokensToBeRenewed(rm2, tokenSet);
    // verify tokens are properly populated back to rm2 DelegationTokenRenewer
    Assert.assertEquals(tokenSet, rm2.getRMContext().getDelegationTokenRenewer().getDelegationTokens());
}
Also used : RMApp(org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp) Token(org.apache.hadoop.security.token.Token) Text(org.apache.hadoop.io.Text) ApplicationStateData(org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationStateData) RMDelegationTokenIdentifier(org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier) ByteBuffer(java.nio.ByteBuffer) MemoryRMStateStore(org.apache.hadoop.yarn.server.resourcemanager.recovery.MemoryRMStateStore) ApplicationAccessType(org.apache.hadoop.yarn.api.records.ApplicationAccessType) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) RMState(org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.RMState) Credentials(org.apache.hadoop.security.Credentials) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 40 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.

the class TestJspHelper method testReadWriteReplicaState.

@Test
public void testReadWriteReplicaState() {
    try {
        DataOutputBuffer out = new DataOutputBuffer();
        DataInputBuffer in = new DataInputBuffer();
        for (HdfsServerConstants.ReplicaState repState : HdfsServerConstants.ReplicaState.values()) {
            repState.write(out);
            in.reset(out.getData(), out.getLength());
            HdfsServerConstants.ReplicaState result = HdfsServerConstants.ReplicaState.read(in);
            assertTrue("testReadWrite error !!!", repState == result);
            out.reset();
            in.reset();
        }
    } catch (Exception ex) {
        fail("testReadWrite ex error ReplicaState");
    }
}
Also used : DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) AuthorizationException(org.apache.hadoop.security.authorize.AuthorizationException) IOException(java.io.IOException) Test(org.junit.Test)

Aggregations

DataOutputBuffer (org.apache.hadoop.io.DataOutputBuffer)196 Test (org.junit.Test)66 ByteBuffer (java.nio.ByteBuffer)54 Credentials (org.apache.hadoop.security.Credentials)49 IOException (java.io.IOException)47 DataInputBuffer (org.apache.hadoop.io.DataInputBuffer)46 Token (org.apache.hadoop.security.token.Token)36 Configuration (org.apache.hadoop.conf.Configuration)35 Path (org.apache.hadoop.fs.Path)31 HashMap (java.util.HashMap)28 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)26 Text (org.apache.hadoop.io.Text)25 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)24 ArrayList (java.util.ArrayList)22 ContainerLaunchContext (org.apache.hadoop.yarn.api.records.ContainerLaunchContext)22 DataInputStream (java.io.DataInputStream)21 LocalResource (org.apache.hadoop.yarn.api.records.LocalResource)19 Map (java.util.Map)16 Random (java.util.Random)15 FileSystem (org.apache.hadoop.fs.FileSystem)15