Search in sources :

Example 11 with DataInputByteBuffer

use of org.apache.hadoop.io.DataInputByteBuffer in project hive by apache.

the class ShuffleHandler method deserializeMetaData.

/**
   * A helper function to deserialize the metadata returned by ShuffleHandler.
   * @param meta the metadata returned by the ShuffleHandler
   * @return the port the Shuffle Handler is listening on to serve shuffle data.
   */
public static int deserializeMetaData(ByteBuffer meta) throws IOException {
    //TODO this should be returning a class not just an int
    DataInputByteBuffer in = new DataInputByteBuffer();
    in.reset(meta);
    int port = in.readInt();
    return port;
}
Also used : DataInputByteBuffer(org.apache.hadoop.io.DataInputByteBuffer)

Example 12 with DataInputByteBuffer

use of org.apache.hadoop.io.DataInputByteBuffer in project apex-core by apache.

the class StreamingContainerManagerTest method testDeployInfoSerialization.

@Test
public void testDeployInfoSerialization() throws Exception {
    OperatorDeployInfo ndi = new OperatorDeployInfo();
    ndi.name = "node1";
    ndi.type = OperatorDeployInfo.OperatorType.GENERIC;
    ndi.id = 1;
    ndi.contextAttributes = new com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap();
    ndi.contextAttributes.put(OperatorContext.SPIN_MILLIS, 100);
    OperatorDeployInfo.InputDeployInfo input = new OperatorDeployInfo.InputDeployInfo();
    input.declaredStreamId = "streamToNode";
    input.portName = "inputPortNameOnNode";
    input.sourceNodeId = 99;
    ndi.inputs = new ArrayList<>();
    ndi.inputs.add(input);
    OperatorDeployInfo.OutputDeployInfo output = new OperatorDeployInfo.OutputDeployInfo();
    output.declaredStreamId = "streamFromNode";
    output.portName = "outputPortNameOnNode";
    ndi.outputs = new ArrayList<>();
    ndi.outputs.add(output);
    ContainerHeartbeatResponse scc = new ContainerHeartbeatResponse();
    scc.deployRequest = Collections.singletonList(ndi);
    DataOutputByteBuffer out = new DataOutputByteBuffer();
    scc.write(out);
    DataInputByteBuffer in = new DataInputByteBuffer();
    in.reset(out.getData());
    ContainerHeartbeatResponse clone = new ContainerHeartbeatResponse();
    clone.readFields(in);
    Assert.assertNotNull(clone.deployRequest);
    Assert.assertEquals(1, clone.deployRequest.size());
    OperatorDeployInfo ndiClone = clone.deployRequest.get(0);
    Assert.assertEquals("name", ndi.name, ndiClone.name);
    Assert.assertEquals("type", ndi.type, ndiClone.type);
    String nodeToString = ndi.toString();
    Assert.assertTrue(nodeToString.contains(input.portName));
    Assert.assertTrue(nodeToString.contains(output.portName));
    Assert.assertEquals("contextAttributes " + ndiClone.contextAttributes, Integer.valueOf(100), ndiClone.contextAttributes.get(OperatorContext.SPIN_MILLIS));
}
Also used : InputDeployInfo(com.datatorrent.stram.api.OperatorDeployInfo.InputDeployInfo) OperatorDeployInfo(com.datatorrent.stram.api.OperatorDeployInfo) DataInputByteBuffer(org.apache.hadoop.io.DataInputByteBuffer) InputDeployInfo(com.datatorrent.stram.api.OperatorDeployInfo.InputDeployInfo) ContainerHeartbeatResponse(com.datatorrent.stram.api.StreamingContainerUmbilicalProtocol.ContainerHeartbeatResponse) DataOutputByteBuffer(org.apache.hadoop.io.DataOutputByteBuffer) OutputDeployInfo(com.datatorrent.stram.api.OperatorDeployInfo.OutputDeployInfo) OutputDeployInfo(com.datatorrent.stram.api.OperatorDeployInfo.OutputDeployInfo) Test(org.junit.Test) PhysicalPlanTest(com.datatorrent.stram.plan.physical.PhysicalPlanTest)

Example 13 with DataInputByteBuffer

use of org.apache.hadoop.io.DataInputByteBuffer in project hadoop by apache.

the class TestDelegationTokenRenewer method testReplaceExpiringDelegationToken.

@Test(timeout = 20000)
public void testReplaceExpiringDelegationToken() throws Exception {
    conf.setBoolean(YarnConfiguration.RM_PROXY_USER_PRIVILEGES_ENABLED, true);
    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    UserGroupInformation.setConfiguration(conf);
    // create Token1:
    Text userText1 = new Text("user1");
    DelegationTokenIdentifier dtId1 = new DelegationTokenIdentifier(userText1, new Text("renewer1"), userText1);
    // set max date to 0 to simulate an expiring token;
    dtId1.setMaxDate(0);
    final Token<DelegationTokenIdentifier> token1 = new Token<DelegationTokenIdentifier>(dtId1.getBytes(), "password1".getBytes(), dtId1.getKind(), new Text("service1"));
    // create token2
    Text userText2 = new Text("user2");
    DelegationTokenIdentifier dtId2 = new DelegationTokenIdentifier(userText1, new Text("renewer2"), userText2);
    final Token<DelegationTokenIdentifier> expectedToken = new Token<DelegationTokenIdentifier>(dtId2.getBytes(), "password2".getBytes(), dtId2.getKind(), new Text("service2"));
    final MockRM rm = new TestSecurityMockRM(conf, null) {

        @Override
        protected DelegationTokenRenewer createDelegationTokenRenewer() {
            return new DelegationTokenRenewer() {

                @Override
                protected Token<?>[] obtainSystemTokensForUser(String user, final Credentials credentials) throws IOException {
                    credentials.addToken(expectedToken.getService(), expectedToken);
                    return new Token<?>[] { expectedToken };
                }
            };
        }
    };
    rm.start();
    Credentials credentials = new Credentials();
    credentials.addToken(userText1, token1);
    RMApp app = rm.submitApp(200, "name", "user", new HashMap<ApplicationAccessType, String>(), false, "default", 1, credentials);
    // wait for the initial expiring hdfs token to be removed from allTokens
    GenericTestUtils.waitFor(new Supplier<Boolean>() {

        public Boolean get() {
            return rm.getRMContext().getDelegationTokenRenewer().getAllTokens().get(token1) == null;
        }
    }, 1000, 20000);
    // wait for the initial expiring hdfs token to be removed from appTokens
    GenericTestUtils.waitFor(new Supplier<Boolean>() {

        public Boolean get() {
            return !rm.getRMContext().getDelegationTokenRenewer().getDelegationTokens().contains(token1);
        }
    }, 1000, 20000);
    // wait for the new retrieved hdfs token.
    GenericTestUtils.waitFor(new Supplier<Boolean>() {

        public Boolean get() {
            return rm.getRMContext().getDelegationTokenRenewer().getDelegationTokens().contains(expectedToken);
        }
    }, 1000, 20000);
    // check nm can retrieve the token
    final MockNM nm1 = new MockNM("127.0.0.1:1234", 15120, rm.getResourceTrackerService());
    nm1.registerNode();
    NodeHeartbeatResponse response = nm1.nodeHeartbeat(true);
    ByteBuffer tokenBuffer = response.getSystemCredentialsForApps().get(app.getApplicationId());
    Assert.assertNotNull(tokenBuffer);
    Credentials appCredentials = new Credentials();
    DataInputByteBuffer buf = new DataInputByteBuffer();
    tokenBuffer.rewind();
    buf.reset(tokenBuffer);
    appCredentials.readTokenStorageStream(buf);
    Assert.assertTrue(appCredentials.getAllTokens().contains(expectedToken));
}
Also used : RMApp(org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp) NodeHeartbeatResponse(org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse) DelegationTokenIdentifier(org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier) MockNM(org.apache.hadoop.yarn.server.resourcemanager.MockNM) DataInputByteBuffer(org.apache.hadoop.io.DataInputByteBuffer) Text(org.apache.hadoop.io.Text) InvalidToken(org.apache.hadoop.security.token.SecretManager.InvalidToken) Token(org.apache.hadoop.security.token.Token) MockRM(org.apache.hadoop.yarn.server.resourcemanager.MockRM) TestSecurityMockRM(org.apache.hadoop.yarn.server.resourcemanager.TestRMRestart.TestSecurityMockRM) ByteBuffer(java.nio.ByteBuffer) DataInputByteBuffer(org.apache.hadoop.io.DataInputByteBuffer) TestSecurityMockRM(org.apache.hadoop.yarn.server.resourcemanager.TestRMRestart.TestSecurityMockRM) ApplicationAccessType(org.apache.hadoop.yarn.api.records.ApplicationAccessType) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Credentials(org.apache.hadoop.security.Credentials) Test(org.junit.Test)

Example 14 with DataInputByteBuffer

use of org.apache.hadoop.io.DataInputByteBuffer in project hive by apache.

the class LlapTokenClient method extractToken.

private Token<LlapTokenIdentifier> extractToken(ByteString tokenBytes) throws IOException {
    Token<LlapTokenIdentifier> token = new Token<>();
    DataInputByteBuffer in = new DataInputByteBuffer();
    in.reset(tokenBytes.asReadOnlyByteBuffer());
    token.readFields(in);
    return token;
}
Also used : LlapTokenIdentifier(org.apache.hadoop.hive.llap.security.LlapTokenIdentifier) DataInputByteBuffer(org.apache.hadoop.io.DataInputByteBuffer) Token(org.apache.hadoop.security.token.Token)

Example 15 with DataInputByteBuffer

use of org.apache.hadoop.io.DataInputByteBuffer in project hive by apache.

the class ShuffleHandler method deserializeServiceData.

static Token<JobTokenIdentifier> deserializeServiceData(ByteBuffer secret) throws IOException {
    DataInputByteBuffer in = new DataInputByteBuffer();
    in.reset(secret);
    Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>();
    jt.readFields(in);
    return jt;
}
Also used : DataInputByteBuffer(org.apache.hadoop.io.DataInputByteBuffer) JobTokenIdentifier(org.apache.tez.common.security.JobTokenIdentifier) Token(org.apache.hadoop.security.token.Token)

Aggregations

DataInputByteBuffer (org.apache.hadoop.io.DataInputByteBuffer)21 Credentials (org.apache.hadoop.security.Credentials)12 ByteBuffer (java.nio.ByteBuffer)10 Token (org.apache.hadoop.security.token.Token)8 Test (org.junit.Test)8 Text (org.apache.hadoop.io.Text)5 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)4 IOException (java.io.IOException)3 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)3 Configuration (org.apache.hadoop.conf.Configuration)3 DelegationTokenIdentifier (org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier)3 DataOutputBuffer (org.apache.hadoop.io.DataOutputBuffer)3 DataOutputByteBuffer (org.apache.hadoop.io.DataOutputByteBuffer)3 InvalidToken (org.apache.hadoop.security.token.SecretManager.InvalidToken)3 ApplicationAccessType (org.apache.hadoop.yarn.api.records.ApplicationAccessType)3 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)3 NodeHeartbeatResponse (org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse)3 MockNM (org.apache.hadoop.yarn.server.resourcemanager.MockNM)3 MockRM (org.apache.hadoop.yarn.server.resourcemanager.MockRM)3 TestSecurityMockRM (org.apache.hadoop.yarn.server.resourcemanager.TestRMRestart.TestSecurityMockRM)3