Search in sources :

Example 81 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hbase by apache.

the class TestHBaseSaslRpcClient method testAuthMethodReadWrite.

@Test
public void testAuthMethodReadWrite() throws IOException {
    DataInputBuffer in = new DataInputBuffer();
    DataOutputBuffer out = new DataOutputBuffer();
    assertAuthMethodRead(in, AuthMethod.SIMPLE);
    assertAuthMethodRead(in, AuthMethod.KERBEROS);
    assertAuthMethodRead(in, AuthMethod.DIGEST);
    assertAuthMethodWrite(out, AuthMethod.SIMPLE);
    assertAuthMethodWrite(out, AuthMethod.KERBEROS);
    assertAuthMethodWrite(out, AuthMethod.DIGEST);
}
Also used : DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) Test(org.junit.Test)

Example 82 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hbase by apache.

the class TestKeyValueCompression method testKVWithTags.

@Test
public void testKVWithTags() throws Exception {
    CompressionContext ctx = new CompressionContext(LRUDictionary.class, false, false);
    DataOutputBuffer buf = new DataOutputBuffer(BUF_SIZE);
    KeyValueCompression.writeKV(buf, createKV(1), ctx);
    KeyValueCompression.writeKV(buf, createKV(0), ctx);
    KeyValueCompression.writeKV(buf, createKV(2), ctx);
    ctx.clear();
    DataInputStream in = new DataInputStream(new ByteArrayInputStream(buf.getData(), 0, buf.getLength()));
    KeyValue readBack = KeyValueCompression.readKV(in, ctx);
    List<Tag> tags = readBack.getTags();
    assertEquals(1, tags.size());
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ByteArrayInputStream(java.io.ByteArrayInputStream) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) Tag(org.apache.hadoop.hbase.Tag) DataInputStream(java.io.DataInputStream) Test(org.junit.Test)

Example 83 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hbase by apache.

the class TestKeyValueCompression method runTestCycle.

private void runTestCycle(List<KeyValue> kvs) throws Exception {
    CompressionContext ctx = new CompressionContext(LRUDictionary.class, false, false);
    DataOutputBuffer buf = new DataOutputBuffer(BUF_SIZE);
    for (KeyValue kv : kvs) {
        KeyValueCompression.writeKV(buf, kv, ctx);
    }
    ctx.clear();
    DataInputStream in = new DataInputStream(new ByteArrayInputStream(buf.getData(), 0, buf.getLength()));
    for (KeyValue kv : kvs) {
        KeyValue readBack = KeyValueCompression.readKV(in, ctx);
        assertEquals(kv, readBack);
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ByteArrayInputStream(java.io.ByteArrayInputStream) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) DataInputStream(java.io.DataInputStream)

Example 84 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.

the class TestDelegationTokenRenewer method testTokensConfExceedLimit.

// Test if app's token conf exceeds RM_DELEGATION_TOKEN_MAX_CONF_SIZE,
// app should fail
@Test
public void testTokensConfExceedLimit() throws Exception {
    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    UserGroupInformation.setConfiguration(conf);
    // limit 100 bytes
    conf.setInt(YarnConfiguration.RM_DELEGATION_TOKEN_MAX_CONF_SIZE, 100);
    MockRM rm = new TestSecurityMockRM(conf, null);
    rm.start();
    final MockNM nm1 = new MockNM("127.0.0.1:1234", 15120, rm.getResourceTrackerService());
    nm1.registerNode();
    // create a token
    Text userText1 = new Text("user1");
    DelegationTokenIdentifier dtId1 = new DelegationTokenIdentifier(userText1, new Text("renewer1"), userText1);
    final Token<DelegationTokenIdentifier> token1 = new Token<DelegationTokenIdentifier>(dtId1.getBytes(), "password1".getBytes(), dtId1.getKind(), new Text("service1"));
    Credentials credentials = new Credentials();
    credentials.addToken(userText1, token1);
    // create token conf for renewal, total size (512 bytes) > limit (100 bytes)
    // By experiment, it's roughly 128 bytes per key-value pair.
    Configuration appConf = new Configuration(false);
    appConf.clear();
    // 128 bytes
    appConf.set("dfs.nameservices", "mycluster1,mycluster2");
    //128 bytes
    appConf.set("dfs.namenode.rpc-address.mycluster2.nn1", "123.0.0.1");
    // 128 bytes
    appConf.set("dfs.namenode.rpc-address.mycluster3.nn2", "123.0.0.2");
    DataOutputBuffer dob = new DataOutputBuffer();
    appConf.write(dob);
    ByteBuffer tokenConf = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
    try {
        rm.submitApp(credentials, tokenConf);
        Assert.fail();
    } catch (Exception e) {
        e.printStackTrace();
        Assert.assertTrue(e.getCause().getMessage().contains(YarnConfiguration.RM_DELEGATION_TOKEN_MAX_CONF_SIZE));
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) DelegationTokenIdentifier(org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier) MockNM(org.apache.hadoop.yarn.server.resourcemanager.MockNM) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) MockRM(org.apache.hadoop.yarn.server.resourcemanager.MockRM) TestSecurityMockRM(org.apache.hadoop.yarn.server.resourcemanager.TestRMRestart.TestSecurityMockRM) Text(org.apache.hadoop.io.Text) InvalidToken(org.apache.hadoop.security.token.SecretManager.InvalidToken) Token(org.apache.hadoop.security.token.Token) ByteBuffer(java.nio.ByteBuffer) DataInputByteBuffer(org.apache.hadoop.io.DataInputByteBuffer) Credentials(org.apache.hadoop.security.Credentials) URISyntaxException(java.net.URISyntaxException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) IOException(java.io.IOException) BrokenBarrierException(java.util.concurrent.BrokenBarrierException) TestSecurityMockRM(org.apache.hadoop.yarn.server.resourcemanager.TestRMRestart.TestSecurityMockRM) Test(org.junit.Test)

Example 85 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.

the class TestRMAppTransitions method testAppRecoverPath.

@Test(timeout = 30000)
public void testAppRecoverPath() throws IOException {
    LOG.info("--- START: testAppRecoverPath ---");
    ApplicationSubmissionContext sub = Records.newRecord(ApplicationSubmissionContext.class);
    ContainerLaunchContext clc = Records.newRecord(ContainerLaunchContext.class);
    Credentials credentials = new Credentials();
    DataOutputBuffer dob = new DataOutputBuffer();
    credentials.writeTokenStorageToStream(dob);
    ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
    clc.setTokens(securityTokens);
    sub.setAMContainerSpec(clc);
    testCreateAppSubmittedRecovery(sub);
}
Also used : ApplicationSubmissionContext(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) ContainerLaunchContext(org.apache.hadoop.yarn.api.records.ContainerLaunchContext) ByteBuffer(java.nio.ByteBuffer) Credentials(org.apache.hadoop.security.Credentials) Test(org.junit.Test)

Aggregations

DataOutputBuffer (org.apache.hadoop.io.DataOutputBuffer)132 Test (org.junit.Test)48 Credentials (org.apache.hadoop.security.Credentials)37 ByteBuffer (java.nio.ByteBuffer)36 DataInputBuffer (org.apache.hadoop.io.DataInputBuffer)36 IOException (java.io.IOException)34 Configuration (org.apache.hadoop.conf.Configuration)25 Token (org.apache.hadoop.security.token.Token)25 Path (org.apache.hadoop.fs.Path)21 HashMap (java.util.HashMap)20 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)20 ContainerLaunchContext (org.apache.hadoop.yarn.api.records.ContainerLaunchContext)18 LocalResource (org.apache.hadoop.yarn.api.records.LocalResource)16 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)16 Random (java.util.Random)15 DataInputStream (java.io.DataInputStream)14 Text (org.apache.hadoop.io.Text)14 ArrayList (java.util.ArrayList)13 Map (java.util.Map)10 FileSystem (org.apache.hadoop.fs.FileSystem)10