use of org.apache.hadoop.io.DataOutputBuffer in project hbase by apache.
the class TestHBaseSaslRpcClient method testAuthMethodReadWrite.
@Test
public void testAuthMethodReadWrite() throws IOException {
DataInputBuffer in = new DataInputBuffer();
DataOutputBuffer out = new DataOutputBuffer();
assertAuthMethodRead(in, AuthMethod.SIMPLE);
assertAuthMethodRead(in, AuthMethod.KERBEROS);
assertAuthMethodRead(in, AuthMethod.DIGEST);
assertAuthMethodWrite(out, AuthMethod.SIMPLE);
assertAuthMethodWrite(out, AuthMethod.KERBEROS);
assertAuthMethodWrite(out, AuthMethod.DIGEST);
}
use of org.apache.hadoop.io.DataOutputBuffer in project hbase by apache.
the class TestKeyValueCompression method testKVWithTags.
@Test
public void testKVWithTags() throws Exception {
CompressionContext ctx = new CompressionContext(LRUDictionary.class, false, false);
DataOutputBuffer buf = new DataOutputBuffer(BUF_SIZE);
KeyValueCompression.writeKV(buf, createKV(1), ctx);
KeyValueCompression.writeKV(buf, createKV(0), ctx);
KeyValueCompression.writeKV(buf, createKV(2), ctx);
ctx.clear();
DataInputStream in = new DataInputStream(new ByteArrayInputStream(buf.getData(), 0, buf.getLength()));
KeyValue readBack = KeyValueCompression.readKV(in, ctx);
List<Tag> tags = readBack.getTags();
assertEquals(1, tags.size());
}
use of org.apache.hadoop.io.DataOutputBuffer in project hbase by apache.
the class TestKeyValueCompression method runTestCycle.
private void runTestCycle(List<KeyValue> kvs) throws Exception {
CompressionContext ctx = new CompressionContext(LRUDictionary.class, false, false);
DataOutputBuffer buf = new DataOutputBuffer(BUF_SIZE);
for (KeyValue kv : kvs) {
KeyValueCompression.writeKV(buf, kv, ctx);
}
ctx.clear();
DataInputStream in = new DataInputStream(new ByteArrayInputStream(buf.getData(), 0, buf.getLength()));
for (KeyValue kv : kvs) {
KeyValue readBack = KeyValueCompression.readKV(in, ctx);
assertEquals(kv, readBack);
}
}
use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.
the class TestDelegationTokenRenewer method testTokensConfExceedLimit.
// Test if app's token conf exceeds RM_DELEGATION_TOKEN_MAX_CONF_SIZE,
// app should fail
@Test
public void testTokensConfExceedLimit() throws Exception {
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(conf);
// limit 100 bytes
conf.setInt(YarnConfiguration.RM_DELEGATION_TOKEN_MAX_CONF_SIZE, 100);
MockRM rm = new TestSecurityMockRM(conf, null);
rm.start();
final MockNM nm1 = new MockNM("127.0.0.1:1234", 15120, rm.getResourceTrackerService());
nm1.registerNode();
// create a token
Text userText1 = new Text("user1");
DelegationTokenIdentifier dtId1 = new DelegationTokenIdentifier(userText1, new Text("renewer1"), userText1);
final Token<DelegationTokenIdentifier> token1 = new Token<DelegationTokenIdentifier>(dtId1.getBytes(), "password1".getBytes(), dtId1.getKind(), new Text("service1"));
Credentials credentials = new Credentials();
credentials.addToken(userText1, token1);
// create token conf for renewal, total size (512 bytes) > limit (100 bytes)
// By experiment, it's roughly 128 bytes per key-value pair.
Configuration appConf = new Configuration(false);
appConf.clear();
// 128 bytes
appConf.set("dfs.nameservices", "mycluster1,mycluster2");
//128 bytes
appConf.set("dfs.namenode.rpc-address.mycluster2.nn1", "123.0.0.1");
// 128 bytes
appConf.set("dfs.namenode.rpc-address.mycluster3.nn2", "123.0.0.2");
DataOutputBuffer dob = new DataOutputBuffer();
appConf.write(dob);
ByteBuffer tokenConf = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
try {
rm.submitApp(credentials, tokenConf);
Assert.fail();
} catch (Exception e) {
e.printStackTrace();
Assert.assertTrue(e.getCause().getMessage().contains(YarnConfiguration.RM_DELEGATION_TOKEN_MAX_CONF_SIZE));
}
}
use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.
the class TestRMAppTransitions method testAppRecoverPath.
@Test(timeout = 30000)
public void testAppRecoverPath() throws IOException {
LOG.info("--- START: testAppRecoverPath ---");
ApplicationSubmissionContext sub = Records.newRecord(ApplicationSubmissionContext.class);
ContainerLaunchContext clc = Records.newRecord(ContainerLaunchContext.class);
Credentials credentials = new Credentials();
DataOutputBuffer dob = new DataOutputBuffer();
credentials.writeTokenStorageToStream(dob);
ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
clc.setTokens(securityTokens);
sub.setAMContainerSpec(clc);
testCreateAppSubmittedRecovery(sub);
}
Aggregations