use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.
the class TestIFileStreams method testIFileStream.
@Test
public void testIFileStream() throws Exception {
final int DLEN = 100;
DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
IFileOutputStream ifos = new IFileOutputStream(dob);
for (int i = 0; i < DLEN; ++i) {
ifos.write(i);
}
ifos.close();
DataInputBuffer dib = new DataInputBuffer();
dib.reset(dob.getData(), DLEN + 4);
IFileInputStream ifis = new IFileInputStream(dib, 104, new Configuration());
for (int i = 0; i < DLEN; ++i) {
assertEquals(i, ifis.read());
}
ifis.close();
}
use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.
the class TestGridmixRecord method binSortTest.
static void binSortTest(GridmixRecord x, GridmixRecord y, int min, int max, WritableComparator cmp) throws Exception {
final Random r = new Random();
final long s = r.nextLong();
r.setSeed(s);
LOG.info("sort: " + s);
final DataOutputBuffer out1 = new DataOutputBuffer();
final DataOutputBuffer out2 = new DataOutputBuffer();
for (int i = min; i < max; ++i) {
final long seed1 = r.nextLong();
setSerialize(x, seed1, i, out1);
assertEquals(0, x.compareSeed(seed1, Math.max(0, i - x.fixedBytes())));
final long seed2 = r.nextLong();
setSerialize(y, seed2, i, out2);
assertEquals(0, y.compareSeed(seed2, Math.max(0, i - x.fixedBytes())));
// for eq sized records, ensure byte cmp where req
final int chk = WritableComparator.compareBytes(out1.getData(), 0, out1.getLength(), out2.getData(), 0, out2.getLength());
assertEquals(Integer.signum(chk), Integer.signum(x.compareTo(y)));
assertEquals(Integer.signum(chk), Integer.signum(cmp.compare(out1.getData(), 0, out1.getLength(), out2.getData(), 0, out2.getLength())));
// write second copy, compare eq
final int s1 = out1.getLength();
x.write(out1);
assertEquals(0, cmp.compare(out1.getData(), 0, s1, out1.getData(), s1, out1.getLength() - s1));
final int s2 = out2.getLength();
y.write(out2);
assertEquals(0, cmp.compare(out2.getData(), 0, s2, out2.getData(), s2, out2.getLength() - s2));
assertEquals(Integer.signum(chk), Integer.signum(cmp.compare(out1.getData(), 0, s1, out2.getData(), s2, out2.getLength() - s2)));
}
}
use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.
the class TestGridmixRecord method lengthTest.
static void lengthTest(GridmixRecord x, GridmixRecord y, int min, int max) throws Exception {
final Random r = new Random();
final long seed = r.nextLong();
r.setSeed(seed);
LOG.info("length: " + seed);
final DataInputBuffer in = new DataInputBuffer();
final DataOutputBuffer out1 = new DataOutputBuffer();
final DataOutputBuffer out2 = new DataOutputBuffer();
for (int i = min; i < max; ++i) {
setSerialize(x, r.nextLong(), i, out1);
// check write
assertEquals(i, out1.getLength());
// write to stream
x.write(out2);
// check read
in.reset(out1.getData(), 0, out1.getLength());
y.readFields(in);
assertEquals(i, x.getSize());
assertEquals(i, y.getSize());
}
// check stream read
in.reset(out2.getData(), 0, out2.getLength());
for (int i = min; i < max; ++i) {
y.readFields(in);
assertEquals(i, y.getSize());
}
}
use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.
the class TestGridmixRecord method eqSeedTest.
static void eqSeedTest(GridmixRecord x, GridmixRecord y, int max) throws Exception {
final Random r = new Random();
final long s = r.nextLong();
r.setSeed(s);
LOG.info("eqSeed: " + s);
assertEquals(x.fixedBytes(), y.fixedBytes());
final int min = x.fixedBytes() + 1;
final DataOutputBuffer out1 = new DataOutputBuffer();
final DataOutputBuffer out2 = new DataOutputBuffer();
for (int i = min; i < max; ++i) {
final long seed = r.nextLong();
setSerialize(x, seed, i, out1);
setSerialize(y, seed, i, out2);
assertEquals(x, y);
assertEquals(x.hashCode(), y.hashCode());
// verify written contents match
assertEquals(out1.getLength(), out2.getLength());
// assumes that writes will grow buffer deterministically
assertEquals("Bad test", out1.getData().length, out2.getData().length);
assertArrayEquals(out1.getData(), out2.getData());
}
}
use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.
the class TestDelegationTokenRenewer method testRenewTokenUsingTokenConfProvidedByApp.
// Test DelegationTokenRenewer uses the tokenConf provided by application
// for token renewal.
@Test
public void testRenewTokenUsingTokenConfProvidedByApp() throws Exception {
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(conf);
MockRM rm = new TestSecurityMockRM(conf, null);
rm.start();
final MockNM nm1 = new MockNM("127.0.0.1:1234", 15120, rm.getResourceTrackerService());
nm1.registerNode();
// create a token
Text userText1 = new Text("user1");
DelegationTokenIdentifier dtId1 = new DelegationTokenIdentifier(userText1, new Text("renewer1"), userText1);
final Token<DelegationTokenIdentifier> token1 = new Token<DelegationTokenIdentifier>(dtId1.getBytes(), "password1".getBytes(), dtId1.getKind(), new Text("service1"));
Credentials credentials = new Credentials();
credentials.addToken(userText1, token1);
// create token conf for renewal
Configuration appConf = new Configuration(false);
appConf.set("dfs.nameservices", "mycluster1,mycluster2");
appConf.set("dfs.namenode.rpc-address.mycluster2.nn1", "123.0.0.1");
appConf.set("dfs.namenode.rpc-address.mycluster2.nn2", "123.0.0.2");
appConf.set("dfs.ha.namenodes.mycluster2", "nn1,nn2");
appConf.set("dfs.client.failover.proxy.provider.mycluster2", "provider");
DataOutputBuffer dob = new DataOutputBuffer();
appConf.write(dob);
ByteBuffer tokenConf = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
final int confSize = appConf.size();
// submit app
RMApp app = rm.submitApp(credentials, tokenConf);
GenericTestUtils.waitFor(new Supplier<Boolean>() {
public Boolean get() {
DelegationTokenToRenew toRenew = rm.getRMContext().getDelegationTokenRenewer().getAllTokens().get(token1);
// the specific config we added.
return toRenew != null && toRenew.conf != null && toRenew.conf.size() == confSize && toRenew.conf.get("dfs.namenode.rpc-address.mycluster2.nn1").equals("123.0.0.1");
}
}, 200, 10000);
}
Aggregations