Search in sources :

Example 16 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.

the class TestIFileStreams method testIFileStream.

@Test
public void testIFileStream() throws Exception {
    final int DLEN = 100;
    DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
    IFileOutputStream ifos = new IFileOutputStream(dob);
    for (int i = 0; i < DLEN; ++i) {
        ifos.write(i);
    }
    ifos.close();
    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(dob.getData(), DLEN + 4);
    IFileInputStream ifis = new IFileInputStream(dib, 104, new Configuration());
    for (int i = 0; i < DLEN; ++i) {
        assertEquals(i, ifis.read());
    }
    ifis.close();
}
Also used : DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) Configuration(org.apache.hadoop.conf.Configuration) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) Test(org.junit.Test)

Example 17 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.

the class TestGridmixRecord method binSortTest.

static void binSortTest(GridmixRecord x, GridmixRecord y, int min, int max, WritableComparator cmp) throws Exception {
    final Random r = new Random();
    final long s = r.nextLong();
    r.setSeed(s);
    LOG.info("sort: " + s);
    final DataOutputBuffer out1 = new DataOutputBuffer();
    final DataOutputBuffer out2 = new DataOutputBuffer();
    for (int i = min; i < max; ++i) {
        final long seed1 = r.nextLong();
        setSerialize(x, seed1, i, out1);
        assertEquals(0, x.compareSeed(seed1, Math.max(0, i - x.fixedBytes())));
        final long seed2 = r.nextLong();
        setSerialize(y, seed2, i, out2);
        assertEquals(0, y.compareSeed(seed2, Math.max(0, i - x.fixedBytes())));
        // for eq sized records, ensure byte cmp where req
        final int chk = WritableComparator.compareBytes(out1.getData(), 0, out1.getLength(), out2.getData(), 0, out2.getLength());
        assertEquals(Integer.signum(chk), Integer.signum(x.compareTo(y)));
        assertEquals(Integer.signum(chk), Integer.signum(cmp.compare(out1.getData(), 0, out1.getLength(), out2.getData(), 0, out2.getLength())));
        // write second copy, compare eq
        final int s1 = out1.getLength();
        x.write(out1);
        assertEquals(0, cmp.compare(out1.getData(), 0, s1, out1.getData(), s1, out1.getLength() - s1));
        final int s2 = out2.getLength();
        y.write(out2);
        assertEquals(0, cmp.compare(out2.getData(), 0, s2, out2.getData(), s2, out2.getLength() - s2));
        assertEquals(Integer.signum(chk), Integer.signum(cmp.compare(out1.getData(), 0, s1, out2.getData(), s2, out2.getLength() - s2)));
    }
}
Also used : Random(java.util.Random) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer)

Example 18 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.

the class TestGridmixRecord method lengthTest.

static void lengthTest(GridmixRecord x, GridmixRecord y, int min, int max) throws Exception {
    final Random r = new Random();
    final long seed = r.nextLong();
    r.setSeed(seed);
    LOG.info("length: " + seed);
    final DataInputBuffer in = new DataInputBuffer();
    final DataOutputBuffer out1 = new DataOutputBuffer();
    final DataOutputBuffer out2 = new DataOutputBuffer();
    for (int i = min; i < max; ++i) {
        setSerialize(x, r.nextLong(), i, out1);
        // check write
        assertEquals(i, out1.getLength());
        // write to stream
        x.write(out2);
        // check read
        in.reset(out1.getData(), 0, out1.getLength());
        y.readFields(in);
        assertEquals(i, x.getSize());
        assertEquals(i, y.getSize());
    }
    // check stream read
    in.reset(out2.getData(), 0, out2.getLength());
    for (int i = min; i < max; ++i) {
        y.readFields(in);
        assertEquals(i, y.getSize());
    }
}
Also used : DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) Random(java.util.Random) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer)

Example 19 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.

the class TestGridmixRecord method eqSeedTest.

static void eqSeedTest(GridmixRecord x, GridmixRecord y, int max) throws Exception {
    final Random r = new Random();
    final long s = r.nextLong();
    r.setSeed(s);
    LOG.info("eqSeed: " + s);
    assertEquals(x.fixedBytes(), y.fixedBytes());
    final int min = x.fixedBytes() + 1;
    final DataOutputBuffer out1 = new DataOutputBuffer();
    final DataOutputBuffer out2 = new DataOutputBuffer();
    for (int i = min; i < max; ++i) {
        final long seed = r.nextLong();
        setSerialize(x, seed, i, out1);
        setSerialize(y, seed, i, out2);
        assertEquals(x, y);
        assertEquals(x.hashCode(), y.hashCode());
        // verify written contents match
        assertEquals(out1.getLength(), out2.getLength());
        // assumes that writes will grow buffer deterministically
        assertEquals("Bad test", out1.getData().length, out2.getData().length);
        assertArrayEquals(out1.getData(), out2.getData());
    }
}
Also used : Random(java.util.Random) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer)

Example 20 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.

the class TestDelegationTokenRenewer method testRenewTokenUsingTokenConfProvidedByApp.

// Test DelegationTokenRenewer uses the tokenConf provided by application
// for token renewal.
@Test
public void testRenewTokenUsingTokenConfProvidedByApp() throws Exception {
    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    UserGroupInformation.setConfiguration(conf);
    MockRM rm = new TestSecurityMockRM(conf, null);
    rm.start();
    final MockNM nm1 = new MockNM("127.0.0.1:1234", 15120, rm.getResourceTrackerService());
    nm1.registerNode();
    // create a token
    Text userText1 = new Text("user1");
    DelegationTokenIdentifier dtId1 = new DelegationTokenIdentifier(userText1, new Text("renewer1"), userText1);
    final Token<DelegationTokenIdentifier> token1 = new Token<DelegationTokenIdentifier>(dtId1.getBytes(), "password1".getBytes(), dtId1.getKind(), new Text("service1"));
    Credentials credentials = new Credentials();
    credentials.addToken(userText1, token1);
    // create token conf for renewal
    Configuration appConf = new Configuration(false);
    appConf.set("dfs.nameservices", "mycluster1,mycluster2");
    appConf.set("dfs.namenode.rpc-address.mycluster2.nn1", "123.0.0.1");
    appConf.set("dfs.namenode.rpc-address.mycluster2.nn2", "123.0.0.2");
    appConf.set("dfs.ha.namenodes.mycluster2", "nn1,nn2");
    appConf.set("dfs.client.failover.proxy.provider.mycluster2", "provider");
    DataOutputBuffer dob = new DataOutputBuffer();
    appConf.write(dob);
    ByteBuffer tokenConf = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
    final int confSize = appConf.size();
    // submit app
    RMApp app = rm.submitApp(credentials, tokenConf);
    GenericTestUtils.waitFor(new Supplier<Boolean>() {

        public Boolean get() {
            DelegationTokenToRenew toRenew = rm.getRMContext().getDelegationTokenRenewer().getAllTokens().get(token1);
            // the specific config we added.
            return toRenew != null && toRenew.conf != null && toRenew.conf.size() == confSize && toRenew.conf.get("dfs.namenode.rpc-address.mycluster2.nn1").equals("123.0.0.1");
        }
    }, 200, 10000);
}
Also used : RMApp(org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) DelegationTokenIdentifier(org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier) MockNM(org.apache.hadoop.yarn.server.resourcemanager.MockNM) MockRM(org.apache.hadoop.yarn.server.resourcemanager.MockRM) TestSecurityMockRM(org.apache.hadoop.yarn.server.resourcemanager.TestRMRestart.TestSecurityMockRM) Text(org.apache.hadoop.io.Text) InvalidToken(org.apache.hadoop.security.token.SecretManager.InvalidToken) Token(org.apache.hadoop.security.token.Token) DelegationTokenToRenew(org.apache.hadoop.yarn.server.resourcemanager.security.DelegationTokenRenewer.DelegationTokenToRenew) ByteBuffer(java.nio.ByteBuffer) DataInputByteBuffer(org.apache.hadoop.io.DataInputByteBuffer) TestSecurityMockRM(org.apache.hadoop.yarn.server.resourcemanager.TestRMRestart.TestSecurityMockRM) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Credentials(org.apache.hadoop.security.Credentials) Test(org.junit.Test)

Aggregations

DataOutputBuffer (org.apache.hadoop.io.DataOutputBuffer)196 Test (org.junit.Test)66 ByteBuffer (java.nio.ByteBuffer)54 Credentials (org.apache.hadoop.security.Credentials)49 IOException (java.io.IOException)47 DataInputBuffer (org.apache.hadoop.io.DataInputBuffer)46 Token (org.apache.hadoop.security.token.Token)36 Configuration (org.apache.hadoop.conf.Configuration)35 Path (org.apache.hadoop.fs.Path)31 HashMap (java.util.HashMap)28 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)26 Text (org.apache.hadoop.io.Text)25 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)24 ArrayList (java.util.ArrayList)22 ContainerLaunchContext (org.apache.hadoop.yarn.api.records.ContainerLaunchContext)22 DataInputStream (java.io.DataInputStream)21 LocalResource (org.apache.hadoop.yarn.api.records.LocalResource)19 Map (java.util.Map)16 Random (java.util.Random)15 FileSystem (org.apache.hadoop.fs.FileSystem)15