Search in sources :

Example 1 with LengthParam

use of org.apache.hadoop.hdfs.web.resources.LengthParam in project hadoop by apache.

the class TestWebHDFS method testWebHdfsOffsetAndLength.

@Test
public void testWebHdfsOffsetAndLength() throws Exception {
    MiniDFSCluster cluster = null;
    final Configuration conf = WebHdfsTestUtil.createConf();
    final int OFFSET = 42;
    final int LENGTH = 512;
    final String PATH = "/foo";
    byte[] CONTENTS = new byte[1024];
    RANDOM.nextBytes(CONTENTS);
    try {
        cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
        final WebHdfsFileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsConstants.WEBHDFS_SCHEME);
        try (OutputStream os = fs.create(new Path(PATH))) {
            os.write(CONTENTS);
        }
        InetSocketAddress addr = cluster.getNameNode().getHttpAddress();
        URL url = new URL("http", addr.getHostString(), addr.getPort(), WebHdfsFileSystem.PATH_PREFIX + PATH + "?op=OPEN" + Param.toSortedString("&", new OffsetParam((long) OFFSET), new LengthParam((long) LENGTH)));
        HttpURLConnection conn = (HttpURLConnection) url.openConnection();
        conn.setInstanceFollowRedirects(true);
        Assert.assertEquals(LENGTH, conn.getContentLength());
        byte[] subContents = new byte[LENGTH];
        byte[] realContents = new byte[LENGTH];
        System.arraycopy(CONTENTS, OFFSET, subContents, 0, LENGTH);
        IOUtils.readFully(conn.getInputStream(), realContents);
        Assert.assertArrayEquals(subContents, realContents);
    } finally {
        if (cluster != null) {
            cluster.shutdown();
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) MiniDFSCluster(org.apache.hadoop.hdfs.MiniDFSCluster) Configuration(org.apache.hadoop.conf.Configuration) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) OffsetParam(org.apache.hadoop.hdfs.web.resources.OffsetParam) InetSocketAddress(java.net.InetSocketAddress) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) OutputStream(java.io.OutputStream) URL(java.net.URL) HttpURLConnection(java.net.HttpURLConnection) LengthParam(org.apache.hadoop.hdfs.web.resources.LengthParam) Test(org.junit.Test) HttpServerFunctionalTest(org.apache.hadoop.http.HttpServerFunctionalTest)

Example 2 with LengthParam

use of org.apache.hadoop.hdfs.web.resources.LengthParam in project hadoop by apache.

the class TestDataNodeUGIProvider method testUGICacheSecure.

@Test
public void testUGICacheSecure() throws Exception {
    // fake turning on security so api thinks it should use tokens
    SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation ugi = UserGroupInformation.createRemoteUser("test-user");
    ugi.setAuthenticationMethod(KERBEROS);
    ugi = UserGroupInformation.createProxyUser("test-proxy-user", ugi);
    UserGroupInformation.setLoginUser(ugi);
    List<Token<DelegationTokenIdentifier>> tokens = Lists.newArrayList();
    getWebHdfsFileSystem(ugi, conf, tokens);
    String uri1 = WebHdfsFileSystem.PATH_PREFIX + PATH + "?op=OPEN" + Param.toSortedString("&", new NamenodeAddressParam("127.0.0.1:1010"), new OffsetParam((long) OFFSET), new LengthParam((long) LENGTH), new DelegationParam(tokens.get(0).encodeToUrlString()));
    String uri2 = WebHdfsFileSystem.PATH_PREFIX + PATH + "?op=OPEN" + Param.toSortedString("&", new NamenodeAddressParam("127.0.0.1:1010"), new OffsetParam((long) OFFSET), new LengthParam((long) LENGTH), new DelegationParam(tokens.get(1).encodeToUrlString()));
    DataNodeUGIProvider ugiProvider1 = new DataNodeUGIProvider(new ParameterParser(new QueryStringDecoder(URI.create(uri1)), conf));
    UserGroupInformation ugi11 = ugiProvider1.ugi();
    UserGroupInformation ugi12 = ugiProvider1.ugi();
    Assert.assertEquals("With UGI cache, two UGIs returned by the same token should be same", ugi11, ugi12);
    DataNodeUGIProvider ugiProvider2 = new DataNodeUGIProvider(new ParameterParser(new QueryStringDecoder(URI.create(uri2)), conf));
    UserGroupInformation url21 = ugiProvider2.ugi();
    UserGroupInformation url22 = ugiProvider2.ugi();
    Assert.assertEquals("With UGI cache, two UGIs returned by the same token should be same", url21, url22);
    Assert.assertNotEquals("With UGI cache, two UGIs for the different token should not be same", ugi11, url22);
    ugiProvider2.clearCache();
    awaitCacheEmptyDueToExpiration();
    ugi12 = ugiProvider1.ugi();
    url22 = ugiProvider2.ugi();
    String msg = "With cache eviction, two UGIs returned" + " by the same token should not be same";
    Assert.assertNotEquals(msg, ugi11, ugi12);
    Assert.assertNotEquals(msg, url21, url22);
    Assert.assertNotEquals("With UGI cache, two UGIs for the different token should not be same", ugi11, url22);
}
Also used : QueryStringDecoder(io.netty.handler.codec.http.QueryStringDecoder) NamenodeAddressParam(org.apache.hadoop.hdfs.web.resources.NamenodeAddressParam) OffsetParam(org.apache.hadoop.hdfs.web.resources.OffsetParam) LengthParam(org.apache.hadoop.hdfs.web.resources.LengthParam) Token(org.apache.hadoop.security.token.Token) DelegationParam(org.apache.hadoop.hdfs.web.resources.DelegationParam) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Test(org.junit.Test)

Example 3 with LengthParam

use of org.apache.hadoop.hdfs.web.resources.LengthParam in project hadoop by apache.

the class TestDataNodeUGIProvider method testUGICacheInSecure.

@Test
public void testUGICacheInSecure() throws Exception {
    String uri1 = WebHdfsFileSystem.PATH_PREFIX + PATH + "?op=OPEN" + Param.toSortedString("&", new OffsetParam((long) OFFSET), new LengthParam((long) LENGTH), new UserParam("root"));
    String uri2 = WebHdfsFileSystem.PATH_PREFIX + PATH + "?op=OPEN" + Param.toSortedString("&", new OffsetParam((long) OFFSET), new LengthParam((long) LENGTH), new UserParam("hdfs"));
    DataNodeUGIProvider ugiProvider1 = new DataNodeUGIProvider(new ParameterParser(new QueryStringDecoder(URI.create(uri1)), conf));
    UserGroupInformation ugi11 = ugiProvider1.ugi();
    UserGroupInformation ugi12 = ugiProvider1.ugi();
    Assert.assertEquals("With UGI cache, two UGIs for the same user should be same", ugi11, ugi12);
    DataNodeUGIProvider ugiProvider2 = new DataNodeUGIProvider(new ParameterParser(new QueryStringDecoder(URI.create(uri2)), conf));
    UserGroupInformation url21 = ugiProvider2.ugi();
    UserGroupInformation url22 = ugiProvider2.ugi();
    Assert.assertEquals("With UGI cache, two UGIs for the same user should be same", url21, url22);
    Assert.assertNotEquals("With UGI cache, two UGIs for the different user should not be same", ugi11, url22);
    awaitCacheEmptyDueToExpiration();
    ugi12 = ugiProvider1.ugi();
    url22 = ugiProvider2.ugi();
    String msg = "With cache eviction, two UGIs returned by" + " the same user should not be same";
    Assert.assertNotEquals(msg, ugi11, ugi12);
    Assert.assertNotEquals(msg, url21, url22);
    Assert.assertNotEquals("With UGI cache, two UGIs for the different user should not be same", ugi11, url22);
}
Also used : QueryStringDecoder(io.netty.handler.codec.http.QueryStringDecoder) UserParam(org.apache.hadoop.hdfs.web.resources.UserParam) OffsetParam(org.apache.hadoop.hdfs.web.resources.OffsetParam) LengthParam(org.apache.hadoop.hdfs.web.resources.LengthParam) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Test(org.junit.Test)

Aggregations

LengthParam (org.apache.hadoop.hdfs.web.resources.LengthParam)3 OffsetParam (org.apache.hadoop.hdfs.web.resources.OffsetParam)3 Test (org.junit.Test)3 QueryStringDecoder (io.netty.handler.codec.http.QueryStringDecoder)2 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)2 OutputStream (java.io.OutputStream)1 HttpURLConnection (java.net.HttpURLConnection)1 InetSocketAddress (java.net.InetSocketAddress)1 URL (java.net.URL)1 Configuration (org.apache.hadoop.conf.Configuration)1 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)1 Path (org.apache.hadoop.fs.Path)1 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)1 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)1 DelegationParam (org.apache.hadoop.hdfs.web.resources.DelegationParam)1 NamenodeAddressParam (org.apache.hadoop.hdfs.web.resources.NamenodeAddressParam)1 UserParam (org.apache.hadoop.hdfs.web.resources.UserParam)1 HttpServerFunctionalTest (org.apache.hadoop.http.HttpServerFunctionalTest)1 Token (org.apache.hadoop.security.token.Token)1