Search in sources :

Example 6 with TestHdfs

use of org.apache.hadoop.test.TestHdfs in project hadoop by apache.

the class TestHttpFSWithKerberos method testInvalidadHttpFSAccess.

@Test
@TestDir
@TestJetty
@TestHdfs
public void testInvalidadHttpFSAccess() throws Exception {
    createHttpFSServer();
    URL url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETHOMEDIRECTORY");
    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
    Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_UNAUTHORIZED);
}
Also used : HttpURLConnection(java.net.HttpURLConnection) URL(java.net.URL) AuthenticatedURL(org.apache.hadoop.security.authentication.client.AuthenticatedURL) TestJetty(org.apache.hadoop.test.TestJetty) TestHdfs(org.apache.hadoop.test.TestHdfs) TestDir(org.apache.hadoop.test.TestDir) Test(org.junit.Test)

Example 7 with TestHdfs

use of org.apache.hadoop.test.TestHdfs in project hadoop by apache.

the class TestHttpFSWithKerberos method testDelegationTokenHttpFSAccess.

@Test
@TestDir
@TestJetty
@TestHdfs
public void testDelegationTokenHttpFSAccess() throws Exception {
    createHttpFSServer();
    KerberosTestUtils.doAsClient(new Callable<Void>() {

        @Override
        public Void call() throws Exception {
            //get delegation token doing SPNEGO authentication
            URL url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETDELEGATIONTOKEN");
            AuthenticatedURL aUrl = new AuthenticatedURL();
            AuthenticatedURL.Token aToken = new AuthenticatedURL.Token();
            HttpURLConnection conn = aUrl.openConnection(url, aToken);
            Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
            JSONObject json = (JSONObject) new JSONParser().parse(new InputStreamReader(conn.getInputStream()));
            json = (JSONObject) json.get(DelegationTokenAuthenticator.DELEGATION_TOKEN_JSON);
            String tokenStr = (String) json.get(DelegationTokenAuthenticator.DELEGATION_TOKEN_URL_STRING_JSON);
            //access httpfs using the delegation token
            url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETHOMEDIRECTORY&delegation=" + tokenStr);
            conn = (HttpURLConnection) url.openConnection();
            Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
            //try to renew the delegation token without SPNEGO credentials
            url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=RENEWDELEGATIONTOKEN&token=" + tokenStr);
            conn = (HttpURLConnection) url.openConnection();
            conn.setRequestMethod("PUT");
            Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_UNAUTHORIZED);
            //renew the delegation token with SPNEGO credentials
            url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=RENEWDELEGATIONTOKEN&token=" + tokenStr);
            conn = aUrl.openConnection(url, aToken);
            conn.setRequestMethod("PUT");
            Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
            //cancel delegation token, no need for SPNEGO credentials
            url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=CANCELDELEGATIONTOKEN&token=" + tokenStr);
            conn = (HttpURLConnection) url.openConnection();
            conn.setRequestMethod("PUT");
            Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
            //try to access httpfs with the canceled delegation token
            url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETHOMEDIRECTORY&delegation=" + tokenStr);
            conn = (HttpURLConnection) url.openConnection();
            Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_UNAUTHORIZED);
            return null;
        }
    });
}
Also used : InputStreamReader(java.io.InputStreamReader) Token(org.apache.hadoop.security.token.Token) URL(java.net.URL) AuthenticatedURL(org.apache.hadoop.security.authentication.client.AuthenticatedURL) HttpURLConnection(java.net.HttpURLConnection) JSONObject(org.json.simple.JSONObject) JSONParser(org.json.simple.parser.JSONParser) AuthenticatedURL(org.apache.hadoop.security.authentication.client.AuthenticatedURL) TestJetty(org.apache.hadoop.test.TestJetty) TestHdfs(org.apache.hadoop.test.TestHdfs) TestDir(org.apache.hadoop.test.TestDir) Test(org.junit.Test)

Example 8 with TestHdfs

use of org.apache.hadoop.test.TestHdfs in project hadoop by apache.

the class TestFileSystemAccessService method fileSystemCache.

@Test
@TestDir
@TestHdfs
public void fileSystemCache() throws Exception {
    String dir = TestDirHelper.getTestDir().getAbsolutePath();
    String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(), SchedulerService.class.getName(), FileSystemAccessService.class.getName()));
    Configuration hadoopConf = new Configuration(false);
    hadoopConf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY));
    createHadoopConf(hadoopConf);
    Configuration conf = new Configuration(false);
    conf.set("server.services", services);
    conf.set("server.hadoop.filesystem.cache.purge.frequency", "1");
    conf.set("server.hadoop.filesystem.cache.purge.timeout", "1");
    Server server = new Server("server", dir, dir, dir, dir, conf);
    try {
        server.init();
        FileSystemAccess hadoop = server.get(FileSystemAccess.class);
        FileSystem fs1 = hadoop.createFileSystem("u", hadoop.getFileSystemConfiguration());
        Assert.assertNotNull(fs1);
        fs1.mkdirs(new Path("/tmp/foo1"));
        hadoop.releaseFileSystem(fs1);
        //still around because of caching
        fs1.mkdirs(new Path("/tmp/foo2"));
        FileSystem fs2 = hadoop.createFileSystem("u", hadoop.getFileSystemConfiguration());
        //should be same instance because of caching
        Assert.assertEquals(fs1, fs2);
        Thread.sleep(4 * 1000);
        //still around because of lease count is 1 (fs2 is out)
        fs1.mkdirs(new Path("/tmp/foo2"));
        Thread.sleep(4 * 1000);
        //still around because of lease count is 1 (fs2 is out)
        fs2.mkdirs(new Path("/tmp/foo"));
        hadoop.releaseFileSystem(fs2);
        Thread.sleep(4 * 1000);
        //should not be around as lease count is 0
        try {
            fs2.mkdirs(new Path("/tmp/foo"));
            Assert.fail();
        } catch (IOException ex) {
        } catch (Exception ex) {
            Assert.fail();
        }
    } finally {
        server.destroy();
    }
}
Also used : FileSystemAccess(org.apache.hadoop.lib.service.FileSystemAccess) Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) Server(org.apache.hadoop.lib.server.Server) FileSystem(org.apache.hadoop.fs.FileSystem) IOException(java.io.IOException) TestException(org.apache.hadoop.test.TestException) IOException(java.io.IOException) FileSystemAccessException(org.apache.hadoop.lib.service.FileSystemAccessException) ServiceException(org.apache.hadoop.lib.server.ServiceException) TestHdfs(org.apache.hadoop.test.TestHdfs) TestDir(org.apache.hadoop.test.TestDir) Test(org.junit.Test)

Example 9 with TestHdfs

use of org.apache.hadoop.test.TestHdfs in project hadoop by apache.

the class TestFileSystemAccessService method fileSystemExecutorNoNameNode.

@Test
@TestException(exception = FileSystemAccessException.class, msgRegExp = "H06.*")
@TestDir
@TestHdfs
public void fileSystemExecutorNoNameNode() throws Exception {
    String dir = TestDirHelper.getTestDir().getAbsolutePath();
    String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(), SchedulerService.class.getName(), FileSystemAccessService.class.getName()));
    Configuration hadoopConf = new Configuration(false);
    hadoopConf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY));
    createHadoopConf(hadoopConf);
    Configuration conf = new Configuration(false);
    conf.set("server.services", services);
    Server server = new Server("server", dir, dir, dir, dir, conf);
    server.init();
    FileSystemAccess fsAccess = server.get(FileSystemAccess.class);
    Configuration hdfsConf = fsAccess.getFileSystemConfiguration();
    hdfsConf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, "");
    fsAccess.execute("u", hdfsConf, new FileSystemAccess.FileSystemExecutor<Void>() {

        @Override
        public Void execute(FileSystem fs) throws IOException {
            return null;
        }
    });
}
Also used : FileSystemAccess(org.apache.hadoop.lib.service.FileSystemAccess) Configuration(org.apache.hadoop.conf.Configuration) Server(org.apache.hadoop.lib.server.Server) FileSystem(org.apache.hadoop.fs.FileSystem) IOException(java.io.IOException) TestHdfs(org.apache.hadoop.test.TestHdfs) TestException(org.apache.hadoop.test.TestException) TestDir(org.apache.hadoop.test.TestDir) Test(org.junit.Test)

Example 10 with TestHdfs

use of org.apache.hadoop.test.TestHdfs in project hadoop by apache.

the class BaseTestHttpFSWith method testOperationDoAs.

@Test
@TestDir
@TestJetty
@TestHdfs
public void testOperationDoAs() throws Exception {
    createHttpFSServer();
    UserGroupInformation ugi = UserGroupInformation.createProxyUser(HadoopUsersConfTestHelper.getHadoopUsers()[0], UserGroupInformation.getCurrentUser());
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        @Override
        public Void run() throws Exception {
            operation(operation);
            return null;
        }
    });
}
Also used : IOException(java.io.IOException) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) TestJetty(org.apache.hadoop.test.TestJetty) TestHdfs(org.apache.hadoop.test.TestHdfs) TestDir(org.apache.hadoop.test.TestDir) Test(org.junit.Test)

Aggregations

TestDir (org.apache.hadoop.test.TestDir)21 TestHdfs (org.apache.hadoop.test.TestHdfs)21 Test (org.junit.Test)21 TestJetty (org.apache.hadoop.test.TestJetty)16 FileSystem (org.apache.hadoop.fs.FileSystem)13 Path (org.apache.hadoop.fs.Path)12 HttpURLConnection (java.net.HttpURLConnection)9 URL (java.net.URL)9 AuthenticatedURL (org.apache.hadoop.security.authentication.client.AuthenticatedURL)9 IOException (java.io.IOException)6 InputStreamReader (java.io.InputStreamReader)5 Configuration (org.apache.hadoop.conf.Configuration)5 Server (org.apache.hadoop.lib.server.Server)5 FileSystemAccess (org.apache.hadoop.lib.service.FileSystemAccess)5 TestException (org.apache.hadoop.test.TestException)5 ServiceException (org.apache.hadoop.lib.server.ServiceException)4 FileSystemAccessException (org.apache.hadoop.lib.service.FileSystemAccessException)4 BufferedReader (java.io.BufferedReader)3 FileOutputStream (java.io.FileOutputStream)3 OutputStream (java.io.OutputStream)3