Search in sources :

Example 1 with FileSystemAccess

use of org.apache.hadoop.lib.service.FileSystemAccess in project hadoop by apache.

the class TestFileSystemAccessService method fileSystemCache.

@Test
@TestDir
@TestHdfs
public void fileSystemCache() throws Exception {
    String dir = TestDirHelper.getTestDir().getAbsolutePath();
    String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(), SchedulerService.class.getName(), FileSystemAccessService.class.getName()));
    Configuration hadoopConf = new Configuration(false);
    hadoopConf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY));
    createHadoopConf(hadoopConf);
    Configuration conf = new Configuration(false);
    conf.set("server.services", services);
    conf.set("server.hadoop.filesystem.cache.purge.frequency", "1");
    conf.set("server.hadoop.filesystem.cache.purge.timeout", "1");
    Server server = new Server("server", dir, dir, dir, dir, conf);
    try {
        server.init();
        FileSystemAccess hadoop = server.get(FileSystemAccess.class);
        FileSystem fs1 = hadoop.createFileSystem("u", hadoop.getFileSystemConfiguration());
        Assert.assertNotNull(fs1);
        fs1.mkdirs(new Path("/tmp/foo1"));
        hadoop.releaseFileSystem(fs1);
        //still around because of caching
        fs1.mkdirs(new Path("/tmp/foo2"));
        FileSystem fs2 = hadoop.createFileSystem("u", hadoop.getFileSystemConfiguration());
        //should be same instance because of caching
        Assert.assertEquals(fs1, fs2);
        Thread.sleep(4 * 1000);
        //still around because of lease count is 1 (fs2 is out)
        fs1.mkdirs(new Path("/tmp/foo2"));
        Thread.sleep(4 * 1000);
        //still around because of lease count is 1 (fs2 is out)
        fs2.mkdirs(new Path("/tmp/foo"));
        hadoop.releaseFileSystem(fs2);
        Thread.sleep(4 * 1000);
        //should not be around as lease count is 0
        try {
            fs2.mkdirs(new Path("/tmp/foo"));
            Assert.fail();
        } catch (IOException ex) {
        } catch (Exception ex) {
            Assert.fail();
        }
    } finally {
        server.destroy();
    }
}
Also used : FileSystemAccess(org.apache.hadoop.lib.service.FileSystemAccess) Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) Server(org.apache.hadoop.lib.server.Server) FileSystem(org.apache.hadoop.fs.FileSystem) IOException(java.io.IOException) TestException(org.apache.hadoop.test.TestException) IOException(java.io.IOException) FileSystemAccessException(org.apache.hadoop.lib.service.FileSystemAccessException) ServiceException(org.apache.hadoop.lib.server.ServiceException) TestHdfs(org.apache.hadoop.test.TestHdfs) TestDir(org.apache.hadoop.test.TestDir) Test(org.junit.Test)

Example 2 with FileSystemAccess

use of org.apache.hadoop.lib.service.FileSystemAccess in project hadoop by apache.

the class TestFileSystemAccessService method fileSystemExecutorNoNameNode.

@Test
@TestException(exception = FileSystemAccessException.class, msgRegExp = "H06.*")
@TestDir
@TestHdfs
public void fileSystemExecutorNoNameNode() throws Exception {
    String dir = TestDirHelper.getTestDir().getAbsolutePath();
    String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(), SchedulerService.class.getName(), FileSystemAccessService.class.getName()));
    Configuration hadoopConf = new Configuration(false);
    hadoopConf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY));
    createHadoopConf(hadoopConf);
    Configuration conf = new Configuration(false);
    conf.set("server.services", services);
    Server server = new Server("server", dir, dir, dir, dir, conf);
    server.init();
    FileSystemAccess fsAccess = server.get(FileSystemAccess.class);
    Configuration hdfsConf = fsAccess.getFileSystemConfiguration();
    hdfsConf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, "");
    fsAccess.execute("u", hdfsConf, new FileSystemAccess.FileSystemExecutor<Void>() {

        @Override
        public Void execute(FileSystem fs) throws IOException {
            return null;
        }
    });
}
Also used : FileSystemAccess(org.apache.hadoop.lib.service.FileSystemAccess) Configuration(org.apache.hadoop.conf.Configuration) Server(org.apache.hadoop.lib.server.Server) FileSystem(org.apache.hadoop.fs.FileSystem) IOException(java.io.IOException) TestHdfs(org.apache.hadoop.test.TestHdfs) TestException(org.apache.hadoop.test.TestException) TestDir(org.apache.hadoop.test.TestDir) Test(org.junit.Test)

Example 3 with FileSystemAccess

use of org.apache.hadoop.lib.service.FileSystemAccess in project hadoop by apache.

the class HttpFSServer method createFileSystem.

/**
   * Returns a filesystem instance. The fileystem instance is wired for release at the completion of
   * the current Servlet request via the {@link FileSystemReleaseFilter}.
   * <p>
   * If a do-as user is specified, the current user must be a valid proxyuser, otherwise an
   * <code>AccessControlException</code> will be thrown.
   *
   * @param ugi principal for whom the filesystem instance is.
   *
   * @return a filesystem for the specified user or do-as user.
   *
   * @throws IOException thrown if an IO error occurred. Thrown exceptions are
   * handled by {@link HttpFSExceptionProvider}.
   * @throws FileSystemAccessException thrown if a FileSystemAccess releated error occurred. Thrown
   * exceptions are handled by {@link HttpFSExceptionProvider}.
   */
private FileSystem createFileSystem(UserGroupInformation ugi) throws IOException, FileSystemAccessException {
    String hadoopUser = ugi.getShortUserName();
    FileSystemAccess fsAccess = HttpFSServerWebApp.get().get(FileSystemAccess.class);
    Configuration conf = HttpFSServerWebApp.get().get(FileSystemAccess.class).getFileSystemConfiguration();
    FileSystem fs = fsAccess.createFileSystem(hadoopUser, conf);
    FileSystemReleaseFilter.setFileSystem(fs);
    return fs;
}
Also used : FileSystemAccess(org.apache.hadoop.lib.service.FileSystemAccess) Configuration(org.apache.hadoop.conf.Configuration) FileSystem(org.apache.hadoop.fs.FileSystem) HttpFSFileSystem(org.apache.hadoop.fs.http.client.HttpFSFileSystem)

Example 4 with FileSystemAccess

use of org.apache.hadoop.lib.service.FileSystemAccess in project hadoop by apache.

the class HttpFSServer method fsExecute.

/**
   * Executes a {@link FileSystemAccess.FileSystemExecutor} using a filesystem for the effective
   * user.
   *
   * @param ugi user making the request.
   * @param executor FileSystemExecutor to execute.
   *
   * @return FileSystemExecutor response
   *
   * @throws IOException thrown if an IO error occurs.
   * @throws FileSystemAccessException thrown if a FileSystemAccess releated error occurred. Thrown
   * exceptions are handled by {@link HttpFSExceptionProvider}.
   */
private <T> T fsExecute(UserGroupInformation ugi, FileSystemAccess.FileSystemExecutor<T> executor) throws IOException, FileSystemAccessException {
    FileSystemAccess fsAccess = HttpFSServerWebApp.get().get(FileSystemAccess.class);
    Configuration conf = HttpFSServerWebApp.get().get(FileSystemAccess.class).getFileSystemConfiguration();
    return fsAccess.execute(ugi.getShortUserName(), conf, executor);
}
Also used : FileSystemAccess(org.apache.hadoop.lib.service.FileSystemAccess) Configuration(org.apache.hadoop.conf.Configuration)

Example 5 with FileSystemAccess

use of org.apache.hadoop.lib.service.FileSystemAccess in project hadoop by apache.

the class TestFileSystemAccessService method fileSystemExecutorException.

@Test
@TestDir
@TestHdfs
public void fileSystemExecutorException() throws Exception {
    String dir = TestDirHelper.getTestDir().getAbsolutePath();
    String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(), SchedulerService.class.getName(), FileSystemAccessService.class.getName()));
    Configuration hadoopConf = new Configuration(false);
    hadoopConf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY));
    createHadoopConf(hadoopConf);
    Configuration conf = new Configuration(false);
    conf.set("server.services", services);
    conf.set("server.hadoop.filesystem.cache.purge.timeout", "0");
    Server server = new Server("server", dir, dir, dir, dir, conf);
    server.init();
    FileSystemAccess hadoop = server.get(FileSystemAccess.class);
    final FileSystem[] fsa = new FileSystem[1];
    try {
        hadoop.execute("u", hadoop.getFileSystemConfiguration(), new FileSystemAccess.FileSystemExecutor<Void>() {

            @Override
            public Void execute(FileSystem fs) throws IOException {
                fsa[0] = fs;
                throw new IOException();
            }
        });
        Assert.fail();
    } catch (FileSystemAccessException ex) {
        Assert.assertEquals(ex.getError(), FileSystemAccessException.ERROR.H03);
    } catch (Exception ex) {
        Assert.fail();
    }
    try {
        fsa[0].mkdirs(new Path("/tmp/foo"));
        Assert.fail();
    } catch (IOException ex) {
    } catch (Exception ex) {
        Assert.fail();
    }
    server.destroy();
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) Server(org.apache.hadoop.lib.server.Server) IOException(java.io.IOException) TestException(org.apache.hadoop.test.TestException) IOException(java.io.IOException) FileSystemAccessException(org.apache.hadoop.lib.service.FileSystemAccessException) ServiceException(org.apache.hadoop.lib.server.ServiceException) FileSystemAccess(org.apache.hadoop.lib.service.FileSystemAccess) FileSystem(org.apache.hadoop.fs.FileSystem) FileSystemAccessException(org.apache.hadoop.lib.service.FileSystemAccessException) TestHdfs(org.apache.hadoop.test.TestHdfs) TestDir(org.apache.hadoop.test.TestDir) Test(org.junit.Test)

Aggregations

Configuration (org.apache.hadoop.conf.Configuration)7 FileSystemAccess (org.apache.hadoop.lib.service.FileSystemAccess)7 FileSystem (org.apache.hadoop.fs.FileSystem)6 IOException (java.io.IOException)5 Server (org.apache.hadoop.lib.server.Server)5 TestDir (org.apache.hadoop.test.TestDir)5 TestException (org.apache.hadoop.test.TestException)5 TestHdfs (org.apache.hadoop.test.TestHdfs)5 Test (org.junit.Test)5 Path (org.apache.hadoop.fs.Path)4 ServiceException (org.apache.hadoop.lib.server.ServiceException)4 FileSystemAccessException (org.apache.hadoop.lib.service.FileSystemAccessException)4 HttpFSFileSystem (org.apache.hadoop.fs.http.client.HttpFSFileSystem)1