Search in sources :

Example 16 with TestHdfs

use of org.apache.hadoop.test.TestHdfs in project hadoop by apache.

the class TestHttpFSServer method testDelegationTokenOperations.

@Test
@TestDir
@TestJetty
@TestHdfs
public void testDelegationTokenOperations() throws Exception {
    createHttpFSServer(true);
    URL url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETHOMEDIRECTORY");
    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
    Assert.assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
    AuthenticationToken token = new AuthenticationToken("u", "p", new KerberosDelegationTokenAuthenticationHandler().getType());
    token.setExpires(System.currentTimeMillis() + 100000000);
    SignerSecretProvider secretProvider = StringSignerSecretProviderCreator.newStringSignerSecretProvider();
    Properties secretProviderProps = new Properties();
    secretProviderProps.setProperty(AuthenticationFilter.SIGNATURE_SECRET, "secret");
    secretProvider.init(secretProviderProps, null, -1);
    Signer signer = new Signer(secretProvider);
    String tokenSigned = signer.sign(token.toString());
    url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETHOMEDIRECTORY");
    conn = (HttpURLConnection) url.openConnection();
    conn.setRequestProperty("Cookie", AuthenticatedURL.AUTH_COOKIE + "=" + tokenSigned);
    Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
    url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETDELEGATIONTOKEN");
    conn = (HttpURLConnection) url.openConnection();
    conn.setRequestProperty("Cookie", AuthenticatedURL.AUTH_COOKIE + "=" + tokenSigned);
    Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
    JSONObject json = (JSONObject) new JSONParser().parse(new InputStreamReader(conn.getInputStream()));
    json = (JSONObject) json.get(DelegationTokenAuthenticator.DELEGATION_TOKEN_JSON);
    String tokenStr = (String) json.get(DelegationTokenAuthenticator.DELEGATION_TOKEN_URL_STRING_JSON);
    url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETHOMEDIRECTORY&delegation=" + tokenStr);
    conn = (HttpURLConnection) url.openConnection();
    Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
    url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=RENEWDELEGATIONTOKEN&token=" + tokenStr);
    conn = (HttpURLConnection) url.openConnection();
    conn.setRequestMethod("PUT");
    Assert.assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
    url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=RENEWDELEGATIONTOKEN&token=" + tokenStr);
    conn = (HttpURLConnection) url.openConnection();
    conn.setRequestMethod("PUT");
    conn.setRequestProperty("Cookie", AuthenticatedURL.AUTH_COOKIE + "=" + tokenSigned);
    Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
    url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=CANCELDELEGATIONTOKEN&token=" + tokenStr);
    conn = (HttpURLConnection) url.openConnection();
    conn.setRequestMethod("PUT");
    Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
    url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETHOMEDIRECTORY&delegation=" + tokenStr);
    conn = (HttpURLConnection) url.openConnection();
    Assert.assertEquals(HttpURLConnection.HTTP_FORBIDDEN, conn.getResponseCode());
    // getTrash test with delegation
    url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETTRASHROOT&delegation=" + tokenStr);
    conn = (HttpURLConnection) url.openConnection();
    Assert.assertEquals(HttpURLConnection.HTTP_FORBIDDEN, conn.getResponseCode());
    url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETTRASHROOT");
    conn = (HttpURLConnection) url.openConnection();
    conn.setRequestProperty("Cookie", AuthenticatedURL.AUTH_COOKIE + "=" + tokenSigned);
    Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
}
Also used : Signer(org.apache.hadoop.security.authentication.util.Signer) SignerSecretProvider(org.apache.hadoop.security.authentication.util.SignerSecretProvider) HttpURLConnection(java.net.HttpURLConnection) AuthenticationToken(org.apache.hadoop.security.authentication.server.AuthenticationToken) JSONObject(org.json.simple.JSONObject) InputStreamReader(java.io.InputStreamReader) KerberosDelegationTokenAuthenticationHandler(org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticationHandler) JSONParser(org.json.simple.parser.JSONParser) Properties(java.util.Properties) URL(java.net.URL) AuthenticatedURL(org.apache.hadoop.security.authentication.client.AuthenticatedURL) TestJetty(org.apache.hadoop.test.TestJetty) TestHdfs(org.apache.hadoop.test.TestHdfs) TestDir(org.apache.hadoop.test.TestDir) Test(org.junit.Test)

Example 17 with TestHdfs

use of org.apache.hadoop.test.TestHdfs in project hadoop by apache.

the class TestHttpFSServerNoXAttrs method testWithXAttrs.

/**
   * Ensure that GETXATTRS, SETXATTR, REMOVEXATTR fail.
   */
@Test
@TestDir
@TestJetty
@TestHdfs
public void testWithXAttrs() throws Exception {
    final String name1 = "user.a1";
    final byte[] value1 = new byte[] { 0x31, 0x32, 0x33 };
    final String dir = "/noXAttr";
    final String path = dir + "/file";
    startMiniDFS();
    createHttpFSServer();
    FileSystem fs = FileSystem.get(nnConf);
    fs.mkdirs(new Path(dir));
    OutputStream os = fs.create(new Path(path));
    os.write(1);
    os.close();
    /* GETXATTRS, SETXATTR, REMOVEXATTR fail */
    getStatus(path, "GETXATTRS");
    putCmd(path, "SETXATTR", TestHttpFSServer.setXAttrParam(name1, value1));
    putCmd(path, "REMOVEXATTR", "xattr.name=" + name1);
}
Also used : Path(org.apache.hadoop.fs.Path) FileSystem(org.apache.hadoop.fs.FileSystem) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) TestJetty(org.apache.hadoop.test.TestJetty) TestHdfs(org.apache.hadoop.test.TestHdfs) TestDir(org.apache.hadoop.test.TestDir) Test(org.junit.Test)

Example 18 with TestHdfs

use of org.apache.hadoop.test.TestHdfs in project hadoop by apache.

the class TestHttpFSWithKerberos method testValidHttpFSAccess.

@Test
@TestDir
@TestJetty
@TestHdfs
public void testValidHttpFSAccess() throws Exception {
    createHttpFSServer();
    KerberosTestUtils.doAsClient(new Callable<Void>() {

        @Override
        public Void call() throws Exception {
            URL url = new URL(TestJettyHelper.getJettyURL(), "/webhdfs/v1/?op=GETHOMEDIRECTORY");
            AuthenticatedURL aUrl = new AuthenticatedURL();
            AuthenticatedURL.Token aToken = new AuthenticatedURL.Token();
            HttpURLConnection conn = aUrl.openConnection(url, aToken);
            Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
            return null;
        }
    });
}
Also used : HttpURLConnection(java.net.HttpURLConnection) Token(org.apache.hadoop.security.token.Token) URL(java.net.URL) AuthenticatedURL(org.apache.hadoop.security.authentication.client.AuthenticatedURL) AuthenticatedURL(org.apache.hadoop.security.authentication.client.AuthenticatedURL) TestJetty(org.apache.hadoop.test.TestJetty) TestHdfs(org.apache.hadoop.test.TestHdfs) TestDir(org.apache.hadoop.test.TestDir) Test(org.junit.Test)

Example 19 with TestHdfs

use of org.apache.hadoop.test.TestHdfs in project hadoop by apache.

the class TestFileSystemAccessService method fileSystemExecutorException.

@Test
@TestDir
@TestHdfs
public void fileSystemExecutorException() throws Exception {
    String dir = TestDirHelper.getTestDir().getAbsolutePath();
    String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(), SchedulerService.class.getName(), FileSystemAccessService.class.getName()));
    Configuration hadoopConf = new Configuration(false);
    hadoopConf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY));
    createHadoopConf(hadoopConf);
    Configuration conf = new Configuration(false);
    conf.set("server.services", services);
    conf.set("server.hadoop.filesystem.cache.purge.timeout", "0");
    Server server = new Server("server", dir, dir, dir, dir, conf);
    server.init();
    FileSystemAccess hadoop = server.get(FileSystemAccess.class);
    final FileSystem[] fsa = new FileSystem[1];
    try {
        hadoop.execute("u", hadoop.getFileSystemConfiguration(), new FileSystemAccess.FileSystemExecutor<Void>() {

            @Override
            public Void execute(FileSystem fs) throws IOException {
                fsa[0] = fs;
                throw new IOException();
            }
        });
        Assert.fail();
    } catch (FileSystemAccessException ex) {
        Assert.assertEquals(ex.getError(), FileSystemAccessException.ERROR.H03);
    } catch (Exception ex) {
        Assert.fail();
    }
    try {
        fsa[0].mkdirs(new Path("/tmp/foo"));
        Assert.fail();
    } catch (IOException ex) {
    } catch (Exception ex) {
        Assert.fail();
    }
    server.destroy();
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) Server(org.apache.hadoop.lib.server.Server) IOException(java.io.IOException) TestException(org.apache.hadoop.test.TestException) IOException(java.io.IOException) FileSystemAccessException(org.apache.hadoop.lib.service.FileSystemAccessException) ServiceException(org.apache.hadoop.lib.server.ServiceException) FileSystemAccess(org.apache.hadoop.lib.service.FileSystemAccess) FileSystem(org.apache.hadoop.fs.FileSystem) FileSystemAccessException(org.apache.hadoop.lib.service.FileSystemAccessException) TestHdfs(org.apache.hadoop.test.TestHdfs) TestDir(org.apache.hadoop.test.TestDir) Test(org.junit.Test)

Example 20 with TestHdfs

use of org.apache.hadoop.test.TestHdfs in project hadoop by apache.

the class TestFileSystemAccessService method createFileSystem.

@Test
@TestDir
@TestHdfs
public void createFileSystem() throws Exception {
    String dir = TestDirHelper.getTestDir().getAbsolutePath();
    String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(), SchedulerService.class.getName(), FileSystemAccessService.class.getName()));
    Configuration hadoopConf = new Configuration(false);
    hadoopConf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY));
    createHadoopConf(hadoopConf);
    Configuration conf = new Configuration(false);
    conf.set("server.services", services);
    conf.set("server.hadoop.filesystem.cache.purge.timeout", "0");
    Server server = new Server("server", dir, dir, dir, dir, conf);
    server.init();
    FileSystemAccess hadoop = server.get(FileSystemAccess.class);
    FileSystem fs = hadoop.createFileSystem("u", hadoop.getFileSystemConfiguration());
    Assert.assertNotNull(fs);
    fs.mkdirs(new Path("/tmp/foo"));
    hadoop.releaseFileSystem(fs);
    try {
        fs.mkdirs(new Path("/tmp/foo"));
        Assert.fail();
    } catch (IOException ex) {
    } catch (Exception ex) {
        Assert.fail();
    }
    server.destroy();
}
Also used : FileSystemAccess(org.apache.hadoop.lib.service.FileSystemAccess) Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) Server(org.apache.hadoop.lib.server.Server) FileSystem(org.apache.hadoop.fs.FileSystem) IOException(java.io.IOException) TestException(org.apache.hadoop.test.TestException) IOException(java.io.IOException) FileSystemAccessException(org.apache.hadoop.lib.service.FileSystemAccessException) ServiceException(org.apache.hadoop.lib.server.ServiceException) TestHdfs(org.apache.hadoop.test.TestHdfs) TestDir(org.apache.hadoop.test.TestDir) Test(org.junit.Test)

Aggregations

TestDir (org.apache.hadoop.test.TestDir)21 TestHdfs (org.apache.hadoop.test.TestHdfs)21 Test (org.junit.Test)21 TestJetty (org.apache.hadoop.test.TestJetty)16 FileSystem (org.apache.hadoop.fs.FileSystem)13 Path (org.apache.hadoop.fs.Path)12 HttpURLConnection (java.net.HttpURLConnection)9 URL (java.net.URL)9 AuthenticatedURL (org.apache.hadoop.security.authentication.client.AuthenticatedURL)9 IOException (java.io.IOException)6 InputStreamReader (java.io.InputStreamReader)5 Configuration (org.apache.hadoop.conf.Configuration)5 Server (org.apache.hadoop.lib.server.Server)5 FileSystemAccess (org.apache.hadoop.lib.service.FileSystemAccess)5 TestException (org.apache.hadoop.test.TestException)5 ServiceException (org.apache.hadoop.lib.server.ServiceException)4 FileSystemAccessException (org.apache.hadoop.lib.service.FileSystemAccessException)4 BufferedReader (java.io.BufferedReader)3 FileOutputStream (java.io.FileOutputStream)3 OutputStream (java.io.OutputStream)3