Search in sources :

Example 16 with WebHdfsFileSystem

use of org.apache.hadoop.hdfs.web.WebHdfsFileSystem in project hadoop by apache.

the class TestDelegationTokenFetcher method expectedTokenIsRetrievedFromHttp.

/**
   * Call fetch token using http server
   */
@Test
public void expectedTokenIsRetrievedFromHttp() throws Exception {
    final Token<DelegationTokenIdentifier> testToken = new Token<DelegationTokenIdentifier>("id".getBytes(), "pwd".getBytes(), FakeRenewer.KIND, new Text("127.0.0.1:1234"));
    WebHdfsFileSystem fs = mock(WebHdfsFileSystem.class);
    doReturn(testToken).when(fs).getDelegationToken(anyString());
    Path p = new Path(f.getRoot().getAbsolutePath(), tokenFile);
    DelegationTokenFetcher.saveDelegationToken(conf, fs, null, p);
    Credentials creds = Credentials.readTokenStorageFile(p, conf);
    Iterator<Token<?>> itr = creds.getAllTokens().iterator();
    assertTrue("token not exist error", itr.hasNext());
    Token<?> fetchedToken = itr.next();
    Assert.assertArrayEquals("token wrong identifier error", testToken.getIdentifier(), fetchedToken.getIdentifier());
    Assert.assertArrayEquals("token wrong password error", testToken.getPassword(), fetchedToken.getPassword());
    DelegationTokenFetcher.renewTokens(conf, p);
    Assert.assertEquals(testToken, FakeRenewer.getLastRenewed());
    DelegationTokenFetcher.cancelTokens(conf, p);
    Assert.assertEquals(testToken, FakeRenewer.getLastCanceled());
}
Also used : Path(org.apache.hadoop.fs.Path) DelegationTokenIdentifier(org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier) Token(org.apache.hadoop.security.token.Token) Text(org.apache.hadoop.io.Text) WebHdfsFileSystem(org.apache.hadoop.hdfs.web.WebHdfsFileSystem) Credentials(org.apache.hadoop.security.Credentials) Test(org.junit.Test)

Example 17 with WebHdfsFileSystem

use of org.apache.hadoop.hdfs.web.WebHdfsFileSystem in project hadoop by apache.

the class TestAuditLogs method testAuditWebHdfsOpen.

/** test that open via webhdfs puts proper entry in audit log */
@Test
public void testAuditWebHdfsOpen() throws Exception {
    final Path file = new Path(fnames[0]);
    fs.setPermission(file, new FsPermission((short) 0644));
    fs.setOwner(file, "root", null);
    setupAuditLogs();
    WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsConstants.WEBHDFS_SCHEME);
    webfs.open(file).read();
    verifyAuditLogsCheckPattern(true, 3, webOpenPattern);
}
Also used : Path(org.apache.hadoop.fs.Path) FsPermission(org.apache.hadoop.fs.permission.FsPermission) WebHdfsFileSystem(org.apache.hadoop.hdfs.web.WebHdfsFileSystem) Test(org.junit.Test)

Example 18 with WebHdfsFileSystem

use of org.apache.hadoop.hdfs.web.WebHdfsFileSystem in project hadoop by apache.

the class TestAuditLogs method testAuditWebHdfs.

/** test that access via webhdfs puts proper entry in audit log */
@Test
public void testAuditWebHdfs() throws Exception {
    final Path file = new Path(fnames[0]);
    fs.setPermission(file, new FsPermission((short) 0644));
    fs.setOwner(file, "root", null);
    setupAuditLogs();
    WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsConstants.WEBHDFS_SCHEME);
    InputStream istream = webfs.open(file);
    int val = istream.read();
    istream.close();
    verifyAuditLogsRepeat(true, 3);
    assertTrue("failed to read from file", val >= 0);
}
Also used : Path(org.apache.hadoop.fs.Path) InputStream(java.io.InputStream) FsPermission(org.apache.hadoop.fs.permission.FsPermission) WebHdfsFileSystem(org.apache.hadoop.hdfs.web.WebHdfsFileSystem) Test(org.junit.Test)

Example 19 with WebHdfsFileSystem

use of org.apache.hadoop.hdfs.web.WebHdfsFileSystem in project hadoop by apache.

the class TestEncryptionZones method testEncryptionZoneWithTrash.

@Test
public void testEncryptionZoneWithTrash() throws Exception {
    // Create the encryption zone1
    final HdfsAdmin dfsAdmin = new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);
    final Path zone1 = new Path("/zone1");
    fs.mkdirs(zone1);
    dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);
    // Create the encrypted file in zone1
    final Path encFile1 = new Path(zone1, "encFile1");
    final int len = 8192;
    DFSTestUtil.createFile(fs, encFile1, len, (short) 1, 0xFEED);
    Configuration clientConf = new Configuration(conf);
    clientConf.setLong(FS_TRASH_INTERVAL_KEY, 1);
    FsShell shell = new FsShell(clientConf);
    // Delete encrypted file from the shell with trash enabled
    // Verify the file is moved to appropriate trash within the zone
    verifyShellDeleteWithTrash(shell, encFile1);
    // Delete encryption zone from the shell with trash enabled
    // Verify the zone is moved to appropriate trash location in user's home dir
    verifyShellDeleteWithTrash(shell, zone1);
    final Path topEZ = new Path("/topEZ");
    fs.mkdirs(topEZ);
    dfsAdmin.createEncryptionZone(topEZ, TEST_KEY, NO_TRASH);
    final String NESTED_EZ_TEST_KEY = "nested_ez_test_key";
    DFSTestUtil.createKey(NESTED_EZ_TEST_KEY, cluster, conf);
    final Path nestedEZ = new Path(topEZ, "nestedEZ");
    fs.mkdirs(nestedEZ);
    dfsAdmin.createEncryptionZone(nestedEZ, NESTED_EZ_TEST_KEY, NO_TRASH);
    final Path topEZFile = new Path(topEZ, "file");
    final Path nestedEZFile = new Path(nestedEZ, "file");
    DFSTestUtil.createFile(fs, topEZFile, len, (short) 1, 0xFEED);
    DFSTestUtil.createFile(fs, nestedEZFile, len, (short) 1, 0xFEED);
    verifyShellDeleteWithTrash(shell, topEZFile);
    verifyShellDeleteWithTrash(shell, nestedEZFile);
    //Test nested EZ with webHDFS
    final WebHdfsFileSystem webFS = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsConstants.WEBHDFS_SCHEME);
    final String currentUser = UserGroupInformation.getCurrentUser().getShortUserName();
    final Path expectedTopTrash = new Path(topEZ, new Path(FileSystem.TRASH_PREFIX, currentUser));
    final Path expectedNestedTrash = new Path(nestedEZ, new Path(FileSystem.TRASH_PREFIX, currentUser));
    final Path topTrash = webFS.getTrashRoot(topEZFile);
    final Path nestedTrash = webFS.getTrashRoot(nestedEZFile);
    assertEquals(expectedTopTrash.toUri().getPath(), topTrash.toUri().getPath());
    assertEquals(expectedNestedTrash.toUri().getPath(), nestedTrash.toUri().getPath());
    verifyShellDeleteWithTrash(shell, nestedEZ);
    verifyShellDeleteWithTrash(shell, topEZ);
}
Also used : Path(org.apache.hadoop.fs.Path) FsShell(org.apache.hadoop.fs.FsShell) Configuration(org.apache.hadoop.conf.Configuration) HdfsAdmin(org.apache.hadoop.hdfs.client.HdfsAdmin) Mockito.anyString(org.mockito.Mockito.anyString) WebHdfsFileSystem(org.apache.hadoop.hdfs.web.WebHdfsFileSystem) Test(org.junit.Test)

Example 20 with WebHdfsFileSystem

use of org.apache.hadoop.hdfs.web.WebHdfsFileSystem in project hadoop by apache.

the class TestDelegationToken method testDelegationTokenWebHdfsApi.

@Test
public void testDelegationTokenWebHdfsApi() throws Exception {
    GenericTestUtils.setLogLevel(NamenodeWebHdfsMethods.LOG, Level.ALL);
    final String uri = WebHdfsConstants.WEBHDFS_SCHEME + "://" + config.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY);
    //get file system as JobTracker
    final UserGroupInformation ugi = UserGroupInformation.createUserForTesting("JobTracker", new String[] { "user" });
    final WebHdfsFileSystem webhdfs = ugi.doAs(new PrivilegedExceptionAction<WebHdfsFileSystem>() {

        @Override
        public WebHdfsFileSystem run() throws Exception {
            return (WebHdfsFileSystem) FileSystem.get(new URI(uri), config);
        }
    });
    {
        //test addDelegationTokens(..)
        Credentials creds = new Credentials();
        final Token<?>[] tokens = webhdfs.addDelegationTokens("JobTracker", creds);
        Assert.assertEquals(1, tokens.length);
        Assert.assertEquals(1, creds.numberOfTokens());
        Assert.assertSame(tokens[0], creds.getAllTokens().iterator().next());
        checkTokenIdentifier(ugi, tokens[0]);
        final Token<?>[] tokens2 = webhdfs.addDelegationTokens("JobTracker", creds);
        Assert.assertEquals(0, tokens2.length);
    }
}
Also used : WebHdfsFileSystem(org.apache.hadoop.hdfs.web.WebHdfsFileSystem) URI(java.net.URI) IOException(java.io.IOException) AccessControlException(org.apache.hadoop.security.AccessControlException) Credentials(org.apache.hadoop.security.Credentials) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Test(org.junit.Test)

Aggregations

WebHdfsFileSystem (org.apache.hadoop.hdfs.web.WebHdfsFileSystem)21 Test (org.junit.Test)19 Path (org.apache.hadoop.fs.Path)18 Configuration (org.apache.hadoop.conf.Configuration)10 URI (java.net.URI)9 HttpURLConnection (java.net.HttpURLConnection)5 URL (java.net.URL)5 ContentSummary (org.apache.hadoop.fs.ContentSummary)5 FsPermission (org.apache.hadoop.fs.permission.FsPermission)5 FileStatus (org.apache.hadoop.fs.FileStatus)3 IOException (java.io.IOException)2 InputStream (java.io.InputStream)2 FsShell (org.apache.hadoop.fs.FsShell)2 HdfsAdmin (org.apache.hadoop.hdfs.client.HdfsAdmin)2 DelegationTokenIdentifier (org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier)2 Text (org.apache.hadoop.io.Text)2 AccessControlException (org.apache.hadoop.security.AccessControlException)2 Credentials (org.apache.hadoop.security.Credentials)2 Token (org.apache.hadoop.security.token.Token)2 Mockito.anyString (org.mockito.Mockito.anyString)2