Search in sources :

Example 51 with FsPermission

use of org.apache.hadoop.fs.permission.FsPermission in project hadoop by apache.

the class TestAuditLoggerWithCommands method testGetQuotaUsage.

@Test
public void testGetQuotaUsage() throws Exception {
    Path path = new Path("/test");
    fs.mkdirs(path, new FsPermission((short) 0));
    String aceGetQuotaUsagePattern = ".*allowed=false.*ugi=theDoctor.*cmd=quotaUsage.*";
    fileSys = DFSTestUtil.getFileSystemAs(user1, conf);
    try {
        fileSys.getQuotaUsage(path);
        fail("The operation should have failed with AccessControlException");
    } catch (AccessControlException ace) {
    }
    int length = verifyAuditLogs(aceGetQuotaUsagePattern);
    fileSys.close();
    try {
        fileSys.getQuotaUsage(path);
        fail("The operation should have failed with IOException");
    } catch (IOException e) {
    }
    assertTrue("Unexpected log!", length == auditlog.getOutput().split("\n").length);
}
Also used : Path(org.apache.hadoop.fs.Path) AccessControlException(org.apache.hadoop.security.AccessControlException) FsPermission(org.apache.hadoop.fs.permission.FsPermission) IOException(java.io.IOException) Test(org.junit.Test)

Example 52 with FsPermission

use of org.apache.hadoop.fs.permission.FsPermission in project hadoop by apache.

the class TestAuditLoggerWithCommands method testGetEZForPath.

@Test
public void testGetEZForPath() throws Exception {
    Path path = new Path("/test");
    fileSys = DFSTestUtil.getFileSystemAs(user1, conf);
    fs.mkdirs(path, new FsPermission((short) 0));
    String aceGetEzForPathPattern = ".*allowed=false.*ugi=theDoctor.*cmd=getEZForPath.*";
    try {
        ((DistributedFileSystem) fileSys).getEZForPath(path);
        fail("The operation should have failed with AccessControlException");
    } catch (AccessControlException ace) {
    }
    int length = verifyAuditLogs(aceGetEzForPathPattern);
    fileSys.close();
    try {
        ((DistributedFileSystem) fileSys).getEZForPath(path);
        fail("The operation should have failed with IOException");
    } catch (IOException e) {
    }
    assertTrue("Unexpected log!", length == auditlog.getOutput().split("\n").length);
}
Also used : Path(org.apache.hadoop.fs.Path) AccessControlException(org.apache.hadoop.security.AccessControlException) FsPermission(org.apache.hadoop.fs.permission.FsPermission) IOException(java.io.IOException) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) Test(org.junit.Test)

Example 53 with FsPermission

use of org.apache.hadoop.fs.permission.FsPermission in project hadoop by apache.

the class TestAuditLoggerWithCommands method testModifyCachePool.

@Test
public void testModifyCachePool() throws Exception {
    removeExistingCachePools(null);
    CachePoolInfo cacheInfo = new CachePoolInfo("pool1").setMode(new FsPermission((short) 0));
    ((DistributedFileSystem) fs).addCachePool(cacheInfo);
    fileSys = DFSTestUtil.getFileSystemAs(user1, conf);
    try {
        ((DistributedFileSystem) fileSys).modifyCachePool(cacheInfo);
        fail("The operation should have failed with AccessControlException");
    } catch (AccessControlException ace) {
    }
    String aceModifyCachePoolPattern = ".*allowed=false.*ugi=theDoctor.*cmd=modifyCachePool.*";
    int length = verifyAuditLogs(aceModifyCachePoolPattern);
    try {
        fileSys.close();
        ((DistributedFileSystem) fileSys).modifyCachePool(cacheInfo);
        fail("The operation should have failed with IOException");
    } catch (IOException e) {
    }
    assertTrue("Unexpected log!", length == auditlog.getOutput().split("\n").length);
}
Also used : AccessControlException(org.apache.hadoop.security.AccessControlException) FsPermission(org.apache.hadoop.fs.permission.FsPermission) IOException(java.io.IOException) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) CachePoolInfo(org.apache.hadoop.hdfs.protocol.CachePoolInfo) Test(org.junit.Test)

Example 54 with FsPermission

use of org.apache.hadoop.fs.permission.FsPermission in project hadoop by apache.

the class TestWebHdfsCreatePermissions method testPermissions.

private void testPermissions(int expectedResponse, String expectedPermission, String path, String... params) throws Exception {
    final String user = System.getProperty("user.name");
    final StringBuilder uri = new StringBuilder(cluster.getHttpUri(0));
    uri.append("/webhdfs/v1").append(path).append("?user.name=").append(user).append("&");
    for (String param : params) {
        uri.append(param).append("&");
    }
    LOG.info(uri.toString());
    try {
        URL url = new URL(uri.toString());
        HttpURLConnection conn = (HttpURLConnection) url.openConnection();
        conn.setRequestMethod("PUT");
        Assert.assertEquals(expectedResponse, conn.getResponseCode());
        NamenodeProtocols namenode = cluster.getNameNode().getRpcServer();
        FsPermission resultingPermission = namenode.getFileInfo(path).getPermission();
        Assert.assertEquals(expectedPermission, resultingPermission.toString());
    } finally {
        cluster.shutdown();
    }
}
Also used : NamenodeProtocols(org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols) HttpURLConnection(java.net.HttpURLConnection) FsPermission(org.apache.hadoop.fs.permission.FsPermission) URL(java.net.URL)

Example 55 with FsPermission

use of org.apache.hadoop.fs.permission.FsPermission in project hadoop by apache.

the class TestCredentialProviderFactory method testLocalJksProvider.

@Test
public void testLocalJksProvider() throws Exception {
    Configuration conf = new Configuration();
    final Path jksPath = new Path(tmpDir.toString(), "test.jks");
    final String ourUrl = LocalJavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
    File file = new File(tmpDir, "test.jks");
    file.delete();
    conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl);
    checkSpecificProvider(conf, ourUrl);
    Path path = ProviderUtils.unnestUri(new URI(ourUrl));
    FileSystem fs = path.getFileSystem(conf);
    FileStatus s = fs.getFileStatus(path);
    assertTrue("Unexpected permissions: " + s.getPermission().toString(), s.getPermission().toString().equals("rw-------"));
    assertTrue(file + " should exist", file.isFile());
    // check permission retention after explicit change
    fs.setPermission(path, new FsPermission("777"));
    checkPermissionRetention(conf, ourUrl, path);
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) Configuration(org.apache.hadoop.conf.Configuration) FileSystem(org.apache.hadoop.fs.FileSystem) FsPermission(org.apache.hadoop.fs.permission.FsPermission) File(java.io.File) URI(java.net.URI) Test(org.junit.Test)

Aggregations

FsPermission (org.apache.hadoop.fs.permission.FsPermission)427 Path (org.apache.hadoop.fs.Path)267 Test (org.junit.Test)180 IOException (java.io.IOException)120 FileSystem (org.apache.hadoop.fs.FileSystem)93 Configuration (org.apache.hadoop.conf.Configuration)89 FileStatus (org.apache.hadoop.fs.FileStatus)87 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)52 AccessControlException (org.apache.hadoop.security.AccessControlException)43 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)36 FileNotFoundException (java.io.FileNotFoundException)33 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)29 File (java.io.File)26 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)26 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)26 AclEntry (org.apache.hadoop.fs.permission.AclEntry)25 ArrayList (java.util.ArrayList)22 HashMap (java.util.HashMap)19 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)16 URI (java.net.URI)15