Search in sources :

Example 1 with HdfsAdmin

use of org.apache.hadoop.hdfs.client.HdfsAdmin in project hadoop by apache.

the class TestEncryptionZones method testListEncryptionZonesAsNonSuperUser.

/**
   * Test listing encryption zones as a non super user.
   */
@Test
public void testListEncryptionZonesAsNonSuperUser() throws Exception {
    final UserGroupInformation user = UserGroupInformation.createUserForTesting("user", new String[] { "mygroup" });
    final Path testRoot = new Path("/tmp/TestEncryptionZones");
    final Path superPath = new Path(testRoot, "superuseronly");
    final Path allPath = new Path(testRoot, "accessall");
    fsWrapper.mkdir(superPath, new FsPermission((short) 0700), true);
    dfsAdmin.createEncryptionZone(superPath, TEST_KEY, NO_TRASH);
    fsWrapper.mkdir(allPath, new FsPermission((short) 0707), true);
    dfsAdmin.createEncryptionZone(allPath, TEST_KEY, NO_TRASH);
    user.doAs(new PrivilegedExceptionAction<Object>() {

        @Override
        public Object run() throws Exception {
            final HdfsAdmin userAdmin = new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);
            try {
                userAdmin.listEncryptionZones();
            } catch (AccessControlException e) {
                assertExceptionContains("Superuser privilege is required", e);
            }
            return null;
        }
    });
}
Also used : Path(org.apache.hadoop.fs.Path) HdfsAdmin(org.apache.hadoop.hdfs.client.HdfsAdmin) AccessControlException(org.apache.hadoop.security.AccessControlException) Matchers.anyObject(org.mockito.Matchers.anyObject) FsPermission(org.apache.hadoop.fs.permission.FsPermission) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) AccessControlException(org.apache.hadoop.security.AccessControlException) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Test(org.junit.Test)

Example 2 with HdfsAdmin

use of org.apache.hadoop.hdfs.client.HdfsAdmin in project hadoop by apache.

the class TestEncryptionZones method dTIEM.

private void dTIEM(Path prefix) throws Exception {
    final HdfsAdmin dfsAdmin = new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);
    // Create an unencrypted file to check isEncrypted returns false
    final Path baseFile = new Path(prefix, "base");
    fsWrapper.createFile(baseFile);
    FileStatus stat = fsWrapper.getFileStatus(baseFile);
    assertFalse("Expected isEncrypted to return false for " + baseFile, stat.isEncrypted());
    // Create an encrypted file to check isEncrypted returns true
    final Path zone = new Path(prefix, "zone");
    fsWrapper.mkdir(zone, FsPermission.getDirDefault(), true);
    dfsAdmin.createEncryptionZone(zone, TEST_KEY, NO_TRASH);
    final Path encFile = new Path(zone, "encfile");
    fsWrapper.createFile(encFile);
    stat = fsWrapper.getFileStatus(encFile);
    assertTrue("Expected isEncrypted to return true for enc file" + encFile, stat.isEncrypted());
    // check that it returns true for an ez root
    stat = fsWrapper.getFileStatus(zone);
    assertTrue("Expected isEncrypted to return true for ezroot", stat.isEncrypted());
    // check that it returns true for a dir in the ez
    final Path zoneSubdir = new Path(zone, "subdir");
    fsWrapper.mkdir(zoneSubdir, FsPermission.getDirDefault(), true);
    stat = fsWrapper.getFileStatus(zoneSubdir);
    assertTrue("Expected isEncrypted to return true for ez subdir " + zoneSubdir, stat.isEncrypted());
    // check that it returns false for a non ez dir
    final Path nonEzDirPath = new Path(prefix, "nonzone");
    fsWrapper.mkdir(nonEzDirPath, FsPermission.getDirDefault(), true);
    stat = fsWrapper.getFileStatus(nonEzDirPath);
    assertFalse("Expected isEncrypted to return false for directory " + nonEzDirPath, stat.isEncrypted());
    // check that it returns true for listings within an ez
    FileStatus[] statuses = fsWrapper.listStatus(zone);
    for (FileStatus s : statuses) {
        assertTrue("Expected isEncrypted to return true for ez stat " + zone, s.isEncrypted());
    }
    statuses = fsWrapper.listStatus(encFile);
    for (FileStatus s : statuses) {
        assertTrue("Expected isEncrypted to return true for ez file stat " + encFile, s.isEncrypted());
    }
    // check that it returns false for listings outside an ez
    statuses = fsWrapper.listStatus(nonEzDirPath);
    for (FileStatus s : statuses) {
        assertFalse("Expected isEncrypted to return false for nonez stat " + nonEzDirPath, s.isEncrypted());
    }
    statuses = fsWrapper.listStatus(baseFile);
    for (FileStatus s : statuses) {
        assertFalse("Expected isEncrypted to return false for non ez stat " + baseFile, s.isEncrypted());
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) HdfsFileStatus(org.apache.hadoop.hdfs.protocol.HdfsFileStatus) HdfsAdmin(org.apache.hadoop.hdfs.client.HdfsAdmin)

Example 3 with HdfsAdmin

use of org.apache.hadoop.hdfs.client.HdfsAdmin in project hadoop by apache.

the class TestEncryptionZones method testReadWriteUsingWebHdfs.

@Test
public void testReadWriteUsingWebHdfs() throws Exception {
    final HdfsAdmin dfsAdmin = new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);
    final FileSystem webHdfsFs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsConstants.WEBHDFS_SCHEME);
    final Path zone = new Path("/zone");
    fs.mkdirs(zone);
    dfsAdmin.createEncryptionZone(zone, TEST_KEY, NO_TRASH);
    /* Create an unencrypted file for comparison purposes. */
    final Path unencFile = new Path("/unenc");
    final int len = 8192;
    DFSTestUtil.createFile(webHdfsFs, unencFile, len, (short) 1, 0xFEED);
    /*
     * Create the same file via webhdfs, but this time encrypted. Compare it
     * using both webhdfs and DFS.
     */
    final Path encFile1 = new Path(zone, "myfile");
    DFSTestUtil.createFile(webHdfsFs, encFile1, len, (short) 1, 0xFEED);
    verifyFilesEqual(webHdfsFs, unencFile, encFile1, len);
    verifyFilesEqual(fs, unencFile, encFile1, len);
    /*
     * Same thing except this time create the encrypted file using DFS.
     */
    final Path encFile2 = new Path(zone, "myfile2");
    DFSTestUtil.createFile(fs, encFile2, len, (short) 1, 0xFEED);
    verifyFilesEqual(webHdfsFs, unencFile, encFile2, len);
    verifyFilesEqual(fs, unencFile, encFile2, len);
    /* Verify appending to files works correctly. */
    appendOneByte(fs, unencFile);
    appendOneByte(webHdfsFs, encFile1);
    appendOneByte(fs, encFile2);
    verifyFilesEqual(webHdfsFs, unencFile, encFile1, len);
    verifyFilesEqual(fs, unencFile, encFile1, len);
    verifyFilesEqual(webHdfsFs, unencFile, encFile2, len);
    verifyFilesEqual(fs, unencFile, encFile2, len);
}
Also used : Path(org.apache.hadoop.fs.Path) HdfsAdmin(org.apache.hadoop.hdfs.client.HdfsAdmin) FileSystem(org.apache.hadoop.fs.FileSystem) WebHdfsFileSystem(org.apache.hadoop.hdfs.web.WebHdfsFileSystem) Test(org.junit.Test)

Example 4 with HdfsAdmin

use of org.apache.hadoop.hdfs.client.HdfsAdmin in project hadoop by apache.

the class TestEncryptionZones method testGetEZAsNonSuperUser.

/**
   * Test getEncryptionZoneForPath as a non super user.
   */
@Test
public void testGetEZAsNonSuperUser() throws Exception {
    final UserGroupInformation user = UserGroupInformation.createUserForTesting("user", new String[] { "mygroup" });
    final Path testRoot = new Path("/tmp/TestEncryptionZones");
    final Path superPath = new Path(testRoot, "superuseronly");
    final Path superPathFile = new Path(superPath, "file1");
    final Path allPath = new Path(testRoot, "accessall");
    final Path allPathFile = new Path(allPath, "file1");
    final Path nonEZDir = new Path(testRoot, "nonEZDir");
    final Path nonEZFile = new Path(nonEZDir, "file1");
    final Path nonexistent = new Path("/nonexistent");
    final int len = 8192;
    fsWrapper.mkdir(testRoot, new FsPermission((short) 0777), true);
    fsWrapper.mkdir(superPath, new FsPermission((short) 0700), false);
    fsWrapper.mkdir(allPath, new FsPermission((short) 0777), false);
    fsWrapper.mkdir(nonEZDir, new FsPermission((short) 0777), false);
    dfsAdmin.createEncryptionZone(superPath, TEST_KEY, NO_TRASH);
    dfsAdmin.createEncryptionZone(allPath, TEST_KEY, NO_TRASH);
    dfsAdmin.allowSnapshot(new Path("/"));
    final Path newSnap = fs.createSnapshot(new Path("/"));
    DFSTestUtil.createFile(fs, superPathFile, len, (short) 1, 0xFEED);
    DFSTestUtil.createFile(fs, allPathFile, len, (short) 1, 0xFEED);
    DFSTestUtil.createFile(fs, nonEZFile, len, (short) 1, 0xFEED);
    user.doAs(new PrivilegedExceptionAction<Object>() {

        @Override
        public Object run() throws Exception {
            final HdfsAdmin userAdmin = new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);
            // Check null arg
            try {
                userAdmin.getEncryptionZoneForPath(null);
                fail("should have thrown NPE");
            } catch (NullPointerException e) {
            /*
           * IWBNI we could use assertExceptionContains, but the NPE that is
           * thrown has no message text.
           */
            }
            // Check operation with accessible paths
            assertEquals("expected ez path", allPath.toString(), userAdmin.getEncryptionZoneForPath(allPath).getPath().toString());
            assertEquals("expected ez path", allPath.toString(), userAdmin.getEncryptionZoneForPath(allPathFile).getPath().toString());
            // Check operation with inaccessible (lack of permissions) path
            try {
                userAdmin.getEncryptionZoneForPath(superPathFile);
                fail("expected AccessControlException");
            } catch (AccessControlException e) {
                assertExceptionContains("Permission denied:", e);
            }
            assertNull("expected null for nonexistent path", userAdmin.getEncryptionZoneForPath(nonexistent));
            // Check operation with non-ez paths
            assertNull("expected null for non-ez path", userAdmin.getEncryptionZoneForPath(nonEZDir));
            assertNull("expected null for non-ez path", userAdmin.getEncryptionZoneForPath(nonEZFile));
            // Check operation with snapshots
            String snapshottedAllPath = newSnap.toString() + allPath.toString();
            assertEquals("expected ez path", allPath.toString(), userAdmin.getEncryptionZoneForPath(new Path(snapshottedAllPath)).getPath().toString());
            /*
         * Delete the file from the non-snapshot and test that it is still ok
         * in the ez.
         */
            fs.delete(allPathFile, false);
            assertEquals("expected ez path", allPath.toString(), userAdmin.getEncryptionZoneForPath(new Path(snapshottedAllPath)).getPath().toString());
            // Delete the ez and make sure ss's ez is still ok.
            fs.delete(allPath, true);
            assertEquals("expected ez path", allPath.toString(), userAdmin.getEncryptionZoneForPath(new Path(snapshottedAllPath)).getPath().toString());
            assertNull("expected null for deleted file path", userAdmin.getEncryptionZoneForPath(allPathFile));
            assertNull("expected null for deleted directory path", userAdmin.getEncryptionZoneForPath(allPath));
            return null;
        }
    });
}
Also used : Path(org.apache.hadoop.fs.Path) HdfsAdmin(org.apache.hadoop.hdfs.client.HdfsAdmin) AccessControlException(org.apache.hadoop.security.AccessControlException) Matchers.anyObject(org.mockito.Matchers.anyObject) FsPermission(org.apache.hadoop.fs.permission.FsPermission) Mockito.anyString(org.mockito.Mockito.anyString) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) AccessControlException(org.apache.hadoop.security.AccessControlException) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Test(org.junit.Test)

Example 5 with HdfsAdmin

use of org.apache.hadoop.hdfs.client.HdfsAdmin in project hadoop by apache.

the class TestEncryptionZones method testReadWrite.

@Test
public void testReadWrite() throws Exception {
    final HdfsAdmin dfsAdmin = new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);
    // Create a base file for comparison
    final Path baseFile = new Path("/base");
    final int len = 8192;
    DFSTestUtil.createFile(fs, baseFile, len, (short) 1, 0xFEED);
    // Create the first enc file
    final Path zone = new Path("/zone");
    fs.mkdirs(zone);
    dfsAdmin.createEncryptionZone(zone, TEST_KEY, NO_TRASH);
    final Path encFile1 = new Path(zone, "myfile");
    DFSTestUtil.createFile(fs, encFile1, len, (short) 1, 0xFEED);
    // Read them back in and compare byte-by-byte
    verifyFilesEqual(fs, baseFile, encFile1, len);
    // Roll the key of the encryption zone
    assertNumZones(1);
    String keyName = dfsAdmin.listEncryptionZones().next().getKeyName();
    cluster.getNamesystem().getProvider().rollNewVersion(keyName);
    cluster.getNamesystem().getProvider().invalidateCache(keyName);
    // Read them back in and compare byte-by-byte
    verifyFilesEqual(fs, baseFile, encFile1, len);
    // Write a new enc file and validate
    final Path encFile2 = new Path(zone, "myfile2");
    DFSTestUtil.createFile(fs, encFile2, len, (short) 1, 0xFEED);
    // FEInfos should be different
    FileEncryptionInfo feInfo1 = getFileEncryptionInfo(encFile1);
    FileEncryptionInfo feInfo2 = getFileEncryptionInfo(encFile2);
    assertFalse("EDEKs should be different", Arrays.equals(feInfo1.getEncryptedDataEncryptionKey(), feInfo2.getEncryptedDataEncryptionKey()));
    assertNotEquals("Key was rolled, versions should be different", feInfo1.getEzKeyVersionName(), feInfo2.getEzKeyVersionName());
    // Contents still equal
    verifyFilesEqual(fs, encFile1, encFile2, len);
}
Also used : Path(org.apache.hadoop.fs.Path) HdfsAdmin(org.apache.hadoop.hdfs.client.HdfsAdmin) Mockito.anyString(org.mockito.Mockito.anyString) FileEncryptionInfo(org.apache.hadoop.fs.FileEncryptionInfo) Test(org.junit.Test)

Aggregations

HdfsAdmin (org.apache.hadoop.hdfs.client.HdfsAdmin)27 Path (org.apache.hadoop.fs.Path)21 Test (org.junit.Test)16 Configuration (org.apache.hadoop.conf.Configuration)8 File (java.io.File)7 Mockito.anyString (org.mockito.Mockito.anyString)7 FileSystemTestHelper (org.apache.hadoop.fs.FileSystemTestHelper)6 FsShell (org.apache.hadoop.fs.FsShell)6 Before (org.junit.Before)6 FileSystem (org.apache.hadoop.fs.FileSystem)5 FileSystemTestWrapper (org.apache.hadoop.fs.FileSystemTestWrapper)5 IOException (java.io.IOException)4 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)4 AccessControlException (org.apache.hadoop.security.AccessControlException)4 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)4 ExecutionException (java.util.concurrent.ExecutionException)3 FsPermission (org.apache.hadoop.fs.permission.FsPermission)3 EncryptionZoneManager (org.apache.hadoop.hdfs.server.namenode.EncryptionZoneManager)3 WebHdfsFileSystem (org.apache.hadoop.hdfs.web.WebHdfsFileSystem)3 BeforeClass (org.junit.BeforeClass)3