Search in sources :

Example 11 with AclEntry

use of org.apache.hadoop.fs.permission.AclEntry in project hadoop by apache.

the class AclTransformation method filterAclEntriesByAclSpec.

/**
   * Filters (discards) any existing ACL entries that have the same scope, type
   * and name of any entry in the ACL spec.  If necessary, recalculates the mask
   * entries.  If necessary, default entries may be inferred by copying the
   * permissions of the corresponding access entries.  It is invalid to request
   * removal of the mask entry from an ACL that would otherwise require a mask
   * entry, due to existing named entries or an unnamed group entry.
   *
   * @param existingAcl List<AclEntry> existing ACL
   * @param inAclSpec List<AclEntry> ACL spec describing entries to filter
   * @return List<AclEntry> new ACL
   * @throws AclException if validation fails
   */
public static List<AclEntry> filterAclEntriesByAclSpec(List<AclEntry> existingAcl, List<AclEntry> inAclSpec) throws AclException {
    ValidatedAclSpec aclSpec = new ValidatedAclSpec(inAclSpec);
    ArrayList<AclEntry> aclBuilder = Lists.newArrayListWithCapacity(MAX_ENTRIES);
    EnumMap<AclEntryScope, AclEntry> providedMask = Maps.newEnumMap(AclEntryScope.class);
    EnumSet<AclEntryScope> maskDirty = EnumSet.noneOf(AclEntryScope.class);
    EnumSet<AclEntryScope> scopeDirty = EnumSet.noneOf(AclEntryScope.class);
    for (AclEntry existingEntry : existingAcl) {
        if (aclSpec.containsKey(existingEntry)) {
            scopeDirty.add(existingEntry.getScope());
            if (existingEntry.getType() == MASK) {
                maskDirty.add(existingEntry.getScope());
            }
        } else {
            if (existingEntry.getType() == MASK) {
                providedMask.put(existingEntry.getScope(), existingEntry);
            } else {
                aclBuilder.add(existingEntry);
            }
        }
    }
    copyDefaultsIfNeeded(aclBuilder);
    calculateMasks(aclBuilder, providedMask, maskDirty, scopeDirty);
    return buildAndValidateAcl(aclBuilder);
}
Also used : AclEntryScope(org.apache.hadoop.fs.permission.AclEntryScope) AclEntry(org.apache.hadoop.fs.permission.AclEntry)

Example 12 with AclEntry

use of org.apache.hadoop.fs.permission.AclEntry in project hadoop by apache.

the class TestAclCommands method testMultipleAclSpecParsingWithoutPermissions.

@Test
public void testMultipleAclSpecParsingWithoutPermissions() throws Exception {
    List<AclEntry> parsedList = AclEntry.parseAclSpec("user::,user:user1:,group::,group:group1:,mask::,other::," + "default:user:user1::,default:mask::", false);
    AclEntry owner = new AclEntry.Builder().setType(AclEntryType.USER).build();
    AclEntry namedUser = new AclEntry.Builder().setType(AclEntryType.USER).setName("user1").build();
    AclEntry group = new AclEntry.Builder().setType(AclEntryType.GROUP).build();
    AclEntry namedGroup = new AclEntry.Builder().setType(AclEntryType.GROUP).setName("group1").build();
    AclEntry mask = new AclEntry.Builder().setType(AclEntryType.MASK).build();
    AclEntry other = new AclEntry.Builder().setType(AclEntryType.OTHER).build();
    AclEntry defaultUser = new AclEntry.Builder().setScope(AclEntryScope.DEFAULT).setType(AclEntryType.USER).setName("user1").build();
    AclEntry defaultMask = new AclEntry.Builder().setScope(AclEntryScope.DEFAULT).setType(AclEntryType.MASK).build();
    List<AclEntry> expectedList = new ArrayList<AclEntry>();
    expectedList.add(owner);
    expectedList.add(namedUser);
    expectedList.add(group);
    expectedList.add(namedGroup);
    expectedList.add(mask);
    expectedList.add(other);
    expectedList.add(defaultUser);
    expectedList.add(defaultMask);
    assertEquals("Parsed Acl not correct", expectedList, parsedList);
}
Also used : AclEntry(org.apache.hadoop.fs.permission.AclEntry) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Example 13 with AclEntry

use of org.apache.hadoop.fs.permission.AclEntry in project hadoop by apache.

the class TestAclCommands method testMultipleAclSpecParsing.

@Test
public void testMultipleAclSpecParsing() throws Exception {
    List<AclEntry> parsedList = AclEntry.parseAclSpec("group::rwx,user:user1:rwx,user:user2:rw-," + "group:group1:rw-,default:group:group1:rw-", true);
    AclEntry basicAcl = new AclEntry.Builder().setType(AclEntryType.GROUP).setPermission(FsAction.ALL).build();
    AclEntry user1Acl = new AclEntry.Builder().setType(AclEntryType.USER).setPermission(FsAction.ALL).setName("user1").build();
    AclEntry user2Acl = new AclEntry.Builder().setType(AclEntryType.USER).setPermission(FsAction.READ_WRITE).setName("user2").build();
    AclEntry group1Acl = new AclEntry.Builder().setType(AclEntryType.GROUP).setPermission(FsAction.READ_WRITE).setName("group1").build();
    AclEntry defaultAcl = new AclEntry.Builder().setType(AclEntryType.GROUP).setPermission(FsAction.READ_WRITE).setName("group1").setScope(AclEntryScope.DEFAULT).build();
    List<AclEntry> expectedList = new ArrayList<AclEntry>();
    expectedList.add(basicAcl);
    expectedList.add(user1Acl);
    expectedList.add(user2Acl);
    expectedList.add(group1Acl);
    expectedList.add(defaultAcl);
    assertEquals("Parsed Acl not correct", expectedList, parsedList);
}
Also used : AclEntry(org.apache.hadoop.fs.permission.AclEntry) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Example 14 with AclEntry

use of org.apache.hadoop.fs.permission.AclEntry in project hadoop by apache.

the class TestExtendedAcls method testDefaultAclNewChildDirFile.

/**
   * Set default ACL to a directory.
   * Create subdirectory, it must have default acls set.
   * Create sub file and it should have default acls.
   * @throws IOException
   */
@Test
public void testDefaultAclNewChildDirFile() throws IOException {
    Path parent = new Path("/testDefaultAclNewChildDirFile");
    List<AclEntry> acls = Lists.newArrayList(aclEntry(DEFAULT, USER, "foo", ALL));
    hdfs.mkdirs(parent);
    hdfs.setAcl(parent, acls);
    // create sub directory
    Path childDir = new Path(parent, "childDir");
    hdfs.mkdirs(childDir);
    // the sub directory should have the default acls
    AclEntry[] childDirExpectedAcl = new AclEntry[] { aclEntry(ACCESS, USER, "foo", ALL), aclEntry(ACCESS, GROUP, READ_EXECUTE), aclEntry(DEFAULT, USER, ALL), aclEntry(DEFAULT, USER, "foo", ALL), aclEntry(DEFAULT, GROUP, READ_EXECUTE), aclEntry(DEFAULT, MASK, ALL), aclEntry(DEFAULT, OTHER, READ_EXECUTE) };
    AclStatus childDirAcl = hdfs.getAclStatus(childDir);
    assertArrayEquals(childDirExpectedAcl, childDirAcl.getEntries().toArray());
    // create sub file
    Path childFile = new Path(parent, "childFile");
    hdfs.create(childFile).close();
    // the sub file should have the default acls
    AclEntry[] childFileExpectedAcl = new AclEntry[] { aclEntry(ACCESS, USER, "foo", ALL), aclEntry(ACCESS, GROUP, READ_EXECUTE) };
    AclStatus childFileAcl = hdfs.getAclStatus(childFile);
    assertArrayEquals(childFileExpectedAcl, childFileAcl.getEntries().toArray());
    hdfs.delete(parent, true);
}
Also used : Path(org.apache.hadoop.fs.Path) AclStatus(org.apache.hadoop.fs.permission.AclStatus) AclEntry(org.apache.hadoop.fs.permission.AclEntry) Test(org.junit.Test)

Example 15 with AclEntry

use of org.apache.hadoop.fs.permission.AclEntry in project hadoop by apache.

the class TestExtendedAcls method testDefaultAclExistingDirFile.

/**
   * Set default ACL to a directory and make sure existing sub dirs/files
   * does not have default acl.
   * @throws IOException
   */
@Test
public void testDefaultAclExistingDirFile() throws Exception {
    Path parent = new Path("/testDefaultAclExistingDirFile");
    hdfs.mkdirs(parent);
    // the old acls
    List<AclEntry> acls1 = Lists.newArrayList(aclEntry(DEFAULT, USER, "foo", ALL));
    // the new acls
    List<AclEntry> acls2 = Lists.newArrayList(aclEntry(DEFAULT, USER, "foo", READ_EXECUTE));
    // set parent to old acl
    hdfs.setAcl(parent, acls1);
    Path childDir = new Path(parent, "childDir");
    hdfs.mkdirs(childDir);
    // the sub directory should also have the old acl
    AclEntry[] childDirExpectedAcl = new AclEntry[] { aclEntry(ACCESS, USER, "foo", ALL), aclEntry(ACCESS, GROUP, READ_EXECUTE), aclEntry(DEFAULT, USER, ALL), aclEntry(DEFAULT, USER, "foo", ALL), aclEntry(DEFAULT, GROUP, READ_EXECUTE), aclEntry(DEFAULT, MASK, ALL), aclEntry(DEFAULT, OTHER, READ_EXECUTE) };
    AclStatus childDirAcl = hdfs.getAclStatus(childDir);
    assertArrayEquals(childDirExpectedAcl, childDirAcl.getEntries().toArray());
    Path childFile = new Path(childDir, "childFile");
    // the sub file should also have the old acl
    hdfs.create(childFile).close();
    AclEntry[] childFileExpectedAcl = new AclEntry[] { aclEntry(ACCESS, USER, "foo", ALL), aclEntry(ACCESS, GROUP, READ_EXECUTE) };
    AclStatus childFileAcl = hdfs.getAclStatus(childFile);
    assertArrayEquals(childFileExpectedAcl, childFileAcl.getEntries().toArray());
    // now change parent to new acls
    hdfs.setAcl(parent, acls2);
    // sub directory and sub file should still have the old acls
    childDirAcl = hdfs.getAclStatus(childDir);
    assertArrayEquals(childDirExpectedAcl, childDirAcl.getEntries().toArray());
    childFileAcl = hdfs.getAclStatus(childFile);
    assertArrayEquals(childFileExpectedAcl, childFileAcl.getEntries().toArray());
    // now remove the parent acls
    hdfs.removeAcl(parent);
    // sub directory and sub file should still have the old acls
    childDirAcl = hdfs.getAclStatus(childDir);
    assertArrayEquals(childDirExpectedAcl, childDirAcl.getEntries().toArray());
    childFileAcl = hdfs.getAclStatus(childFile);
    assertArrayEquals(childFileExpectedAcl, childFileAcl.getEntries().toArray());
    // check changing the access mode of the file
    // mask out the access of group other for testing
    hdfs.setPermission(childFile, new FsPermission((short) 0640));
    boolean canAccess = tryAccess(childFile, "other", new String[] { "other" }, READ);
    assertFalse(canAccess);
    hdfs.delete(parent, true);
}
Also used : Path(org.apache.hadoop.fs.Path) AclStatus(org.apache.hadoop.fs.permission.AclStatus) AclEntry(org.apache.hadoop.fs.permission.AclEntry) FsPermission(org.apache.hadoop.fs.permission.FsPermission) Test(org.junit.Test)

Aggregations

AclEntry (org.apache.hadoop.fs.permission.AclEntry)136 Test (org.junit.Test)90 AclStatus (org.apache.hadoop.fs.permission.AclStatus)81 Path (org.apache.hadoop.fs.Path)52 FsPermission (org.apache.hadoop.fs.permission.FsPermission)24 ArrayList (java.util.ArrayList)11 FSAclBaseTest (org.apache.hadoop.hdfs.server.namenode.FSAclBaseTest)11 FileSystem (org.apache.hadoop.fs.FileSystem)10 Configuration (org.apache.hadoop.conf.Configuration)7 MockResponse (com.squareup.okhttp.mockwebserver.MockResponse)5 FileStatus (org.apache.hadoop.fs.FileStatus)5 ScopedAclEntries (org.apache.hadoop.fs.permission.ScopedAclEntries)5 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)5 DatanodeInfoBuilder (org.apache.hadoop.hdfs.protocol.DatanodeInfo.DatanodeInfoBuilder)5 List (java.util.List)4 AclEntryScope (org.apache.hadoop.fs.permission.AclEntryScope)4 AclEntryProto (org.apache.hadoop.hdfs.protocol.proto.AclProtos.AclEntryProto)4 IOException (java.io.IOException)3 URI (java.net.URI)3 AclEntryType (org.apache.hadoop.fs.permission.AclEntryType)3