Search in sources :

Example 36 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project hadoop by apache.

the class WebHdfsFileSystem method getDelegationToken.

@Override
public Token<DelegationTokenIdentifier> getDelegationToken(final String renewer) throws IOException {
    final HttpOpParam.Op op = GetOpParam.Op.GETDELEGATIONTOKEN;
    Token<DelegationTokenIdentifier> token = new FsPathResponseRunner<Token<DelegationTokenIdentifier>>(op, null, new RenewerParam(renewer)) {

        @Override
        Token<DelegationTokenIdentifier> decodeResponse(Map<?, ?> json) throws IOException {
            return JsonUtilClient.toDelegationToken(json);
        }
    }.run();
    if (token != null) {
        token.setService(tokenServiceName);
    } else {
        if (disallowFallbackToInsecureCluster) {
            throw new AccessControlException(CANT_FALLBACK_TO_INSECURE_MSG);
        }
    }
    return token;
}
Also used : DelegationTokenIdentifier(org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier) Op(org.apache.hadoop.hdfs.web.resources.HttpOpParam.Op) AccessControlException(org.apache.hadoop.security.AccessControlException) InvalidToken(org.apache.hadoop.security.token.SecretManager.InvalidToken) Token(org.apache.hadoop.security.token.Token) IOException(java.io.IOException)

Example 37 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project hadoop by apache.

the class CacheManager method listCacheDirectives.

public BatchedListEntries<CacheDirectiveEntry> listCacheDirectives(long prevId, CacheDirectiveInfo filter, FSPermissionChecker pc) throws IOException {
    assert namesystem.hasReadLock();
    final int NUM_PRE_ALLOCATED_ENTRIES = 16;
    String filterPath = null;
    if (filter.getPath() != null) {
        filterPath = validatePath(filter);
    }
    if (filter.getReplication() != null) {
        throw new InvalidRequestException("Filtering by replication is unsupported.");
    }
    // Querying for a single ID
    final Long id = filter.getId();
    if (id != null) {
        if (!directivesById.containsKey(id)) {
            throw new InvalidRequestException("Did not find requested id " + id);
        }
        // Since we use a tailMap on directivesById, setting prev to id-1 gets
        // us the directive with the id (if present)
        prevId = id - 1;
    }
    ArrayList<CacheDirectiveEntry> replies = new ArrayList<CacheDirectiveEntry>(NUM_PRE_ALLOCATED_ENTRIES);
    int numReplies = 0;
    SortedMap<Long, CacheDirective> tailMap = directivesById.tailMap(prevId + 1);
    for (Entry<Long, CacheDirective> cur : tailMap.entrySet()) {
        if (numReplies >= maxListCacheDirectivesNumResponses) {
            return new BatchedListEntries<CacheDirectiveEntry>(replies, true);
        }
        CacheDirective curDirective = cur.getValue();
        CacheDirectiveInfo info = cur.getValue().toInfo();
        // item and should break out.
        if (id != null && !(info.getId().equals(id))) {
            break;
        }
        if (filter.getPool() != null && !info.getPool().equals(filter.getPool())) {
            continue;
        }
        if (filterPath != null && !info.getPath().toUri().getPath().equals(filterPath)) {
            continue;
        }
        boolean hasPermission = true;
        if (pc != null) {
            try {
                pc.checkPermission(curDirective.getPool(), FsAction.READ);
            } catch (AccessControlException e) {
                hasPermission = false;
            }
        }
        if (hasPermission) {
            replies.add(new CacheDirectiveEntry(info, cur.getValue().toStats()));
            numReplies++;
        }
    }
    return new BatchedListEntries<CacheDirectiveEntry>(replies, false);
}
Also used : ArrayList(java.util.ArrayList) AccessControlException(org.apache.hadoop.security.AccessControlException) CacheDirectiveInfo(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo) CacheDirective(org.apache.hadoop.hdfs.protocol.CacheDirective) BatchedListEntries(org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries) CacheDirectiveEntry(org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry) InvalidRequestException(org.apache.hadoop.fs.InvalidRequestException)

Example 38 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project hadoop by apache.

the class FSDirectory method resolvePath.

/**
   * Resolves a given path into an INodesInPath.  All ancestor inodes that
   * exist are validated as traversable directories.  Symlinks in the ancestry
   * will generate an UnresolvedLinkException.  The returned IIP will be an
   * accessible path that also passed additional sanity checks based on how
   * the path will be used as specified by the DirOp.
   *   READ:   Expands reserved paths and performs permission checks
   *           during traversal.  Raw paths are only accessible by a superuser.
   *   WRITE:  In addition to READ checks, ensures the path is not a
   *           snapshot path.
   *   CREATE: In addition to WRITE checks, ensures path does not contain
   *           illegal character sequences.
   *
   * @param pc  A permission checker for traversal checks.  Pass null for
   *            no permission checks.
   * @param src The path to resolve.
   * @param dirOp The {@link DirOp} that controls additional checks.
   * @param resolveLink If false, only ancestor symlinks will be checked.  If
   *         true, the last inode will also be checked.
   * @return if the path indicates an inode, return path after replacing up to
   *         <inodeid> with the corresponding path of the inode, else the path
   *         in {@code src} as is. If the path refers to a path in the "raw"
   *         directory, return the non-raw pathname.
   * @throws FileNotFoundException
   * @throws AccessControlException
   * @throws ParentNotDirectoryException
   * @throws UnresolvedLinkException
   */
@VisibleForTesting
public INodesInPath resolvePath(FSPermissionChecker pc, String src, DirOp dirOp) throws UnresolvedLinkException, FileNotFoundException, AccessControlException, ParentNotDirectoryException {
    boolean isCreate = (dirOp == DirOp.CREATE || dirOp == DirOp.CREATE_LINK);
    // prevent creation of new invalid paths
    if (isCreate && !DFSUtil.isValidName(src)) {
        throw new InvalidPathException("Invalid file name: " + src);
    }
    byte[][] components = INode.getPathComponents(src);
    boolean isRaw = isReservedRawName(components);
    if (isPermissionEnabled && pc != null && isRaw) {
        pc.checkSuperuserPrivilege();
    }
    components = resolveComponents(components, this);
    INodesInPath iip = INodesInPath.resolve(rootDir, components, isRaw);
    // PNDE
    try {
        checkTraverse(pc, iip, dirOp);
    } catch (ParentNotDirectoryException pnde) {
        if (!isCreate) {
            throw new AccessControlException(pnde.getMessage());
        }
        throw pnde;
    }
    return iip;
}
Also used : ParentNotDirectoryException(org.apache.hadoop.fs.ParentNotDirectoryException) AccessControlException(org.apache.hadoop.security.AccessControlException) SnapshotAccessControlException(org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException) InvalidPathException(org.apache.hadoop.fs.InvalidPathException) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Example 39 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project hadoop by apache.

the class FSDirStatAndListingOp method getFileInfo.

/**
   * Get the file info for a specific file.
   *
   * @param srcArg The string representation of the path to the file
   * @param resolveLink whether to throw UnresolvedLinkException
   *        if src refers to a symlink
   *
   * @return object containing information regarding the file
   *         or null if file not found
   */
static HdfsFileStatus getFileInfo(FSDirectory fsd, String srcArg, boolean resolveLink) throws IOException {
    DirOp dirOp = resolveLink ? DirOp.READ : DirOp.READ_LINK;
    FSPermissionChecker pc = fsd.getPermissionChecker();
    final INodesInPath iip;
    if (pc.isSuperUser()) {
        // superuser to receive null instead.
        try {
            iip = fsd.resolvePath(pc, srcArg, dirOp);
        } catch (AccessControlException ace) {
            return null;
        }
    } else {
        iip = fsd.resolvePath(pc, srcArg, dirOp);
    }
    return getFileInfo(fsd, iip);
}
Also used : DirOp(org.apache.hadoop.hdfs.server.namenode.FSDirectory.DirOp) AccessControlException(org.apache.hadoop.security.AccessControlException)

Example 40 with AccessControlException

use of org.apache.hadoop.security.AccessControlException in project hadoop by apache.

the class TestStickyBit method confirmDeletingFiles.

/**
   * Test that one user can't delete another user's file when the sticky bit is
   * set.
   */
private void confirmDeletingFiles(Configuration conf, Path p) throws Exception {
    // Write a file to the new temp directory as a regular user
    Path file = new Path(p, "foo");
    writeFile(hdfsAsUser1, file);
    // Make sure the correct user is the owner
    assertEquals(user1.getShortUserName(), hdfsAsUser1.getFileStatus(file).getOwner());
    // Log onto cluster as another user and attempt to delete the file
    try {
        hdfsAsUser2.delete(file, false);
        fail("Shouldn't be able to delete someone else's file with SB on");
    } catch (IOException ioe) {
        assertTrue(ioe instanceof AccessControlException);
        assertTrue(ioe.getMessage().contains("sticky bit"));
        assertTrue(ioe.getMessage().contains("user=" + user2.getUserName()));
        assertTrue(ioe.getMessage().contains("path=\"" + file + "\""));
        assertTrue(ioe.getMessage().contains("parent=\"" + file.getParent() + "\""));
    }
}
Also used : Path(org.apache.hadoop.fs.Path) AccessControlException(org.apache.hadoop.security.AccessControlException) IOException(java.io.IOException)

Aggregations

AccessControlException (org.apache.hadoop.security.AccessControlException)129 Test (org.junit.Test)59 Path (org.apache.hadoop.fs.Path)53 IOException (java.io.IOException)52 SnapshotAccessControlException (org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException)35 FsPermission (org.apache.hadoop.fs.permission.FsPermission)33 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)22 HdfsFileStatus (org.apache.hadoop.hdfs.protocol.HdfsFileStatus)21 FileSystem (org.apache.hadoop.fs.FileSystem)19 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)14 Configuration (org.apache.hadoop.conf.Configuration)11 FileNotFoundException (java.io.FileNotFoundException)10 CachePoolInfo (org.apache.hadoop.hdfs.protocol.CachePoolInfo)8 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)7 FileStatus (org.apache.hadoop.fs.FileStatus)6 CacheDirectiveInfo (org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo)6 Text (org.apache.hadoop.io.Text)5 InvalidToken (org.apache.hadoop.security.token.SecretManager.InvalidToken)5 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)5 ArrayList (java.util.ArrayList)4