Search in sources :

Example 96 with HdfsFileStatus

use of org.apache.hadoop.hdfs.protocol.HdfsFileStatus in project hadoop by apache.

the class FSNamesystem method concat.

/**
   * Moves all the blocks from {@code srcs} and appends them to {@code target}
   * To avoid rollbacks we will verify validity of ALL of the args
   * before we start actual move.
   * 
   * This does not support ".inodes" relative path
   * @param target target to concat into
   * @param srcs file that will be concatenated
   * @throws IOException on error
   */
void concat(String target, String[] srcs, boolean logRetryCache) throws IOException {
    final String operationName = "concat";
    HdfsFileStatus stat = null;
    boolean success = false;
    writeLock();
    try {
        checkOperation(OperationCategory.WRITE);
        checkNameNodeSafeMode("Cannot concat " + target);
        stat = FSDirConcatOp.concat(dir, target, srcs, logRetryCache);
        success = true;
    } catch (AccessControlException ace) {
        logAuditEvent(success, operationName, Arrays.toString(srcs), target, stat);
        throw ace;
    } finally {
        writeUnlock(operationName);
        if (success) {
            getEditLog().logSync();
        }
    }
    logAuditEvent(success, operationName, Arrays.toString(srcs), target, stat);
}
Also used : HdfsFileStatus(org.apache.hadoop.hdfs.protocol.HdfsFileStatus) AccessControlException(org.apache.hadoop.security.AccessControlException) SnapshotAccessControlException(org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException)

Example 97 with HdfsFileStatus

use of org.apache.hadoop.hdfs.protocol.HdfsFileStatus in project hadoop by apache.

the class FSNamesystem method createSymlink.

/**
   * Create a symbolic link.
   */
void createSymlink(String target, String link, PermissionStatus dirPerms, boolean createParent, boolean logRetryCache) throws IOException {
    final String operationName = "createSymlink";
    if (!FileSystem.areSymlinksEnabled()) {
        throw new UnsupportedOperationException("Symlinks not supported");
    }
    HdfsFileStatus auditStat = null;
    writeLock();
    try {
        checkOperation(OperationCategory.WRITE);
        checkNameNodeSafeMode("Cannot create symlink " + link);
        auditStat = FSDirSymlinkOp.createSymlinkInt(this, target, link, dirPerms, createParent, logRetryCache);
    } catch (AccessControlException e) {
        logAuditEvent(false, operationName, link, target, null);
        throw e;
    } finally {
        writeUnlock(operationName);
    }
    getEditLog().logSync();
    logAuditEvent(true, operationName, link, target, auditStat);
}
Also used : HdfsFileStatus(org.apache.hadoop.hdfs.protocol.HdfsFileStatus) AccessControlException(org.apache.hadoop.security.AccessControlException) SnapshotAccessControlException(org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException)

Example 98 with HdfsFileStatus

use of org.apache.hadoop.hdfs.protocol.HdfsFileStatus in project hadoop by apache.

the class FSNamesystem method getFileInfo.

/**
   * Get the file info for a specific file.
   *
   * @param src The string representation of the path to the file
   * @param resolveLink whether to throw UnresolvedLinkException
   *        if src refers to a symlink
   *
   * @throws AccessControlException if access is denied
   * @throws UnresolvedLinkException if a symlink is encountered.
   *
   * @return object containing information regarding the file
   *         or null if file not found
   * @throws StandbyException
   */
HdfsFileStatus getFileInfo(final String src, boolean resolveLink) throws IOException {
    final String operationName = "getfileinfo";
    checkOperation(OperationCategory.READ);
    HdfsFileStatus stat = null;
    readLock();
    try {
        checkOperation(OperationCategory.READ);
        stat = FSDirStatAndListingOp.getFileInfo(dir, src, resolveLink);
    } catch (AccessControlException e) {
        logAuditEvent(false, operationName, src);
        throw e;
    } finally {
        readUnlock(operationName);
    }
    logAuditEvent(true, operationName, src);
    return stat;
}
Also used : HdfsFileStatus(org.apache.hadoop.hdfs.protocol.HdfsFileStatus) AccessControlException(org.apache.hadoop.security.AccessControlException) SnapshotAccessControlException(org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException)

Example 99 with HdfsFileStatus

use of org.apache.hadoop.hdfs.protocol.HdfsFileStatus in project hadoop by apache.

the class FSNamesystem method getEZForPath.

/**
   * Get the encryption zone for the specified path.
   *
   * @param srcArg the path of a file or directory to get the EZ for.
   * @return the EZ of the of the path or null if none.
   * @throws AccessControlException  if the caller is not the superuser.
   * @throws UnresolvedLinkException if the path can't be resolved.
   */
EncryptionZone getEZForPath(final String srcArg) throws AccessControlException, UnresolvedLinkException, IOException {
    final String operationName = "getEZForPath";
    HdfsFileStatus resultingStat = null;
    boolean success = false;
    EncryptionZone encryptionZone;
    final FSPermissionChecker pc = getPermissionChecker();
    checkOperation(OperationCategory.READ);
    readLock();
    try {
        checkOperation(OperationCategory.READ);
        Entry<EncryptionZone, HdfsFileStatus> ezForPath = FSDirEncryptionZoneOp.getEZForPath(dir, srcArg, pc);
        success = true;
        resultingStat = ezForPath.getValue();
        encryptionZone = ezForPath.getKey();
    } catch (AccessControlException ace) {
        logAuditEvent(success, operationName, srcArg, null, resultingStat);
        throw ace;
    } finally {
        readUnlock(operationName);
    }
    logAuditEvent(success, operationName, srcArg, null, resultingStat);
    return encryptionZone;
}
Also used : EncryptionZone(org.apache.hadoop.hdfs.protocol.EncryptionZone) HdfsFileStatus(org.apache.hadoop.hdfs.protocol.HdfsFileStatus) AccessControlException(org.apache.hadoop.security.AccessControlException) SnapshotAccessControlException(org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException)

Example 100 with HdfsFileStatus

use of org.apache.hadoop.hdfs.protocol.HdfsFileStatus in project hadoop by apache.

the class FSNamesystem method setPermission.

/////////////////////////////////////////////////////////
//
// These methods are called by HadoopFS clients
//
/////////////////////////////////////////////////////////
/**
   * Set permissions for an existing file.
   * @throws IOException
   */
void setPermission(String src, FsPermission permission) throws IOException {
    final String operationName = "setPermission";
    HdfsFileStatus auditStat;
    checkOperation(OperationCategory.WRITE);
    writeLock();
    try {
        checkOperation(OperationCategory.WRITE);
        checkNameNodeSafeMode("Cannot set permission for " + src);
        auditStat = FSDirAttrOp.setPermission(dir, src, permission);
    } catch (AccessControlException e) {
        logAuditEvent(false, operationName, src);
        throw e;
    } finally {
        writeUnlock(operationName);
    }
    getEditLog().logSync();
    logAuditEvent(true, operationName, src, null, auditStat);
}
Also used : HdfsFileStatus(org.apache.hadoop.hdfs.protocol.HdfsFileStatus) AccessControlException(org.apache.hadoop.security.AccessControlException) SnapshotAccessControlException(org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException)

Aggregations

HdfsFileStatus (org.apache.hadoop.hdfs.protocol.HdfsFileStatus)124 Test (org.junit.Test)51 FileHandle (org.apache.hadoop.nfs.nfs3.FileHandle)34 IOException (java.io.IOException)28 InetSocketAddress (java.net.InetSocketAddress)28 XDR (org.apache.hadoop.oncrpc.XDR)28 AccessControlException (org.apache.hadoop.security.AccessControlException)26 Path (org.apache.hadoop.fs.Path)23 SnapshotAccessControlException (org.apache.hadoop.hdfs.protocol.SnapshotAccessControlException)23 FileNotFoundException (java.io.FileNotFoundException)16 DFSClient (org.apache.hadoop.hdfs.DFSClient)11 DirectoryListing (org.apache.hadoop.hdfs.protocol.DirectoryListing)11 FsPermission (org.apache.hadoop.fs.permission.FsPermission)10 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)9 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)9 LocatedBlock (org.apache.hadoop.hdfs.protocol.LocatedBlock)8 SetAttr3 (org.apache.hadoop.nfs.nfs3.request.SetAttr3)8 FileStatus (org.apache.hadoop.fs.FileStatus)7 Matchers.anyString (org.mockito.Matchers.anyString)7 Configuration (org.apache.hadoop.conf.Configuration)6