Search in sources :

Example 1 with EncryptionZone

use of org.apache.hadoop.hdfs.protocol.EncryptionZone in project hadoop by apache.

the class FSDirEncryptionZoneOp method getFileEncryptionInfo.

/**
   * If the file and encryption key are valid, return the encryption info,
   * else throw a retry exception.  The startFile method generates the EDEK
   * outside of the lock so the zone must be reverified.
   *
   * @param dir fsdirectory
   * @param iip inodes in the file path
   * @param ezInfo the encryption key
   * @return FileEncryptionInfo for the file
   * @throws RetryStartFileException if key is inconsistent with current zone
   */
static FileEncryptionInfo getFileEncryptionInfo(FSDirectory dir, INodesInPath iip, EncryptionKeyInfo ezInfo) throws RetryStartFileException {
    FileEncryptionInfo feInfo = null;
    final EncryptionZone zone = getEZForPath(dir, iip);
    if (zone != null) {
        // The path is now within an EZ, but we're missing encryption parameters
        if (ezInfo == null) {
            throw new RetryStartFileException();
        }
        // Path is within an EZ and we have provided encryption parameters.
        // Make sure that the generated EDEK matches the settings of the EZ.
        final String ezKeyName = zone.getKeyName();
        if (!ezKeyName.equals(ezInfo.edek.getEncryptionKeyName())) {
            throw new RetryStartFileException();
        }
        feInfo = new FileEncryptionInfo(ezInfo.suite, ezInfo.protocolVersion, ezInfo.edek.getEncryptedKeyVersion().getMaterial(), ezInfo.edek.getEncryptedKeyIv(), ezKeyName, ezInfo.edek.getEncryptionKeyVersionName());
    }
    return feInfo;
}
Also used : EncryptionZone(org.apache.hadoop.hdfs.protocol.EncryptionZone) FileEncryptionInfo(org.apache.hadoop.fs.FileEncryptionInfo)

Example 2 with EncryptionZone

use of org.apache.hadoop.hdfs.protocol.EncryptionZone in project hadoop by apache.

the class FSDirEncryptionZoneOp method getFileEncryptionInfo.

/**
   * This function combines the per-file encryption info (obtained
   * from the inode's XAttrs), and the encryption info from its zone, and
   * returns a consolidated FileEncryptionInfo instance. Null is returned
   * for non-encrypted or raw files.
   *
   * @param fsd fsdirectory
   * @param iip inodes in the path containing the file, passed in to
   *            avoid obtaining the list of inodes again
   * @return consolidated file encryption info; null for non-encrypted files
   */
static FileEncryptionInfo getFileEncryptionInfo(final FSDirectory fsd, final INodesInPath iip) throws IOException {
    if (iip.isRaw() || !fsd.ezManager.hasCreatedEncryptionZone() || !iip.getLastINode().isFile()) {
        return null;
    }
    fsd.readLock();
    try {
        EncryptionZone encryptionZone = getEZForPath(fsd, iip);
        if (encryptionZone == null) {
            // not an encrypted file
            return null;
        } else if (encryptionZone.getPath() == null || encryptionZone.getPath().isEmpty()) {
            if (NameNode.LOG.isDebugEnabled()) {
                NameNode.LOG.debug("Encryption zone " + encryptionZone.getPath() + " does not have a valid path.");
            }
        }
        final CryptoProtocolVersion version = encryptionZone.getVersion();
        final CipherSuite suite = encryptionZone.getSuite();
        final String keyName = encryptionZone.getKeyName();
        XAttr fileXAttr = FSDirXAttrOp.unprotectedGetXAttrByPrefixedName(iip, CRYPTO_XATTR_FILE_ENCRYPTION_INFO);
        if (fileXAttr == null) {
            NameNode.LOG.warn("Could not find encryption XAttr for file " + iip.getPath() + " in encryption zone " + encryptionZone.getPath());
            return null;
        }
        try {
            HdfsProtos.PerFileEncryptionInfoProto fileProto = HdfsProtos.PerFileEncryptionInfoProto.parseFrom(fileXAttr.getValue());
            return PBHelperClient.convert(fileProto, suite, version, keyName);
        } catch (InvalidProtocolBufferException e) {
            throw new IOException("Could not parse file encryption info for " + "inode " + iip.getPath(), e);
        }
    } finally {
        fsd.readUnlock();
    }
}
Also used : EncryptionZone(org.apache.hadoop.hdfs.protocol.EncryptionZone) HdfsProtos(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos) CryptoProtocolVersion(org.apache.hadoop.crypto.CryptoProtocolVersion) CipherSuite(org.apache.hadoop.crypto.CipherSuite) InvalidProtocolBufferException(com.google.protobuf.InvalidProtocolBufferException) IOException(java.io.IOException) XAttr(org.apache.hadoop.fs.XAttr)

Example 3 with EncryptionZone

use of org.apache.hadoop.hdfs.protocol.EncryptionZone in project hadoop by apache.

the class DistributedFileSystem method getTrashRoots.

/**
   * Get all the trash roots of HDFS for current user or for all the users.
   * 1. File deleted from non-encryption zone /user/username/.Trash
   * 2. File deleted from encryption zones
   *    e.g., ez1 rooted at /ez1 has its trash root at /ez1/.Trash/$USER
   * @param allUsers return trashRoots of all users if true, used by emptier
   * @return trash roots of HDFS
   */
@Override
public Collection<FileStatus> getTrashRoots(boolean allUsers) {
    List<FileStatus> ret = new ArrayList<>();
    // Get normal trash roots
    ret.addAll(super.getTrashRoots(allUsers));
    try {
        // Get EZ Trash roots
        final RemoteIterator<EncryptionZone> it = dfs.listEncryptionZones();
        while (it.hasNext()) {
            Path ezTrashRoot = new Path(it.next().getPath(), FileSystem.TRASH_PREFIX);
            if (!exists(ezTrashRoot)) {
                continue;
            }
            if (allUsers) {
                for (FileStatus candidate : listStatus(ezTrashRoot)) {
                    if (exists(candidate.getPath())) {
                        ret.add(candidate);
                    }
                }
            } else {
                Path userTrash = new Path(ezTrashRoot, System.getProperty("user.name"));
                try {
                    ret.add(getFileStatus(userTrash));
                } catch (FileNotFoundException ignored) {
                }
            }
        }
    } catch (IOException e) {
        DFSClient.LOG.warn("Cannot get all encrypted trash roots", e);
    }
    return ret;
}
Also used : Path(org.apache.hadoop.fs.Path) EncryptionZone(org.apache.hadoop.hdfs.protocol.EncryptionZone) FileStatus(org.apache.hadoop.fs.FileStatus) LocatedFileStatus(org.apache.hadoop.fs.LocatedFileStatus) HdfsFileStatus(org.apache.hadoop.hdfs.protocol.HdfsFileStatus) HdfsLocatedFileStatus(org.apache.hadoop.hdfs.protocol.HdfsLocatedFileStatus) ArrayList(java.util.ArrayList) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException)

Example 4 with EncryptionZone

use of org.apache.hadoop.hdfs.protocol.EncryptionZone in project hadoop by apache.

the class ClientNamenodeProtocolServerSideTranslatorPB method getEZForPath.

@Override
public GetEZForPathResponseProto getEZForPath(RpcController controller, GetEZForPathRequestProto req) throws ServiceException {
    try {
        GetEZForPathResponseProto.Builder builder = GetEZForPathResponseProto.newBuilder();
        final EncryptionZone ret = server.getEZForPath(req.getSrc());
        if (ret != null) {
            builder.setZone(PBHelperClient.convert(ret));
        }
        return builder.build();
    } catch (IOException e) {
        throw new ServiceException(e);
    }
}
Also used : EncryptionZone(org.apache.hadoop.hdfs.protocol.EncryptionZone) ServiceException(com.google.protobuf.ServiceException) GetEZForPathResponseProto(org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos.GetEZForPathResponseProto) IOException(java.io.IOException)

Example 5 with EncryptionZone

use of org.apache.hadoop.hdfs.protocol.EncryptionZone in project hadoop by apache.

the class ClientNamenodeProtocolServerSideTranslatorPB method listEncryptionZones.

@Override
public ListEncryptionZonesResponseProto listEncryptionZones(RpcController controller, ListEncryptionZonesRequestProto req) throws ServiceException {
    try {
        BatchedEntries<EncryptionZone> entries = server.listEncryptionZones(req.getId());
        ListEncryptionZonesResponseProto.Builder builder = ListEncryptionZonesResponseProto.newBuilder();
        builder.setHasMore(entries.hasMore());
        for (int i = 0; i < entries.size(); i++) {
            builder.addZones(PBHelperClient.convert(entries.get(i)));
        }
        return builder.build();
    } catch (IOException e) {
        throw new ServiceException(e);
    }
}
Also used : EncryptionZone(org.apache.hadoop.hdfs.protocol.EncryptionZone) ServiceException(com.google.protobuf.ServiceException) IOException(java.io.IOException) ListEncryptionZonesResponseProto(org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos.ListEncryptionZonesResponseProto)

Aggregations

EncryptionZone (org.apache.hadoop.hdfs.protocol.EncryptionZone)15 IOException (java.io.IOException)5 Path (org.apache.hadoop.fs.Path)5 ServiceException (com.google.protobuf.ServiceException)3 HdfsFileStatus (org.apache.hadoop.hdfs.protocol.HdfsFileStatus)3 FileNotFoundException (java.io.FileNotFoundException)2 CipherSuite (org.apache.hadoop.crypto.CipherSuite)2 CryptoProtocolVersion (org.apache.hadoop.crypto.CryptoProtocolVersion)2 BatchedListEntries (org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries)2 FileStatus (org.apache.hadoop.fs.FileStatus)2 Test (org.junit.Test)2 InvalidProtocolBufferException (com.google.protobuf.InvalidProtocolBufferException)1 ArrayList (java.util.ArrayList)1 HadoopIllegalArgumentException (org.apache.hadoop.HadoopIllegalArgumentException)1 FileAlreadyExistsException (org.apache.hadoop.fs.FileAlreadyExistsException)1 FileEncryptionInfo (org.apache.hadoop.fs.FileEncryptionInfo)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 LocatedFileStatus (org.apache.hadoop.fs.LocatedFileStatus)1 XAttr (org.apache.hadoop.fs.XAttr)1 HdfsLocatedFileStatus (org.apache.hadoop.hdfs.protocol.HdfsLocatedFileStatus)1