Search in sources :

Example 1 with FileEncryptionInfo

use of org.apache.hadoop.fs.FileEncryptionInfo in project hadoop by apache.

the class FSNamesystem method startFileInt.

private HdfsFileStatus startFileInt(String src, PermissionStatus permissions, String holder, String clientMachine, EnumSet<CreateFlag> flag, boolean createParent, short replication, long blockSize, CryptoProtocolVersion[] supportedVersions, boolean logRetryCache) throws IOException {
    if (NameNode.stateChangeLog.isDebugEnabled()) {
        StringBuilder builder = new StringBuilder();
        builder.append("DIR* NameSystem.startFile: src=").append(src).append(", holder=").append(holder).append(", clientMachine=").append(clientMachine).append(", createParent=").append(createParent).append(", replication=").append(replication).append(", createFlag=").append(flag).append(", blockSize=").append(blockSize).append(", supportedVersions=").append(Arrays.toString(supportedVersions));
        NameNode.stateChangeLog.debug(builder.toString());
    }
    if (!DFSUtil.isValidName(src) || FSDirectory.isExactReservedName(src) || (FSDirectory.isReservedName(src) && !FSDirectory.isReservedRawName(src) && !FSDirectory.isReservedInodesName(src))) {
        throw new InvalidPathException(src);
    }
    FSPermissionChecker pc = getPermissionChecker();
    INodesInPath iip = null;
    // until we do something that might create edits
    boolean skipSync = true;
    HdfsFileStatus stat = null;
    BlocksMapUpdateInfo toRemoveBlocks = null;
    checkOperation(OperationCategory.WRITE);
    writeLock();
    try {
        checkOperation(OperationCategory.WRITE);
        checkNameNodeSafeMode("Cannot create file" + src);
        iip = FSDirWriteFileOp.resolvePathForStartFile(dir, pc, src, flag, createParent);
        if (!FSDirErasureCodingOp.hasErasureCodingPolicy(this, iip)) {
            blockManager.verifyReplication(src, replication, clientMachine);
        }
        if (blockSize < minBlockSize) {
            throw new IOException("Specified block size is less than configured" + " minimum value (" + DFSConfigKeys.DFS_NAMENODE_MIN_BLOCK_SIZE_KEY + "): " + blockSize + " < " + minBlockSize);
        }
        FileEncryptionInfo feInfo = null;
        if (provider != null) {
            EncryptionKeyInfo ezInfo = FSDirEncryptionZoneOp.getEncryptionKeyInfo(this, iip, supportedVersions);
            // and/or EZ has not mutated
            if (ezInfo != null) {
                checkOperation(OperationCategory.WRITE);
                iip = FSDirWriteFileOp.resolvePathForStartFile(dir, pc, iip.getPath(), flag, createParent);
                feInfo = FSDirEncryptionZoneOp.getFileEncryptionInfo(dir, iip, ezInfo);
            }
        }
        // following might generate edits
        skipSync = false;
        toRemoveBlocks = new BlocksMapUpdateInfo();
        dir.writeLock();
        try {
            stat = FSDirWriteFileOp.startFile(this, iip, permissions, holder, clientMachine, flag, createParent, replication, blockSize, feInfo, toRemoveBlocks, logRetryCache);
        } catch (IOException e) {
            skipSync = e instanceof StandbyException;
            throw e;
        } finally {
            dir.writeUnlock();
        }
    } finally {
        writeUnlock("create");
        // They need to be sync'ed even when an exception was thrown.
        if (!skipSync) {
            getEditLog().logSync();
            if (toRemoveBlocks != null) {
                removeBlocks(toRemoveBlocks);
                toRemoveBlocks.clear();
            }
        }
    }
    return stat;
}
Also used : BlocksMapUpdateInfo(org.apache.hadoop.hdfs.server.namenode.INode.BlocksMapUpdateInfo) StandbyException(org.apache.hadoop.ipc.StandbyException) HdfsFileStatus(org.apache.hadoop.hdfs.protocol.HdfsFileStatus) IOException(java.io.IOException) EncryptionKeyInfo(org.apache.hadoop.hdfs.server.namenode.FSDirEncryptionZoneOp.EncryptionKeyInfo) FileEncryptionInfo(org.apache.hadoop.fs.FileEncryptionInfo) InvalidPathException(org.apache.hadoop.fs.InvalidPathException)

Example 2 with FileEncryptionInfo

use of org.apache.hadoop.fs.FileEncryptionInfo in project hadoop by apache.

the class PBHelperClient method convert.

public static FileEncryptionInfo convert(HdfsProtos.FileEncryptionInfoProto proto) {
    if (proto == null) {
        return null;
    }
    CipherSuite suite = convert(proto.getSuite());
    CryptoProtocolVersion version = convert(proto.getCryptoProtocolVersion());
    byte[] key = proto.getKey().toByteArray();
    byte[] iv = proto.getIv().toByteArray();
    String ezKeyVersionName = proto.getEzKeyVersionName();
    String keyName = proto.getKeyName();
    return new FileEncryptionInfo(suite, version, key, iv, keyName, ezKeyVersionName);
}
Also used : CipherSuite(org.apache.hadoop.crypto.CipherSuite) CryptoProtocolVersion(org.apache.hadoop.crypto.CryptoProtocolVersion) ByteString(com.google.protobuf.ByteString) FileEncryptionInfo(org.apache.hadoop.fs.FileEncryptionInfo)

Example 3 with FileEncryptionInfo

use of org.apache.hadoop.fs.FileEncryptionInfo in project hadoop by apache.

the class FSDirEncryptionZoneOp method getFileEncryptionInfo.

/**
   * If the file and encryption key are valid, return the encryption info,
   * else throw a retry exception.  The startFile method generates the EDEK
   * outside of the lock so the zone must be reverified.
   *
   * @param dir fsdirectory
   * @param iip inodes in the file path
   * @param ezInfo the encryption key
   * @return FileEncryptionInfo for the file
   * @throws RetryStartFileException if key is inconsistent with current zone
   */
static FileEncryptionInfo getFileEncryptionInfo(FSDirectory dir, INodesInPath iip, EncryptionKeyInfo ezInfo) throws RetryStartFileException {
    FileEncryptionInfo feInfo = null;
    final EncryptionZone zone = getEZForPath(dir, iip);
    if (zone != null) {
        // The path is now within an EZ, but we're missing encryption parameters
        if (ezInfo == null) {
            throw new RetryStartFileException();
        }
        // Path is within an EZ and we have provided encryption parameters.
        // Make sure that the generated EDEK matches the settings of the EZ.
        final String ezKeyName = zone.getKeyName();
        if (!ezKeyName.equals(ezInfo.edek.getEncryptionKeyName())) {
            throw new RetryStartFileException();
        }
        feInfo = new FileEncryptionInfo(ezInfo.suite, ezInfo.protocolVersion, ezInfo.edek.getEncryptedKeyVersion().getMaterial(), ezInfo.edek.getEncryptedKeyIv(), ezKeyName, ezInfo.edek.getEncryptionKeyVersionName());
    }
    return feInfo;
}
Also used : EncryptionZone(org.apache.hadoop.hdfs.protocol.EncryptionZone) FileEncryptionInfo(org.apache.hadoop.fs.FileEncryptionInfo)

Example 4 with FileEncryptionInfo

use of org.apache.hadoop.fs.FileEncryptionInfo in project hadoop by apache.

the class DFSClient method createWrappedOutputStream.

/**
   * Wraps the stream in a CryptoOutputStream if the underlying file is
   * encrypted.
   */
public HdfsDataOutputStream createWrappedOutputStream(DFSOutputStream dfsos, FileSystem.Statistics statistics, long startPos) throws IOException {
    final FileEncryptionInfo feInfo = dfsos.getFileEncryptionInfo();
    if (feInfo != null) {
        // File is encrypted, wrap the stream in a crypto stream.
        // Currently only one version, so no special logic based on the version #
        getCryptoProtocolVersion(feInfo);
        final CryptoCodec codec = getCryptoCodec(conf, feInfo);
        KeyVersion decrypted = decryptEncryptedDataEncryptionKey(feInfo);
        final CryptoOutputStream cryptoOut = new CryptoOutputStream(dfsos, codec, decrypted.getMaterial(), feInfo.getIV(), startPos);
        return new HdfsDataOutputStream(cryptoOut, statistics, startPos);
    } else {
        // No FileEncryptionInfo present so no encryption.
        return new HdfsDataOutputStream(dfsos, statistics, startPos);
    }
}
Also used : KeyVersion(org.apache.hadoop.crypto.key.KeyProvider.KeyVersion) EncryptedKeyVersion(org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion) CryptoCodec(org.apache.hadoop.crypto.CryptoCodec) CryptoOutputStream(org.apache.hadoop.crypto.CryptoOutputStream) HdfsDataOutputStream(org.apache.hadoop.hdfs.client.HdfsDataOutputStream) FileEncryptionInfo(org.apache.hadoop.fs.FileEncryptionInfo)

Example 5 with FileEncryptionInfo

use of org.apache.hadoop.fs.FileEncryptionInfo in project hadoop by apache.

the class DFSClient method createWrappedInputStream.

/**
   * Wraps the stream in a CryptoInputStream if the underlying file is
   * encrypted.
   */
public HdfsDataInputStream createWrappedInputStream(DFSInputStream dfsis) throws IOException {
    final FileEncryptionInfo feInfo = dfsis.getFileEncryptionInfo();
    if (feInfo != null) {
        // File is encrypted, wrap the stream in a crypto stream.
        // Currently only one version, so no special logic based on the version #
        getCryptoProtocolVersion(feInfo);
        final CryptoCodec codec = getCryptoCodec(conf, feInfo);
        final KeyVersion decrypted = decryptEncryptedDataEncryptionKey(feInfo);
        final CryptoInputStream cryptoIn = new CryptoInputStream(dfsis, codec, decrypted.getMaterial(), feInfo.getIV());
        return new HdfsDataInputStream(cryptoIn);
    } else {
        // No FileEncryptionInfo so no encryption.
        return new HdfsDataInputStream(dfsis);
    }
}
Also used : CryptoInputStream(org.apache.hadoop.crypto.CryptoInputStream) KeyVersion(org.apache.hadoop.crypto.key.KeyProvider.KeyVersion) EncryptedKeyVersion(org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion) CryptoCodec(org.apache.hadoop.crypto.CryptoCodec) FileEncryptionInfo(org.apache.hadoop.fs.FileEncryptionInfo) HdfsDataInputStream(org.apache.hadoop.hdfs.client.HdfsDataInputStream)

Aggregations

FileEncryptionInfo (org.apache.hadoop.fs.FileEncryptionInfo)11 CryptoCodec (org.apache.hadoop.crypto.CryptoCodec)3 KeyVersion (org.apache.hadoop.crypto.key.KeyProvider.KeyVersion)3 ByteString (com.google.protobuf.ByteString)2 IOException (java.io.IOException)2 EncryptedKeyVersion (org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion)2 Path (org.apache.hadoop.fs.Path)2 HdfsAdmin (org.apache.hadoop.hdfs.client.HdfsAdmin)2 ErasureCodingPolicy (org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy)2 LocatedBlocks (org.apache.hadoop.hdfs.protocol.LocatedBlocks)2 Test (org.junit.Test)2 Mockito.anyString (org.mockito.Mockito.anyString)2 InvocationTargetException (java.lang.reflect.InvocationTargetException)1 Method (java.lang.reflect.Method)1 URI (java.net.URI)1 GeneralSecurityException (java.security.GeneralSecurityException)1 Configuration (org.apache.hadoop.conf.Configuration)1 CipherSuite (org.apache.hadoop.crypto.CipherSuite)1 CryptoInputStream (org.apache.hadoop.crypto.CryptoInputStream)1 CryptoOutputStream (org.apache.hadoop.crypto.CryptoOutputStream)1