Search in sources :

Example 1 with CanReinit

use of org.apache.hadoop.hbase.io.compress.CanReinit in project hbase by apache.

the class HFileBlockDefaultDecodingContext method prepareDecoding.

@Override
public void prepareDecoding(int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader, ByteBuff blockBufferWithoutHeader, ByteBuff onDiskBlock) throws IOException {
    final ByteBuffInputStream byteBuffInputStream = new ByteBuffInputStream(onDiskBlock);
    InputStream dataInputStream = new DataInputStream(byteBuffInputStream);
    try {
        Encryption.Context cryptoContext = fileContext.getEncryptionContext();
        if (cryptoContext != Encryption.Context.NONE) {
            Cipher cipher = cryptoContext.getCipher();
            Decryptor decryptor = cipher.getDecryptor();
            decryptor.setKey(cryptoContext.getKey());
            // Encrypted block format:
            // +--------------------------+
            // | byte iv length           |
            // +--------------------------+
            // | iv data ...              |
            // +--------------------------+
            // | encrypted block data ... |
            // +--------------------------+
            int ivLength = dataInputStream.read();
            if (ivLength > 0) {
                byte[] iv = new byte[ivLength];
                IOUtils.readFully(dataInputStream, iv);
                decryptor.setIv(iv);
                // All encrypted blocks will have a nonzero IV length. If we see an IV
                // length of zero, this means the encoding context had 0 bytes of
                // plaintext to encode.
                decryptor.reset();
                dataInputStream = decryptor.createDecryptionStream(dataInputStream);
            }
            onDiskSizeWithoutHeader -= Bytes.SIZEOF_BYTE + ivLength;
        }
        Compression.Algorithm compression = fileContext.getCompression();
        if (compression != Compression.Algorithm.NONE) {
            Decompressor decompressor = null;
            try {
                decompressor = compression.getDecompressor();
                // same when creating decompression streams. We can ignore these cases wrt reinit.
                if (decompressor instanceof CanReinit) {
                    ((CanReinit) decompressor).reinit(conf);
                }
                try (InputStream is = compression.createDecompressionStream(dataInputStream, decompressor, 0)) {
                    BlockIOUtils.readFullyWithHeapBuffer(is, blockBufferWithoutHeader, uncompressedSizeWithoutHeader);
                }
            } finally {
                if (decompressor != null) {
                    compression.returnDecompressor(decompressor);
                }
            }
        } else {
            BlockIOUtils.readFullyWithHeapBuffer(dataInputStream, blockBufferWithoutHeader, onDiskSizeWithoutHeader);
        }
    } finally {
        byteBuffInputStream.close();
        dataInputStream.close();
    }
}
Also used : Compression(org.apache.hadoop.hbase.io.compress.Compression) Decryptor(org.apache.hadoop.hbase.io.crypto.Decryptor) Decompressor(org.apache.hadoop.io.compress.Decompressor) CanReinit(org.apache.hadoop.hbase.io.compress.CanReinit) DataInputStream(java.io.DataInputStream) ByteBuffInputStream(org.apache.hadoop.hbase.io.ByteBuffInputStream) InputStream(java.io.InputStream) Encryption(org.apache.hadoop.hbase.io.crypto.Encryption) DataInputStream(java.io.DataInputStream) ByteBuffInputStream(org.apache.hadoop.hbase.io.ByteBuffInputStream) Cipher(org.apache.hadoop.hbase.io.crypto.Cipher)

Aggregations

DataInputStream (java.io.DataInputStream)1 InputStream (java.io.InputStream)1 ByteBuffInputStream (org.apache.hadoop.hbase.io.ByteBuffInputStream)1 CanReinit (org.apache.hadoop.hbase.io.compress.CanReinit)1 Compression (org.apache.hadoop.hbase.io.compress.Compression)1 Cipher (org.apache.hadoop.hbase.io.crypto.Cipher)1 Decryptor (org.apache.hadoop.hbase.io.crypto.Decryptor)1 Encryption (org.apache.hadoop.hbase.io.crypto.Encryption)1 Decompressor (org.apache.hadoop.io.compress.Decompressor)1