use of org.apache.hadoop.hbase.io.crypto.Encryption in project hbase by apache.
the class HFileBlockDefaultEncodingContext method compressAfterEncoding.
private Bytes compressAfterEncoding(byte[] uncompressedBytesWithHeaderBuffer, int uncompressedBytesWithHeaderOffset, int uncompressedBytesWithHeaderLength, byte[] headerBytes) throws IOException {
Encryption.Context cryptoContext = fileContext.getEncryptionContext();
if (cryptoContext != Encryption.Context.NONE) {
// Encrypted block format:
// +--------------------------+
// | byte iv length |
// +--------------------------+
// | iv data ... |
// +--------------------------+
// | encrypted block data ... |
// +--------------------------+
cryptoByteStream.reset();
// Write the block header (plaintext)
cryptoByteStream.write(headerBytes);
InputStream in;
int plaintextLength;
// Run any compression before encryption
if (fileContext.getCompression() != Compression.Algorithm.NONE) {
compressedByteStream.reset();
compressionStream.resetState();
compressionStream.write(uncompressedBytesWithHeaderBuffer, headerBytes.length + uncompressedBytesWithHeaderOffset, uncompressedBytesWithHeaderLength - headerBytes.length);
compressionStream.flush();
compressionStream.finish();
byte[] plaintext = compressedByteStream.toByteArray();
plaintextLength = plaintext.length;
in = new ByteArrayInputStream(plaintext);
} else {
plaintextLength = uncompressedBytesWithHeaderLength - headerBytes.length;
in = new ByteArrayInputStream(uncompressedBytesWithHeaderBuffer, headerBytes.length + uncompressedBytesWithHeaderOffset, plaintextLength);
}
if (plaintextLength > 0) {
// Set up the cipher
Cipher cipher = cryptoContext.getCipher();
Encryptor encryptor = cipher.getEncryptor();
encryptor.setKey(cryptoContext.getKey());
// Set up the IV
int ivLength = iv.length;
Preconditions.checkState(ivLength <= Byte.MAX_VALUE, "IV length out of range");
cryptoByteStream.write(ivLength);
if (ivLength > 0) {
encryptor.setIv(iv);
cryptoByteStream.write(iv);
}
// Encrypt the data
Encryption.encrypt(cryptoByteStream, in, encryptor);
// Increment the IV given the final block size
Encryption.incrementIv(iv, 1 + (cryptoByteStream.size() / encryptor.getBlockSize()));
return new Bytes(cryptoByteStream.getBuffer(), 0, cryptoByteStream.size());
} else {
cryptoByteStream.write(0);
return new Bytes(cryptoByteStream.getBuffer(), 0, cryptoByteStream.size());
}
} else {
if (this.fileContext.getCompression() != NONE) {
compressedByteStream.reset();
compressedByteStream.write(headerBytes);
compressionStream.resetState();
compressionStream.write(uncompressedBytesWithHeaderBuffer, headerBytes.length + uncompressedBytesWithHeaderOffset, uncompressedBytesWithHeaderLength - headerBytes.length);
compressionStream.flush();
compressionStream.finish();
return new Bytes(compressedByteStream.getBuffer(), 0, compressedByteStream.size());
} else {
return null;
}
}
}
use of org.apache.hadoop.hbase.io.crypto.Encryption in project hbase by apache.
the class EncryptionUtil method createEncryptionContext.
/**
* Helper to create an encyption context.
*
* @param conf The current configuration.
* @param family The current column descriptor.
* @return The created encryption context.
* @throws IOException if an encryption key for the column cannot be unwrapped
* @throws IllegalStateException in case of encryption related configuration errors
*/
public static Encryption.Context createEncryptionContext(Configuration conf, ColumnFamilyDescriptor family) throws IOException {
Encryption.Context cryptoContext = Encryption.Context.NONE;
String cipherName = family.getEncryptionType();
if (cipherName != null) {
if (!Encryption.isEncryptionEnabled(conf)) {
throw new IllegalStateException("Encryption for family '" + family.getNameAsString() + "' configured with type '" + cipherName + "' but the encryption feature is disabled");
}
Cipher cipher;
Key key;
byte[] keyBytes = family.getEncryptionKey();
if (keyBytes != null) {
// Family provides specific key material
key = unwrapKey(conf, keyBytes);
// Use the algorithm the key wants
cipher = Encryption.getCipher(conf, key.getAlgorithm());
if (cipher == null) {
throw new IllegalStateException("Cipher '" + key.getAlgorithm() + "' is not available");
}
// what the wrapped key is telling us
if (!cipher.getName().equalsIgnoreCase(cipherName)) {
throw new IllegalStateException("Encryption for family '" + family.getNameAsString() + "' configured with type '" + cipherName + "' but key specifies algorithm '" + cipher.getName() + "'");
}
} else {
// Family does not provide key material, create a random key
cipher = Encryption.getCipher(conf, cipherName);
if (cipher == null) {
throw new IllegalStateException("Cipher '" + cipherName + "' is not available");
}
key = cipher.getRandomKey();
}
cryptoContext = Encryption.newContext(conf);
cryptoContext.setCipher(cipher);
cryptoContext.setKey(key);
}
return cryptoContext;
}
use of org.apache.hadoop.hbase.io.crypto.Encryption in project hbase by apache.
the class EncryptionTest method testEncryption.
/**
* Check that the specified cipher can be loaded and initialized, or throw
* an exception. Verifies key and cipher provider configuration as a
* prerequisite for cipher verification. Also verifies if encryption is enabled globally.
*
* @param conf HBase configuration
* @param cipher chiper algorith to use for the column family
* @param key encryption key
* @throws IOException in case of encryption configuration error
*/
public static void testEncryption(final Configuration conf, final String cipher, byte[] key) throws IOException {
if (cipher == null) {
return;
}
if (!Encryption.isEncryptionEnabled(conf)) {
String message = String.format("Cipher %s failed test: encryption is disabled on the cluster", cipher);
throw new IOException(message);
}
testKeyProvider(conf);
testCipherProvider(conf);
Boolean result = cipherResults.get(cipher);
if (result == null) {
try {
Encryption.Context context = Encryption.newContext(conf);
context.setCipher(Encryption.getCipher(conf, cipher));
if (key == null) {
// Make a random key since one was not provided
context.setKey(context.getCipher().getRandomKey());
} else {
// This will be a wrapped key from schema
context.setKey(EncryptionUtil.unwrapKey(conf, conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase"), key));
}
byte[] iv = null;
if (context.getCipher().getIvLength() > 0) {
iv = new byte[context.getCipher().getIvLength()];
Bytes.random(iv);
}
byte[] plaintext = new byte[1024];
Bytes.random(plaintext);
ByteArrayOutputStream out = new ByteArrayOutputStream();
Encryption.encrypt(out, new ByteArrayInputStream(plaintext), context, iv);
byte[] ciphertext = out.toByteArray();
out.reset();
Encryption.decrypt(out, new ByteArrayInputStream(ciphertext), plaintext.length, context, iv);
byte[] test = out.toByteArray();
if (!Bytes.equals(plaintext, test)) {
throw new IOException("Did not pass encrypt/decrypt test");
}
cipherResults.put(cipher, true);
} catch (Exception e) {
cipherResults.put(cipher, false);
throw new IOException("Cipher " + cipher + " failed test: " + e.getMessage(), e);
}
} else if (!result) {
throw new IOException("Cipher " + cipher + " previously failed test");
}
}
use of org.apache.hadoop.hbase.io.crypto.Encryption in project hbase by apache.
the class HFileBlockDefaultEncodingContext method compressAfterEncoding.
/**
* @param uncompressedBytesWithHeader
* @param headerBytes
* @throws IOException
*/
protected void compressAfterEncoding(byte[] uncompressedBytesWithHeader, byte[] headerBytes) throws IOException {
Encryption.Context cryptoContext = fileContext.getEncryptionContext();
if (cryptoContext != Encryption.Context.NONE) {
// Encrypted block format:
// +--------------------------+
// | byte iv length |
// +--------------------------+
// | iv data ... |
// +--------------------------+
// | encrypted block data ... |
// +--------------------------+
cryptoByteStream.reset();
// Write the block header (plaintext)
cryptoByteStream.write(headerBytes);
InputStream in;
int plaintextLength;
// Run any compression before encryption
if (fileContext.getCompression() != Compression.Algorithm.NONE) {
compressedByteStream.reset();
compressionStream.resetState();
compressionStream.write(uncompressedBytesWithHeader, headerBytes.length, uncompressedBytesWithHeader.length - headerBytes.length);
compressionStream.flush();
compressionStream.finish();
byte[] plaintext = compressedByteStream.toByteArray();
plaintextLength = plaintext.length;
in = new ByteArrayInputStream(plaintext);
} else {
plaintextLength = uncompressedBytesWithHeader.length - headerBytes.length;
in = new ByteArrayInputStream(uncompressedBytesWithHeader, headerBytes.length, plaintextLength);
}
if (plaintextLength > 0) {
// Set up the cipher
Cipher cipher = cryptoContext.getCipher();
Encryptor encryptor = cipher.getEncryptor();
encryptor.setKey(cryptoContext.getKey());
// Set up the IV
int ivLength = iv.length;
Preconditions.checkState(ivLength <= Byte.MAX_VALUE, "IV length out of range");
cryptoByteStream.write(ivLength);
if (ivLength > 0) {
encryptor.setIv(iv);
cryptoByteStream.write(iv);
}
// Encrypt the data
Encryption.encrypt(cryptoByteStream, in, encryptor);
onDiskBytesWithHeader = cryptoByteStream.toByteArray();
// Increment the IV given the final block size
Encryption.incrementIv(iv, 1 + (onDiskBytesWithHeader.length / encryptor.getBlockSize()));
} else {
cryptoByteStream.write(0);
onDiskBytesWithHeader = cryptoByteStream.toByteArray();
}
} else {
if (this.fileContext.getCompression() != NONE) {
compressedByteStream.reset();
compressedByteStream.write(headerBytes);
compressionStream.resetState();
compressionStream.write(uncompressedBytesWithHeader, headerBytes.length, uncompressedBytesWithHeader.length - headerBytes.length);
compressionStream.flush();
compressionStream.finish();
onDiskBytesWithHeader = compressedByteStream.toByteArray();
} else {
onDiskBytesWithHeader = uncompressedBytesWithHeader;
}
}
}
use of org.apache.hadoop.hbase.io.crypto.Encryption in project hbase by apache.
the class EncryptionUtil method createEncryptionContext.
/**
* Helper to create an encyption context.
*
* @param conf The current configuration.
* @param family The current column descriptor.
* @return The created encryption context.
* @throws IOException if an encryption key for the column cannot be unwrapped
*/
public static Encryption.Context createEncryptionContext(Configuration conf, HColumnDescriptor family) throws IOException {
Encryption.Context cryptoContext = Encryption.Context.NONE;
String cipherName = family.getEncryptionType();
if (cipherName != null) {
Cipher cipher;
Key key;
byte[] keyBytes = family.getEncryptionKey();
if (keyBytes != null) {
// Family provides specific key material
key = unwrapKey(conf, keyBytes);
// Use the algorithm the key wants
cipher = Encryption.getCipher(conf, key.getAlgorithm());
if (cipher == null) {
throw new RuntimeException("Cipher '" + key.getAlgorithm() + "' is not available");
}
// what the wrapped key is telling us
if (!cipher.getName().equalsIgnoreCase(cipherName)) {
throw new RuntimeException("Encryption for family '" + family.getNameAsString() + "' configured with type '" + cipherName + "' but key specifies algorithm '" + cipher.getName() + "'");
}
} else {
// Family does not provide key material, create a random key
cipher = Encryption.getCipher(conf, cipherName);
if (cipher == null) {
throw new RuntimeException("Cipher '" + cipherName + "' is not available");
}
key = cipher.getRandomKey();
}
cryptoContext = Encryption.newContext(conf);
cryptoContext.setCipher(cipher);
cryptoContext.setKey(key);
}
return cryptoContext;
}
Aggregations