use of org.apache.hadoop.crypto.key.KeyProvider.KeyVersion in project hadoop by apache.
the class KMS method handleEncryptedKeyOp.
@SuppressWarnings("rawtypes")
@POST
@Path(KMSRESTConstants.KEY_VERSION_RESOURCE + "/{versionName:.*}/" + KMSRESTConstants.EEK_SUB_RESOURCE)
@Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8)
public Response handleEncryptedKeyOp(@PathParam("versionName") final String versionName, @QueryParam(KMSRESTConstants.EEK_OP) String eekOp, Map jsonPayload) throws Exception {
try {
LOG.trace("Entering decryptEncryptedKey method.");
UserGroupInformation user = HttpUserGroupInformation.get();
KMSClientProvider.checkNotEmpty(versionName, "versionName");
KMSClientProvider.checkNotNull(eekOp, "eekOp");
LOG.debug("Decrypting key for {}, the edek Operation is {}.", versionName, eekOp);
final String keyName = (String) jsonPayload.get(KMSRESTConstants.NAME_FIELD);
String ivStr = (String) jsonPayload.get(KMSRESTConstants.IV_FIELD);
String encMaterialStr = (String) jsonPayload.get(KMSRESTConstants.MATERIAL_FIELD);
KMSClientProvider.checkNotNull(ivStr, KMSRESTConstants.IV_FIELD);
final byte[] iv = Base64.decodeBase64(ivStr);
KMSClientProvider.checkNotNull(encMaterialStr, KMSRESTConstants.MATERIAL_FIELD);
final byte[] encMaterial = Base64.decodeBase64(encMaterialStr);
Object retJSON;
if (eekOp.equals(KMSRESTConstants.EEK_DECRYPT)) {
assertAccess(KMSACLs.Type.DECRYPT_EEK, user, KMSOp.DECRYPT_EEK, keyName);
KeyProvider.KeyVersion retKeyVersion = user.doAs(new PrivilegedExceptionAction<KeyVersion>() {
@Override
public KeyVersion run() throws Exception {
return provider.decryptEncryptedKey(new KMSClientProvider.KMSEncryptedKeyVersion(keyName, versionName, iv, KeyProviderCryptoExtension.EEK, encMaterial));
}
});
retJSON = KMSServerJSONUtils.toJSON(retKeyVersion);
kmsAudit.ok(user, KMSOp.DECRYPT_EEK, keyName, "");
} else if (eekOp.equals(KMSRESTConstants.EEK_REENCRYPT)) {
assertAccess(KMSACLs.Type.GENERATE_EEK, user, KMSOp.REENCRYPT_EEK, keyName);
EncryptedKeyVersion retEncryptedKeyVersion = user.doAs(new PrivilegedExceptionAction<EncryptedKeyVersion>() {
@Override
public EncryptedKeyVersion run() throws Exception {
return provider.reencryptEncryptedKey(new KMSClientProvider.KMSEncryptedKeyVersion(keyName, versionName, iv, KeyProviderCryptoExtension.EEK, encMaterial));
}
});
retJSON = KMSServerJSONUtils.toJSON(retEncryptedKeyVersion);
kmsAudit.ok(user, KMSOp.REENCRYPT_EEK, keyName, "");
} else {
StringBuilder error;
error = new StringBuilder("IllegalArgumentException Wrong ");
error.append(KMSRESTConstants.EEK_OP);
error.append(" value, it must be ");
error.append(KMSRESTConstants.EEK_GENERATE);
error.append(" or ");
error.append(KMSRESTConstants.EEK_DECRYPT);
LOG.error(error.toString());
throw new IllegalArgumentException(error.toString());
}
KMSWebApp.getDecryptEEKCallsMeter().mark();
LOG.trace("Exiting handleEncryptedKeyOp method.");
return Response.ok().type(MediaType.APPLICATION_JSON).entity(retJSON).build();
} catch (Exception e) {
LOG.debug("Exception in handleEncryptedKeyOp.", e);
throw e;
}
}
use of org.apache.hadoop.crypto.key.KeyProvider.KeyVersion in project hadoop by apache.
the class DFSClient method createWrappedOutputStream.
/**
* Wraps the stream in a CryptoOutputStream if the underlying file is
* encrypted.
*/
public HdfsDataOutputStream createWrappedOutputStream(DFSOutputStream dfsos, FileSystem.Statistics statistics, long startPos) throws IOException {
final FileEncryptionInfo feInfo = dfsos.getFileEncryptionInfo();
if (feInfo != null) {
// File is encrypted, wrap the stream in a crypto stream.
// Currently only one version, so no special logic based on the version #
getCryptoProtocolVersion(feInfo);
final CryptoCodec codec = getCryptoCodec(conf, feInfo);
KeyVersion decrypted = decryptEncryptedDataEncryptionKey(feInfo);
final CryptoOutputStream cryptoOut = new CryptoOutputStream(dfsos, codec, decrypted.getMaterial(), feInfo.getIV(), startPos);
return new HdfsDataOutputStream(cryptoOut, statistics, startPos);
} else {
// No FileEncryptionInfo present so no encryption.
return new HdfsDataOutputStream(dfsos, statistics, startPos);
}
}
use of org.apache.hadoop.crypto.key.KeyProvider.KeyVersion in project hadoop by apache.
the class DFSClient method createWrappedInputStream.
/**
* Wraps the stream in a CryptoInputStream if the underlying file is
* encrypted.
*/
public HdfsDataInputStream createWrappedInputStream(DFSInputStream dfsis) throws IOException {
final FileEncryptionInfo feInfo = dfsis.getFileEncryptionInfo();
if (feInfo != null) {
// File is encrypted, wrap the stream in a crypto stream.
// Currently only one version, so no special logic based on the version #
getCryptoProtocolVersion(feInfo);
final CryptoCodec codec = getCryptoCodec(conf, feInfo);
final KeyVersion decrypted = decryptEncryptedDataEncryptionKey(feInfo);
final CryptoInputStream cryptoIn = new CryptoInputStream(dfsis, codec, decrypted.getMaterial(), feInfo.getIV());
return new HdfsDataInputStream(cryptoIn);
} else {
// No FileEncryptionInfo so no encryption.
return new HdfsDataInputStream(dfsis);
}
}
use of org.apache.hadoop.crypto.key.KeyProvider.KeyVersion in project hbase by apache.
the class FanOutOneBlockAsyncDFSOutputSaslHelper method createTransparentCryptoHelper.
private static TransparentCryptoHelper createTransparentCryptoHelper() throws NoSuchMethodException {
Method decryptEncryptedDataEncryptionKeyMethod = DFSClient.class.getDeclaredMethod("decryptEncryptedDataEncryptionKey", FileEncryptionInfo.class);
decryptEncryptedDataEncryptionKeyMethod.setAccessible(true);
return new TransparentCryptoHelper() {
@Override
public Encryptor createEncryptor(Configuration conf, FileEncryptionInfo feInfo, DFSClient client) throws IOException {
try {
KeyVersion decryptedKey = (KeyVersion) decryptEncryptedDataEncryptionKeyMethod.invoke(client, feInfo);
CryptoCodec cryptoCodec = CryptoCodec.getInstance(conf, feInfo.getCipherSuite());
Encryptor encryptor = cryptoCodec.createEncryptor();
encryptor.init(decryptedKey.getMaterial(), feInfo.getIV());
return encryptor;
} catch (InvocationTargetException e) {
Throwables.propagateIfPossible(e.getTargetException(), IOException.class);
throw new RuntimeException(e.getTargetException());
} catch (GeneralSecurityException e) {
throw new IOException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
};
}
use of org.apache.hadoop.crypto.key.KeyProvider.KeyVersion in project hadoop by apache.
the class TestKeyProviderCryptoExtension method testEncryptDecrypt.
@Test
public void testEncryptDecrypt() throws Exception {
// Get an EEK
KeyProviderCryptoExtension.EncryptedKeyVersion eek = kpExt.generateEncryptedKey(encryptionKey.getName());
final byte[] encryptedKeyIv = eek.getEncryptedKeyIv();
final byte[] encryptedKeyMaterial = eek.getEncryptedKeyVersion().getMaterial();
// Decrypt it manually
Cipher cipher = Cipher.getInstance("AES/CTR/NoPadding");
cipher.init(Cipher.DECRYPT_MODE, new SecretKeySpec(encryptionKey.getMaterial(), "AES"), new IvParameterSpec(KeyProviderCryptoExtension.EncryptedKeyVersion.deriveIV(encryptedKeyIv)));
final byte[] manualMaterial = cipher.doFinal(encryptedKeyMaterial);
// Test the createForDecryption factory method
EncryptedKeyVersion eek2 = EncryptedKeyVersion.createForDecryption(eek.getEncryptionKeyName(), eek.getEncryptionKeyVersionName(), eek.getEncryptedKeyIv(), eek.getEncryptedKeyVersion().getMaterial());
// Decrypt it with the API
KeyVersion decryptedKey = kpExt.decryptEncryptedKey(eek2);
final byte[] apiMaterial = decryptedKey.getMaterial();
assertArrayEquals("Wrong key material from decryptEncryptedKey", manualMaterial, apiMaterial);
}
Aggregations