use of org.apache.parquet.crypto.ParquetCryptoRuntimeException in project parquet-mr by apache.
the class PropertiesDrivenCryptoFactory method getFileEncryptionProperties.
@Override
public FileEncryptionProperties getFileEncryptionProperties(Configuration fileHadoopConfig, Path tempFilePath, WriteContext fileWriteContext) throws ParquetCryptoRuntimeException {
String footerKeyId = fileHadoopConfig.getTrimmed(FOOTER_KEY_PROPERTY_NAME);
String columnKeysStr = fileHadoopConfig.getTrimmed(COLUMN_KEYS_PROPERTY_NAME);
String uniformKeyId = fileHadoopConfig.getTrimmed(UNIFORM_KEY_PROPERTY_NAME);
boolean emptyFooterKeyId = stringIsEmpty(footerKeyId);
boolean emptyColumnKeyIds = stringIsEmpty(columnKeysStr);
boolean emptyUniformKeyId = stringIsEmpty(uniformKeyId);
// File shouldn't be encrypted
if (emptyFooterKeyId && emptyColumnKeyIds && emptyUniformKeyId) {
LOG.debug("Unencrypted file: {}", tempFilePath);
return null;
}
if (emptyUniformKeyId) {
// Non-uniform encryption.Must have both footer and column key ids
if (emptyFooterKeyId) {
throw new ParquetCryptoRuntimeException("No footer key configured in " + FOOTER_KEY_PROPERTY_NAME);
}
if (emptyColumnKeyIds) {
throw new ParquetCryptoRuntimeException("No column keys configured in " + COLUMN_KEYS_PROPERTY_NAME);
}
} else {
// Uniform encryption. Can't have configuration of footer and column key ids
if (!emptyFooterKeyId) {
throw new ParquetCryptoRuntimeException("Uniform encryption. Cant have footer key configured in " + FOOTER_KEY_PROPERTY_NAME);
}
if (!emptyColumnKeyIds) {
throw new ParquetCryptoRuntimeException("Uniform encryption. Cant have column keys configured in " + COLUMN_KEYS_PROPERTY_NAME);
}
// Now assign footer key id to uniform key id
footerKeyId = uniformKeyId;
}
FileKeyMaterialStore keyMaterialStore = null;
boolean keyMaterialInternalStorage = fileHadoopConfig.getBoolean(KeyToolkit.KEY_MATERIAL_INTERNAL_PROPERTY_NAME, KeyToolkit.KEY_MATERIAL_INTERNAL_DEFAULT);
if (!keyMaterialInternalStorage) {
if (tempFilePath == null) {
throw new ParquetCryptoRuntimeException("Output file path cannot be null");
}
try {
keyMaterialStore = new HadoopFSKeyMaterialStore(tempFilePath.getFileSystem(fileHadoopConfig));
keyMaterialStore.initialize(tempFilePath, fileHadoopConfig, false);
} catch (IOException e) {
throw new ParquetCryptoRuntimeException("Failed to get key material store", e);
}
}
FileKeyWrapper keyWrapper = new FileKeyWrapper(fileHadoopConfig, keyMaterialStore);
String algo = fileHadoopConfig.getTrimmed(ENCRYPTION_ALGORITHM_PROPERTY_NAME, ENCRYPTION_ALGORITHM_DEFAULT);
ParquetCipher cipher;
try {
cipher = ParquetCipher.valueOf(algo);
} catch (IllegalArgumentException e) {
throw new ParquetCryptoRuntimeException("Wrong encryption algorithm: " + algo);
}
int dekLengthBits = fileHadoopConfig.getInt(KeyToolkit.DATA_KEY_LENGTH_PROPERTY_NAME, KeyToolkit.DATA_KEY_LENGTH_DEFAULT);
if (Arrays.binarySearch(ACCEPTABLE_DATA_KEY_LENGTHS, dekLengthBits) < 0) {
throw new ParquetCryptoRuntimeException("Wrong data key length : " + dekLengthBits);
}
int dekLength = dekLengthBits / 8;
byte[] footerKeyBytes = new byte[dekLength];
RANDOM.nextBytes(footerKeyBytes);
byte[] footerKeyMetadata = keyWrapper.getEncryptionKeyMetadata(footerKeyBytes, footerKeyId, true);
boolean plaintextFooter = fileHadoopConfig.getBoolean(PLAINTEXT_FOOTER_PROPERTY_NAME, PLAINTEXT_FOOTER_DEFAULT);
FileEncryptionProperties.Builder propertiesBuilder = FileEncryptionProperties.builder(footerKeyBytes).withFooterKeyMetadata(footerKeyMetadata).withAlgorithm(cipher);
if (emptyUniformKeyId) {
Map<ColumnPath, ColumnEncryptionProperties> encryptedColumns = getColumnEncryptionProperties(dekLength, columnKeysStr, keyWrapper);
propertiesBuilder = propertiesBuilder.withEncryptedColumns(encryptedColumns);
}
if (plaintextFooter) {
propertiesBuilder = propertiesBuilder.withPlaintextFooter();
}
if (null != keyMaterialStore) {
keyMaterialStore.saveMaterial();
}
if (LOG.isDebugEnabled()) {
LOG.debug("File encryption properties for {} - algo: {}; footer key id: {}; uniform key id: {}; " + "" + "plaintext footer: {}; internal key material: {}; encrypted columns: {}", tempFilePath, cipher, footerKeyId, uniformKeyId, plaintextFooter, keyMaterialInternalStorage, columnKeysStr);
}
return propertiesBuilder.build();
}
use of org.apache.parquet.crypto.ParquetCryptoRuntimeException in project parquet-mr by apache.
the class FileKeyWrapper method getEncryptionKeyMetadata.
byte[] getEncryptionKeyMetadata(byte[] dataKey, String masterKeyID, boolean isFooterKey, String keyIdInFile) {
if (null == kmsClient) {
throw new ParquetCryptoRuntimeException("No KMS client available. See previous errors.");
}
String encodedKekID = null;
String encodedWrappedKEK = null;
String encodedWrappedDEK = null;
if (!doubleWrapping) {
encodedWrappedDEK = kmsClient.wrapKey(dataKey, masterKeyID);
} else {
// Find in cache, or generate KEK for Master Key ID
KeyEncryptionKey keyEncryptionKey = KEKPerMasterKeyID.computeIfAbsent(masterKeyID, (k) -> createKeyEncryptionKey(masterKeyID));
// Encrypt DEK with KEK
byte[] AAD = keyEncryptionKey.getID();
encodedWrappedDEK = KeyToolkit.encryptKeyLocally(dataKey, keyEncryptionKey.getBytes(), AAD);
encodedKekID = keyEncryptionKey.getEncodedID();
encodedWrappedKEK = keyEncryptionKey.getEncodedWrappedKEK();
}
boolean storeKeyMaterialInternally = (null == keyMaterialStore);
String serializedKeyMaterial = KeyMaterial.createSerialized(isFooterKey, kmsInstanceID, kmsInstanceURL, masterKeyID, doubleWrapping, encodedKekID, encodedWrappedKEK, encodedWrappedDEK, storeKeyMaterialInternally);
// Internal key material storage: key metadata and key material are the same
if (storeKeyMaterialInternally) {
return serializedKeyMaterial.getBytes(StandardCharsets.UTF_8);
}
// External key material storage: key metadata is a reference to a key in the material store
if (null == keyIdInFile) {
if (isFooterKey) {
keyIdInFile = KeyMaterial.FOOTER_KEY_ID_IN_FILE;
} else {
keyIdInFile = KeyMaterial.COLUMN_KEY_ID_IN_FILE_PREFIX + keyCounter;
keyCounter++;
}
}
keyMaterialStore.addKeyMaterial(keyIdInFile, serializedKeyMaterial);
String serializedKeyMetadata = KeyMetadata.createSerializedForExternalMaterial(keyIdInFile);
return serializedKeyMetadata.getBytes(StandardCharsets.UTF_8);
}
use of org.apache.parquet.crypto.ParquetCryptoRuntimeException in project parquet-mr by apache.
the class KeyMaterial method createSerialized.
static String createSerialized(boolean isFooterKey, String kmsInstanceID, String kmsInstanceURL, String masterKeyID, boolean isDoubleWrapped, String kekID, String encodedWrappedKEK, String encodedWrappedDEK, boolean isInternalStorage) {
Map<String, Object> keyMaterialMap = new HashMap<String, Object>(10);
// 1. Write "key material type"
keyMaterialMap.put(KEY_MATERIAL_TYPE_FIELD, KEY_MATERIAL_TYPE1);
if (isInternalStorage) {
// for internal storage, key material and key metadata are the same.
// adding the "internalStorage" field that belongs to KeyMetadata.
keyMaterialMap.put(KeyMetadata.KEY_MATERIAL_INTERNAL_STORAGE_FIELD, Boolean.TRUE);
}
// 2. Write isFooterKey
keyMaterialMap.put(IS_FOOTER_KEY_FIELD, Boolean.valueOf(isFooterKey));
if (isFooterKey) {
// 3. For footer key, write KMS Instance ID
keyMaterialMap.put(KMS_INSTANCE_ID_FIELD, kmsInstanceID);
// 4. For footer key, write KMS Instance URL
keyMaterialMap.put(KMS_INSTANCE_URL_FIELD, kmsInstanceURL);
}
// 5. Write master key ID
keyMaterialMap.put(MASTER_KEY_ID_FIELD, masterKeyID);
// 6. Write wrapped DEK
keyMaterialMap.put(WRAPPED_DEK_FIELD, encodedWrappedDEK);
// 7. Write isDoubleWrapped
keyMaterialMap.put(DOUBLE_WRAPPING_FIELD, Boolean.valueOf(isDoubleWrapped));
if (isDoubleWrapped) {
// 8. In double wrapping mode, write KEK ID
keyMaterialMap.put(KEK_ID_FIELD, kekID);
// 9. In double wrapping mode, write wrapped KEK
keyMaterialMap.put(WRAPPED_KEK_FIELD, encodedWrappedKEK);
}
try {
return OBJECT_MAPPER.writeValueAsString(keyMaterialMap);
} catch (IOException e) {
throw new ParquetCryptoRuntimeException("Failed to serialize key material", e);
}
}
use of org.apache.parquet.crypto.ParquetCryptoRuntimeException in project parquet-mr by apache.
the class KeyMaterial method parse.
// parses external key material
static KeyMaterial parse(String keyMaterialString) {
Map<String, Object> keyMaterialJson = null;
try {
keyMaterialJson = OBJECT_MAPPER.readValue(new StringReader(keyMaterialString), new TypeReference<Map<String, Object>>() {
});
} catch (IOException e) {
throw new ParquetCryptoRuntimeException("Failed to parse key metadata " + keyMaterialString, e);
}
// 1. External key material - extract "key material type", and make sure it is supported
String keyMaterialType = (String) keyMaterialJson.get(KEY_MATERIAL_TYPE_FIELD);
if (!KEY_MATERIAL_TYPE1.equals(keyMaterialType)) {
throw new ParquetCryptoRuntimeException("Wrong key material type: " + keyMaterialType + " vs " + KEY_MATERIAL_TYPE1);
}
// Parse other fields (common to internal and external key material)
return parse(keyMaterialJson);
}
use of org.apache.parquet.crypto.ParquetCryptoRuntimeException in project parquet-mr by apache.
the class KeyToolkit method rotateMasterKeys.
/**
* Key rotation. In the single wrapping mode, decrypts data keys with old master keys, then encrypts
* them with new master keys. In the double wrapping mode, decrypts KEKs (key encryption keys) with old
* master keys, generates new KEKs and encrypts them with new master keys.
* Works only if key material is not stored internally in file footers.
* Not supported in local key wrapping mode.
* Method can be run by multiple threads, but each thread must work on a different folder.
* @param folderPath parent path of Parquet files, whose keys will be rotated
* @param hadoopConfig Hadoop configuration
* @throws IOException I/O problems
* @throws ParquetCryptoRuntimeException General parquet encryption problems
* @throws KeyAccessDeniedException No access to master keys
* @throws UnsupportedOperationException Master key rotation not supported in the specific configuration
*/
public static void rotateMasterKeys(String folderPath, Configuration hadoopConfig) throws IOException, ParquetCryptoRuntimeException, KeyAccessDeniedException, UnsupportedOperationException {
if (hadoopConfig.getBoolean(KEY_MATERIAL_INTERNAL_PROPERTY_NAME, false)) {
throw new UnsupportedOperationException("Key rotation is not supported for internal key material");
}
// If process wrote files with double-wrapped keys, clean KEK cache (since master keys are changing).
// Only once for each key rotation cycle; not for every folder
long currentTime = System.currentTimeMillis();
synchronized (lastCacheCleanForKeyRotationTimeLock) {
if (currentTime - lastCacheCleanForKeyRotationTime > CACHE_CLEAN_PERIOD_FOR_KEY_ROTATION) {
KEK_WRITE_CACHE_PER_TOKEN.clear();
lastCacheCleanForKeyRotationTime = currentTime;
}
}
Path parentPath = new Path(folderPath);
FileSystem hadoopFileSystem = parentPath.getFileSystem(hadoopConfig);
if (!hadoopFileSystem.exists(parentPath) || !hadoopFileSystem.isDirectory(parentPath)) {
throw new ParquetCryptoRuntimeException("Couldn't rotate keys - folder doesn't exist or is not a directory: " + folderPath);
}
FileStatus[] parquetFilesInFolder = hadoopFileSystem.listStatus(parentPath, HiddenFileFilter.INSTANCE);
if (parquetFilesInFolder.length == 0) {
throw new ParquetCryptoRuntimeException("Couldn't rotate keys - no parquet files in folder " + folderPath);
}
for (FileStatus fs : parquetFilesInFolder) {
Path parquetFile = fs.getPath();
FileKeyMaterialStore keyMaterialStore = new HadoopFSKeyMaterialStore(hadoopFileSystem);
keyMaterialStore.initialize(parquetFile, hadoopConfig, false);
FileKeyMaterialStore tempKeyMaterialStore = new HadoopFSKeyMaterialStore(hadoopFileSystem);
tempKeyMaterialStore.initialize(parquetFile, hadoopConfig, true);
Set<String> fileKeyIdSet = keyMaterialStore.getKeyIDSet();
// Start with footer key (to get KMS ID, URL, if needed)
FileKeyUnwrapper fileKeyUnwrapper = new FileKeyUnwrapper(hadoopConfig, parquetFile, keyMaterialStore);
String keyMaterialString = keyMaterialStore.getKeyMaterial(KeyMaterial.FOOTER_KEY_ID_IN_FILE);
KeyWithMasterID key = fileKeyUnwrapper.getDEKandMasterID(KeyMaterial.parse(keyMaterialString));
KmsClientAndDetails kmsClientAndDetails = fileKeyUnwrapper.getKmsClientAndDetails();
FileKeyWrapper fileKeyWrapper = new FileKeyWrapper(hadoopConfig, tempKeyMaterialStore, kmsClientAndDetails);
fileKeyWrapper.getEncryptionKeyMetadata(key.getDataKey(), key.getMasterID(), true, KeyMaterial.FOOTER_KEY_ID_IN_FILE);
fileKeyIdSet.remove(KeyMaterial.FOOTER_KEY_ID_IN_FILE);
// Rotate column keys
for (String keyIdInFile : fileKeyIdSet) {
keyMaterialString = keyMaterialStore.getKeyMaterial(keyIdInFile);
key = fileKeyUnwrapper.getDEKandMasterID(KeyMaterial.parse(keyMaterialString));
fileKeyWrapper.getEncryptionKeyMetadata(key.getDataKey(), key.getMasterID(), false, keyIdInFile);
}
tempKeyMaterialStore.saveMaterial();
keyMaterialStore.removeMaterial();
tempKeyMaterialStore.moveMaterialTo(keyMaterialStore);
}
}
Aggregations