use of org.finra.herd.model.dto.S3FileTransferRequestParamsDto in project herd by FINRAOS.
the class JdbcServiceImpl method getProperties.
/**
* Gets an S3 object from the specified location, and parses it as a Java properties.
*
* @param s3PropertiesLocation {@link S3PropertiesLocation}
*
* @return {@link Properties}
*/
private Properties getProperties(S3PropertiesLocation s3PropertiesLocation) {
String s3BucketName = s3PropertiesLocation.getBucketName().trim();
String s3ObjectKey = s3PropertiesLocation.getKey().trim();
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = storageHelper.getS3FileTransferRequestParamsDto();
return s3Dao.getProperties(s3BucketName, s3ObjectKey, s3FileTransferRequestParamsDto);
}
use of org.finra.herd.model.dto.S3FileTransferRequestParamsDto in project herd by FINRAOS.
the class BusinessObjectDataInitiateDestroyHelperServiceImpl method executeS3SpecificStepsImpl.
/**
* Executes S3 specific steps required for initiation of a business object data destroy.
*
* @param businessObjectDataDestroyDto the DTO that holds various parameters needed to initiate a business object data destroy
*/
protected void executeS3SpecificStepsImpl(BusinessObjectDataDestroyDto businessObjectDataDestroyDto) {
// Create an S3 file transfer parameters DTO to access the S3 bucket.
// Since the S3 key prefix represents a directory, we add a trailing '/' character to it.
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = storageHelper.getS3FileTransferRequestParamsDto();
s3FileTransferRequestParamsDto.setS3Endpoint(businessObjectDataDestroyDto.getS3Endpoint());
s3FileTransferRequestParamsDto.setS3BucketName(businessObjectDataDestroyDto.getS3BucketName());
s3FileTransferRequestParamsDto.setS3KeyPrefix(StringUtils.appendIfMissing(businessObjectDataDestroyDto.getS3KeyPrefix(), "/"));
// Create an S3 file transfer parameters DTO to be used for S3 object tagging operation.
S3FileTransferRequestParamsDto s3ObjectTaggerParamsDto = storageHelper.getS3FileTransferRequestParamsDtoByRole(businessObjectDataDestroyDto.getS3ObjectTaggerRoleArn(), businessObjectDataDestroyDto.getS3ObjectTaggerRoleSessionName());
s3ObjectTaggerParamsDto.setS3Endpoint(businessObjectDataDestroyDto.getS3Endpoint());
// Get actual S3 files by selecting all S3 keys matching the S3 key prefix form the S3 bucket.
// This time, we do not ignore 0 byte objects that represent S3 directories.
List<S3ObjectSummary> actualS3Files = s3Service.listDirectory(s3FileTransferRequestParamsDto, false);
// Set the list of files to be tagged for deletion.
s3FileTransferRequestParamsDto.setFiles(storageFileHelper.getFiles(storageFileHelper.createStorageFilesFromS3ObjectSummaries(actualS3Files)));
// Tag the S3 objects to initiate the deletion.
s3Service.tagObjects(s3FileTransferRequestParamsDto, s3ObjectTaggerParamsDto, new Tag(businessObjectDataDestroyDto.getS3ObjectTagKey(), businessObjectDataDestroyDto.getS3ObjectTagValue()));
// Log a list of S3 files that got tagged.
if (LOGGER.isInfoEnabled()) {
LOGGER.info("Successfully tagged files in S3 bucket. s3BucketName=\"{}\" s3KeyCount={} s3ObjectTagKey=\"{}\" s3ObjectTagValue=\"{}\"", s3FileTransferRequestParamsDto.getS3BucketName(), actualS3Files.size(), businessObjectDataDestroyDto.getS3ObjectTagKey(), businessObjectDataDestroyDto.getS3ObjectTagValue());
for (S3ObjectSummary s3File : actualS3Files) {
LOGGER.info("s3Key=\"{}\"", s3File.getKey());
}
}
}
use of org.finra.herd.model.dto.S3FileTransferRequestParamsDto in project herd by FINRAOS.
the class StorageHelper method getS3BucketAccessParams.
/**
* Returns a new instance of S3FileTransferRequestParamsDto populated with all parameters, required to access an S3 bucket.
*
* @param storageEntity the storage entity that contains attributes to access an S3 bucket
*
* @return the S3FileTransferRequestParamsDto instance that can be used to access S3 bucket
*/
public S3FileTransferRequestParamsDto getS3BucketAccessParams(StorageEntity storageEntity) {
// Get S3 bucket specific configuration settings.
// Please note that since those values are required we pass a "true" flag.
String s3BucketName = getStorageAttributeValueByName(configurationHelper.getProperty(ConfigurationValue.S3_ATTRIBUTE_NAME_BUCKET_NAME), storageEntity, true);
S3FileTransferRequestParamsDto params = getS3FileTransferRequestParamsDto();
params.setS3Endpoint(configurationHelper.getProperty(ConfigurationValue.S3_ENDPOINT));
params.setS3BucketName(s3BucketName);
return params;
}
use of org.finra.herd.model.dto.S3FileTransferRequestParamsDto in project herd by FINRAOS.
the class StorageHelper method getS3FileTransferRequestParamsDtoByRole.
/**
* Returns a new {@link S3FileTransferRequestParamsDto} with temporary credentials as per specified AWS role and session name.
*
* @param roleArn the ARN of the role
* @param sessionName the session name
*
* @return the {@link S3FileTransferRequestParamsDto} object
*/
public S3FileTransferRequestParamsDto getS3FileTransferRequestParamsDtoByRole(String roleArn, String sessionName) {
// Get the S3 file transfer request parameters DTO with proxy host and port populated from the configuration.
S3FileTransferRequestParamsDto params = getS3FileTransferRequestParamsDto();
// Assume the specified role. Set the duration of the role session to 3600 seconds (1 hour).
Credentials credentials = stsDao.getTemporarySecurityCredentials(params, sessionName, roleArn, 3600, null);
// Update the AWS parameters DTO with the temporary credentials.
params.setAwsAccessKeyId(credentials.getAccessKeyId());
params.setAwsSecretKey(credentials.getSecretAccessKey());
params.setSessionToken(credentials.getSessionToken());
return params;
}
use of org.finra.herd.model.dto.S3FileTransferRequestParamsDto in project herd by FINRAOS.
the class UploaderControllerTest method testPerformUploadInvalidLocalDir.
@Test
public void testPerformUploadInvalidLocalDir() throws Exception {
// Create uploader input manifest file in LOCAL_TEMP_PATH_INPUT directory
File manifestFile = createManifestFile(LOCAL_TEMP_PATH_INPUT.toString(), getTestUploaderInputManifestDto());
Assert.assertTrue(manifestFile.isFile());
// Try to run upload by specifying a non-existing local directory.
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = getTestS3FileTransferRequestParamsDto();
s3FileTransferRequestParamsDto.setLocalPath(Paths.get(LOCAL_TEMP_PATH_INPUT.toString(), "I_DO_NOT_EXIST").toString());
RegServerAccessParamsDto regServerAccessParamsDto = RegServerAccessParamsDto.builder().withRegServerHost(WEB_SERVICE_HOSTNAME).withRegServerPort(WEB_SERVICE_HTTPS_PORT).withUseSsl(true).withUsername(WEB_SERVICE_HTTPS_USERNAME).withPassword(WEB_SERVICE_HTTPS_PASSWORD).build();
try {
uploaderController.performUpload(regServerAccessParamsDto, manifestFile, s3FileTransferRequestParamsDto, false, false, TEST_RETRY_ATTEMPTS, TEST_RETRY_DELAY_SECS);
fail("Should throw an IllegalArgumentException when local directory does not exist.");
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().startsWith("Invalid local base directory"));
}
}
Aggregations