use of com.amazonaws.services.s3.transfer.Transfer in project herd by FINRAOS.
the class StoragePolicyProcessorHelperServiceImpl method executeStoragePolicyTransitionImpl.
/**
* Executes a storage policy transition as per specified storage policy selection.
*
* @param storagePolicyTransitionParamsDto the storage policy transition DTO that contains parameters needed to perform a storage policy transition
*/
protected void executeStoragePolicyTransitionImpl(StoragePolicyTransitionParamsDto storagePolicyTransitionParamsDto) {
// Create an S3 file transfer parameters DTO to access the S3 bucket.
// Since the S3 key prefix represents a directory, we add a trailing '/' character to it.
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = storageHelper.getS3FileTransferRequestParamsDto();
s3FileTransferRequestParamsDto.setS3Endpoint(storagePolicyTransitionParamsDto.getS3Endpoint());
s3FileTransferRequestParamsDto.setS3BucketName(storagePolicyTransitionParamsDto.getS3BucketName());
s3FileTransferRequestParamsDto.setS3KeyPrefix(StringUtils.appendIfMissing(storagePolicyTransitionParamsDto.getS3KeyPrefix(), "/"));
// Create an S3 file transfer parameters DTO to be used for S3 object tagging operation.
S3FileTransferRequestParamsDto s3ObjectTaggerParamsDto = storageHelper.getS3FileTransferRequestParamsDtoByRole(storagePolicyTransitionParamsDto.getS3ObjectTaggerRoleArn(), storagePolicyTransitionParamsDto.getS3ObjectTaggerRoleSessionName());
s3ObjectTaggerParamsDto.setS3Endpoint(storagePolicyTransitionParamsDto.getS3Endpoint());
// Get actual S3 files by selecting all S3 keys matching the S3 key prefix form the S3 bucket.
// When listing S3 files, we ignore 0 byte objects that represent S3 directories.
List<S3ObjectSummary> actualS3FilesWithoutZeroByteDirectoryMarkers = s3Service.listDirectory(s3FileTransferRequestParamsDto, true);
// Validate existence of the S3 files.
storageFileHelper.validateRegisteredS3Files(storagePolicyTransitionParamsDto.getStorageFiles(), actualS3FilesWithoutZeroByteDirectoryMarkers, storagePolicyTransitionParamsDto.getStorageName(), storagePolicyTransitionParamsDto.getBusinessObjectDataKey());
// Get actual S3 files by selecting all S3 keys matching the S3 key prefix form the S3 bucket.
// This time, we do not ignore 0 byte objects that represent S3 directories.
List<S3ObjectSummary> actualS3Files = s3Service.listDirectory(s3FileTransferRequestParamsDto, false);
// Set the list of files to be tagged for archiving.
s3FileTransferRequestParamsDto.setFiles(storageFileHelper.getFiles(storageFileHelper.createStorageFilesFromS3ObjectSummaries(actualS3Files)));
// Tag the S3 objects to initiate the archiving.
s3Service.tagObjects(s3FileTransferRequestParamsDto, s3ObjectTaggerParamsDto, new Tag(storagePolicyTransitionParamsDto.getS3ObjectTagKey(), storagePolicyTransitionParamsDto.getS3ObjectTagValue()));
// Log a list of files tagged in the S3 bucket.
if (LOGGER.isInfoEnabled()) {
LOGGER.info("Successfully tagged files in S3 bucket. s3BucketName=\"{}\" s3KeyCount={} s3ObjectTagKey=\"{}\" s3ObjectTagValue=\"{}\"", s3FileTransferRequestParamsDto.getS3BucketName(), actualS3Files.size(), storagePolicyTransitionParamsDto.getS3ObjectTagKey(), storagePolicyTransitionParamsDto.getS3ObjectTagValue());
for (S3ObjectSummary s3File : actualS3FilesWithoutZeroByteDirectoryMarkers) {
LOGGER.info("s3Key=\"{}\"", s3File.getKey());
}
}
}
use of com.amazonaws.services.s3.transfer.Transfer in project herd by FINRAOS.
the class BusinessObjectDataInitiateDestroyHelperServiceImpl method executeS3SpecificStepsImpl.
/**
* Executes S3 specific steps required for initiation of a business object data destroy.
*
* @param businessObjectDataDestroyDto the DTO that holds various parameters needed to initiate a business object data destroy
*/
protected void executeS3SpecificStepsImpl(BusinessObjectDataDestroyDto businessObjectDataDestroyDto) {
// Create an S3 file transfer parameters DTO to access the S3 bucket.
// Since the S3 key prefix represents a directory, we add a trailing '/' character to it.
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = storageHelper.getS3FileTransferRequestParamsDto();
s3FileTransferRequestParamsDto.setS3Endpoint(businessObjectDataDestroyDto.getS3Endpoint());
s3FileTransferRequestParamsDto.setS3BucketName(businessObjectDataDestroyDto.getS3BucketName());
s3FileTransferRequestParamsDto.setS3KeyPrefix(StringUtils.appendIfMissing(businessObjectDataDestroyDto.getS3KeyPrefix(), "/"));
// Create an S3 file transfer parameters DTO to be used for S3 object tagging operation.
S3FileTransferRequestParamsDto s3ObjectTaggerParamsDto = storageHelper.getS3FileTransferRequestParamsDtoByRole(businessObjectDataDestroyDto.getS3ObjectTaggerRoleArn(), businessObjectDataDestroyDto.getS3ObjectTaggerRoleSessionName());
s3ObjectTaggerParamsDto.setS3Endpoint(businessObjectDataDestroyDto.getS3Endpoint());
// Get actual S3 files by selecting all S3 keys matching the S3 key prefix form the S3 bucket.
// This time, we do not ignore 0 byte objects that represent S3 directories.
List<S3ObjectSummary> actualS3Files = s3Service.listDirectory(s3FileTransferRequestParamsDto, false);
// Set the list of files to be tagged for deletion.
s3FileTransferRequestParamsDto.setFiles(storageFileHelper.getFiles(storageFileHelper.createStorageFilesFromS3ObjectSummaries(actualS3Files)));
// Tag the S3 objects to initiate the deletion.
s3Service.tagObjects(s3FileTransferRequestParamsDto, s3ObjectTaggerParamsDto, new Tag(businessObjectDataDestroyDto.getS3ObjectTagKey(), businessObjectDataDestroyDto.getS3ObjectTagValue()));
// Log a list of S3 files that got tagged.
if (LOGGER.isInfoEnabled()) {
LOGGER.info("Successfully tagged files in S3 bucket. s3BucketName=\"{}\" s3KeyCount={} s3ObjectTagKey=\"{}\" s3ObjectTagValue=\"{}\"", s3FileTransferRequestParamsDto.getS3BucketName(), actualS3Files.size(), businessObjectDataDestroyDto.getS3ObjectTagKey(), businessObjectDataDestroyDto.getS3ObjectTagValue());
for (S3ObjectSummary s3File : actualS3Files) {
LOGGER.info("s3Key=\"{}\"", s3File.getKey());
}
}
}
use of com.amazonaws.services.s3.transfer.Transfer in project herd by FINRAOS.
the class S3ServiceTest method testTagObjects.
@Test
public void testTagObjects() {
// Create an S3 file transfer request parameters DTO to access S3 objects.
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = new S3FileTransferRequestParamsDto();
s3FileTransferRequestParamsDto.setFiles(Arrays.asList(new File(TEST_S3_KEY_PREFIX + "/" + LOCAL_FILE)));
// Create an S3 file transfer request parameters DTO to tag S3 objects.
S3FileTransferRequestParamsDto s3ObjectTaggerParamsDto = new S3FileTransferRequestParamsDto();
s3ObjectTaggerParamsDto.setAwsAccessKeyId(AWS_ASSUMED_ROLE_ACCESS_KEY);
s3ObjectTaggerParamsDto.setAwsSecretKey(AWS_ASSUMED_ROLE_SECRET_KEY);
s3ObjectTaggerParamsDto.setSessionToken(AWS_ASSUMED_ROLE_SESSION_TOKEN);
// Create an S3 object tag.
Tag tag = new Tag(S3_OBJECT_TAG_KEY, S3_OBJECT_TAG_VALUE);
// Call the method under test.
s3Service.tagObjects(s3FileTransferRequestParamsDto, s3ObjectTaggerParamsDto, tag);
// Verify the external calls.
verify(s3Dao).tagObjects(s3FileTransferRequestParamsDto, s3ObjectTaggerParamsDto, tag);
verifyNoMoreInteractions(s3Dao);
}
use of com.amazonaws.services.s3.transfer.Transfer in project herd by FINRAOS.
the class S3DaoImpl method downloadFile.
@Override
public S3FileTransferResultsDto downloadFile(final S3FileTransferRequestParamsDto params) throws InterruptedException {
LOGGER.info("Downloading S3 file... s3Key=\"{}\" s3BucketName=\"{}\" localPath=\"{}\"", params.getS3KeyPrefix(), params.getS3BucketName(), params.getLocalPath());
// Perform the transfer.
S3FileTransferResultsDto results = performTransfer(params, new Transferer() {
@Override
public Transfer performTransfer(TransferManager transferManager) {
return s3Operations.download(params.getS3BucketName(), params.getS3KeyPrefix(), new File(params.getLocalPath()), transferManager);
}
});
LOGGER.info("Downloaded S3 file to the local system. s3Key=\"{}\" s3BucketName=\"{}\" localPath=\"{}\" totalBytesTransferred={} transferDuration=\"{}\"", params.getS3KeyPrefix(), params.getS3BucketName(), params.getLocalPath(), results.getTotalBytesTransferred(), HerdDateUtils.formatDuration(results.getDurationMillis()));
logOverallTransferRate(results);
return results;
}
use of com.amazonaws.services.s3.transfer.Transfer in project herd by FINRAOS.
the class S3DaoImpl method uploadFileList.
@Override
public S3FileTransferResultsDto uploadFileList(final S3FileTransferRequestParamsDto params) throws InterruptedException {
LOGGER.info("Uploading a list of files from the local directory to S3... localDirectory=\"{}\" s3KeyPrefix=\"{}\" s3BucketName=\"{}\" s3KeyCount={}", params.getLocalPath(), params.getS3KeyPrefix(), params.getS3BucketName(), params.getFiles().size());
if (LOGGER.isInfoEnabled()) {
for (File file : params.getFiles()) {
LOGGER.info("s3Key=\"{}\"", file.getPath());
}
}
// Perform the transfer.
S3FileTransferResultsDto results = performTransfer(params, new Transferer() {
@Override
public Transfer performTransfer(TransferManager transferManager) {
return s3Operations.uploadFileList(params.getS3BucketName(), params.getS3KeyPrefix(), new File(params.getLocalPath()), params.getFiles(), new ObjectMetadataProvider() {
@Override
public void provideObjectMetadata(File file, ObjectMetadata metadata) {
prepareMetadata(params, metadata);
}
}, transferManager);
}
});
LOGGER.info("Uploaded list of files from the local directory to S3. " + "localDirectory=\"{}\" s3KeyPrefix=\"{}\" s3BucketName=\"{}\" s3KeyCount={} totalBytesTransferred={} transferDuration=\"{}\"", params.getLocalPath(), params.getS3KeyPrefix(), params.getS3BucketName(), results.getTotalFilesTransferred(), results.getTotalBytesTransferred(), HerdDateUtils.formatDuration(results.getDurationMillis()));
logOverallTransferRate(results);
return results;
}
Aggregations