use of com.amazonaws.services.s3.transfer.Transfer in project herd by FINRAOS.
the class S3DaoTestHelper method validateS3FileUpload.
/**
* Validates uploaded S3 files.
*
* @param s3FileTransferRequestParamsDto the S3 file transfer request parameters DTO
* @param expectedS3Keys the list of expected S3 keys
*/
public void validateS3FileUpload(S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto, List<String> expectedS3Keys) {
// Validate the upload.
List<S3ObjectSummary> s3ObjectSummaries = s3Dao.listDirectory(s3FileTransferRequestParamsDto);
assertTrue(s3ObjectSummaries.size() == expectedS3Keys.size());
// Build a list of the actual S3 keys.
List<String> actualS3Keys = new ArrayList<>();
for (S3ObjectSummary s3ObjectSummary : s3ObjectSummaries) {
actualS3Keys.add(s3ObjectSummary.getKey());
}
// Check that all local test files got uploaded.
assertTrue(expectedS3Keys.containsAll(actualS3Keys));
assertTrue(actualS3Keys.containsAll(expectedS3Keys));
}
use of com.amazonaws.services.s3.transfer.Transfer in project photon-model by vmware.
the class AWSUtils method getS3TransferManager.
/**
* Method to get an S3 transfer manager client.
*
* Note: ARN-based credentials will not work unless they have already been exchanged to
* AWS for session credentials. If unset, this method will fail. To enable ARN-based
* credentials, migrate to {@link #getS3TransferManagerAsync(AuthCredentialsServiceState,
* String, ExecutorService)}
*
* @param credentials An {@link AuthCredentialsServiceState} object.
* @param region The region to get the AWS client in.
* @param executorService The executor service to run async services in.
*/
public static TransferManager getS3TransferManager(AuthCredentialsServiceState credentials, String region, ExecutorService executorService) {
AmazonS3ClientBuilder amazonS3ClientBuilder = AmazonS3ClientBuilder.standard().withCredentials(getAwsStaticCredentialsProvider(credentials)).withForceGlobalBucketAccessEnabled(true);
if (region == null) {
region = Regions.DEFAULT_REGION.getName();
}
if (isAwsS3Proxy()) {
AwsClientBuilder.EndpointConfiguration endpointConfiguration = new AwsClientBuilder.EndpointConfiguration(getAwsS3ProxyHost(), region);
amazonS3ClientBuilder.setEndpointConfiguration(endpointConfiguration);
} else {
amazonS3ClientBuilder.setRegion(region);
}
TransferManagerBuilder transferManagerBuilder = TransferManagerBuilder.standard().withS3Client(amazonS3ClientBuilder.build()).withExecutorFactory(() -> executorService).withShutDownThreadPools(false);
return transferManagerBuilder.build();
}
use of com.amazonaws.services.s3.transfer.Transfer in project herd by FINRAOS.
the class S3ServiceTest method testListDirectoryIgnoreZeroByteDirectoryMarkers.
@Test
public void testListDirectoryIgnoreZeroByteDirectoryMarkers() {
// Create an S3 file transfer request parameters DTO.
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = new S3FileTransferRequestParamsDto();
// Create a list of S3 object summaries.
List<S3ObjectSummary> s3ObjectSummaries = Arrays.asList(new S3ObjectSummary());
// Mock the external calls.
when(s3Dao.listDirectory(s3FileTransferRequestParamsDto, true)).thenReturn(s3ObjectSummaries);
// Call the method under test.
List<S3ObjectSummary> result = s3Service.listDirectory(s3FileTransferRequestParamsDto, true);
// Verify the external calls.
verify(s3Dao).listDirectory(s3FileTransferRequestParamsDto, true);
verifyNoMoreInteractions(s3Dao);
// Validate the returned object.
assertEquals(s3ObjectSummaries, result);
}
use of com.amazonaws.services.s3.transfer.Transfer in project herd by FINRAOS.
the class S3ServiceTest method testListDirectory.
@Test
public void testListDirectory() {
// Create an S3 file transfer request parameters DTO.
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = new S3FileTransferRequestParamsDto();
// Create a list of S3 object summaries.
List<S3ObjectSummary> s3ObjectSummaries = Arrays.asList(new S3ObjectSummary());
// Mock the external calls.
when(s3Dao.listDirectory(s3FileTransferRequestParamsDto, false)).thenReturn(s3ObjectSummaries);
// Call the method under test.
List<S3ObjectSummary> result = s3Service.listDirectory(s3FileTransferRequestParamsDto);
// Verify the external calls. By default, we do not ignore 0 byte objects that represent S3 directories.
verify(s3Dao).listDirectory(s3FileTransferRequestParamsDto, false);
verifyNoMoreInteractions(s3Dao);
// Validate the returned object.
assertEquals(s3ObjectSummaries, result);
}
use of com.amazonaws.services.s3.transfer.Transfer in project herd by FINRAOS.
the class BusinessObjectDataInitiateRestoreHelperServiceImpl method executeS3SpecificStepsImpl.
/**
* Executes S3 specific steps for the initiation of a business object data restore request. The method also updates the specified DTO.
*
* @param businessObjectDataRestoreDto the DTO that holds various parameters needed to perform a business object data restore
*/
protected void executeS3SpecificStepsImpl(BusinessObjectDataRestoreDto businessObjectDataRestoreDto) {
try {
// Create an S3 file transfer parameters DTO to access the S3 bucket.
// Since the S3 key prefix represents a directory, we add a trailing '/' character to it.
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = storageHelper.getS3FileTransferRequestParamsDto();
s3FileTransferRequestParamsDto.setS3BucketName(businessObjectDataRestoreDto.getS3BucketName());
s3FileTransferRequestParamsDto.setS3Endpoint(businessObjectDataRestoreDto.getS3Endpoint());
s3FileTransferRequestParamsDto.setS3KeyPrefix(StringUtils.appendIfMissing(businessObjectDataRestoreDto.getS3KeyPrefix(), "/"));
// Get a list of S3 files matching the S3 key prefix. When listing S3 files, we ignore 0 byte objects that represent S3 directories.
List<S3ObjectSummary> actualS3Files = s3Service.listDirectory(s3FileTransferRequestParamsDto, true);
// Validate existence and file size of the S3 files.
storageFileHelper.validateRegisteredS3Files(businessObjectDataRestoreDto.getStorageFiles(), actualS3Files, businessObjectDataRestoreDto.getStorageName(), businessObjectDataRestoreDto.getBusinessObjectDataKey());
// Build a list of files to restore by selection only objects that are currently archived in Glacier (have Glacier storage class).
List<S3ObjectSummary> glacierS3Files = new ArrayList<>();
for (S3ObjectSummary s3ObjectSummary : actualS3Files) {
if (StorageClass.Glacier.toString().equals(s3ObjectSummary.getStorageClass())) {
glacierS3Files.add(s3ObjectSummary);
}
}
// Set a list of files to restore.
s3FileTransferRequestParamsDto.setFiles(storageFileHelper.getFiles(storageFileHelper.createStorageFilesFromS3ObjectSummaries(glacierS3Files)));
// Initiate restore requests for the list of objects in the Glacier bucket.
// TODO: Make "expirationInDays" value configurable with default value set to 99 years (36135 days).
s3Service.restoreObjects(s3FileTransferRequestParamsDto, 36135);
} catch (Exception e) {
// Log the exception.
LOGGER.error("Failed to initiate a restore request for the business object data. businessObjectDataKey={}", jsonHelper.objectToJson(businessObjectDataRestoreDto.getBusinessObjectDataKey()), businessObjectDataRestoreDto.getException());
// Update the DTO with the caught exception.
businessObjectDataRestoreDto.setException(e);
}
}
Aggregations