use of org.finra.herd.model.dto.S3FileTransferRequestParamsDto in project herd by FINRAOS.
the class S3DaoTest method testDownloadFile.
/**
* Test that we are able to perform the uploadFile S3Dao operation on S3 using our DAO tier.
*/
@Test
public void testDownloadFile() throws IOException, InterruptedException {
// Upload local file to s3Dao.
testUploadFile();
// Clean up the local directory, so we can test the download.
FileUtils.deleteDirectory(localTempPath.toFile());
// Create local temp directory - this also validates that clean up was really executed.
Assert.assertTrue(localTempPath.toFile().mkdir());
// Destination local file.
File destinationLocalFile = Paths.get(localTempPath.toString(), LOCAL_FILE).toFile();
// Execute download.
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto();
s3FileTransferRequestParamsDto.setS3KeyPrefix(TARGET_S3_KEY);
s3FileTransferRequestParamsDto.setLocalPath(destinationLocalFile.getPath());
S3FileTransferResultsDto results = s3Dao.downloadFile(s3FileTransferRequestParamsDto);
// Validate results.
Assert.assertTrue(results.getTotalFilesTransferred() == 1L);
// Validate that we have the file downloaded from S3.
Assert.assertTrue(destinationLocalFile.isFile());
}
use of org.finra.herd.model.dto.S3FileTransferRequestParamsDto in project herd by FINRAOS.
the class S3DaoTest method testListDirectoryAssertIgnoreDirectories.
@Test
public void testListDirectoryAssertIgnoreDirectories() {
S3Operations originalS3Operations = (S3Operations) ReflectionTestUtils.getField(s3Dao, "s3Operations");
S3Operations mockS3Operations = mock(S3Operations.class);
ReflectionTestUtils.setField(s3Dao, "s3Operations", mockS3Operations);
try {
String s3BucketName = "s3BucketName";
String s3KeyPrefix = "s3KeyPrefix";
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = new S3FileTransferRequestParamsDto();
s3FileTransferRequestParamsDto.setS3BucketName(s3BucketName);
s3FileTransferRequestParamsDto.setS3KeyPrefix(s3KeyPrefix);
boolean ignoreZeroByteDirectoryMarkers = true;
when(mockS3Operations.listObjects(any(), any())).then(new Answer<ObjectListing>() {
@Override
public ObjectListing answer(InvocationOnMock invocation) throws Throwable {
ListObjectsRequest listObjectsRequest = invocation.getArgument(0);
assertEquals(s3BucketName, listObjectsRequest.getBucketName());
assertEquals(s3KeyPrefix, listObjectsRequest.getPrefix());
ObjectListing objectListing = new ObjectListing();
{
S3ObjectSummary s3ObjectSummary = new S3ObjectSummary();
s3ObjectSummary.setBucketName(s3BucketName);
s3ObjectSummary.setKey("valid/object/key");
s3ObjectSummary.setSize(1024l);
objectListing.getObjectSummaries().add(s3ObjectSummary);
}
{
S3ObjectSummary s3ObjectSummary = new S3ObjectSummary();
s3ObjectSummary.setBucketName(s3BucketName);
s3ObjectSummary.setKey("empty/file");
s3ObjectSummary.setSize(0l);
objectListing.getObjectSummaries().add(s3ObjectSummary);
}
{
S3ObjectSummary s3ObjectSummary = new S3ObjectSummary();
s3ObjectSummary.setBucketName(s3BucketName);
s3ObjectSummary.setKey("directory/path/");
s3ObjectSummary.setSize(0l);
objectListing.getObjectSummaries().add(s3ObjectSummary);
}
// directory with a non-zero size is impossible, but we have a conditional branch to cover
{
S3ObjectSummary s3ObjectSummary = new S3ObjectSummary();
s3ObjectSummary.setBucketName(s3BucketName);
s3ObjectSummary.setKey("another/directory/path/");
s3ObjectSummary.setSize(1024l);
objectListing.getObjectSummaries().add(s3ObjectSummary);
}
return objectListing;
}
});
List<S3ObjectSummary> s3ObjectSummaries = s3Dao.listDirectory(s3FileTransferRequestParamsDto, ignoreZeroByteDirectoryMarkers);
assertEquals(3, s3ObjectSummaries.size());
assertEquals("valid/object/key", s3ObjectSummaries.get(0).getKey());
assertEquals(1024l, s3ObjectSummaries.get(0).getSize());
assertEquals("empty/file", s3ObjectSummaries.get(1).getKey());
assertEquals(0l, s3ObjectSummaries.get(1).getSize());
assertEquals("another/directory/path/", s3ObjectSummaries.get(2).getKey());
assertEquals(1024l, s3ObjectSummaries.get(2).getSize());
} finally {
ReflectionTestUtils.setField(s3Dao, "s3Operations", originalS3Operations);
}
}
use of org.finra.herd.model.dto.S3FileTransferRequestParamsDto in project herd by FINRAOS.
the class S3DaoTest method testS3FileExists.
/**
* The method is successful when both bucket and key exists.
*/
@Test
public void testS3FileExists() {
String expectedKey = "foo";
String expectedValue = "bar";
ByteArrayInputStream inputStream = new ByteArrayInputStream((expectedKey + "=" + expectedValue).getBytes());
PutObjectRequest putObjectRequest = new PutObjectRequest(S3_BUCKET_NAME, TARGET_S3_KEY, inputStream, new ObjectMetadata());
s3Operations.putObject(putObjectRequest, null);
S3FileTransferRequestParamsDto params = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto();
params.setS3BucketName(S3_BUCKET_NAME);
params.setS3KeyPrefix(TARGET_S3_KEY);
Assert.assertTrue(s3Dao.s3FileExists(params));
}
use of org.finra.herd.model.dto.S3FileTransferRequestParamsDto in project herd by FINRAOS.
the class S3DaoTest method testGetAWSCredentialsProviderAssertAdditionalProviderIsSet.
/**
* A case where additional credentials provider is given in the request params. The credentials returned should be an AWS session credential where the
* values are from the provided custom credentials provider.
*/
@Test
public void testGetAWSCredentialsProviderAssertAdditionalProviderIsSet() throws Exception {
S3Operations originalS3Operations = (S3Operations) ReflectionTestUtils.getField(s3Dao, "s3Operations");
S3Operations mockS3Operations = mock(S3Operations.class);
ReflectionTestUtils.setField(s3Dao, "s3Operations", mockS3Operations);
try {
String s3BucketName = "s3BucketName";
String s3KeyPrefix = "s3KeyPrefix";
String awsAccessKey = "awsAccessKey";
String awsSecretKey = "awsSecretKey";
String awsSessionToken = "awsSessionToken";
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = new S3FileTransferRequestParamsDto();
s3FileTransferRequestParamsDto.setS3BucketName(s3BucketName);
s3FileTransferRequestParamsDto.setS3KeyPrefix(s3KeyPrefix);
s3FileTransferRequestParamsDto.setAdditionalAwsCredentialsProviders(Arrays.asList(new HerdAWSCredentialsProvider() {
@Override
public AwsCredential getAwsCredential() {
return new AwsCredential(awsAccessKey, awsSecretKey, awsSessionToken, null);
}
}));
when(mockS3Operations.putObject(any(), any())).then(new Answer<PutObjectResult>() {
@SuppressWarnings("unchecked")
@Override
public PutObjectResult answer(InvocationOnMock invocation) throws Throwable {
AmazonS3Client amazonS3Client = invocation.getArgument(1);
AWSCredentialsProviderChain awsCredentialsProviderChain = (AWSCredentialsProviderChain) ReflectionTestUtils.getField(amazonS3Client, "awsCredentialsProvider");
List<AWSCredentialsProvider> credentialsProviders = (List<AWSCredentialsProvider>) ReflectionTestUtils.getField(awsCredentialsProviderChain, "credentialsProviders");
assertEquals(2, credentialsProviders.size());
// refresh() does nothing, but gives code coverage
credentialsProviders.get(0).refresh();
/*
* We can't inspect the field directly since the class definition is private.
* Instead we call the getCredentials() and verify that it returns the credentials staged as part of this test.
*/
AWSCredentials credentials = awsCredentialsProviderChain.getCredentials();
assertEquals(BasicSessionCredentials.class, credentials.getClass());
BasicSessionCredentials basicSessionCredentials = (BasicSessionCredentials) credentials;
assertEquals(awsAccessKey, basicSessionCredentials.getAWSAccessKeyId());
assertEquals(awsSecretKey, basicSessionCredentials.getAWSSecretKey());
assertEquals(awsSessionToken, basicSessionCredentials.getSessionToken());
return new PutObjectResult();
}
});
s3Dao.createDirectory(s3FileTransferRequestParamsDto);
} finally {
ReflectionTestUtils.setField(s3Dao, "s3Operations", originalS3Operations);
}
}
use of org.finra.herd.model.dto.S3FileTransferRequestParamsDto in project herd by FINRAOS.
the class S3DaoTest method testValidateS3FileObjectNotFoundException.
@Test(expected = ObjectNotFoundException.class)
public void testValidateS3FileObjectNotFoundException() throws IOException, InterruptedException {
// Try to validate a non-existing S3 file.
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto();
s3FileTransferRequestParamsDto.setS3KeyPrefix(TARGET_S3_KEY);
s3Dao.validateS3File(s3FileTransferRequestParamsDto, FILE_SIZE_1_KB);
}
Aggregations