use of com.talend.shaded.com.amazonaws.services.s3.model.ObjectMetadata in project herd by FINRAOS.
the class S3DaoTest method testPrepareMetadataAssertSetKmsHeaders.
@Test
public void testPrepareMetadataAssertSetKmsHeaders() {
S3Operations originalS3Operations = (S3Operations) ReflectionTestUtils.getField(s3Dao, "s3Operations");
S3Operations mockS3Operations = mock(S3Operations.class);
ReflectionTestUtils.setField(s3Dao, "s3Operations", mockS3Operations);
try {
String s3BucketName = "s3BucketName";
String s3KeyPrefix = "s3KeyPrefix";
String kmsKeyId = "kmsKeyId";
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = new S3FileTransferRequestParamsDto();
s3FileTransferRequestParamsDto.setS3BucketName(s3BucketName);
s3FileTransferRequestParamsDto.setS3KeyPrefix(s3KeyPrefix);
s3FileTransferRequestParamsDto.setKmsKeyId(kmsKeyId);
when(mockS3Operations.putObject(any(), any())).then(new Answer<PutObjectResult>() {
@Override
public PutObjectResult answer(InvocationOnMock invocation) throws Throwable {
PutObjectRequest putObjectRequest = invocation.getArgument(0);
ObjectMetadata metadata = putObjectRequest.getMetadata();
assertEquals("aws:kms", metadata.getSSEAlgorithm());
assertEquals(kmsKeyId, metadata.getRawMetadata().get(Headers.SERVER_SIDE_ENCRYPTION_AWS_KMS_KEYID));
return new PutObjectResult();
}
});
s3Dao.createDirectory(s3FileTransferRequestParamsDto);
} finally {
ReflectionTestUtils.setField(s3Dao, "s3Operations", originalS3Operations);
}
}
use of com.talend.shaded.com.amazonaws.services.s3.model.ObjectMetadata in project herd by FINRAOS.
the class S3DaoTest method testTagObjectsTargetTagKeyAlreadyExists.
@Test
public void testTagObjectsTargetTagKeyAlreadyExists() {
// Create two S3 object tags having the same tag key.
List<Tag> tags = Arrays.asList(new Tag(S3_OBJECT_TAG_KEY, S3_OBJECT_TAG_VALUE), new Tag(S3_OBJECT_TAG_KEY, S3_OBJECT_TAG_VALUE_2));
// Put a file in S3 that is already tagged with the first S3 object tag.
PutObjectRequest putObjectRequest = new PutObjectRequest(S3_BUCKET_NAME, TARGET_S3_KEY, new ByteArrayInputStream(new byte[1]), new ObjectMetadata());
putObjectRequest.setTagging(new ObjectTagging(Arrays.asList(tags.get(0))));
s3Operations.putObject(putObjectRequest, null);
// Validate that the S3 object is tagged with the first tag.
GetObjectTaggingResult getObjectTaggingResult = s3Operations.getObjectTagging(new GetObjectTaggingRequest(S3_BUCKET_NAME, TARGET_S3_KEY), null);
assertEquals(Arrays.asList(tags.get(0)), getObjectTaggingResult.getTagSet());
// Tag the S3 file with the second S3 object tag.
S3FileTransferRequestParamsDto params = new S3FileTransferRequestParamsDto();
params.setS3BucketName(S3_BUCKET_NAME);
params.setFiles(Arrays.asList(new File(TARGET_S3_KEY)));
s3Dao.tagObjects(params, new S3FileTransferRequestParamsDto(), tags.get(1));
// Validate that the S3 object is tagged with the second tag now.
getObjectTaggingResult = s3Operations.getObjectTagging(new GetObjectTaggingRequest(S3_BUCKET_NAME, TARGET_S3_KEY), null);
assertEquals(Arrays.asList(tags.get(1)), getObjectTaggingResult.getTagSet());
}
use of com.talend.shaded.com.amazonaws.services.s3.model.ObjectMetadata in project herd by FINRAOS.
the class S3DaoTest method testRestoreObjectsAmazonServiceException.
@Test
public void testRestoreObjectsAmazonServiceException() {
// Build a mock file path that triggers an Amazon service exception when we request to restore an object.
String testKey = String.format("%s/%s", TEST_S3_KEY_PREFIX, MockS3OperationsImpl.MOCK_S3_FILE_NAME_SERVICE_EXCEPTION);
// Put a 1 byte Glacier storage class file in S3.
ObjectMetadata metadata = new ObjectMetadata();
metadata.setHeader(Headers.STORAGE_CLASS, StorageClass.Glacier);
metadata.setOngoingRestore(false);
s3Operations.putObject(new PutObjectRequest(storageDaoTestHelper.getS3ManagedBucketName(), testKey, new ByteArrayInputStream(new byte[1]), metadata), null);
// Try to initiate a restore request for a mocked S3 file that would trigger an Amazon service exception when we request to restore an object.
try {
S3FileTransferRequestParamsDto params = new S3FileTransferRequestParamsDto();
params.setS3BucketName(storageDaoTestHelper.getS3ManagedBucketName());
params.setFiles(Arrays.asList(new File(testKey)));
s3Dao.restoreObjects(params, S3_RESTORE_OBJECT_EXPIRATION_IN_DAYS);
fail("Should throw an IllegalStateException when an S3 restore object operation fails.");
} catch (IllegalStateException e) {
assertEquals(String.format("Failed to initiate a restore request for \"%s\" key in \"%s\" bucket. " + "Reason: InternalError (Service: null; Status Code: 0; Error Code: InternalError; Request ID: null)", testKey, storageDaoTestHelper.getS3ManagedBucketName()), e.getMessage());
}
}
use of com.talend.shaded.com.amazonaws.services.s3.model.ObjectMetadata in project herd by FINRAOS.
the class S3DaoTest method testValidateGlacierS3FilesRestoredAmazonServiceException.
@Test
public void testValidateGlacierS3FilesRestoredAmazonServiceException() {
// Build a mock file path that triggers an Amazon service exception when we request S3 metadata for the object.
String testKey = String.format("%s/%s", TEST_S3_KEY_PREFIX, MockS3OperationsImpl.MOCK_S3_FILE_NAME_SERVICE_EXCEPTION);
// Put a 1 byte Glacier storage class file in S3.
ObjectMetadata metadata = new ObjectMetadata();
metadata.setHeader(Headers.STORAGE_CLASS, StorageClass.Glacier);
metadata.setOngoingRestore(false);
s3Operations.putObject(new PutObjectRequest(storageDaoTestHelper.getS3ManagedBucketName(), testKey, new ByteArrayInputStream(new byte[1]), metadata), null);
// that triggers an Amazon service exception when we request S3 metadata for the object.
try {
S3FileTransferRequestParamsDto params = new S3FileTransferRequestParamsDto();
params.setS3BucketName(storageDaoTestHelper.getS3ManagedBucketName());
params.setFiles(Arrays.asList(new File(testKey)));
s3Dao.validateGlacierS3FilesRestored(params);
fail("Should throw an IllegalStateException when Glacier S3 object validation fails due to an Amazon service exception.");
} catch (IllegalStateException e) {
assertEquals(String.format("Fail to check restore status for \"%s\" key in \"%s\" bucket. " + "Reason: InternalError (Service: null; Status Code: 0; Error Code: InternalError; Request ID: null)", testKey, storageDaoTestHelper.getS3ManagedBucketName()), e.getMessage());
}
}
use of com.talend.shaded.com.amazonaws.services.s3.model.ObjectMetadata in project herd by FINRAOS.
the class S3DaoTest method testGetProperties.
/**
* The method is successful when both bucket and key exists.
*/
@Test
public void testGetProperties() {
String expectedKey = "foo";
String expectedValue = "bar";
ByteArrayInputStream inputStream = new ByteArrayInputStream((expectedKey + "=" + expectedValue).getBytes());
PutObjectRequest putObjectRequest = new PutObjectRequest(S3_BUCKET_NAME, TARGET_S3_KEY, inputStream, new ObjectMetadata());
s3Operations.putObject(putObjectRequest, null);
Properties properties = s3Dao.getProperties(S3_BUCKET_NAME, TARGET_S3_KEY, s3DaoTestHelper.getTestS3FileTransferRequestParamsDto());
Assert.assertEquals("properties key '" + expectedKey + "'", expectedValue, properties.get(expectedKey));
}
Aggregations