use of software.amazon.awssdk.services.s3.model.MultipartUpload in project herd by FINRAOS.
the class MockS3OperationsImpl method getMultipartUploadListing.
/**
* <p> Returns a mock {@link MultipartUploadListing}. </p> <p> The return object has the following properties. <dl> <dt>multipartUploads</dt> <dd>Length 3
* list</dd> <p/> <dt>multipartUploads[0].initiated</dt> <dd>5 minutes prior to the object creation time.</dd> <p/> <dt>multipartUploads[1].initiated</dt>
* <dd>15 minutes prior to the object creation time.</dd> <p/> <dt>multipartUploads[2].initiated</dt> <dd>20 minutes prior to the object creation time.</dd>
* </dl> <p/> All other properties as set to default as defined in the by {@link MultipartUploadListing} constructor. </p>
*
* @return a mock object
*/
private MultipartUploadListing getMultipartUploadListing() {
// Return 3 multipart uploads with 2 of them started more than 10 minutes ago.
MultipartUploadListing multipartUploadListing = new MultipartUploadListing();
List<MultipartUpload> multipartUploads = new ArrayList<>();
multipartUploadListing.setMultipartUploads(multipartUploads);
Date now = new Date();
multipartUploads.add(getMultipartUpload(HerdDateUtils.addMinutes(now, -5)));
multipartUploads.add(getMultipartUpload(HerdDateUtils.addMinutes(now, -15)));
multipartUploads.add(getMultipartUpload(HerdDateUtils.addMinutes(now, -20)));
return multipartUploadListing;
}
use of software.amazon.awssdk.services.s3.model.MultipartUpload in project herd by FINRAOS.
the class MockS3OperationsImpl method getMultipartUpload.
/**
* Creates and returns a mock {@link MultipartUpload} with the given initiated timestamp.
*
* @param initiated the timestamp to set to initiate the object
*
* @return the mock object
*/
private MultipartUpload getMultipartUpload(Date initiated) {
MultipartUpload multipartUpload = new MultipartUpload();
multipartUpload.setInitiated(initiated);
return multipartUpload;
}
use of software.amazon.awssdk.services.s3.model.MultipartUpload in project herd by FINRAOS.
the class S3DaoTest method testAbortMultipartUploadsAssertAbortOnlyBeforeThreshold.
@Test
public void testAbortMultipartUploadsAssertAbortOnlyBeforeThreshold() {
S3Operations originalS3Operations = (S3Operations) ReflectionTestUtils.getField(s3Dao, "s3Operations");
S3Operations mockS3Operations = mock(S3Operations.class);
ReflectionTestUtils.setField(s3Dao, "s3Operations", mockS3Operations);
try {
String s3BucketName = "s3BucketName";
String uploadKey = "uploadKey1";
String uploadId = "uploadId1";
Date uploadInitiated = new Date(0);
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = new S3FileTransferRequestParamsDto();
s3FileTransferRequestParamsDto.setS3BucketName(s3BucketName);
Date thresholdDate = new Date(1);
when(mockS3Operations.listMultipartUploads(any(), any())).then(new Answer<MultipartUploadListing>() {
@Override
public MultipartUploadListing answer(InvocationOnMock invocation) throws Throwable {
ListMultipartUploadsRequest listMultipartUploadsRequest = invocation.getArgument(0);
assertEquals(s3BucketName, listMultipartUploadsRequest.getBucketName());
MultipartUploadListing multipartUploadListing = new MultipartUploadListing();
{
MultipartUpload multipartUpload = new MultipartUpload();
multipartUpload.setUploadId(uploadId);
multipartUpload.setKey(uploadKey);
multipartUpload.setInitiated(uploadInitiated);
multipartUploadListing.getMultipartUploads().add(multipartUpload);
}
// This upload is not aborted since the initiated date is greater than the threshold
{
MultipartUpload multipartUpload = new MultipartUpload();
multipartUpload.setUploadId("uploadId2");
multipartUpload.setKey("uploadKey2");
multipartUpload.setInitiated(new Date(2));
multipartUploadListing.getMultipartUploads().add(multipartUpload);
}
return multipartUploadListing;
}
});
assertEquals(1, s3Dao.abortMultipartUploads(s3FileTransferRequestParamsDto, thresholdDate));
verify(mockS3Operations).listMultipartUploads(any(), any());
/*
* Assert that S3Operations.abortMultipartUpload is called exactly ONCE with arguments matching the given ArgumentMatcher
*/
verify(mockS3Operations).abortMultipartUpload(argThat(argument -> Objects.equal(s3BucketName, argument.getBucketName()) && Objects.equal(uploadKey, argument.getKey()) && Objects.equal(uploadId, argument.getUploadId())), any());
// Assert that no other interactions occur with the mock
verifyNoMoreInteractions(mockS3Operations);
} finally {
ReflectionTestUtils.setField(s3Dao, "s3Operations", originalS3Operations);
}
}
use of software.amazon.awssdk.services.s3.model.MultipartUpload in project herd by FINRAOS.
the class S3DaoTest method testAbortMultipartUploadsAssertTruncatedResult.
@Test
public void testAbortMultipartUploadsAssertTruncatedResult() {
S3Operations originalS3Operations = (S3Operations) ReflectionTestUtils.getField(s3Dao, "s3Operations");
S3Operations mockS3Operations = mock(S3Operations.class);
ReflectionTestUtils.setField(s3Dao, "s3Operations", mockS3Operations);
try {
String s3BucketName = "s3BucketName";
String uploadKey = "uploadKey";
String uploadId = "uploadId";
Date uploadInitiated = new Date(0);
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = new S3FileTransferRequestParamsDto();
s3FileTransferRequestParamsDto.setS3BucketName(s3BucketName);
Date thresholdDate = new Date(1);
when(mockS3Operations.listMultipartUploads(any(), any())).then(new Answer<MultipartUploadListing>() {
@Override
public MultipartUploadListing answer(InvocationOnMock invocation) throws Throwable {
ListMultipartUploadsRequest listMultipartUploadsRequest = invocation.getArgument(0);
String keyMarker = listMultipartUploadsRequest.getKeyMarker();
String uploadIdMarker = listMultipartUploadsRequest.getUploadIdMarker();
MultipartUploadListing multipartUploadListing = new MultipartUploadListing();
if (keyMarker == null || uploadIdMarker == null) {
multipartUploadListing.setNextKeyMarker("nextKeyMarker");
multipartUploadListing.setNextUploadIdMarker("nextUploadIdMarker");
multipartUploadListing.setTruncated(true);
} else {
assertEquals("nextKeyMarker", keyMarker);
assertEquals("nextUploadIdMarker", uploadIdMarker);
MultipartUpload multipartUpload = new MultipartUpload();
multipartUpload.setUploadId(uploadId);
multipartUpload.setKey(uploadKey);
multipartUpload.setInitiated(uploadInitiated);
multipartUploadListing.getMultipartUploads().add(multipartUpload);
}
return multipartUploadListing;
}
});
assertEquals(1, s3Dao.abortMultipartUploads(s3FileTransferRequestParamsDto, thresholdDate));
// Assert listMultipartUploads() is called twice due to truncation
verify(mockS3Operations, times(2)).listMultipartUploads(any(), any());
/*
* Assert that S3Operations.abortMultipartUpload is called exactly ONCE with arguments matching the given ArgumentMatcher
*/
verify(mockS3Operations).abortMultipartUpload(argThat(argument -> Objects.equal(s3BucketName, argument.getBucketName()) && Objects.equal(uploadKey, argument.getKey()) && Objects.equal(uploadId, argument.getUploadId())), any());
// Assert that no other interactions occur with the mock
verifyNoMoreInteractions(mockS3Operations);
} finally {
ReflectionTestUtils.setField(s3Dao, "s3Operations", originalS3Operations);
}
}
use of software.amazon.awssdk.services.s3.model.MultipartUpload in project beam by apache.
the class S3FileSystem method multipartCopy.
@VisibleForTesting
CompleteMultipartUploadResponse multipartCopy(S3ResourceId sourcePath, S3ResourceId destinationPath, HeadObjectResponse sourceObjectHead) throws SdkServiceException {
CreateMultipartUploadRequest initiateUploadRequest = CreateMultipartUploadRequest.builder().bucket(destinationPath.getBucket()).key(destinationPath.getKey()).storageClass(config.getS3StorageClass()).metadata(sourceObjectHead.metadata()).serverSideEncryption(config.getSSEAlgorithm()).ssekmsKeyId(config.getSSEKMSKeyId()).sseCustomerKey(config.getSSECustomerKey().getKey()).sseCustomerAlgorithm(config.getSSECustomerKey().getAlgorithm()).build();
CreateMultipartUploadResponse createMultipartUploadResponse = s3Client.get().createMultipartUpload(initiateUploadRequest);
final String uploadId = createMultipartUploadResponse.uploadId();
List<CompletedPart> completedParts = new ArrayList<>();
final long objectSize = sourceObjectHead.contentLength();
CopyPartResult copyPartResult;
CompletedPart completedPart;
// without using S3FileSystem.copy in the future
if (objectSize == 0) {
final UploadPartCopyRequest uploadPartCopyRequest = UploadPartCopyRequest.builder().bucket(sourcePath.getBucket()).key(sourcePath.getKey()).copySource(sourcePath.getBucket() + "/" + sourcePath.getKey()).uploadId(uploadId).partNumber(1).sseCustomerKey(config.getSSECustomerKey().getKey()).sseCustomerAlgorithm(config.getSSECustomerKey().getAlgorithm()).copySourceSSECustomerKey(config.getSSECustomerKey().getKey()).copySourceSSECustomerAlgorithm(config.getSSECustomerKey().getAlgorithm()).build();
copyPartResult = s3Client.get().uploadPartCopy(uploadPartCopyRequest).copyPartResult();
completedPart = CompletedPart.builder().partNumber(1).eTag(copyPartResult.eTag()).build();
completedParts.add(completedPart);
} else {
long bytePosition = 0;
Integer uploadBufferSizeBytes = config.getS3UploadBufferSizeBytes();
// Amazon parts are 1-indexed, not zero-indexed.
for (int partNumber = 1; bytePosition < objectSize; partNumber++) {
final UploadPartCopyRequest uploadPartCopyRequest = UploadPartCopyRequest.builder().bucket(sourcePath.getBucket()).key(sourcePath.getKey()).copySource(destinationPath.getBucket() + "/" + sourcePath.getKey()).uploadId(uploadId).partNumber(partNumber).copySourceRange(String.format("bytes=%s-%s", bytePosition, Math.min(objectSize - 1, bytePosition + uploadBufferSizeBytes - 1))).sseCustomerKey(config.getSSECustomerKey().getKey()).sseCustomerAlgorithm(config.getSSECustomerKey().getAlgorithm()).copySourceSSECustomerKey(config.getSSECustomerKey().getKey()).copySourceSSECustomerAlgorithm(config.getSSECustomerKey().getAlgorithm()).build();
copyPartResult = s3Client.get().uploadPartCopy(uploadPartCopyRequest).copyPartResult();
completedPart = CompletedPart.builder().partNumber(1).eTag(copyPartResult.eTag()).build();
completedParts.add(completedPart);
bytePosition += uploadBufferSizeBytes;
}
}
CompletedMultipartUpload completedMultipartUpload = CompletedMultipartUpload.builder().parts(completedParts).build();
CompleteMultipartUploadRequest completeUploadRequest = CompleteMultipartUploadRequest.builder().bucket(destinationPath.getBucket()).key(destinationPath.getKey()).uploadId(uploadId).multipartUpload(completedMultipartUpload).build();
return s3Client.get().completeMultipartUpload(completeUploadRequest);
}
Aggregations