use of com.amazonaws.services.s3.model.MultipartUploadListing in project nifi by apache.
the class PutS3Object method getS3AgeoffListAndAgeoffLocalState.
protected MultipartUploadListing getS3AgeoffListAndAgeoffLocalState(final ProcessContext context, final AmazonS3Client s3, final long now) {
final long ageoff_interval = context.getProperty(MULTIPART_S3_AGEOFF_INTERVAL).asTimePeriod(TimeUnit.MILLISECONDS);
final String bucket = context.getProperty(BUCKET).evaluateAttributeExpressions().getValue();
final Long maxAge = context.getProperty(MULTIPART_S3_MAX_AGE).asTimePeriod(TimeUnit.MILLISECONDS);
final long ageCutoff = now - maxAge;
final List<MultipartUpload> ageoffList = new ArrayList<>();
if ((lastS3AgeOff.get() < now - ageoff_interval) && s3BucketLock.tryLock()) {
try {
ListMultipartUploadsRequest listRequest = new ListMultipartUploadsRequest(bucket);
MultipartUploadListing listing = s3.listMultipartUploads(listRequest);
for (MultipartUpload upload : listing.getMultipartUploads()) {
long uploadTime = upload.getInitiated().getTime();
if (uploadTime < ageCutoff) {
ageoffList.add(upload);
}
}
// ageoff any local state
ageoffLocalState(ageCutoff);
lastS3AgeOff.set(System.currentTimeMillis());
} catch (AmazonClientException e) {
if (e instanceof AmazonS3Exception && ((AmazonS3Exception) e).getStatusCode() == 403 && ((AmazonS3Exception) e).getErrorCode().equals("AccessDenied")) {
getLogger().warn("AccessDenied checking S3 Multipart Upload list for {}: {} " + "** The configured user does not have the s3:ListBucketMultipartUploads permission " + "for this bucket, S3 ageoff cannot occur without this permission. Next ageoff check " + "time is being advanced by interval to prevent checking on every upload **", new Object[] { bucket, e.getMessage() });
lastS3AgeOff.set(System.currentTimeMillis());
} else {
getLogger().error("Error checking S3 Multipart Upload list for {}: {}", new Object[] { bucket, e.getMessage() });
}
} finally {
s3BucketLock.unlock();
}
}
MultipartUploadListing result = new MultipartUploadListing();
result.setBucketName(bucket);
result.setMultipartUploads(ageoffList);
return result;
}
use of com.amazonaws.services.s3.model.MultipartUploadListing in project herd by FINRAOS.
the class MockS3OperationsImpl method getMultipartUploadListing.
/**
* <p> Returns a mock {@link MultipartUploadListing}. </p> <p> The return object has the following properties. <dl> <dt>multipartUploads</dt> <dd>Length 3
* list</dd> <p/> <dt>multipartUploads[0].initiated</dt> <dd>5 minutes prior to the object creation time.</dd> <p/> <dt>multipartUploads[1].initiated</dt>
* <dd>15 minutes prior to the object creation time.</dd> <p/> <dt>multipartUploads[2].initiated</dt> <dd>20 minutes prior to the object creation time.</dd>
* </dl> <p/> All other properties as set to default as defined in the by {@link MultipartUploadListing} constructor. </p>
*
* @return a mock object
*/
private MultipartUploadListing getMultipartUploadListing() {
// Return 3 multipart uploads with 2 of them started more than 10 minutes ago.
MultipartUploadListing multipartUploadListing = new MultipartUploadListing();
List<MultipartUpload> multipartUploads = new ArrayList<>();
multipartUploadListing.setMultipartUploads(multipartUploads);
Date now = new Date();
multipartUploads.add(getMultipartUpload(HerdDateUtils.addMinutes(now, -5)));
multipartUploads.add(getMultipartUpload(HerdDateUtils.addMinutes(now, -15)));
multipartUploads.add(getMultipartUpload(HerdDateUtils.addMinutes(now, -20)));
return multipartUploadListing;
}
use of com.amazonaws.services.s3.model.MultipartUploadListing in project herd by FINRAOS.
the class S3DaoTest method testAbortMultipartUploadsAssertAbortOnlyBeforeThreshold.
@Test
public void testAbortMultipartUploadsAssertAbortOnlyBeforeThreshold() {
S3Operations originalS3Operations = (S3Operations) ReflectionTestUtils.getField(s3Dao, "s3Operations");
S3Operations mockS3Operations = mock(S3Operations.class);
ReflectionTestUtils.setField(s3Dao, "s3Operations", mockS3Operations);
try {
String s3BucketName = "s3BucketName";
String uploadKey = "uploadKey1";
String uploadId = "uploadId1";
Date uploadInitiated = new Date(0);
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = new S3FileTransferRequestParamsDto();
s3FileTransferRequestParamsDto.setS3BucketName(s3BucketName);
Date thresholdDate = new Date(1);
when(mockS3Operations.listMultipartUploads(any(), any())).then(new Answer<MultipartUploadListing>() {
@Override
public MultipartUploadListing answer(InvocationOnMock invocation) throws Throwable {
ListMultipartUploadsRequest listMultipartUploadsRequest = invocation.getArgument(0);
assertEquals(s3BucketName, listMultipartUploadsRequest.getBucketName());
MultipartUploadListing multipartUploadListing = new MultipartUploadListing();
{
MultipartUpload multipartUpload = new MultipartUpload();
multipartUpload.setUploadId(uploadId);
multipartUpload.setKey(uploadKey);
multipartUpload.setInitiated(uploadInitiated);
multipartUploadListing.getMultipartUploads().add(multipartUpload);
}
// This upload is not aborted since the initiated date is greater than the threshold
{
MultipartUpload multipartUpload = new MultipartUpload();
multipartUpload.setUploadId("uploadId2");
multipartUpload.setKey("uploadKey2");
multipartUpload.setInitiated(new Date(2));
multipartUploadListing.getMultipartUploads().add(multipartUpload);
}
return multipartUploadListing;
}
});
assertEquals(1, s3Dao.abortMultipartUploads(s3FileTransferRequestParamsDto, thresholdDate));
verify(mockS3Operations).listMultipartUploads(any(), any());
/*
* Assert that S3Operations.abortMultipartUpload is called exactly ONCE with arguments matching the given ArgumentMatcher
*/
verify(mockS3Operations).abortMultipartUpload(argThat(argument -> Objects.equal(s3BucketName, argument.getBucketName()) && Objects.equal(uploadKey, argument.getKey()) && Objects.equal(uploadId, argument.getUploadId())), any());
// Assert that no other interactions occur with the mock
verifyNoMoreInteractions(mockS3Operations);
} finally {
ReflectionTestUtils.setField(s3Dao, "s3Operations", originalS3Operations);
}
}
use of com.amazonaws.services.s3.model.MultipartUploadListing in project herd by FINRAOS.
the class S3DaoTest method testAbortMultipartUploadsAssertTruncatedResult.
@Test
public void testAbortMultipartUploadsAssertTruncatedResult() {
S3Operations originalS3Operations = (S3Operations) ReflectionTestUtils.getField(s3Dao, "s3Operations");
S3Operations mockS3Operations = mock(S3Operations.class);
ReflectionTestUtils.setField(s3Dao, "s3Operations", mockS3Operations);
try {
String s3BucketName = "s3BucketName";
String uploadKey = "uploadKey";
String uploadId = "uploadId";
Date uploadInitiated = new Date(0);
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = new S3FileTransferRequestParamsDto();
s3FileTransferRequestParamsDto.setS3BucketName(s3BucketName);
Date thresholdDate = new Date(1);
when(mockS3Operations.listMultipartUploads(any(), any())).then(new Answer<MultipartUploadListing>() {
@Override
public MultipartUploadListing answer(InvocationOnMock invocation) throws Throwable {
ListMultipartUploadsRequest listMultipartUploadsRequest = invocation.getArgument(0);
String keyMarker = listMultipartUploadsRequest.getKeyMarker();
String uploadIdMarker = listMultipartUploadsRequest.getUploadIdMarker();
MultipartUploadListing multipartUploadListing = new MultipartUploadListing();
if (keyMarker == null || uploadIdMarker == null) {
multipartUploadListing.setNextKeyMarker("nextKeyMarker");
multipartUploadListing.setNextUploadIdMarker("nextUploadIdMarker");
multipartUploadListing.setTruncated(true);
} else {
assertEquals("nextKeyMarker", keyMarker);
assertEquals("nextUploadIdMarker", uploadIdMarker);
MultipartUpload multipartUpload = new MultipartUpload();
multipartUpload.setUploadId(uploadId);
multipartUpload.setKey(uploadKey);
multipartUpload.setInitiated(uploadInitiated);
multipartUploadListing.getMultipartUploads().add(multipartUpload);
}
return multipartUploadListing;
}
});
assertEquals(1, s3Dao.abortMultipartUploads(s3FileTransferRequestParamsDto, thresholdDate));
// Assert listMultipartUploads() is called twice due to truncation
verify(mockS3Operations, times(2)).listMultipartUploads(any(), any());
/*
* Assert that S3Operations.abortMultipartUpload is called exactly ONCE with arguments matching the given ArgumentMatcher
*/
verify(mockS3Operations).abortMultipartUpload(argThat(argument -> Objects.equal(s3BucketName, argument.getBucketName()) && Objects.equal(uploadKey, argument.getKey()) && Objects.equal(uploadId, argument.getUploadId())), any());
// Assert that no other interactions occur with the mock
verifyNoMoreInteractions(mockS3Operations);
} finally {
ReflectionTestUtils.setField(s3Dao, "s3Operations", originalS3Operations);
}
}
use of com.amazonaws.services.s3.model.MultipartUploadListing in project nifi by apache.
the class ITPutS3Object method testStateRemove.
@Test
public void testStateRemove() throws IOException {
final PutS3Object processor = new PutS3Object();
final TestRunner runner = TestRunners.newTestRunner(processor);
final String bucket = runner.getProcessContext().getProperty(PutS3Object.BUCKET).getValue();
final String key = runner.getProcessContext().getProperty(PutS3Object.KEY).getValue();
final String cacheKey = runner.getProcessor().getIdentifier() + "/" + bucket + "/" + key + "-sr";
final List<MultipartUpload> uploadList = new ArrayList<>();
final MultipartUpload upload1 = new MultipartUpload();
upload1.setKey(key);
upload1.setUploadId("1234");
uploadList.add(upload1);
final MultipartUploadListing uploadListing = new MultipartUploadListing();
uploadListing.setMultipartUploads(uploadList);
final MockAmazonS3Client mockClient = new MockAmazonS3Client();
mockClient.setListing(uploadListing);
/*
* store state, retrieve and validate, remove and validate
*/
PutS3Object.MultipartState stateOrig = new PutS3Object.MultipartState();
stateOrig.setUploadId("1234");
stateOrig.setContentLength(1234L);
processor.persistLocalState(cacheKey, stateOrig);
PutS3Object.MultipartState state1 = processor.getLocalStateIfInS3(mockClient, bucket, cacheKey);
Assert.assertEquals("1234", state1.getUploadId());
Assert.assertEquals(1234L, state1.getContentLength().longValue());
processor.persistLocalState(cacheKey, null);
PutS3Object.MultipartState state2 = processor.getLocalStateIfInS3(mockClient, bucket, cacheKey);
Assert.assertNull(state2);
}
Aggregations