use of com.amazonaws.services.s3.model.AmazonS3Exception in project Singularity by HubSpot.
the class SingularityS3Uploader method uploadSingle.
protected void uploadSingle(int sequence, Path file) throws Exception {
Retryer<Boolean> retryer = RetryerBuilder.<Boolean>newBuilder().retryIfExceptionOfType(AmazonS3Exception.class).retryIfRuntimeException().withWaitStrategy(WaitStrategies.fixedWait(configuration.getRetryWaitMs(), TimeUnit.MILLISECONDS)).withStopStrategy(StopStrategies.stopAfterAttempt(configuration.getRetryCount())).build();
retryer.call(() -> {
final long start = System.currentTimeMillis();
final String key = SingularityS3FormatHelper.getKey(uploadMetadata.getS3KeyFormat(), sequence, Files.getLastModifiedTime(file).toMillis(), Objects.toString(file.getFileName()), hostname);
long fileSizeBytes = Files.size(file);
LOG.info("{} Uploading {} to {}/{} (size {})", logIdentifier, file, bucketName, key, fileSizeBytes);
try {
ObjectMetadata objectMetadata = new ObjectMetadata();
UploaderFileAttributes fileAttributes = getFileAttributes(file);
if (fileAttributes.getStartTime().isPresent()) {
objectMetadata.addUserMetadata(SingularityS3Log.LOG_START_S3_ATTR, fileAttributes.getStartTime().get().toString());
LOG.debug("Added extra metadata for object ({}:{})", SingularityS3Log.LOG_START_S3_ATTR, fileAttributes.getStartTime().get());
}
if (fileAttributes.getEndTime().isPresent()) {
objectMetadata.addUserMetadata(SingularityS3Log.LOG_END_S3_ATTR, fileAttributes.getEndTime().get().toString());
LOG.debug("Added extra metadata for object ({}:{})", SingularityS3Log.LOG_END_S3_ATTR, fileAttributes.getEndTime().get());
}
for (SingularityS3UploaderContentHeaders contentHeaders : configuration.getS3ContentHeaders()) {
if (file.toString().endsWith(contentHeaders.getFilenameEndsWith())) {
LOG.debug("{} Using content headers {} for file {}", logIdentifier, contentHeaders, file);
if (contentHeaders.getContentType().isPresent()) {
objectMetadata.setContentType(contentHeaders.getContentType().get());
}
if (contentHeaders.getContentEncoding().isPresent()) {
objectMetadata.setContentEncoding(contentHeaders.getContentEncoding().get());
}
break;
}
}
Optional<StorageClass> maybeStorageClass = Optional.absent();
if (shouldApplyStorageClass(fileSizeBytes, uploadMetadata.getS3StorageClass())) {
LOG.debug("{} adding storage class {} to {}", logIdentifier, uploadMetadata.getS3StorageClass().get(), file);
maybeStorageClass = Optional.of(StorageClass.fromValue(uploadMetadata.getS3StorageClass().get()));
}
LOG.debug("Uploading object with metadata {}", objectMetadata);
if (fileSizeBytes > configuration.getMaxSingleUploadSizeBytes()) {
multipartUpload(key, file.toFile(), objectMetadata, maybeStorageClass);
} else {
PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, key, file.toFile()).withMetadata(objectMetadata);
if (maybeStorageClass.isPresent()) {
putObjectRequest.setStorageClass(maybeStorageClass.get());
}
if (uploadMetadata.getEncryptionKey().isPresent()) {
putObjectRequest.withSSEAwsKeyManagementParams(new SSEAwsKeyManagementParams(uploadMetadata.getEncryptionKey().get()));
}
s3Client.putObject(putObjectRequest);
}
} catch (AmazonS3Exception se) {
LOG.warn("{} Couldn't upload {} due to {} - {}", logIdentifier, file, se.getErrorCode(), se.getErrorMessage(), se);
throw se;
} catch (Exception e) {
LOG.warn("Exception uploading {}", file, e);
throw e;
}
LOG.info("{} Uploaded {} in {}", logIdentifier, key, JavaUtils.duration(start));
return true;
});
}
use of com.amazonaws.services.s3.model.AmazonS3Exception in project kafka-connect-storage-cloud by confluentinc.
the class TestWithMockedS3 method listObjects.
public static List<S3ObjectSummary> listObjects(String bucket, String prefix, AmazonS3 s3) {
List<S3ObjectSummary> objects = new ArrayList<>();
ObjectListing listing;
try {
if (prefix == null) {
listing = s3.listObjects(bucket);
} else {
listing = s3.listObjects(bucket, prefix);
}
objects.addAll(listing.getObjectSummaries());
while (listing.isTruncated()) {
listing = s3.listNextBatchOfObjects(listing);
objects.addAll(listing.getObjectSummaries());
}
} catch (AmazonS3Exception e) {
log.warn("listObjects for bucket '{}' prefix '{}' returned error code: {}", bucket, prefix, e.getStatusCode());
}
return objects;
}
use of com.amazonaws.services.s3.model.AmazonS3Exception in project herd by FINRAOS.
the class MockS3OperationsImpl method listObjects.
/**
* {@inheritDoc}
* <p/>
* If the bucket does not exist, returns a listing with an empty list. If a prefix is specified in listObjectsRequest, only keys starting with the prefix
* will be returned.
*/
@Override
public ObjectListing listObjects(ListObjectsRequest listObjectsRequest, AmazonS3 s3Client) {
LOGGER.debug("listObjects(): listObjectsRequest.getBucketName() = " + listObjectsRequest.getBucketName());
String bucketName = listObjectsRequest.getBucketName();
if (MOCK_S3_BUCKET_NAME_NO_SUCH_BUCKET_EXCEPTION.equals(bucketName)) {
AmazonS3Exception amazonS3Exception = new AmazonS3Exception(MOCK_S3_BUCKET_NAME_NO_SUCH_BUCKET_EXCEPTION);
amazonS3Exception.setErrorCode("NoSuchBucket");
throw amazonS3Exception;
}
ObjectListing objectListing = new ObjectListing();
objectListing.setBucketName(bucketName);
MockS3Bucket mockS3Bucket = mockS3Buckets.get(bucketName);
if (mockS3Bucket != null) {
for (MockS3Object mockS3Object : mockS3Bucket.getObjects().values()) {
String s3ObjectKey = mockS3Object.getKey();
if (listObjectsRequest.getPrefix() == null || s3ObjectKey.startsWith(listObjectsRequest.getPrefix())) {
S3ObjectSummary s3ObjectSummary = new S3ObjectSummary();
s3ObjectSummary.setBucketName(bucketName);
s3ObjectSummary.setKey(s3ObjectKey);
s3ObjectSummary.setSize(mockS3Object.getData().length);
s3ObjectSummary.setStorageClass(mockS3Object.getObjectMetadata() != null ? mockS3Object.getObjectMetadata().getStorageClass() : null);
objectListing.getObjectSummaries().add(s3ObjectSummary);
}
}
}
return objectListing;
}
use of com.amazonaws.services.s3.model.AmazonS3Exception in project herd by FINRAOS.
the class MockS3OperationsImpl method listVersions.
/**
* {@inheritDoc}
* <p/>
* If the bucket does not exist, returns a listing with an empty list. If a prefix is specified in listVersionsRequest, only versions starting with the
* prefix will be returned.
*/
@Override
public VersionListing listVersions(ListVersionsRequest listVersionsRequest, AmazonS3 s3Client) {
LOGGER.debug("listVersions(): listVersionsRequest.getBucketName() = " + listVersionsRequest.getBucketName());
String bucketName = listVersionsRequest.getBucketName();
if (MOCK_S3_BUCKET_NAME_NO_SUCH_BUCKET_EXCEPTION.equals(bucketName)) {
AmazonS3Exception amazonS3Exception = new AmazonS3Exception(MOCK_S3_BUCKET_NAME_NO_SUCH_BUCKET_EXCEPTION);
amazonS3Exception.setErrorCode("NoSuchBucket");
throw amazonS3Exception;
} else if (MOCK_S3_BUCKET_NAME_INTERNAL_ERROR.equals(bucketName)) {
throw new AmazonServiceException(S3Operations.ERROR_CODE_INTERNAL_ERROR);
}
VersionListing versionListing = new VersionListing();
versionListing.setBucketName(bucketName);
MockS3Bucket mockS3Bucket = mockS3Buckets.get(bucketName);
if (mockS3Bucket != null) {
for (MockS3Object mockS3Object : mockS3Bucket.getVersions().values()) {
String s3ObjectKey = mockS3Object.getKey();
if (listVersionsRequest.getPrefix() == null || s3ObjectKey.startsWith(listVersionsRequest.getPrefix())) {
S3VersionSummary s3VersionSummary = new S3VersionSummary();
s3VersionSummary.setBucketName(bucketName);
s3VersionSummary.setKey(s3ObjectKey);
s3VersionSummary.setVersionId(mockS3Object.getVersion());
s3VersionSummary.setSize(mockS3Object.getData().length);
s3VersionSummary.setStorageClass(mockS3Object.getObjectMetadata() != null ? mockS3Object.getObjectMetadata().getStorageClass() : null);
versionListing.getVersionSummaries().add(s3VersionSummary);
}
}
}
return versionListing;
}
use of com.amazonaws.services.s3.model.AmazonS3Exception in project herd by FINRAOS.
the class S3DaoImpl method listVersions.
@Override
public List<DeleteObjectsRequest.KeyVersion> listVersions(final S3FileTransferRequestParamsDto params) {
Assert.isTrue(!isRootKeyPrefix(params.getS3KeyPrefix()), "Listing of S3 key versions from root directory is not allowed.");
AmazonS3Client s3Client = getAmazonS3(params);
List<DeleteObjectsRequest.KeyVersion> keyVersions = new ArrayList<>();
try {
ListVersionsRequest listVersionsRequest = new ListVersionsRequest().withBucketName(params.getS3BucketName()).withPrefix(params.getS3KeyPrefix());
VersionListing versionListing;
do {
versionListing = s3Operations.listVersions(listVersionsRequest, s3Client);
for (S3VersionSummary versionSummary : versionListing.getVersionSummaries()) {
keyVersions.add(new DeleteObjectsRequest.KeyVersion(versionSummary.getKey(), versionSummary.getVersionId()));
}
listVersionsRequest.setKeyMarker(versionListing.getNextKeyMarker());
listVersionsRequest.setVersionIdMarker(versionListing.getNextVersionIdMarker());
} while (versionListing.isTruncated());
} catch (AmazonS3Exception amazonS3Exception) {
if (S3Operations.ERROR_CODE_NO_SUCH_BUCKET.equals(amazonS3Exception.getErrorCode())) {
throw new IllegalArgumentException("The specified bucket '" + params.getS3BucketName() + "' does not exist.", amazonS3Exception);
}
throw new IllegalStateException("Error accessing S3", amazonS3Exception);
} catch (AmazonClientException e) {
throw new IllegalStateException(String.format("Failed to list keys/key versions with prefix \"%s\" from bucket \"%s\". Reason: %s", params.getS3KeyPrefix(), params.getS3BucketName(), e.getMessage()), e);
} finally {
// Shutdown the AmazonS3Client instance to release resources.
s3Client.shutdown();
}
return keyVersions;
}
Aggregations