use of com.amazonaws.services.s3.model.AmazonS3Exception in project elasticsearch by elastic.
the class DefaultS3OutputStream method doCompleteMultipart.
protected void doCompleteMultipart(S3BlobStore blobStore, String bucketName, String blobName, String uploadId, List<PartETag> parts) throws AmazonS3Exception {
CompleteMultipartUploadRequest request = new CompleteMultipartUploadRequest(bucketName, blobName, uploadId, parts);
blobStore.client().completeMultipartUpload(request);
}
use of com.amazonaws.services.s3.model.AmazonS3Exception in project elasticsearch by elastic.
the class TestAmazonS3 method getObject.
@Override
public S3Object getObject(String bucketName, String key) throws AmazonClientException, AmazonServiceException {
if (shouldFail(bucketName, key, readFailureRate)) {
logger.info("--> random read failure on getObject method: throwing an exception for [bucket={}, key={}]", bucketName, key);
AmazonS3Exception ex = new AmazonS3Exception("Random S3 read exception");
ex.setStatusCode(404);
throw ex;
} else {
return super.getObject(bucketName, key);
}
}
use of com.amazonaws.services.s3.model.AmazonS3Exception in project presto by prestodb.
the class MockAmazonS3 method getObject.
@Override
public S3Object getObject(GetObjectRequest getObjectRequest) throws AmazonClientException {
if (getObjectHttpCode != SC_OK) {
AmazonS3Exception exception = new AmazonS3Exception("Failing getObject call with " + getObjectHttpCode);
exception.setStatusCode(getObjectHttpCode);
throw exception;
}
return null;
}
use of com.amazonaws.services.s3.model.AmazonS3Exception in project presto by prestodb.
the class TestPrestoS3FileSystem method testReadRetryCounters.
@SuppressWarnings({ "ResultOfMethodCallIgnored", "OverlyStrongTypeCast", "ConstantConditions" })
@Test
public void testReadRetryCounters() throws Exception {
try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) {
int maxRetries = 2;
MockAmazonS3 s3 = new MockAmazonS3();
s3.setGetObjectHttpErrorCode(SC_INTERNAL_SERVER_ERROR);
Configuration configuration = new Configuration();
configuration.set(S3_MAX_BACKOFF_TIME, "1ms");
configuration.set(S3_MAX_RETRY_TIME, "5s");
configuration.setInt(S3_MAX_CLIENT_RETRIES, maxRetries);
fs.initialize(new URI("s3n://test-bucket/"), configuration);
fs.setS3Client(s3);
try (FSDataInputStream inputStream = fs.open(new Path("s3n://test-bucket/test"))) {
inputStream.read();
} catch (Throwable expected) {
assertInstanceOf(expected, AmazonS3Exception.class);
assertEquals(((AmazonS3Exception) expected).getStatusCode(), SC_INTERNAL_SERVER_ERROR);
assertEquals(PrestoS3FileSystem.getFileSystemStats().getReadRetries().getTotalCount(), maxRetries);
assertEquals(PrestoS3FileSystem.getFileSystemStats().getGetObjectRetries().getTotalCount(), (maxRetries + 1L) * maxRetries);
}
}
}
use of com.amazonaws.services.s3.model.AmazonS3Exception in project hadoop by apache.
the class TestS3AExceptionTranslation method test301ContainsEndpoint.
@Test
public void test301ContainsEndpoint() throws Exception {
AmazonS3Exception s3Exception = createS3Exception("wrong endpoint", 301, Collections.singletonMap(S3AUtils.ENDPOINT_KEY, "bucket.s3-us-west-2.amazonaws.com"));
AWSS3IOException ex = (AWSS3IOException) verifyTranslated(AWSS3IOException.class, s3Exception);
assertEquals(301, ex.getStatusCode());
assertNotNull(ex.getMessage());
assertTrue(ex.getMessage().contains("bucket.s3-us-west-2.amazonaws.com"));
assertTrue(ex.getMessage().contains(ENDPOINT));
}
Aggregations