use of software.amazon.awssdk.core.exception.SdkException in project aws-sdk-java-v2 by aws.
the class RetryOnExceptionsCondition method shouldRetry.
/**
* @param context Context about the state of the last request and information about the number of requests made.
* @return True if the exception class matches one of the whitelisted exceptions or if the cause of the exception matches the
* whitelisted exception.
*/
@Override
public boolean shouldRetry(RetryPolicyContext context) {
SdkException exception = context.exception();
if (exception == null) {
return false;
}
Predicate<Class<? extends Exception>> isRetryableException = ex -> ex.isAssignableFrom(exception.getClass());
Predicate<Class<? extends Exception>> hasRetrableCause = ex -> exception.getCause() != null && ex.isAssignableFrom(exception.getCause().getClass());
return exceptionsToRetryOn.stream().anyMatch(isRetryableException.or(hasRetrableCause));
}
use of software.amazon.awssdk.core.exception.SdkException in project djl by deepjavalibrary.
the class SageMakerTest method testDeployModel.
@Test
public void testDeployModel() throws IOException, ModelException {
if (!Boolean.getBoolean("nightly") || !hasCredential()) {
throw new SkipException("The test requires AWS credentials.");
}
Criteria<NDList, NDList> criteria = Criteria.builder().setTypes(NDList.class, NDList.class).optModelUrls("https://resources.djl.ai/test-models/mlp.tar.gz").build();
try (ZooModel<NDList, NDList> model = criteria.loadModel()) {
SageMaker sageMaker = SageMaker.builder().setModel(model).optBucketName("djl-sm-test").optModelName("resnet").optContainerImage("125045733377.dkr.ecr.us-east-1.amazonaws.com/djl").optExecutionRole("arn:aws:iam::125045733377:role/service-role/DJLSageMaker-ExecutionRole-20210213T1027050").build();
sageMaker.deploy();
byte[] image;
Path imagePath = Paths.get("../../examples/src/test/resources/0.png");
try (InputStream is = Files.newInputStream(imagePath)) {
image = Utils.toByteArray(is);
}
String ret = new String(sageMaker.invoke(image), StandardCharsets.UTF_8);
Type type = new TypeToken<List<Classifications.Classification>>() {
}.getType();
List<Classifications.Classification> list = JsonUtils.GSON.fromJson(ret, type);
String className = list.get(0).getClassName();
Assert.assertEquals(className, "0");
sageMaker.deleteEndpoint();
sageMaker.deleteEndpointConfig();
sageMaker.deleteSageMakerModel();
} catch (SdkException e) {
throw new SkipException("Skip tests that requires permission.", e);
}
}
use of software.amazon.awssdk.core.exception.SdkException in project jmix by jmix-framework.
the class AwsFileStorage method removeFile.
@Override
public void removeFile(FileRef reference) {
try {
S3Client s3Client = s3ClientReference.get();
DeleteObjectRequest deleteObjectRequest = DeleteObjectRequest.builder().bucket(bucket).key(reference.getPath()).build();
s3Client.deleteObject(deleteObjectRequest);
} catch (SdkException e) {
log.error("Error removing file from S3 storage", e);
String message = String.format("Could not delete file %s.", reference.getFileName());
throw new FileStorageException(FileStorageException.Type.IO_EXCEPTION, message);
}
}
use of software.amazon.awssdk.core.exception.SdkException in project jmix by jmix-framework.
the class AwsFileStorage method saveStream.
@Override
public FileRef saveStream(String fileName, InputStream inputStream) {
String fileKey = createFileKey(fileName);
int s3ChunkSizeBytes = this.chunkSize * 1024;
try (BufferedInputStream bos = new BufferedInputStream(inputStream, s3ChunkSizeBytes)) {
S3Client s3Client = s3ClientReference.get();
int totalSizeBytes = bos.available();
if (totalSizeBytes == 0) {
s3Client.putObject(PutObjectRequest.builder().bucket(bucket).key(fileKey).build(), RequestBody.empty());
return new FileRef(getStorageName(), fileKey, fileName);
}
CreateMultipartUploadRequest createMultipartUploadRequest = CreateMultipartUploadRequest.builder().bucket(bucket).key(fileKey).build();
CreateMultipartUploadResponse response = s3Client.createMultipartUpload(createMultipartUploadRequest);
List<CompletedPart> completedParts = new ArrayList<>();
for (int partNumber = 1, readBytes = 0; readBytes != totalSizeBytes; partNumber++) {
byte[] chunkBytes = new byte[Math.min(totalSizeBytes - readBytes, s3ChunkSizeBytes)];
readBytes += bos.read(chunkBytes);
UploadPartRequest uploadPartRequest = UploadPartRequest.builder().bucket(bucket).key(fileKey).uploadId(response.uploadId()).partNumber(partNumber).build();
String eTag = s3Client.uploadPart(uploadPartRequest, RequestBody.fromBytes(chunkBytes)).eTag();
CompletedPart part = CompletedPart.builder().partNumber(partNumber).eTag(eTag).build();
completedParts.add(part);
}
CompletedMultipartUpload completedMultipartUpload = CompletedMultipartUpload.builder().parts(completedParts).build();
CompleteMultipartUploadRequest completeMultipartUploadRequest = CompleteMultipartUploadRequest.builder().bucket(bucket).key(fileKey).uploadId(response.uploadId()).multipartUpload(completedMultipartUpload).build();
s3Client.completeMultipartUpload(completeMultipartUploadRequest);
return new FileRef(getStorageName(), fileKey, fileName);
} catch (IOException | SdkException e) {
log.error("Error saving file to S3 storage", e);
String message = String.format("Could not save file %s.", fileName);
throw new FileStorageException(FileStorageException.Type.IO_EXCEPTION, message);
}
}
use of software.amazon.awssdk.core.exception.SdkException in project jmix by jmix-framework.
the class AwsFileStorage method openStream.
@Override
public InputStream openStream(FileRef reference) {
InputStream is;
try {
S3Client s3Client = s3ClientReference.get();
GetObjectRequest getObjectRequest = GetObjectRequest.builder().bucket(bucket).key(reference.getPath()).build();
is = s3Client.getObject(getObjectRequest, ResponseTransformer.toInputStream());
} catch (SdkException e) {
log.error("Error loading file from S3 storage", e);
String message = String.format("Could not load file %s.", reference.getFileName());
throw new FileStorageException(FileStorageException.Type.IO_EXCEPTION, message);
}
return is;
}
Aggregations