use of software.amazon.awssdk.core.exception.SdkClientException in project beam by apache.
the class S3ReadableSeekableByteChannel method read.
@Override
public int read(ByteBuffer destinationBuffer) throws IOException {
if (!isOpen()) {
throw new ClosedChannelException();
}
if (!destinationBuffer.hasRemaining()) {
return 0;
}
if (position == contentLength) {
return -1;
}
if (s3ResponseInputStream == null) {
GetObjectRequest.Builder builder = GetObjectRequest.builder().bucket(path.getBucket()).key(path.getKey()).sseCustomerKey(config.getSSECustomerKey().getKey()).sseCustomerAlgorithm(config.getSSECustomerKey().getAlgorithm());
if (position > 0) {
builder.range(String.format("bytes=%s-%s", position, contentLength));
}
GetObjectRequest request = builder.build();
try {
s3ResponseInputStream = s3Client.getObject(request);
} catch (SdkClientException e) {
throw new IOException(e);
}
s3ObjectContentChannel = Channels.newChannel(new BufferedInputStream(s3ResponseInputStream, 1024 * 1024));
}
int totalBytesRead = 0;
int bytesRead = 0;
do {
totalBytesRead += bytesRead;
try {
bytesRead = s3ObjectContentChannel.read(destinationBuffer);
} catch (SdkServiceException e) {
throw new IOException(e);
}
} while (bytesRead > 0);
position += totalBytesRead;
return totalBytesRead;
}
use of software.amazon.awssdk.core.exception.SdkClientException in project beam by apache.
the class S3WritableByteChannel method flush.
private void flush() throws IOException {
uploadBuffer.flip();
ByteArrayInputStream inputStream = new ByteArrayInputStream(uploadBuffer.array(), 0, uploadBuffer.limit());
UploadPartRequest request = UploadPartRequest.builder().bucket(path.getBucket()).key(path.getKey()).uploadId(uploadId).partNumber(partNumber++).contentLength((long) uploadBuffer.limit()).sseCustomerKey(config.getSSECustomerKey().getKey()).sseCustomerAlgorithm(config.getSSECustomerKey().getAlgorithm()).sseCustomerKeyMD5(config.getSSECustomerKey().getMD5()).contentMD5(Base64.getEncoder().encodeToString(md5.digest())).build();
UploadPartResponse response;
try {
response = s3Client.uploadPart(request, RequestBody.fromInputStream(inputStream, request.contentLength()));
} catch (SdkClientException e) {
throw new IOException(e);
}
CompletedPart part = CompletedPart.builder().partNumber(request.partNumber()).eTag(response.eTag()).build();
uploadBuffer.clear();
md5.reset();
completedParts.add(part);
}
use of software.amazon.awssdk.core.exception.SdkClientException in project beam by apache.
the class S3WritableByteChannel method close.
@Override
public void close() throws IOException {
open = false;
if (uploadBuffer.remaining() > 0) {
flush();
}
CompletedMultipartUpload completedMultipartUpload = CompletedMultipartUpload.builder().parts(completedParts).build();
CompleteMultipartUploadRequest request = CompleteMultipartUploadRequest.builder().bucket(path.getBucket()).key(path.getKey()).uploadId(uploadId).multipartUpload(completedMultipartUpload).build();
try {
s3Client.completeMultipartUpload(request);
} catch (SdkClientException e) {
throw new IOException(e);
}
}
use of software.amazon.awssdk.core.exception.SdkClientException in project flyway by flyway.
the class AwsS3Scanner method scanForResources.
/**
* Scans S3 for the resources. In AWS SDK v2, only the region that the client is configured with can be used.
* The format of the path is expected to be {@code s3:{bucketName}/{optional prefix}}.
*
* @param location The location in S3 to start searching. Subdirectories are also searched.
* @return The resources that were found.
*/
@Override
public Collection<LoadableResource> scanForResources(final Location location) {
String bucketName = getBucketName(location);
String prefix = getPrefix(bucketName, location.getPath());
S3Client s3Client = S3ClientFactory.getClient();
try {
ListObjectsV2Request.Builder builder = ListObjectsV2Request.builder().bucket(bucketName).prefix(prefix);
ListObjectsV2Request request = builder.build();
ListObjectsV2Response listObjectResult = s3Client.listObjectsV2(request);
return getLoadableResources(bucketName, listObjectResult);
} catch (SdkClientException e) {
if (throwOnMissingLocations) {
throw new FlywayException("Could not access s3 location:" + bucketName + prefix + " due to error: " + e.getMessage());
}
LOG.error("Skipping s3 location:" + bucketName + prefix + " due to error: " + e.getMessage());
return Collections.emptyList();
}
}
use of software.amazon.awssdk.core.exception.SdkClientException in project data-transfer-project by google.
the class BackblazeDataTransferClient method uploadFile.
public String uploadFile(String fileKey, File file) throws IOException {
if (s3Client == null || bucketName == null) {
throw new IllegalStateException("BackblazeDataTransferClient has not been initialised");
}
try {
long contentLength = file.length();
monitor.debug(() -> String.format("Uploading '%s' with file size %d bytes", fileKey, contentLength));
if (contentLength >= sizeThresholdForMultipartUpload) {
monitor.debug(() -> String.format("File size is larger than %d bytes, so using multipart upload", sizeThresholdForMultipartUpload));
return uploadFileUsingMultipartUpload(fileKey, file, contentLength);
}
PutObjectRequest putObjectRequest = PutObjectRequest.builder().bucket(bucketName).key(fileKey).build();
PutObjectResponse putObjectResponse = s3Client.putObject(putObjectRequest, RequestBody.fromFile(file));
return putObjectResponse.versionId();
} catch (AwsServiceException | SdkClientException e) {
throw new IOException(String.format("Error while uploading file, fileKey: %s", fileKey), e);
}
}
Aggregations