use of com.amazonaws.services.s3.transfer.Download in project android-simpl3r by jgilfelt.
the class Uploader method start.
/**
* Initiate a multipart file upload to Amazon S3
*
* @return the URL of a successfully uploaded file
*/
public String start() {
// initialize
List<PartETag> partETags = new ArrayList<PartETag>();
final long contentLength = file.length();
long filePosition = 0;
int startPartNumber = 1;
userInterrupted = false;
userAborted = false;
bytesUploaded = 0;
// check if we can resume an incomplete download
String uploadId = getCachedUploadId();
if (uploadId != null) {
// we can resume the download
Log.i(TAG, "resuming upload for " + uploadId);
// get the cached etags
List<PartETag> cachedEtags = getCachedPartEtags();
partETags.addAll(cachedEtags);
// calculate the start position for resume
startPartNumber = cachedEtags.size() + 1;
filePosition = (startPartNumber - 1) * partSize;
bytesUploaded = filePosition;
Log.i(TAG, "resuming at part " + startPartNumber + " position " + filePosition);
} else {
// initiate a new multi part upload
Log.i(TAG, "initiating new upload");
InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(s3bucketName, s3key);
configureInitiateRequest(initRequest);
InitiateMultipartUploadResult initResponse = s3Client.initiateMultipartUpload(initRequest);
uploadId = initResponse.getUploadId();
}
final AbortMultipartUploadRequest abortRequest = new AbortMultipartUploadRequest(s3bucketName, s3key, uploadId);
for (int k = startPartNumber; filePosition < contentLength; k++) {
long thisPartSize = Math.min(partSize, (contentLength - filePosition));
Log.i(TAG, "starting file part " + k + " with size " + thisPartSize);
UploadPartRequest uploadRequest = new UploadPartRequest().withBucketName(s3bucketName).withKey(s3key).withUploadId(uploadId).withPartNumber(k).withFileOffset(filePosition).withFile(file).withPartSize(thisPartSize);
ProgressListener s3progressListener = new ProgressListener() {
public void progressChanged(ProgressEvent progressEvent) {
// TODO calling shutdown too brute force?
if (userInterrupted) {
s3Client.shutdown();
throw new UploadIterruptedException("User interrupted");
} else if (userAborted) {
// aborted requests cannot be resumed, so clear any cached etags
clearProgressCache();
s3Client.abortMultipartUpload(abortRequest);
s3Client.shutdown();
}
bytesUploaded += progressEvent.getBytesTransfered();
// Log.d(TAG, "bytesUploaded=" + bytesUploaded);
// broadcast progress
float fpercent = ((bytesUploaded * 100) / contentLength);
int percent = Math.round(fpercent);
if (progressListener != null) {
progressListener.progressChanged(progressEvent, bytesUploaded, percent);
}
}
};
uploadRequest.setProgressListener(s3progressListener);
UploadPartResult result = s3Client.uploadPart(uploadRequest);
partETags.add(result.getPartETag());
// cache the part progress for this upload
if (k == 1) {
initProgressCache(uploadId);
}
// store part etag
cachePartEtag(result);
filePosition += thisPartSize;
}
CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest(s3bucketName, s3key, uploadId, partETags);
CompleteMultipartUploadResult result = s3Client.completeMultipartUpload(compRequest);
bytesUploaded = 0;
Log.i(TAG, "upload complete for " + uploadId);
clearProgressCache();
return result.getLocation();
}
use of com.amazonaws.services.s3.transfer.Download in project aws-doc-sdk-examples by awsdocs.
the class S3ClientSideEncryptionSymMasterKey method main.
public static void main(String[] args) throws Exception {
Regions clientRegion = Regions.DEFAULT_REGION;
String bucketName = "*** Bucket name ***";
String objectKeyName = "*** Object key name ***";
String masterKeyDir = System.getProperty("java.io.tmpdir");
String masterKeyName = "secret.key";
// Generate a symmetric 256-bit AES key.
KeyGenerator symKeyGenerator = KeyGenerator.getInstance("AES");
symKeyGenerator.init(256);
SecretKey symKey = symKeyGenerator.generateKey();
// To see how it works, save and load the key to and from the file system.
saveSymmetricKey(masterKeyDir, masterKeyName, symKey);
symKey = loadSymmetricAESKey(masterKeyDir, masterKeyName, "AES");
try {
// Create the Amazon S3 encryption client.
EncryptionMaterials encryptionMaterials = new EncryptionMaterials(symKey);
AmazonS3 s3EncryptionClient = AmazonS3EncryptionClientBuilder.standard().withCredentials(new ProfileCredentialsProvider()).withEncryptionMaterials(new StaticEncryptionMaterialsProvider(encryptionMaterials)).withRegion(clientRegion).build();
// Upload a new object. The encryption client automatically encrypts it.
byte[] plaintext = "S3 Object Encrypted Using Client-Side Symmetric Master Key.".getBytes();
s3EncryptionClient.putObject(new PutObjectRequest(bucketName, objectKeyName, new ByteArrayInputStream(plaintext), new ObjectMetadata()));
// Download and decrypt the object.
S3Object downloadedObject = s3EncryptionClient.getObject(bucketName, objectKeyName);
byte[] decrypted = com.amazonaws.util.IOUtils.toByteArray(downloadedObject.getObjectContent());
// Verify that the data that you downloaded is the same as the original data.
System.out.println("Plaintext: " + new String(plaintext));
System.out.println("Decrypted text: " + new String(decrypted));
} catch (AmazonServiceException e) {
// The call was transmitted successfully, but Amazon S3 couldn't process
// it, so it returned an error response.
e.printStackTrace();
} catch (SdkClientException e) {
// Amazon S3 couldn't be contacted for a response, or the client
// couldn't parse the response from Amazon S3.
e.printStackTrace();
}
}
use of com.amazonaws.services.s3.transfer.Download in project aws-doc-sdk-examples by awsdocs.
the class XferMgrDownload method downloadFile.
public static void downloadFile(String bucket_name, String key_name, String file_path, boolean pause) {
System.out.println("Downloading to file: " + file_path + (pause ? " (pause)" : ""));
// snippet-start:[s3.java1.s3_xfer_mgr_download.single]
File f = new File(file_path);
TransferManager xfer_mgr = TransferManagerBuilder.standard().build();
try {
Download xfer = xfer_mgr.download(bucket_name, key_name, f);
// loop with Transfer.isDone()
XferMgrProgress.showTransferProgress(xfer);
// or block with Transfer.waitForCompletion()
XferMgrProgress.waitForCompletion(xfer);
} catch (AmazonServiceException e) {
System.err.println(e.getErrorMessage());
System.exit(1);
}
xfer_mgr.shutdownNow();
// snippet-end:[s3.java1.s3_xfer_mgr_download.single]
}
use of com.amazonaws.services.s3.transfer.Download in project gradle by gradle.
the class S3Client method doGetS3Object.
private S3Object doGetS3Object(URI uri, boolean isLightWeight) {
S3RegionalResource s3RegionalResource = new S3RegionalResource(uri);
String bucketName = s3RegionalResource.getBucketName();
String s3BucketKey = s3RegionalResource.getKey();
configureClient(s3RegionalResource);
GetObjectRequest getObjectRequest = new GetObjectRequest(bucketName, s3BucketKey);
if (isLightWeight) {
// Skip content download
getObjectRequest.setRange(0, 0);
}
try {
return amazonS3Client.getObject(getObjectRequest);
} catch (AmazonServiceException e) {
String errorCode = e.getErrorCode();
if (null != errorCode && errorCode.equalsIgnoreCase("NoSuchKey")) {
return null;
}
throw ResourceExceptions.getFailed(uri, e);
}
}
use of com.amazonaws.services.s3.transfer.Download in project ats-framework by Axway.
the class S3Operations method download.
/**
* Download an object data as a file
*
* @param remoteObjectName the name of object/key which contents should be downloaded
* @param localFileName the location and file name on the local machine, where the file will be downloaded
* @throws S3OperationException if there is an error during data transfer
*/
@PublicAtsApi
public void download(String remoteObjectName, String localFileName) throws S3OperationException, IllegalArgumentException {
localFileName = IoUtils.normalizeFilePath(localFileName);
String localDirName = IoUtils.getFilePath(localFileName);
String localFileOnlyName = IoUtils.getFileName(localFileName);
File localDir = new File(localDirName);
if (localDir.exists()) {
if (localDir.isFile()) {
throw new IllegalArgumentException("Could not create file " + localFileOnlyName + " into existing file " + localDirName);
}
// else dir exists
} else {
LOG.debug("Creating target directory path " + localDirName);
if (!localDir.mkdirs()) {
throw new S3OperationException("Could not create local directory path '" + localDirName + "' for local file specified '" + localFileName + "'");
}
}
S3Object obj = s3Client.getObject(bucketName, remoteObjectName);
try (BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(new File(localFileName)));
S3ObjectInputStream s3is = obj.getObjectContent()) {
byte[] readBuffArr = new byte[4096];
int readBytes = 0;
while ((readBytes = s3is.read(readBuffArr)) >= 0) {
bos.write(readBuffArr, 0, readBytes);
}
} catch (Exception e) {
handleExeption(e, "Error while downloading object " + remoteObjectName + " to local file " + localFileName + ". If error persists check your endpoint, credentials and permissions.");
}
LOG.info("S3 object '" + remoteObjectName + "; is downloaded successfully from bucket '" + bucketName + "' to file " + localFileName);
}
Aggregations