use of com.amazonaws.event.ProgressEvent in project hadoop by apache.
the class S3AFileSystem method copyFile.
/**
* Copy a single object in the bucket via a COPY operation.
* @param srcKey source object path
* @param dstKey destination object path
* @param size object size
* @throws AmazonClientException on failures inside the AWS SDK
* @throws InterruptedIOException the operation was interrupted
* @throws IOException Other IO problems
*/
private void copyFile(String srcKey, String dstKey, long size) throws IOException, InterruptedIOException, AmazonClientException {
LOG.debug("copyFile {} -> {} ", srcKey, dstKey);
try {
ObjectMetadata srcom = getObjectMetadata(srcKey);
ObjectMetadata dstom = cloneObjectMetadata(srcom);
setOptionalObjectMetadata(dstom);
CopyObjectRequest copyObjectRequest = new CopyObjectRequest(bucket, srcKey, bucket, dstKey);
setOptionalCopyObjectRequestParameters(copyObjectRequest);
copyObjectRequest.setCannedAccessControlList(cannedACL);
copyObjectRequest.setNewObjectMetadata(dstom);
ProgressListener progressListener = new ProgressListener() {
public void progressChanged(ProgressEvent progressEvent) {
switch(progressEvent.getEventType()) {
case TRANSFER_PART_COMPLETED_EVENT:
incrementWriteOperations();
break;
default:
break;
}
}
};
Copy copy = transfers.copy(copyObjectRequest);
copy.addProgressListener(progressListener);
try {
copy.waitForCopyResult();
incrementWriteOperations();
instrumentation.filesCopied(1, size);
} catch (InterruptedException e) {
throw new InterruptedIOException("Interrupted copying " + srcKey + " to " + dstKey + ", cancelling");
}
} catch (AmazonClientException e) {
throw translateException("copyFile(" + srcKey + ", " + dstKey + ")", srcKey, e);
}
}
use of com.amazonaws.event.ProgressEvent in project aws-doc-sdk-examples by awsdocs.
the class XferMgrProgress method uploadFileWithListener.
public static void uploadFileWithListener(String file_path, String bucket_name, String key_prefix, boolean pause) {
System.out.println("file: " + file_path + (pause ? " (pause)" : ""));
String key_name = null;
if (key_prefix != null) {
key_name = key_prefix + '/' + file_path;
} else {
key_name = file_path;
}
// snippet-start:[s3.java1.s3_xfer_mgr_progress.progress_listener]
File f = new File(file_path);
TransferManager xfer_mgr = TransferManagerBuilder.standard().build();
try {
Upload u = xfer_mgr.upload(bucket_name, key_name, f);
// print an empty progress bar...
printProgressBar(0.0);
u.addProgressListener(new ProgressListener() {
public void progressChanged(ProgressEvent e) {
double pct = e.getBytesTransferred() * 100.0 / e.getBytes();
eraseProgressBar();
printProgressBar(pct);
}
});
// block with Transfer.waitForCompletion()
XferMgrProgress.waitForCompletion(u);
// print the final state of the transfer.
TransferState xfer_state = u.getState();
System.out.println(": " + xfer_state);
} catch (AmazonServiceException e) {
System.err.println(e.getErrorMessage());
System.exit(1);
}
xfer_mgr.shutdownNow();
// snippet-end:[s3.java1.s3_xfer_mgr_progress.progress_listener]
}
use of com.amazonaws.event.ProgressEvent in project photon-model by vmware.
the class AWSCostStatsService method downloadParseAndCreateStats.
private void downloadParseAndCreateStats(AWSCostStatsCreationContext statsData, String awsBucketName) throws IOException {
try {
// Creating a working directory for downloading and processing the bill
final Path workingDirPath = Paths.get(System.getProperty(TEMP_DIR_LOCATION), UUID.randomUUID().toString());
Files.createDirectories(workingDirPath);
AWSCsvBillParser parser = new AWSCsvBillParser();
final String csvBillZipFileName = parser.getCsvBillFileName(statsData.billMonthToDownload, statsData.accountId, true);
Path csvBillZipFilePath = Paths.get(workingDirPath.toString(), csvBillZipFileName);
ProgressListener listener = new ProgressListener() {
@Override
public void progressChanged(ProgressEvent progressEvent) {
try {
ProgressEventType eventType = progressEvent.getEventType();
if (ProgressEventType.TRANSFER_COMPLETED_EVENT.equals(eventType)) {
OperationContext.restoreOperationContext(statsData.opContext);
LocalDate billMonth = new LocalDate(statsData.billMonthToDownload.getYear(), statsData.billMonthToDownload.getMonthOfYear(), 1);
logWithContext(statsData, Level.INFO, () -> String.format("Processing" + " bill for the month: %s.", billMonth));
parser.parseDetailedCsvBill(statsData.ignorableInvoiceCharge, csvBillZipFilePath, statsData.awsAccountIdToComputeStates.keySet(), getHourlyStatsConsumer(billMonth, statsData), getMonthlyStatsConsumer(billMonth, statsData));
deleteTempFiles();
// Continue downloading and processing the bills for past and current months' bills
statsData.billMonthToDownload = statsData.billMonthToDownload.plusMonths(1);
handleCostStatsCreationRequest(statsData);
} else if (ProgressEventType.TRANSFER_FAILED_EVENT.equals(eventType)) {
deleteTempFiles();
billDownloadFailureHandler(statsData, awsBucketName, new IOException("Download of AWS CSV Bill '" + csvBillZipFileName + "' failed."));
}
} catch (Exception exception) {
deleteTempFiles();
billDownloadFailureHandler(statsData, awsBucketName, exception);
}
}
private void deleteTempFiles() {
try {
Files.deleteIfExists(csvBillZipFilePath);
Files.deleteIfExists(workingDirPath);
} catch (IOException e) {
// Ignore IO exception while cleaning files.
}
}
};
GetObjectRequest getObjectRequest = new GetObjectRequest(awsBucketName, csvBillZipFileName).withGeneralProgressListener(listener);
statsData.s3Client.download(getObjectRequest, csvBillZipFilePath.toFile());
} catch (AmazonS3Exception s3Exception) {
billDownloadFailureHandler(statsData, awsBucketName, s3Exception);
}
}
use of com.amazonaws.event.ProgressEvent in project spring-integration-aws by spring-projects.
the class S3MessageHandler method upload.
private Transfer upload(Message<?> requestMessage) {
Object payload = requestMessage.getPayload();
String bucketName = obtainBucket(requestMessage);
String key = null;
if (this.keyExpression != null) {
key = this.keyExpression.getValue(this.evaluationContext, requestMessage, String.class);
}
if (payload instanceof File && ((File) payload).isDirectory()) {
File fileToUpload = (File) payload;
if (key == null) {
key = fileToUpload.getName();
}
return this.transferManager.uploadDirectory(bucketName, key, fileToUpload, true, new MessageHeadersObjectMetadataProvider(requestMessage.getHeaders()));
} else {
ObjectMetadata metadata = new ObjectMetadata();
if (this.uploadMetadataProvider != null) {
this.uploadMetadataProvider.populateMetadata(metadata, requestMessage);
}
PutObjectRequest putObjectRequest = null;
try {
if (payload instanceof InputStream) {
InputStream inputStream = (InputStream) payload;
if (metadata.getContentMD5() == null) {
Assert.state(inputStream.markSupported(), "For an upload InputStream with no MD5 digest metadata, the " + "markSupported() method must evaluate to true. ");
String contentMd5 = Md5Utils.md5AsBase64(inputStream);
metadata.setContentMD5(contentMd5);
inputStream.reset();
}
putObjectRequest = new PutObjectRequest(bucketName, key, inputStream, metadata);
} else if (payload instanceof File) {
File fileToUpload = (File) payload;
if (key == null) {
key = fileToUpload.getName();
}
if (metadata.getContentMD5() == null) {
String contentMd5 = Md5Utils.md5AsBase64(fileToUpload);
metadata.setContentMD5(contentMd5);
}
if (metadata.getContentLength() == 0) {
metadata.setContentLength(fileToUpload.length());
}
if (metadata.getContentType() == null) {
metadata.setContentType(Mimetypes.getInstance().getMimetype(fileToUpload));
}
putObjectRequest = new PutObjectRequest(bucketName, key, fileToUpload).withMetadata(metadata);
} else if (payload instanceof byte[]) {
byte[] payloadBytes = (byte[]) payload;
InputStream inputStream = new ByteArrayInputStream(payloadBytes);
if (metadata.getContentMD5() == null) {
String contentMd5 = Md5Utils.md5AsBase64(inputStream);
metadata.setContentMD5(contentMd5);
inputStream.reset();
}
if (metadata.getContentLength() == 0) {
metadata.setContentLength(payloadBytes.length);
}
putObjectRequest = new PutObjectRequest(bucketName, key, inputStream, metadata);
} else {
throw new IllegalArgumentException("Unsupported payload type: [" + payload.getClass() + "]. The only supported payloads for the upload request are " + "java.io.File, java.io.InputStream, byte[] and PutObjectRequest.");
}
} catch (IOException e) {
throw new MessageHandlingException(requestMessage, e);
}
if (key == null) {
if (this.keyExpression != null) {
throw new IllegalStateException("The 'keyExpression' [" + this.keyExpression.getExpressionString() + "] must not evaluate to null. Root object is: " + requestMessage);
} else {
throw new IllegalStateException("Specify a 'keyExpression' for non-java.io.File payloads");
}
}
S3ProgressListener progressListener = this.s3ProgressListener;
if (this.objectAclExpression != null) {
Object acl = this.objectAclExpression.getValue(this.evaluationContext, requestMessage);
Assert.state(acl instanceof AccessControlList || acl instanceof CannedAccessControlList, "The 'objectAclExpression' [" + this.objectAclExpression.getExpressionString() + "] must evaluate to com.amazonaws.services.s3.model.AccessControlList " + "or must evaluate to com.amazonaws.services.s3.model.CannedAccessControlList. " + "Gotten: [" + acl + "]");
SetObjectAclRequest aclRequest;
if (acl instanceof AccessControlList) {
aclRequest = new SetObjectAclRequest(bucketName, key, (AccessControlList) acl);
} else {
aclRequest = new SetObjectAclRequest(bucketName, key, (CannedAccessControlList) acl);
}
final SetObjectAclRequest theAclRequest = aclRequest;
progressListener = new S3ProgressListener() {
@Override
public void onPersistableTransfer(PersistableTransfer persistableTransfer) {
}
@Override
public void progressChanged(ProgressEvent progressEvent) {
if (ProgressEventType.TRANSFER_COMPLETED_EVENT.equals(progressEvent.getEventType())) {
S3MessageHandler.this.transferManager.getAmazonS3Client().setObjectAcl(theAclRequest);
}
}
};
if (this.s3ProgressListener != null) {
progressListener = new S3ProgressListenerChain(this.s3ProgressListener, progressListener);
}
}
if (progressListener != null) {
return this.transferManager.upload(putObjectRequest, progressListener);
} else {
return this.transferManager.upload(putObjectRequest);
}
}
}
use of com.amazonaws.event.ProgressEvent in project cloudstack by apache.
the class S3TemplateDownloader method download.
@Override
public long download(boolean resume, DownloadCompleteCallback callback) {
if (!status.equals(Status.NOT_STARTED)) {
// Only start downloading if we haven't started yet.
LOGGER.debug("Template download is already started, not starting again. Template: " + downloadUrl);
return 0;
}
int responseCode;
if ((responseCode = HTTPUtils.executeMethod(httpClient, getMethod)) == -1) {
errorString = "Exception while executing HttpMethod " + getMethod.getName() + " on URL " + downloadUrl;
LOGGER.warn(errorString);
status = Status.UNRECOVERABLE_ERROR;
return 0;
}
if (!HTTPUtils.verifyResponseCode(responseCode)) {
errorString = "Response code for GetMethod of " + downloadUrl + " is incorrect, responseCode: " + responseCode;
LOGGER.warn(errorString);
status = Status.UNRECOVERABLE_ERROR;
return 0;
}
// Headers
Header contentLengthHeader = getMethod.getResponseHeader("Content-Length");
Header contentTypeHeader = getMethod.getResponseHeader("Content-Type");
// Check the contentLengthHeader and transferEncodingHeader.
if (contentLengthHeader == null) {
errorString = "The ContentLengthHeader of " + downloadUrl + " isn't supplied";
LOGGER.warn(errorString);
status = Status.UNRECOVERABLE_ERROR;
return 0;
} else {
// The ContentLengthHeader is supplied, parse it's value.
remoteSize = Long.parseLong(contentLengthHeader.getValue());
}
if (remoteSize > maxTemplateSizeInByte) {
errorString = "Remote size is too large for template " + downloadUrl + " remote size is " + remoteSize + " max allowed is " + maxTemplateSizeInByte;
LOGGER.warn(errorString);
status = Status.UNRECOVERABLE_ERROR;
return 0;
}
InputStream inputStream;
try {
inputStream = new BufferedInputStream(getMethod.getResponseBodyAsStream());
} catch (IOException e) {
errorString = "Exception occurred while opening InputStream for template " + downloadUrl;
LOGGER.warn(errorString);
status = Status.UNRECOVERABLE_ERROR;
return 0;
}
LOGGER.info("Starting download from " + downloadUrl + " to S3 bucket " + s3TO.getBucketName() + " and size " + toHumanReadableSize(remoteSize) + " bytes");
// Time the upload starts.
final Date start = new Date();
ObjectMetadata objectMetadata = new ObjectMetadata();
objectMetadata.setContentLength(remoteSize);
if (contentTypeHeader.getValue() != null) {
objectMetadata.setContentType(contentTypeHeader.getValue());
}
// Create the PutObjectRequest.
PutObjectRequest putObjectRequest = new PutObjectRequest(s3TO.getBucketName(), s3Key, inputStream, objectMetadata);
// If reduced redundancy is enabled, set it.
if (s3TO.isEnableRRS()) {
putObjectRequest.withStorageClass(StorageClass.ReducedRedundancy);
}
Upload upload = S3Utils.putObject(s3TO, putObjectRequest);
upload.addProgressListener(new ProgressListener() {
@Override
public void progressChanged(ProgressEvent progressEvent) {
// Record the amount of bytes transferred.
totalBytes += progressEvent.getBytesTransferred();
LOGGER.trace("Template download from " + downloadUrl + " to S3 bucket " + s3TO.getBucketName() + " transferred " + toHumanReadableSize(totalBytes) + " in " + ((new Date().getTime() - start.getTime()) / 1000) + " seconds");
if (progressEvent.getEventType() == ProgressEventType.TRANSFER_STARTED_EVENT) {
status = Status.IN_PROGRESS;
} else if (progressEvent.getEventType() == ProgressEventType.TRANSFER_COMPLETED_EVENT) {
status = Status.DOWNLOAD_FINISHED;
} else if (progressEvent.getEventType() == ProgressEventType.TRANSFER_CANCELED_EVENT) {
status = Status.ABORTED;
} else if (progressEvent.getEventType() == ProgressEventType.TRANSFER_FAILED_EVENT) {
status = Status.UNRECOVERABLE_ERROR;
}
}
});
try {
// Wait for the upload to complete.
upload.waitForCompletion();
} catch (InterruptedException e) {
// Interruption while waiting for the upload to complete.
LOGGER.warn("Interruption occurred while waiting for upload of " + downloadUrl + " to complete");
}
downloadTime = new Date().getTime() - start.getTime();
if (status == Status.DOWNLOAD_FINISHED) {
LOGGER.info("Template download from " + downloadUrl + " to S3 bucket " + s3TO.getBucketName() + " transferred " + toHumanReadableSize(totalBytes) + " in " + (downloadTime / 1000) + " seconds, completed successfully!");
} else {
LOGGER.warn("Template download from " + downloadUrl + " to S3 bucket " + s3TO.getBucketName() + " transferred " + toHumanReadableSize(totalBytes) + " in " + (downloadTime / 1000) + " seconds, completed with status " + status.toString());
}
// Close input stream
getMethod.releaseConnection();
// Call the callback!
if (callback != null) {
callback.downloadComplete(status);
}
return totalBytes;
}
Aggregations