use of com.amazonaws.event.ProgressListener in project hadoop by apache.
the class S3AFileSystem method copyFile.
/**
* Copy a single object in the bucket via a COPY operation.
* @param srcKey source object path
* @param dstKey destination object path
* @param size object size
* @throws AmazonClientException on failures inside the AWS SDK
* @throws InterruptedIOException the operation was interrupted
* @throws IOException Other IO problems
*/
private void copyFile(String srcKey, String dstKey, long size) throws IOException, InterruptedIOException, AmazonClientException {
LOG.debug("copyFile {} -> {} ", srcKey, dstKey);
try {
ObjectMetadata srcom = getObjectMetadata(srcKey);
ObjectMetadata dstom = cloneObjectMetadata(srcom);
setOptionalObjectMetadata(dstom);
CopyObjectRequest copyObjectRequest = new CopyObjectRequest(bucket, srcKey, bucket, dstKey);
setOptionalCopyObjectRequestParameters(copyObjectRequest);
copyObjectRequest.setCannedAccessControlList(cannedACL);
copyObjectRequest.setNewObjectMetadata(dstom);
ProgressListener progressListener = new ProgressListener() {
public void progressChanged(ProgressEvent progressEvent) {
switch(progressEvent.getEventType()) {
case TRANSFER_PART_COMPLETED_EVENT:
incrementWriteOperations();
break;
default:
break;
}
}
};
Copy copy = transfers.copy(copyObjectRequest);
copy.addProgressListener(progressListener);
try {
copy.waitForCopyResult();
incrementWriteOperations();
instrumentation.filesCopied(1, size);
} catch (InterruptedException e) {
throw new InterruptedIOException("Interrupted copying " + srcKey + " to " + dstKey + ", cancelling");
}
} catch (AmazonClientException e) {
throw translateException("copyFile(" + srcKey + ", " + dstKey + ")", srcKey, e);
}
}
use of com.amazonaws.event.ProgressListener in project aws-doc-sdk-examples by awsdocs.
the class XferMgrProgress method uploadFileWithListener.
public static void uploadFileWithListener(String file_path, String bucket_name, String key_prefix, boolean pause) {
System.out.println("file: " + file_path + (pause ? " (pause)" : ""));
String key_name = null;
if (key_prefix != null) {
key_name = key_prefix + '/' + file_path;
} else {
key_name = file_path;
}
// snippet-start:[s3.java1.s3_xfer_mgr_progress.progress_listener]
File f = new File(file_path);
TransferManager xfer_mgr = TransferManagerBuilder.standard().build();
try {
Upload u = xfer_mgr.upload(bucket_name, key_name, f);
// print an empty progress bar...
printProgressBar(0.0);
u.addProgressListener(new ProgressListener() {
public void progressChanged(ProgressEvent e) {
double pct = e.getBytesTransferred() * 100.0 / e.getBytes();
eraseProgressBar();
printProgressBar(pct);
}
});
// block with Transfer.waitForCompletion()
XferMgrProgress.waitForCompletion(u);
// print the final state of the transfer.
TransferState xfer_state = u.getState();
System.out.println(": " + xfer_state);
} catch (AmazonServiceException e) {
System.err.println(e.getErrorMessage());
System.exit(1);
}
xfer_mgr.shutdownNow();
// snippet-end:[s3.java1.s3_xfer_mgr_progress.progress_listener]
}
use of com.amazonaws.event.ProgressListener in project photon-model by vmware.
the class AWSCostStatsService method downloadParseAndCreateStats.
private void downloadParseAndCreateStats(AWSCostStatsCreationContext statsData, String awsBucketName) throws IOException {
try {
// Creating a working directory for downloading and processing the bill
final Path workingDirPath = Paths.get(System.getProperty(TEMP_DIR_LOCATION), UUID.randomUUID().toString());
Files.createDirectories(workingDirPath);
AWSCsvBillParser parser = new AWSCsvBillParser();
final String csvBillZipFileName = parser.getCsvBillFileName(statsData.billMonthToDownload, statsData.accountId, true);
Path csvBillZipFilePath = Paths.get(workingDirPath.toString(), csvBillZipFileName);
ProgressListener listener = new ProgressListener() {
@Override
public void progressChanged(ProgressEvent progressEvent) {
try {
ProgressEventType eventType = progressEvent.getEventType();
if (ProgressEventType.TRANSFER_COMPLETED_EVENT.equals(eventType)) {
OperationContext.restoreOperationContext(statsData.opContext);
LocalDate billMonth = new LocalDate(statsData.billMonthToDownload.getYear(), statsData.billMonthToDownload.getMonthOfYear(), 1);
logWithContext(statsData, Level.INFO, () -> String.format("Processing" + " bill for the month: %s.", billMonth));
parser.parseDetailedCsvBill(statsData.ignorableInvoiceCharge, csvBillZipFilePath, statsData.awsAccountIdToComputeStates.keySet(), getHourlyStatsConsumer(billMonth, statsData), getMonthlyStatsConsumer(billMonth, statsData));
deleteTempFiles();
// Continue downloading and processing the bills for past and current months' bills
statsData.billMonthToDownload = statsData.billMonthToDownload.plusMonths(1);
handleCostStatsCreationRequest(statsData);
} else if (ProgressEventType.TRANSFER_FAILED_EVENT.equals(eventType)) {
deleteTempFiles();
billDownloadFailureHandler(statsData, awsBucketName, new IOException("Download of AWS CSV Bill '" + csvBillZipFileName + "' failed."));
}
} catch (Exception exception) {
deleteTempFiles();
billDownloadFailureHandler(statsData, awsBucketName, exception);
}
}
private void deleteTempFiles() {
try {
Files.deleteIfExists(csvBillZipFilePath);
Files.deleteIfExists(workingDirPath);
} catch (IOException e) {
// Ignore IO exception while cleaning files.
}
}
};
GetObjectRequest getObjectRequest = new GetObjectRequest(awsBucketName, csvBillZipFileName).withGeneralProgressListener(listener);
statsData.s3Client.download(getObjectRequest, csvBillZipFilePath.toFile());
} catch (AmazonS3Exception s3Exception) {
billDownloadFailureHandler(statsData, awsBucketName, s3Exception);
}
}
use of com.amazonaws.event.ProgressListener in project tutorials by eugenp.
the class MultipartUpload method main.
public static void main(String[] args) throws Exception {
String existingBucketName = "baeldung-bucket";
String keyName = "my-picture.jpg";
String filePath = "documents/my-picture.jpg";
AmazonS3 amazonS3 = AmazonS3ClientBuilder.standard().withCredentials(new DefaultAWSCredentialsProviderChain()).withRegion(Regions.DEFAULT_REGION).build();
int maxUploadThreads = 5;
TransferManager tm = TransferManagerBuilder.standard().withS3Client(amazonS3).withMultipartUploadThreshold((long) (5 * 1024 * 1024)).withExecutorFactory(() -> Executors.newFixedThreadPool(maxUploadThreads)).build();
ProgressListener progressListener = progressEvent -> System.out.println("Transferred bytes: " + progressEvent.getBytesTransferred());
PutObjectRequest request = new PutObjectRequest(existingBucketName, keyName, new File(filePath));
request.setGeneralProgressListener(progressListener);
Upload upload = tm.upload(request);
try {
upload.waitForCompletion();
System.out.println("Upload complete.");
} catch (AmazonClientException e) {
System.out.println("Error occurred while uploading file");
e.printStackTrace();
}
}
use of com.amazonaws.event.ProgressListener in project spring-integration-aws by spring-projects.
the class S3MessageHandlerTests method testUploadFile.
@Test
public void testUploadFile() throws IOException, InterruptedException {
File file = this.temporaryFolder.newFile("foo.mp3");
Message<?> message = MessageBuilder.withPayload(file).setHeader("s3Command", S3MessageHandler.Command.UPLOAD.name()).build();
this.s3SendChannel.send(message);
ArgumentCaptor<PutObjectRequest> putObjectRequestArgumentCaptor = ArgumentCaptor.forClass(PutObjectRequest.class);
verify(this.amazonS3, atLeastOnce()).putObject(putObjectRequestArgumentCaptor.capture());
PutObjectRequest putObjectRequest = putObjectRequestArgumentCaptor.getValue();
assertThat(putObjectRequest.getBucketName()).isEqualTo(S3_BUCKET_NAME);
assertThat(putObjectRequest.getKey()).isEqualTo("foo.mp3");
assertThat(putObjectRequest.getFile()).isNotNull();
assertThat(putObjectRequest.getInputStream()).isNull();
ObjectMetadata metadata = putObjectRequest.getMetadata();
assertThat(metadata.getContentMD5()).isEqualTo(Md5Utils.md5AsBase64(file));
assertThat(metadata.getContentLength()).isEqualTo(0);
assertThat(metadata.getContentType()).isEqualTo("audio/mpeg");
ProgressListener listener = putObjectRequest.getGeneralProgressListener();
S3ProgressPublisher.publishProgress(listener, ProgressEventType.TRANSFER_COMPLETED_EVENT);
assertThat(this.transferCompletedLatch.await(10, TimeUnit.SECONDS)).isTrue();
assertThat(this.aclLatch.await(10, TimeUnit.SECONDS)).isTrue();
ArgumentCaptor<SetObjectAclRequest> setObjectAclRequestArgumentCaptor = ArgumentCaptor.forClass(SetObjectAclRequest.class);
verify(this.amazonS3).setObjectAcl(setObjectAclRequestArgumentCaptor.capture());
SetObjectAclRequest setObjectAclRequest = setObjectAclRequestArgumentCaptor.getValue();
assertThat(setObjectAclRequest.getBucketName()).isEqualTo(S3_BUCKET_NAME);
assertThat(setObjectAclRequest.getKey()).isEqualTo("foo.mp3");
assertThat(setObjectAclRequest.getAcl()).isNull();
assertThat(setObjectAclRequest.getCannedAcl()).isEqualTo(CannedAccessControlList.PublicReadWrite);
}
Aggregations