Search in sources :

Example 1 with ProgressListener

use of com.amazonaws.services.s3.model.ProgressListener in project hadoop by apache.

the class S3AFileSystem method copyFile.

/**
   * Copy a single object in the bucket via a COPY operation.
   * @param srcKey source object path
   * @param dstKey destination object path
   * @param size object size
   * @throws AmazonClientException on failures inside the AWS SDK
   * @throws InterruptedIOException the operation was interrupted
   * @throws IOException Other IO problems
   */
private void copyFile(String srcKey, String dstKey, long size) throws IOException, InterruptedIOException, AmazonClientException {
    LOG.debug("copyFile {} -> {} ", srcKey, dstKey);
    try {
        ObjectMetadata srcom = getObjectMetadata(srcKey);
        ObjectMetadata dstom = cloneObjectMetadata(srcom);
        setOptionalObjectMetadata(dstom);
        CopyObjectRequest copyObjectRequest = new CopyObjectRequest(bucket, srcKey, bucket, dstKey);
        setOptionalCopyObjectRequestParameters(copyObjectRequest);
        copyObjectRequest.setCannedAccessControlList(cannedACL);
        copyObjectRequest.setNewObjectMetadata(dstom);
        ProgressListener progressListener = new ProgressListener() {

            public void progressChanged(ProgressEvent progressEvent) {
                switch(progressEvent.getEventType()) {
                    case TRANSFER_PART_COMPLETED_EVENT:
                        incrementWriteOperations();
                        break;
                    default:
                        break;
                }
            }
        };
        Copy copy = transfers.copy(copyObjectRequest);
        copy.addProgressListener(progressListener);
        try {
            copy.waitForCopyResult();
            incrementWriteOperations();
            instrumentation.filesCopied(1, size);
        } catch (InterruptedException e) {
            throw new InterruptedIOException("Interrupted copying " + srcKey + " to " + dstKey + ", cancelling");
        }
    } catch (AmazonClientException e) {
        throw translateException("copyFile(" + srcKey + ", " + dstKey + ")", srcKey, e);
    }
}
Also used : InterruptedIOException(java.io.InterruptedIOException) CopyObjectRequest(com.amazonaws.services.s3.model.CopyObjectRequest) ProgressListener(com.amazonaws.event.ProgressListener) Copy(com.amazonaws.services.s3.transfer.Copy) AmazonClientException(com.amazonaws.AmazonClientException) ProgressEvent(com.amazonaws.event.ProgressEvent) ObjectMetadata(com.amazonaws.services.s3.model.ObjectMetadata)

Example 2 with ProgressListener

use of com.amazonaws.services.s3.model.ProgressListener in project hadoop by apache.

the class S3ABlockOutputStream method putObject.

/**
   * Upload the current block as a single PUT request; if the buffer
   * is empty a 0-byte PUT will be invoked, as it is needed to create an
   * entry at the far end.
   * @throws IOException any problem.
   */
private void putObject() throws IOException {
    LOG.debug("Executing regular upload for {}", writeOperationHelper);
    final S3ADataBlocks.DataBlock block = getActiveBlock();
    int size = block.dataSize();
    final S3ADataBlocks.BlockUploadData uploadData = block.startUpload();
    final PutObjectRequest putObjectRequest = uploadData.hasFile() ? writeOperationHelper.newPutRequest(uploadData.getFile()) : writeOperationHelper.newPutRequest(uploadData.getUploadStream(), size);
    fs.setOptionalPutRequestParameters(putObjectRequest);
    long transferQueueTime = now();
    BlockUploadProgress callback = new BlockUploadProgress(block, progressListener, transferQueueTime);
    putObjectRequest.setGeneralProgressListener(callback);
    statistics.blockUploadQueued(size);
    ListenableFuture<PutObjectResult> putObjectResult = executorService.submit(new Callable<PutObjectResult>() {

        @Override
        public PutObjectResult call() throws Exception {
            PutObjectResult result;
            try {
                // the putObject call automatically closes the input
                // stream afterwards.
                result = writeOperationHelper.putObject(putObjectRequest);
            } finally {
                closeAll(LOG, uploadData, block);
            }
            return result;
        }
    });
    clearActiveBlock();
    //wait for completion
    try {
        putObjectResult.get();
    } catch (InterruptedException ie) {
        LOG.warn("Interrupted object upload", ie);
        Thread.currentThread().interrupt();
    } catch (ExecutionException ee) {
        throw extractException("regular upload", key, ee);
    }
}
Also used : PutObjectResult(com.amazonaws.services.s3.model.PutObjectResult) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) AmazonClientException(com.amazonaws.AmazonClientException) ExecutionException(java.util.concurrent.ExecutionException) PutObjectRequest(com.amazonaws.services.s3.model.PutObjectRequest)

Example 3 with ProgressListener

use of com.amazonaws.services.s3.model.ProgressListener in project aws-doc-sdk-examples by awsdocs.

the class XferMgrProgress method uploadFileWithListener.

public static void uploadFileWithListener(String file_path, String bucket_name, String key_prefix, boolean pause) {
    System.out.println("file: " + file_path + (pause ? " (pause)" : ""));
    String key_name = null;
    if (key_prefix != null) {
        key_name = key_prefix + '/' + file_path;
    } else {
        key_name = file_path;
    }
    File f = new File(file_path);
    TransferManager xfer_mgr = new TransferManager();
    try {
        Upload u = xfer_mgr.upload(bucket_name, key_name, f);
        // print an empty progress bar...
        printProgressBar(0.0);
        u.addProgressListener(new ProgressListener() {

            public void progressChanged(ProgressEvent e) {
                double pct = e.getBytesTransferred() * 100.0 / e.getBytes();
                eraseProgressBar();
                printProgressBar(pct);
            }
        });
        // block with Transfer.waitForCompletion()
        XferMgrProgress.waitForCompletion(u);
        // print the final state of the transfer.
        TransferState xfer_state = u.getState();
        System.out.println(": " + xfer_state);
    } catch (AmazonServiceException e) {
        System.err.println(e.getErrorMessage());
        System.exit(1);
    }
    xfer_mgr.shutdownNow();
}
Also used : TransferManager(com.amazonaws.services.s3.transfer.TransferManager) ProgressListener(com.amazonaws.event.ProgressListener) AmazonServiceException(com.amazonaws.AmazonServiceException) Upload(com.amazonaws.services.s3.transfer.Upload) MultipleFileUpload(com.amazonaws.services.s3.transfer.MultipleFileUpload) ProgressEvent(com.amazonaws.event.ProgressEvent) File(java.io.File) TransferState(com.amazonaws.services.s3.transfer.Transfer.TransferState)

Example 4 with ProgressListener

use of com.amazonaws.services.s3.model.ProgressListener in project android-simpl3r by jgilfelt.

the class Uploader method start.

/**
	 * Initiate a multipart file upload to Amazon S3
	 * 
	 * @return the URL of a successfully uploaded file
	 */
public String start() {
    // initialize
    List<PartETag> partETags = new ArrayList<PartETag>();
    final long contentLength = file.length();
    long filePosition = 0;
    int startPartNumber = 1;
    userInterrupted = false;
    userAborted = false;
    bytesUploaded = 0;
    // check if we can resume an incomplete download
    String uploadId = getCachedUploadId();
    if (uploadId != null) {
        // we can resume the download
        Log.i(TAG, "resuming upload for " + uploadId);
        // get the cached etags
        List<PartETag> cachedEtags = getCachedPartEtags();
        partETags.addAll(cachedEtags);
        // calculate the start position for resume
        startPartNumber = cachedEtags.size() + 1;
        filePosition = (startPartNumber - 1) * partSize;
        bytesUploaded = filePosition;
        Log.i(TAG, "resuming at part " + startPartNumber + " position " + filePosition);
    } else {
        // initiate a new multi part upload
        Log.i(TAG, "initiating new upload");
        InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(s3bucketName, s3key);
        configureInitiateRequest(initRequest);
        InitiateMultipartUploadResult initResponse = s3Client.initiateMultipartUpload(initRequest);
        uploadId = initResponse.getUploadId();
    }
    final AbortMultipartUploadRequest abortRequest = new AbortMultipartUploadRequest(s3bucketName, s3key, uploadId);
    for (int k = startPartNumber; filePosition < contentLength; k++) {
        long thisPartSize = Math.min(partSize, (contentLength - filePosition));
        Log.i(TAG, "starting file part " + k + " with size " + thisPartSize);
        UploadPartRequest uploadRequest = new UploadPartRequest().withBucketName(s3bucketName).withKey(s3key).withUploadId(uploadId).withPartNumber(k).withFileOffset(filePosition).withFile(file).withPartSize(thisPartSize);
        ProgressListener s3progressListener = new ProgressListener() {

            public void progressChanged(ProgressEvent progressEvent) {
                // TODO calling shutdown too brute force?
                if (userInterrupted) {
                    s3Client.shutdown();
                    throw new UploadIterruptedException("User interrupted");
                } else if (userAborted) {
                    // aborted requests cannot be resumed, so clear any cached etags
                    clearProgressCache();
                    s3Client.abortMultipartUpload(abortRequest);
                    s3Client.shutdown();
                }
                bytesUploaded += progressEvent.getBytesTransfered();
                //Log.d(TAG, "bytesUploaded=" + bytesUploaded);
                // broadcast progress
                float fpercent = ((bytesUploaded * 100) / contentLength);
                int percent = Math.round(fpercent);
                if (progressListener != null) {
                    progressListener.progressChanged(progressEvent, bytesUploaded, percent);
                }
            }
        };
        uploadRequest.setProgressListener(s3progressListener);
        UploadPartResult result = s3Client.uploadPart(uploadRequest);
        partETags.add(result.getPartETag());
        // cache the part progress for this upload
        if (k == 1) {
            initProgressCache(uploadId);
        }
        // store part etag
        cachePartEtag(result);
        filePosition += thisPartSize;
    }
    CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest(s3bucketName, s3key, uploadId, partETags);
    CompleteMultipartUploadResult result = s3Client.completeMultipartUpload(compRequest);
    bytesUploaded = 0;
    Log.i(TAG, "upload complete for " + uploadId);
    clearProgressCache();
    return result.getLocation();
}
Also used : InitiateMultipartUploadResult(com.amazonaws.services.s3.model.InitiateMultipartUploadResult) ArrayList(java.util.ArrayList) InitiateMultipartUploadRequest(com.amazonaws.services.s3.model.InitiateMultipartUploadRequest) UploadPartRequest(com.amazonaws.services.s3.model.UploadPartRequest) AbortMultipartUploadRequest(com.amazonaws.services.s3.model.AbortMultipartUploadRequest) CompleteMultipartUploadResult(com.amazonaws.services.s3.model.CompleteMultipartUploadResult) ProgressEvent(com.amazonaws.services.s3.model.ProgressEvent) PartETag(com.amazonaws.services.s3.model.PartETag) UploadPartResult(com.amazonaws.services.s3.model.UploadPartResult) ProgressListener(com.amazonaws.services.s3.model.ProgressListener) CompleteMultipartUploadRequest(com.amazonaws.services.s3.model.CompleteMultipartUploadRequest)

Example 5 with ProgressListener

use of com.amazonaws.services.s3.model.ProgressListener in project cloudstack by apache.

the class S3TemplateDownloader method download.

@Override
public long download(boolean resume, DownloadCompleteCallback callback) {
    if (!status.equals(Status.NOT_STARTED)) {
        // Only start downloading if we haven't started yet.
        LOGGER.debug("Template download is already started, not starting again. Template: " + downloadUrl);
        return 0;
    }
    int responseCode;
    if ((responseCode = HTTPUtils.executeMethod(httpClient, getMethod)) == -1) {
        errorString = "Exception while executing HttpMethod " + getMethod.getName() + " on URL " + downloadUrl;
        LOGGER.warn(errorString);
        status = Status.UNRECOVERABLE_ERROR;
        return 0;
    }
    if (!HTTPUtils.verifyResponseCode(responseCode)) {
        errorString = "Response code for GetMethod of " + downloadUrl + " is incorrect, responseCode: " + responseCode;
        LOGGER.warn(errorString);
        status = Status.UNRECOVERABLE_ERROR;
        return 0;
    }
    // Headers
    Header contentLengthHeader = getMethod.getResponseHeader("Content-Length");
    Header contentTypeHeader = getMethod.getResponseHeader("Content-Type");
    // Check the contentLengthHeader and transferEncodingHeader.
    if (contentLengthHeader == null) {
        errorString = "The ContentLengthHeader of " + downloadUrl + " isn't supplied";
        LOGGER.warn(errorString);
        status = Status.UNRECOVERABLE_ERROR;
        return 0;
    } else {
        // The ContentLengthHeader is supplied, parse it's value.
        remoteSize = Long.parseLong(contentLengthHeader.getValue());
    }
    if (remoteSize > maxTemplateSizeInByte) {
        errorString = "Remote size is too large for template " + downloadUrl + " remote size is " + remoteSize + " max allowed is " + maxTemplateSizeInByte;
        LOGGER.warn(errorString);
        status = Status.UNRECOVERABLE_ERROR;
        return 0;
    }
    InputStream inputStream;
    try {
        inputStream = new BufferedInputStream(getMethod.getResponseBodyAsStream());
    } catch (IOException e) {
        errorString = "Exception occurred while opening InputStream for template " + downloadUrl;
        LOGGER.warn(errorString);
        status = Status.UNRECOVERABLE_ERROR;
        return 0;
    }
    LOGGER.info("Starting download from " + downloadUrl + " to S3 bucket " + s3TO.getBucketName() + " and size " + remoteSize + " bytes");
    // Time the upload starts.
    final Date start = new Date();
    ObjectMetadata objectMetadata = new ObjectMetadata();
    objectMetadata.setContentLength(remoteSize);
    if (contentTypeHeader.getValue() != null) {
        objectMetadata.setContentType(contentTypeHeader.getValue());
    }
    // Create the PutObjectRequest.
    PutObjectRequest putObjectRequest = new PutObjectRequest(s3TO.getBucketName(), s3Key, inputStream, objectMetadata);
    // If reduced redundancy is enabled, set it.
    if (s3TO.getEnableRRS()) {
        putObjectRequest.withStorageClass(StorageClass.ReducedRedundancy);
    }
    Upload upload = S3Utils.putObject(s3TO, putObjectRequest);
    upload.addProgressListener(new ProgressListener() {

        @Override
        public void progressChanged(ProgressEvent progressEvent) {
            // Record the amount of bytes transferred.
            totalBytes += progressEvent.getBytesTransferred();
            LOGGER.trace("Template download from " + downloadUrl + " to S3 bucket " + s3TO.getBucketName() + " transferred  " + totalBytes + " in " + ((new Date().getTime() - start.getTime()) / 1000) + " seconds");
            if (progressEvent.getEventType() == ProgressEventType.TRANSFER_STARTED_EVENT) {
                status = Status.IN_PROGRESS;
            } else if (progressEvent.getEventType() == ProgressEventType.TRANSFER_COMPLETED_EVENT) {
                status = Status.DOWNLOAD_FINISHED;
            } else if (progressEvent.getEventType() == ProgressEventType.TRANSFER_CANCELED_EVENT) {
                status = Status.ABORTED;
            } else if (progressEvent.getEventType() == ProgressEventType.TRANSFER_FAILED_EVENT) {
                status = Status.UNRECOVERABLE_ERROR;
            }
        }
    });
    try {
        // Wait for the upload to complete.
        upload.waitForCompletion();
    } catch (InterruptedException e) {
        // Interruption while waiting for the upload to complete.
        LOGGER.warn("Interruption occurred while waiting for upload of " + downloadUrl + " to complete");
    }
    downloadTime = new Date().getTime() - start.getTime();
    if (status == Status.DOWNLOAD_FINISHED) {
        LOGGER.info("Template download from " + downloadUrl + " to S3 bucket " + s3TO.getBucketName() + " transferred  " + totalBytes + " in " + (downloadTime / 1000) + " seconds, completed successfully!");
    } else {
        LOGGER.warn("Template download from " + downloadUrl + " to S3 bucket " + s3TO.getBucketName() + " transferred  " + totalBytes + " in " + (downloadTime / 1000) + " seconds, completed with status " + status.toString());
    }
    // Close input stream
    getMethod.releaseConnection();
    // Call the callback!
    if (callback != null) {
        callback.downloadComplete(status);
    }
    return totalBytes;
}
Also used : BufferedInputStream(java.io.BufferedInputStream) InputStream(java.io.InputStream) Upload(com.amazonaws.services.s3.transfer.Upload) IOException(java.io.IOException) ProgressEvent(com.amazonaws.event.ProgressEvent) Date(java.util.Date) Header(org.apache.commons.httpclient.Header) ProgressListener(com.amazonaws.event.ProgressListener) BufferedInputStream(java.io.BufferedInputStream) ObjectMetadata(com.amazonaws.services.s3.model.ObjectMetadata) PutObjectRequest(com.amazonaws.services.s3.model.PutObjectRequest)

Aggregations

ProgressEvent (com.amazonaws.event.ProgressEvent)3 ProgressListener (com.amazonaws.event.ProgressListener)3 AmazonClientException (com.amazonaws.AmazonClientException)2 ObjectMetadata (com.amazonaws.services.s3.model.ObjectMetadata)2 PutObjectRequest (com.amazonaws.services.s3.model.PutObjectRequest)2 Upload (com.amazonaws.services.s3.transfer.Upload)2 IOException (java.io.IOException)2 AmazonServiceException (com.amazonaws.AmazonServiceException)1 AbortMultipartUploadRequest (com.amazonaws.services.s3.model.AbortMultipartUploadRequest)1 CompleteMultipartUploadRequest (com.amazonaws.services.s3.model.CompleteMultipartUploadRequest)1 CompleteMultipartUploadResult (com.amazonaws.services.s3.model.CompleteMultipartUploadResult)1 CopyObjectRequest (com.amazonaws.services.s3.model.CopyObjectRequest)1 InitiateMultipartUploadRequest (com.amazonaws.services.s3.model.InitiateMultipartUploadRequest)1 InitiateMultipartUploadResult (com.amazonaws.services.s3.model.InitiateMultipartUploadResult)1 PartETag (com.amazonaws.services.s3.model.PartETag)1 ProgressEvent (com.amazonaws.services.s3.model.ProgressEvent)1 ProgressListener (com.amazonaws.services.s3.model.ProgressListener)1 PutObjectResult (com.amazonaws.services.s3.model.PutObjectResult)1 UploadPartRequest (com.amazonaws.services.s3.model.UploadPartRequest)1 UploadPartResult (com.amazonaws.services.s3.model.UploadPartResult)1