Search in sources :

Example 6 with PutObjectRequest

use of com.amazonaws.services.s3.model.PutObjectRequest in project hadoop by apache.

the class S3AFileSystem method newPutObjectRequest.

/**
   * Create a {@link PutObjectRequest} request.
   * The metadata is assumed to have been configured with the size of the
   * operation.
   * @param key key of object
   * @param metadata metadata header
   * @param inputStream source data.
   * @return the request
   */
private PutObjectRequest newPutObjectRequest(String key, ObjectMetadata metadata, InputStream inputStream) {
    Preconditions.checkNotNull(inputStream);
    PutObjectRequest putObjectRequest = new PutObjectRequest(bucket, key, inputStream, metadata);
    setOptionalPutRequestParameters(putObjectRequest);
    putObjectRequest.setCannedAcl(cannedACL);
    return putObjectRequest;
}
Also used : PutObjectRequest(com.amazonaws.services.s3.model.PutObjectRequest)

Example 7 with PutObjectRequest

use of com.amazonaws.services.s3.model.PutObjectRequest in project hadoop by apache.

the class S3ABlockOutputStream method putObject.

/**
   * Upload the current block as a single PUT request; if the buffer
   * is empty a 0-byte PUT will be invoked, as it is needed to create an
   * entry at the far end.
   * @throws IOException any problem.
   */
private void putObject() throws IOException {
    LOG.debug("Executing regular upload for {}", writeOperationHelper);
    final S3ADataBlocks.DataBlock block = getActiveBlock();
    int size = block.dataSize();
    final S3ADataBlocks.BlockUploadData uploadData = block.startUpload();
    final PutObjectRequest putObjectRequest = uploadData.hasFile() ? writeOperationHelper.newPutRequest(uploadData.getFile()) : writeOperationHelper.newPutRequest(uploadData.getUploadStream(), size);
    fs.setOptionalPutRequestParameters(putObjectRequest);
    long transferQueueTime = now();
    BlockUploadProgress callback = new BlockUploadProgress(block, progressListener, transferQueueTime);
    putObjectRequest.setGeneralProgressListener(callback);
    statistics.blockUploadQueued(size);
    ListenableFuture<PutObjectResult> putObjectResult = executorService.submit(new Callable<PutObjectResult>() {

        @Override
        public PutObjectResult call() throws Exception {
            PutObjectResult result;
            try {
                // the putObject call automatically closes the input
                // stream afterwards.
                result = writeOperationHelper.putObject(putObjectRequest);
            } finally {
                closeAll(LOG, uploadData, block);
            }
            return result;
        }
    });
    clearActiveBlock();
    //wait for completion
    try {
        putObjectResult.get();
    } catch (InterruptedException ie) {
        LOG.warn("Interrupted object upload", ie);
        Thread.currentThread().interrupt();
    } catch (ExecutionException ee) {
        throw extractException("regular upload", key, ee);
    }
}
Also used : PutObjectResult(com.amazonaws.services.s3.model.PutObjectResult) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) AmazonClientException(com.amazonaws.AmazonClientException) ExecutionException(java.util.concurrent.ExecutionException) PutObjectRequest(com.amazonaws.services.s3.model.PutObjectRequest)

Example 8 with PutObjectRequest

use of com.amazonaws.services.s3.model.PutObjectRequest in project hadoop by apache.

the class S3AFileSystem method putObjectDirect.

/**
   * PUT an object directly (i.e. not via the transfer manager).
   * Byte length is calculated from the file length, or, if there is no
   * file, from the content length of the header.
   * <i>Important: this call will close any input stream in the request.</i>
   * @param putObjectRequest the request
   * @return the upload initiated
   * @throws AmazonClientException on problems
   */
public PutObjectResult putObjectDirect(PutObjectRequest putObjectRequest) throws AmazonClientException {
    long len;
    if (putObjectRequest.getFile() != null) {
        len = putObjectRequest.getFile().length();
    } else {
        len = putObjectRequest.getMetadata().getContentLength();
    }
    incrementPutStartStatistics(len);
    try {
        PutObjectResult result = s3.putObject(putObjectRequest);
        incrementPutCompletedStatistics(true, len);
        return result;
    } catch (AmazonClientException e) {
        incrementPutCompletedStatistics(false, len);
        throw e;
    }
}
Also used : PutObjectResult(com.amazonaws.services.s3.model.PutObjectResult) AmazonClientException(com.amazonaws.AmazonClientException)

Example 9 with PutObjectRequest

use of com.amazonaws.services.s3.model.PutObjectRequest in project elasticsearch by elastic.

the class MockAmazonS3 method putObject.

@Override
public PutObjectResult putObject(PutObjectRequest putObjectRequest) throws AmazonClientException, AmazonServiceException {
    String blobName = putObjectRequest.getKey();
    DigestInputStream stream = (DigestInputStream) putObjectRequest.getInputStream();
    if (blobs.containsKey(blobName)) {
        throw new AmazonS3Exception("[" + blobName + "] already exists.");
    }
    blobs.put(blobName, stream);
    // input and output md5 hashes need to match to avoid an exception
    String md5 = Base64.encodeAsString(stream.getMessageDigest().digest());
    PutObjectResult result = new PutObjectResult();
    result.setContentMd5(md5);
    return result;
}
Also used : DigestInputStream(java.security.DigestInputStream) PutObjectResult(com.amazonaws.services.s3.model.PutObjectResult) AmazonS3Exception(com.amazonaws.services.s3.model.AmazonS3Exception)

Example 10 with PutObjectRequest

use of com.amazonaws.services.s3.model.PutObjectRequest in project elasticsearch by elastic.

the class DefaultS3OutputStream method doUpload.

protected void doUpload(S3BlobStore blobStore, String bucketName, String blobName, InputStream is, int length, boolean serverSideEncryption) throws AmazonS3Exception {
    ObjectMetadata md = new ObjectMetadata();
    if (serverSideEncryption) {
        md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
    }
    md.setContentLength(length);
    InputStream inputStream = is;
    // We try to compute a MD5 while reading it
    MessageDigest messageDigest;
    try {
        messageDigest = MessageDigest.getInstance("MD5");
        inputStream = new DigestInputStream(is, messageDigest);
    } catch (NoSuchAlgorithmException impossible) {
        // Every implementation of the Java platform is required to support MD5 (see MessageDigest)
        throw new RuntimeException(impossible);
    }
    PutObjectRequest putRequest = new PutObjectRequest(bucketName, blobName, inputStream, md).withStorageClass(blobStore.getStorageClass()).withCannedAcl(blobStore.getCannedACL());
    PutObjectResult putObjectResult = blobStore.client().putObject(putRequest);
    String localMd5 = Base64.encodeAsString(messageDigest.digest());
    String remoteMd5 = putObjectResult.getContentMd5();
    if (!localMd5.equals(remoteMd5)) {
        logger.debug("MD5 local [{}], remote [{}] are not equal...", localMd5, remoteMd5);
        throw new AmazonS3Exception("MD5 local [" + localMd5 + "], remote [" + remoteMd5 + "] are not equal...");
    }
}
Also used : DigestInputStream(java.security.DigestInputStream) PutObjectResult(com.amazonaws.services.s3.model.PutObjectResult) ByteArrayInputStream(java.io.ByteArrayInputStream) DigestInputStream(java.security.DigestInputStream) InputStream(java.io.InputStream) NoSuchAlgorithmException(java.security.NoSuchAlgorithmException) AmazonS3Exception(com.amazonaws.services.s3.model.AmazonS3Exception) MessageDigest(java.security.MessageDigest) ObjectMetadata(com.amazonaws.services.s3.model.ObjectMetadata) PutObjectRequest(com.amazonaws.services.s3.model.PutObjectRequest)

Aggregations

PutObjectRequest (com.amazonaws.services.s3.model.PutObjectRequest)33 ObjectMetadata (com.amazonaws.services.s3.model.ObjectMetadata)21 Upload (com.amazonaws.services.s3.transfer.Upload)11 AmazonClientException (com.amazonaws.AmazonClientException)10 PutObjectResult (com.amazonaws.services.s3.model.PutObjectResult)8 Exchange (org.apache.camel.Exchange)8 Processor (org.apache.camel.Processor)8 Test (org.junit.Test)8 InputStream (java.io.InputStream)7 DataStoreException (org.apache.jackrabbit.core.data.DataStoreException)7 File (java.io.File)6 IOException (java.io.IOException)6 ByteArrayInputStream (java.io.ByteArrayInputStream)5 AmazonServiceException (com.amazonaws.AmazonServiceException)4 S3Object (com.amazonaws.services.s3.model.S3Object)4 Date (java.util.Date)4 CopyObjectRequest (com.amazonaws.services.s3.model.CopyObjectRequest)3 Copy (com.amazonaws.services.s3.transfer.Copy)3 FileInputStream (java.io.FileInputStream)3 HashMap (java.util.HashMap)3