Search in sources :

Example 16 with Bucket

use of com.amazonaws.services.s3.model.Bucket in project deeplearning4j by deeplearning4j.

the class S3Uploader method multiPartUpload.

/**
     * Multi part upload for big files
     * @param file the file to upload
     * @param bucketName the bucket name to upload
     */
public void multiPartUpload(File file, String bucketName) {
    AmazonS3 client = new AmazonS3Client(creds);
    bucketName = ensureValidBucketName(bucketName);
    List<Bucket> buckets = client.listBuckets();
    for (Bucket b : buckets) if (b.getName().equals(bucketName)) {
        doMultiPart(client, bucketName, file);
        return;
    }
    //bucket didn't exist: create it
    client.createBucket(bucketName);
    doMultiPart(client, bucketName, file);
}
Also used : AmazonS3(com.amazonaws.services.s3.AmazonS3) AmazonS3Client(com.amazonaws.services.s3.AmazonS3Client)

Example 17 with Bucket

use of com.amazonaws.services.s3.model.Bucket in project elasticsearch by elastic.

the class TestAmazonS3 method putObject.

@Override
public PutObjectResult putObject(String bucketName, String key, InputStream input, ObjectMetadata metadata) throws AmazonClientException, AmazonServiceException {
    if (shouldFail(bucketName, key, writeFailureRate)) {
        long length = metadata.getContentLength();
        long partToRead = (long) (length * randomDouble());
        byte[] buffer = new byte[1024];
        for (long cur = 0; cur < partToRead; cur += buffer.length) {
            try {
                input.read(buffer, 0, (int) (partToRead - cur > buffer.length ? buffer.length : partToRead - cur));
            } catch (IOException ex) {
                throw new ElasticsearchException("cannot read input stream", ex);
            }
        }
        logger.info("--> random write failure on putObject method: throwing an exception for [bucket={}, key={}]", bucketName, key);
        AmazonS3Exception ex = new AmazonS3Exception("Random S3 exception");
        ex.setStatusCode(400);
        ex.setErrorCode("RequestTimeout");
        throw ex;
    } else {
        return super.putObject(bucketName, key, input, metadata);
    }
}
Also used : IOException(java.io.IOException) ElasticsearchException(org.elasticsearch.ElasticsearchException) AmazonS3Exception(com.amazonaws.services.s3.model.AmazonS3Exception)

Example 18 with Bucket

use of com.amazonaws.services.s3.model.Bucket in project elasticsearch by elastic.

the class S3BlobStoreTests method testInitCannedACL.

public void testInitCannedACL() throws IOException {
    String[] aclList = new String[] { "private", "public-read", "public-read-write", "authenticated-read", "log-delivery-write", "bucket-owner-read", "bucket-owner-full-control" };
    //empty acl
    assertThat(S3BlobStore.initCannedACL(null), equalTo(CannedAccessControlList.Private));
    assertThat(S3BlobStore.initCannedACL(""), equalTo(CannedAccessControlList.Private));
    // it should init cannedACL correctly
    for (String aclString : aclList) {
        CannedAccessControlList acl = S3BlobStore.initCannedACL(aclString);
        assertThat(acl.toString(), equalTo(aclString));
    }
    // it should accept all aws cannedACLs
    for (CannedAccessControlList awsList : CannedAccessControlList.values()) {
        CannedAccessControlList acl = S3BlobStore.initCannedACL(awsList.toString());
        assertThat(acl, equalTo(awsList));
    }
}
Also used : CannedAccessControlList(com.amazonaws.services.s3.model.CannedAccessControlList)

Example 19 with Bucket

use of com.amazonaws.services.s3.model.Bucket in project hadoop by apache.

the class S3AFileSystem method getObjectMetadata.

/**
   * Request object metadata; increments counters in the process.
   * @param key key
   * @return the metadata
   */
protected ObjectMetadata getObjectMetadata(String key) {
    incrementStatistic(OBJECT_METADATA_REQUESTS);
    GetObjectMetadataRequest request = new GetObjectMetadataRequest(bucket, key);
    //SSE-C requires to be filled in if enabled for object metadata
    if (S3AEncryptionMethods.SSE_C.equals(serverSideEncryptionAlgorithm) && StringUtils.isNotBlank(getServerSideEncryptionKey(getConf()))) {
        request.setSSECustomerKey(generateSSECustomerKey());
    }
    ObjectMetadata meta = s3.getObjectMetadata(request);
    incrementReadOperations();
    return meta;
}
Also used : GetObjectMetadataRequest(com.amazonaws.services.s3.model.GetObjectMetadataRequest) ObjectMetadata(com.amazonaws.services.s3.model.ObjectMetadata)

Example 20 with Bucket

use of com.amazonaws.services.s3.model.Bucket in project hadoop by apache.

the class S3AFileSystem method copyFile.

/**
   * Copy a single object in the bucket via a COPY operation.
   * @param srcKey source object path
   * @param dstKey destination object path
   * @param size object size
   * @throws AmazonClientException on failures inside the AWS SDK
   * @throws InterruptedIOException the operation was interrupted
   * @throws IOException Other IO problems
   */
private void copyFile(String srcKey, String dstKey, long size) throws IOException, InterruptedIOException, AmazonClientException {
    LOG.debug("copyFile {} -> {} ", srcKey, dstKey);
    try {
        ObjectMetadata srcom = getObjectMetadata(srcKey);
        ObjectMetadata dstom = cloneObjectMetadata(srcom);
        setOptionalObjectMetadata(dstom);
        CopyObjectRequest copyObjectRequest = new CopyObjectRequest(bucket, srcKey, bucket, dstKey);
        setOptionalCopyObjectRequestParameters(copyObjectRequest);
        copyObjectRequest.setCannedAccessControlList(cannedACL);
        copyObjectRequest.setNewObjectMetadata(dstom);
        ProgressListener progressListener = new ProgressListener() {

            public void progressChanged(ProgressEvent progressEvent) {
                switch(progressEvent.getEventType()) {
                    case TRANSFER_PART_COMPLETED_EVENT:
                        incrementWriteOperations();
                        break;
                    default:
                        break;
                }
            }
        };
        Copy copy = transfers.copy(copyObjectRequest);
        copy.addProgressListener(progressListener);
        try {
            copy.waitForCopyResult();
            incrementWriteOperations();
            instrumentation.filesCopied(1, size);
        } catch (InterruptedException e) {
            throw new InterruptedIOException("Interrupted copying " + srcKey + " to " + dstKey + ", cancelling");
        }
    } catch (AmazonClientException e) {
        throw translateException("copyFile(" + srcKey + ", " + dstKey + ")", srcKey, e);
    }
}
Also used : InterruptedIOException(java.io.InterruptedIOException) CopyObjectRequest(com.amazonaws.services.s3.model.CopyObjectRequest) ProgressListener(com.amazonaws.event.ProgressListener) Copy(com.amazonaws.services.s3.transfer.Copy) AmazonClientException(com.amazonaws.AmazonClientException) ProgressEvent(com.amazonaws.event.ProgressEvent) ObjectMetadata(com.amazonaws.services.s3.model.ObjectMetadata)

Aggregations

AmazonServiceException (com.amazonaws.AmazonServiceException)47 AmazonS3 (com.amazonaws.services.s3.AmazonS3)41 ObjectListing (com.amazonaws.services.s3.model.ObjectListing)39 S3ObjectSummary (com.amazonaws.services.s3.model.S3ObjectSummary)38 ObjectMetadata (com.amazonaws.services.s3.model.ObjectMetadata)35 DataStoreException (org.apache.jackrabbit.core.data.DataStoreException)29 IOException (java.io.IOException)24 PutObjectRequest (com.amazonaws.services.s3.model.PutObjectRequest)23 AmazonClientException (com.amazonaws.AmazonClientException)22 ArrayList (java.util.ArrayList)20 AmazonS3Client (com.amazonaws.services.s3.AmazonS3Client)16 ListObjectsRequest (com.amazonaws.services.s3.model.ListObjectsRequest)16 Test (org.junit.Test)15 S3Object (com.amazonaws.services.s3.model.S3Object)14 DeleteObjectsRequest (com.amazonaws.services.s3.model.DeleteObjectsRequest)13 Path (org.apache.hadoop.fs.Path)12 Date (java.util.Date)11 Bucket (com.amazonaws.services.s3.model.Bucket)10 InputStream (java.io.InputStream)10 AmazonS3Exception (com.amazonaws.services.s3.model.AmazonS3Exception)9