Search in sources :

Example 46 with Transfer

use of com.amazonaws.services.s3.transfer.Transfer in project herd by FINRAOS.

the class BusinessObjectDataFinalizeRestoreHelperServiceImpl method executeS3SpecificStepsImpl.

/**
 * Executes S3 specific steps for the business object data finalize restore.
 *
 * @param businessObjectDataRestoreDto the DTO that holds various parameters needed to perform a business object data restore
 */
protected void executeS3SpecificStepsImpl(BusinessObjectDataRestoreDto businessObjectDataRestoreDto) {
    // Create an S3 file transfer parameters DTO to access the S3 bucket.
    // Since the S3 key prefix represents a directory, we add a trailing '/' character to it.
    S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = storageHelper.getS3FileTransferRequestParamsDto();
    s3FileTransferRequestParamsDto.setS3BucketName(businessObjectDataRestoreDto.getS3BucketName());
    s3FileTransferRequestParamsDto.setS3Endpoint(businessObjectDataRestoreDto.getS3Endpoint());
    s3FileTransferRequestParamsDto.setS3KeyPrefix(StringUtils.appendIfMissing(businessObjectDataRestoreDto.getS3KeyPrefix(), "/"));
    // Get actual S3 files by selecting all S3 keys matching the S3 key prefix form the S3 bucket.
    // When listing S3 files, we ignore 0 byte objects that represent S3 directories.
    List<S3ObjectSummary> actualS3Files = s3Service.listDirectory(s3FileTransferRequestParamsDto, true);
    // Validate existence and file size of the S3 files.
    storageFileHelper.validateRegisteredS3Files(businessObjectDataRestoreDto.getStorageFiles(), actualS3Files, businessObjectDataRestoreDto.getStorageName(), businessObjectDataRestoreDto.getBusinessObjectDataKey());
    // Build a list of files to check for restore status by selection only objects that are currently archived in Glacier (have Glacier storage class).
    List<S3ObjectSummary> glacierS3Files = new ArrayList<>();
    for (S3ObjectSummary s3ObjectSummary : actualS3Files) {
        if (StorageClass.Glacier.toString().equals(s3ObjectSummary.getStorageClass())) {
            glacierS3Files.add(s3ObjectSummary);
        }
    }
    // Validate that all Glacier storage class S3 files are now restored.
    s3FileTransferRequestParamsDto.setFiles(storageFileHelper.getFiles(storageFileHelper.createStorageFilesFromS3ObjectSummaries(glacierS3Files)));
    s3Service.validateGlacierS3FilesRestored(s3FileTransferRequestParamsDto);
}
Also used : S3FileTransferRequestParamsDto(org.finra.herd.model.dto.S3FileTransferRequestParamsDto) ArrayList(java.util.ArrayList) S3ObjectSummary(com.amazonaws.services.s3.model.S3ObjectSummary)

Example 47 with Transfer

use of com.amazonaws.services.s3.transfer.Transfer in project herd by FINRAOS.

the class ExpireRestoredBusinessObjectDataHelperServiceImpl method executeS3SpecificStepsImpl.

/**
 * Executes S3 specific steps required to expire business object data.
 *
 * @param businessObjectDataRestoreDto the DTO that holds various parameters needed to expire business object data
 */
protected void executeS3SpecificStepsImpl(BusinessObjectDataRestoreDto businessObjectDataRestoreDto) {
    // Create an S3 file transfer parameters DTO to access the S3 bucket.
    // Since the S3 key prefix represents a directory, we add a trailing '/' character to it.
    S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = storageHelper.getS3FileTransferRequestParamsDto();
    s3FileTransferRequestParamsDto.setS3Endpoint(businessObjectDataRestoreDto.getS3Endpoint());
    s3FileTransferRequestParamsDto.setS3BucketName(businessObjectDataRestoreDto.getS3BucketName());
    s3FileTransferRequestParamsDto.setS3KeyPrefix(StringUtils.appendIfMissing(businessObjectDataRestoreDto.getS3KeyPrefix(), "/"));
    // Get a list of S3 files matching the S3 key prefix. When listing S3 files, we ignore 0 byte objects that represent S3 directories.
    List<S3ObjectSummary> actualS3Files = s3Service.listDirectory(s3FileTransferRequestParamsDto, true);
    // Validate existence and file size of the S3 files.
    storageFileHelper.validateRegisteredS3Files(businessObjectDataRestoreDto.getStorageFiles(), actualS3Files, businessObjectDataRestoreDto.getStorageName(), businessObjectDataRestoreDto.getBusinessObjectDataKey());
    // Build a list of files to restore by selection only objects that have Glacier storage class.
    List<S3ObjectSummary> glacierS3Files = new ArrayList<>();
    for (S3ObjectSummary s3ObjectSummary : actualS3Files) {
        if (StorageClass.Glacier.toString().equals(s3ObjectSummary.getStorageClass())) {
            glacierS3Files.add(s3ObjectSummary);
        }
    }
    // Set a list of files to expire.
    s3FileTransferRequestParamsDto.setFiles(storageFileHelper.getFiles(storageFileHelper.createStorageFilesFromS3ObjectSummaries(glacierS3Files)));
    // To expire the restored S3 objects, initiate restore requests with expiration set to 1 day.
    s3Service.restoreObjects(s3FileTransferRequestParamsDto, 1);
}
Also used : S3FileTransferRequestParamsDto(org.finra.herd.model.dto.S3FileTransferRequestParamsDto) ArrayList(java.util.ArrayList) S3ObjectSummary(com.amazonaws.services.s3.model.S3ObjectSummary)

Example 48 with Transfer

use of com.amazonaws.services.s3.transfer.Transfer in project herd by FINRAOS.

the class UploaderController method logS3KeyPrefixContents.

/**
 * Logs all files found in the specified S3 location.
 *
 * @param params the S3 file transfer request parameters
 */
private void logS3KeyPrefixContents(S3FileTransferRequestParamsDto params) {
    List<S3ObjectSummary> s3ObjectSummaries = s3Service.listDirectory(params);
    LOGGER.info(String.format("Found %d keys with prefix \"%s\" in bucket \"%s\":", s3ObjectSummaries.size(), params.getS3KeyPrefix(), params.getS3BucketName()));
    for (S3ObjectSummary s3ObjectSummary : s3ObjectSummaries) {
        LOGGER.info(String.format("    s3://%s/%s", params.getS3BucketName(), s3ObjectSummary.getKey()));
    }
}
Also used : S3ObjectSummary(com.amazonaws.services.s3.model.S3ObjectSummary)

Example 49 with Transfer

use of com.amazonaws.services.s3.transfer.Transfer in project spring-integration-aws by spring-projects.

the class S3MessageHandler method upload.

private Transfer upload(Message<?> requestMessage) {
    Object payload = requestMessage.getPayload();
    String bucketName = obtainBucket(requestMessage);
    String key = null;
    if (this.keyExpression != null) {
        key = this.keyExpression.getValue(this.evaluationContext, requestMessage, String.class);
    }
    if (payload instanceof File && ((File) payload).isDirectory()) {
        File fileToUpload = (File) payload;
        if (key == null) {
            key = fileToUpload.getName();
        }
        return this.transferManager.uploadDirectory(bucketName, key, fileToUpload, true, new MessageHeadersObjectMetadataProvider(requestMessage.getHeaders()));
    } else {
        ObjectMetadata metadata = new ObjectMetadata();
        if (this.uploadMetadataProvider != null) {
            this.uploadMetadataProvider.populateMetadata(metadata, requestMessage);
        }
        PutObjectRequest putObjectRequest = null;
        try {
            if (payload instanceof InputStream) {
                InputStream inputStream = (InputStream) payload;
                if (metadata.getContentMD5() == null) {
                    Assert.state(inputStream.markSupported(), "For an upload InputStream with no MD5 digest metadata, the " + "markSupported() method must evaluate to true. ");
                    String contentMd5 = Md5Utils.md5AsBase64(inputStream);
                    metadata.setContentMD5(contentMd5);
                    inputStream.reset();
                }
                putObjectRequest = new PutObjectRequest(bucketName, key, inputStream, metadata);
            } else if (payload instanceof File) {
                File fileToUpload = (File) payload;
                if (key == null) {
                    key = fileToUpload.getName();
                }
                if (metadata.getContentMD5() == null) {
                    String contentMd5 = Md5Utils.md5AsBase64(fileToUpload);
                    metadata.setContentMD5(contentMd5);
                }
                if (metadata.getContentLength() == 0) {
                    metadata.setContentLength(fileToUpload.length());
                }
                if (metadata.getContentType() == null) {
                    metadata.setContentType(Mimetypes.getInstance().getMimetype(fileToUpload));
                }
                putObjectRequest = new PutObjectRequest(bucketName, key, fileToUpload).withMetadata(metadata);
            } else if (payload instanceof byte[]) {
                byte[] payloadBytes = (byte[]) payload;
                InputStream inputStream = new ByteArrayInputStream(payloadBytes);
                if (metadata.getContentMD5() == null) {
                    String contentMd5 = Md5Utils.md5AsBase64(inputStream);
                    metadata.setContentMD5(contentMd5);
                    inputStream.reset();
                }
                if (metadata.getContentLength() == 0) {
                    metadata.setContentLength(payloadBytes.length);
                }
                putObjectRequest = new PutObjectRequest(bucketName, key, inputStream, metadata);
            } else {
                throw new IllegalArgumentException("Unsupported payload type: [" + payload.getClass() + "]. The only supported payloads for the upload request are " + "java.io.File, java.io.InputStream, byte[] and PutObjectRequest.");
            }
        } catch (IOException e) {
            throw new MessageHandlingException(requestMessage, e);
        }
        if (key == null) {
            if (this.keyExpression != null) {
                throw new IllegalStateException("The 'keyExpression' [" + this.keyExpression.getExpressionString() + "] must not evaluate to null. Root object is: " + requestMessage);
            } else {
                throw new IllegalStateException("Specify a 'keyExpression' for non-java.io.File payloads");
            }
        }
        S3ProgressListener progressListener = this.s3ProgressListener;
        if (this.objectAclExpression != null) {
            Object acl = this.objectAclExpression.getValue(this.evaluationContext, requestMessage);
            Assert.state(acl instanceof AccessControlList || acl instanceof CannedAccessControlList, "The 'objectAclExpression' [" + this.objectAclExpression.getExpressionString() + "] must evaluate to com.amazonaws.services.s3.model.AccessControlList " + "or must evaluate to com.amazonaws.services.s3.model.CannedAccessControlList. " + "Gotten: [" + acl + "]");
            SetObjectAclRequest aclRequest;
            if (acl instanceof AccessControlList) {
                aclRequest = new SetObjectAclRequest(bucketName, key, (AccessControlList) acl);
            } else {
                aclRequest = new SetObjectAclRequest(bucketName, key, (CannedAccessControlList) acl);
            }
            final SetObjectAclRequest theAclRequest = aclRequest;
            progressListener = new S3ProgressListener() {

                @Override
                public void onPersistableTransfer(PersistableTransfer persistableTransfer) {
                }

                @Override
                public void progressChanged(ProgressEvent progressEvent) {
                    if (ProgressEventType.TRANSFER_COMPLETED_EVENT.equals(progressEvent.getEventType())) {
                        S3MessageHandler.this.transferManager.getAmazonS3Client().setObjectAcl(theAclRequest);
                    }
                }
            };
            if (this.s3ProgressListener != null) {
                progressListener = new S3ProgressListenerChain(this.s3ProgressListener, progressListener);
            }
        }
        if (progressListener != null) {
            return this.transferManager.upload(putObjectRequest, progressListener);
        } else {
            return this.transferManager.upload(putObjectRequest);
        }
    }
}
Also used : CannedAccessControlList(com.amazonaws.services.s3.model.CannedAccessControlList) AccessControlList(com.amazonaws.services.s3.model.AccessControlList) SetObjectAclRequest(com.amazonaws.services.s3.model.SetObjectAclRequest) ByteArrayInputStream(java.io.ByteArrayInputStream) InputStream(java.io.InputStream) IOException(java.io.IOException) CannedAccessControlList(com.amazonaws.services.s3.model.CannedAccessControlList) ProgressEvent(com.amazonaws.event.ProgressEvent) MessageHandlingException(org.springframework.messaging.MessageHandlingException) S3ProgressListenerChain(com.amazonaws.services.s3.transfer.internal.S3ProgressListenerChain) ByteArrayInputStream(java.io.ByteArrayInputStream) S3ProgressListener(com.amazonaws.services.s3.transfer.internal.S3ProgressListener) PersistableTransfer(com.amazonaws.services.s3.transfer.PersistableTransfer) File(java.io.File) ObjectMetadata(com.amazonaws.services.s3.model.ObjectMetadata) PutObjectRequest(com.amazonaws.services.s3.model.PutObjectRequest)

Example 50 with Transfer

use of com.amazonaws.services.s3.transfer.Transfer in project spring-integration-aws by spring-projects.

the class S3MessageHandler method copy.

private Transfer copy(Message<?> requestMessage) {
    String sourceBucketName = obtainBucket(requestMessage);
    String sourceKey = null;
    if (this.keyExpression != null) {
        sourceKey = this.keyExpression.getValue(this.evaluationContext, requestMessage, String.class);
    }
    Assert.state(sourceKey != null, "The 'keyExpression' must not be null for 'copy' operation " + "and 'keyExpression' can't evaluate to null. " + "Root object is: " + requestMessage);
    String destinationBucketName = null;
    if (this.destinationBucketExpression != null) {
        destinationBucketName = this.destinationBucketExpression.getValue(this.evaluationContext, requestMessage, String.class);
    }
    if (this.resourceIdResolver != null) {
        destinationBucketName = this.resourceIdResolver.resolveToPhysicalResourceId(destinationBucketName);
    }
    Assert.state(destinationBucketName != null, "The 'destinationBucketExpression' must not be null for 'copy' operation and can't evaluate to null. " + "Root object is: " + requestMessage);
    String destinationKey = null;
    if (this.destinationKeyExpression != null) {
        destinationKey = this.destinationKeyExpression.getValue(this.evaluationContext, requestMessage, String.class);
    }
    Assert.state(destinationKey != null, "The 'destinationKeyExpression' must not be null for 'copy' operation and can't evaluate to null. " + "Root object is: " + requestMessage);
    CopyObjectRequest copyObjectRequest = new CopyObjectRequest(sourceBucketName, sourceKey, destinationBucketName, destinationKey);
    return this.transferManager.copy(copyObjectRequest);
}
Also used : CopyObjectRequest(com.amazonaws.services.s3.model.CopyObjectRequest)

Aggregations

S3FileTransferRequestParamsDto (org.finra.herd.model.dto.S3FileTransferRequestParamsDto)18 File (java.io.File)17 S3ObjectSummary (com.amazonaws.services.s3.model.S3ObjectSummary)14 Test (org.junit.Test)14 TransferManager (com.amazonaws.services.s3.transfer.TransferManager)13 AmazonClientException (com.amazonaws.AmazonClientException)11 AmazonServiceException (com.amazonaws.AmazonServiceException)8 Tag (com.amazonaws.services.s3.model.Tag)8 Transfer (com.amazonaws.services.s3.transfer.Transfer)8 IOException (java.io.IOException)8 AmazonS3Client (com.amazonaws.services.s3.AmazonS3Client)7 ObjectMetadata (com.amazonaws.services.s3.model.ObjectMetadata)7 S3FileTransferResultsDto (org.finra.herd.model.dto.S3FileTransferResultsDto)7 PutObjectRequest (com.amazonaws.services.s3.model.PutObjectRequest)5 ThreadPoolExecutor (java.util.concurrent.ThreadPoolExecutor)5 BusinessObjectDataKey (org.finra.herd.model.api.xml.BusinessObjectDataKey)5 StorageFile (org.finra.herd.model.api.xml.StorageFile)5 TransferState (com.amazonaws.services.s3.transfer.Transfer.TransferState)4 ArrayList (java.util.ArrayList)4 AbstractDaoTest (org.finra.herd.dao.AbstractDaoTest)4