use of software.amazon.awssdk.services.s3.model.GetObjectRequest in project nifi by apache.
the class TestFetchS3Object method testGetObject.
@Test
public void testGetObject() throws IOException {
runner.setProperty(FetchS3Object.REGION, "us-east-1");
runner.setProperty(FetchS3Object.BUCKET, "request-bucket");
final Map<String, String> attrs = new HashMap<>();
attrs.put("filename", "request-key");
runner.enqueue(new byte[0], attrs);
S3Object s3ObjectResponse = new S3Object();
s3ObjectResponse.setBucketName("response-bucket-name");
s3ObjectResponse.setKey("response-key");
s3ObjectResponse.setObjectContent(new StringInputStream("Some Content"));
ObjectMetadata metadata = Mockito.spy(ObjectMetadata.class);
metadata.setContentDisposition("key/path/to/file.txt");
metadata.setContentType("text/plain");
metadata.setContentMD5("testMD5hash");
Date expiration = new Date();
metadata.setExpirationTime(expiration);
metadata.setExpirationTimeRuleId("testExpirationRuleId");
Map<String, String> userMetadata = new HashMap<>();
userMetadata.put("userKey1", "userValue1");
userMetadata.put("userKey2", "userValue2");
metadata.setUserMetadata(userMetadata);
metadata.setSSEAlgorithm("testAlgorithm");
Mockito.when(metadata.getETag()).thenReturn("test-etag");
s3ObjectResponse.setObjectMetadata(metadata);
Mockito.when(mockS3Client.getObject(Mockito.any())).thenReturn(s3ObjectResponse);
runner.run(1);
ArgumentCaptor<GetObjectRequest> captureRequest = ArgumentCaptor.forClass(GetObjectRequest.class);
Mockito.verify(mockS3Client, Mockito.times(1)).getObject(captureRequest.capture());
GetObjectRequest request = captureRequest.getValue();
assertEquals("request-bucket", request.getBucketName());
assertEquals("request-key", request.getKey());
assertNull(request.getVersionId());
runner.assertAllFlowFilesTransferred(FetchS3Object.REL_SUCCESS, 1);
final List<MockFlowFile> ffs = runner.getFlowFilesForRelationship(FetchS3Object.REL_SUCCESS);
MockFlowFile ff = ffs.get(0);
ff.assertAttributeEquals("s3.bucket", "response-bucket-name");
ff.assertAttributeEquals(CoreAttributes.FILENAME.key(), "file.txt");
ff.assertAttributeEquals(CoreAttributes.PATH.key(), "key/path/to");
ff.assertAttributeEquals(CoreAttributes.ABSOLUTE_PATH.key(), "key/path/to/file.txt");
ff.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "text/plain");
ff.assertAttributeEquals("hash.value", "testMD5hash");
ff.assertAttributeEquals("hash.algorithm", "MD5");
ff.assertAttributeEquals("s3.etag", "test-etag");
ff.assertAttributeEquals("s3.expirationTime", String.valueOf(expiration.getTime()));
ff.assertAttributeEquals("s3.expirationTimeRuleId", "testExpirationRuleId");
ff.assertAttributeEquals("userKey1", "userValue1");
ff.assertAttributeEquals("userKey2", "userValue2");
ff.assertAttributeEquals("s3.sseAlgorithm", "testAlgorithm");
ff.assertContentEquals("Some Content");
}
use of software.amazon.awssdk.services.s3.model.GetObjectRequest in project Synapse-Stack-Builder by Sage-Bionetworks.
the class SSLSetup method getCertificateStringFromS3.
/**
* Download a certificate file from S3 directly into a string, skipping an intermediate file.
* @param key
* @return
*/
public String getCertificateStringFromS3(String key) {
// For this case we do not write to file first
S3Object s3Object = s3Client.getObject(new GetObjectRequest(config.getDefaultS3BucketName(), key));
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
try {
byte[] buffer = new byte[1024 * 10];
int bytesRead;
while ((bytesRead = s3Object.getObjectContent().read(buffer)) > -1) {
outputStream.write(buffer, 0, bytesRead);
}
// Go right to string.
return new String(outputStream.toByteArray(), "UTF-8");
} catch (IOException e) {
s3Object.getObjectContent().abort();
throw new AmazonClientException("Unable to store object contents to disk: " + e.getMessage(), e);
} finally {
try {
outputStream.close();
} catch (Exception e) {
}
try {
s3Object.getObjectContent().close();
} catch (Exception e) {
}
}
}
use of software.amazon.awssdk.services.s3.model.GetObjectRequest in project Synapse-Stack-Builder by Sage-Bionetworks.
the class StackDefaults method loadStackDefaultsFromS3.
/**
* Connect to S3 and downloads the default properties for this stack.
*
* @param stack
* @param s3Client
* @return
* @throws IOException
*/
public Properties loadStackDefaultsFromS3() throws IOException {
// Create the config bucket.
String bucketName = config.getStackConfigS3BucketName();
log.info("Creating S3 Bucket: " + bucketName);
// This call is idempotent and will only actually create the bucket if it does not already exist.
Bucket bucket = s3Client.createBucket(bucketName);
// This is the buck where we expect to find the properties.
bucketName = config.getDefaultS3BucketName();
log.info("Creating S3 Bucket: " + bucketName);
// This call is idempotent and will only actually create the bucket if it does not already exist.
bucket = s3Client.createBucket(bucketName);
String fileName = config.getDefaultPropertiesFileName();
File temp = File.createTempFile("DefaultProps", ".properties");
FileInputStream in = new FileInputStream(temp);
try {
// Download the file to a temp file.
s3Client.getObject(new GetObjectRequest(bucketName, fileName), temp);
Properties props = new Properties();
props.load(in);
// Did we get the expected properties?
validateProperties(bucketName, fileName, props);
// Done
return props;
} catch (IOException e) {
log.error("Failed to read the '" + fileName + "' downloaded from S3 bucket: '" + bucketName + "'. Expected the file to be a java.util.Properties file");
throw e;
} catch (AmazonClientException e) {
log.error("Failed to dowload the '" + fileName + "' from S3 bucket: '" + bucketName + "' make sure the file exists and try again.");
throw e;
} finally {
in.close();
// Delete the temp file
temp.delete();
}
}
use of software.amazon.awssdk.services.s3.model.GetObjectRequest in project stocator by CODAIT.
the class COSInputStream method reopen.
/**
* Opens up the stream at specified target position and for given length.
*
* @param reason reason for reopen
* @param targetPos target position
* @param length length requested
* @throws IOException on any failure to open the object
*/
private synchronized void reopen(String reason, long targetPos, long length) throws IOException {
if (wrappedStream != null) {
closeStream("reopen(" + reason + ")", contentRangeFinish, false);
}
contentRangeFinish = calculateRequestLimit(inputPolicy, targetPos, length, contentLength, readahead);
LOG.debug("reopen({}) for {} range[{}-{}], length={}," + " streamPosition={}, nextReadPosition={}", uri, reason, targetPos, contentRangeFinish, length, pos, nextReadPos);
try {
GetObjectRequest request = new GetObjectRequest(bucket, key).withRange(targetPos, contentRangeFinish - 1);
wrappedStream = client.getObject(request).getObjectContent();
contentRangeStart = targetPos;
if (wrappedStream == null) {
throw new IOException("Null IO stream from reopen of (" + reason + ") " + uri);
}
} catch (AmazonClientException e) {
throw COSUtils.translateException("Reopen at position " + targetPos, uri, e);
}
pos = targetPos;
}
use of software.amazon.awssdk.services.s3.model.GetObjectRequest in project herd by FINRAOS.
the class MockS3OperationsImpl method downloadDirectory.
/**
* {@inheritDoc}
* <p/>
* This implementation creates any directory that does not exist in the path to the destination directory.
*/
@Override
public MultipleFileDownload downloadDirectory(String bucketName, String keyPrefix, File destinationDirectory, TransferManager transferManager) {
LOGGER.debug("downloadDirectory(): bucketName = " + bucketName + ", keyPrefix = " + keyPrefix + ", destinationDirectory = " + destinationDirectory);
MockS3Bucket mockS3Bucket = mockS3Buckets.get(bucketName);
List<Download> downloads = new ArrayList<>();
long totalBytes = 0;
if (mockS3Bucket != null) {
for (MockS3Object mockS3Object : mockS3Bucket.getObjects().values()) {
if (mockS3Object.getKey().startsWith(keyPrefix)) {
String filePath = destinationDirectory.getAbsolutePath() + "/" + mockS3Object.getKey();
File file = new File(filePath);
// Create any directory in the path that does not exist.
file.getParentFile().mkdirs();
try (FileOutputStream fileOutputStream = new FileOutputStream(file)) {
LOGGER.debug("downloadDirectory(): Writing file " + file);
fileOutputStream.write(mockS3Object.getData());
totalBytes += mockS3Object.getData().length;
downloads.add(new DownloadImpl(null, null, null, null, null, new GetObjectRequest(bucketName, mockS3Object.getKey()), file, mockS3Object.getObjectMetadata(), false));
} catch (IOException e) {
throw new RuntimeException("Error writing to file " + file, e);
}
}
}
}
TransferProgress progress = new TransferProgress();
progress.setTotalBytesToTransfer(totalBytes);
progress.updateProgress(totalBytes);
MultipleFileDownloadImpl multipleFileDownload = new MultipleFileDownloadImpl(null, progress, null, keyPrefix, bucketName, downloads);
multipleFileDownload.setState(TransferState.Completed);
return multipleFileDownload;
}
Aggregations