use of com.talend.shaded.com.amazonaws.services.s3.AmazonS3 in project deeplearning4j by deeplearning4j.
the class S3Uploader method multiPartUpload.
/**
* Multi part upload for big files
* @param file the file to upload
* @param bucketName the bucket name to upload
*/
public void multiPartUpload(File file, String bucketName) {
AmazonS3 client = new AmazonS3Client(creds);
bucketName = ensureValidBucketName(bucketName);
List<Bucket> buckets = client.listBuckets();
for (Bucket b : buckets) if (b.getName().equals(bucketName)) {
doMultiPart(client, bucketName, file);
return;
}
//bucket didn't exist: create it
client.createBucket(bucketName);
doMultiPart(client, bucketName, file);
}
use of com.talend.shaded.com.amazonaws.services.s3.AmazonS3 in project hadoop by apache.
the class ITestS3AConfiguration method testDefaultUserAgent.
@Test
public void testDefaultUserAgent() throws Exception {
conf = new Configuration();
fs = S3ATestUtils.createTestFileSystem(conf);
assertNotNull(fs);
AmazonS3 s3 = fs.getAmazonS3Client();
assertNotNull(s3);
ClientConfiguration awsConf = getField(s3, ClientConfiguration.class, "clientConfiguration");
assertEquals("Hadoop " + VersionInfo.getVersion(), awsConf.getUserAgentPrefix());
}
use of com.talend.shaded.com.amazonaws.services.s3.AmazonS3 in project hadoop by apache.
the class ITestS3AConfiguration method testCustomUserAgent.
@Test
public void testCustomUserAgent() throws Exception {
conf = new Configuration();
conf.set(Constants.USER_AGENT_PREFIX, "MyApp");
fs = S3ATestUtils.createTestFileSystem(conf);
assertNotNull(fs);
AmazonS3 s3 = fs.getAmazonS3Client();
assertNotNull(s3);
ClientConfiguration awsConf = getField(s3, ClientConfiguration.class, "clientConfiguration");
assertEquals("MyApp, Hadoop " + VersionInfo.getVersion(), awsConf.getUserAgentPrefix());
}
use of com.talend.shaded.com.amazonaws.services.s3.AmazonS3 in project hadoop by apache.
the class MockS3ClientFactory method createS3Client.
@Override
public AmazonS3 createS3Client(URI name, URI uri) {
String bucket = name.getHost();
AmazonS3 s3 = mock(AmazonS3.class);
when(s3.doesBucketExist(bucket)).thenReturn(true);
return s3;
}
use of com.talend.shaded.com.amazonaws.services.s3.AmazonS3 in project crate by crate.
the class FileReadingCollectorTest method createBatchIterator.
private BatchIterator createBatchIterator(Collection<String> fileUris, String compression, final S3ObjectInputStream s3InputStream) {
Reference raw = createReference("_raw", DataTypes.STRING);
InputFactory.Context<LineCollectorExpression<?>> ctx = inputFactory.ctxForRefs(FileLineReferenceResolver::getImplementation);
List<Input<?>> inputs = Collections.singletonList(ctx.add(raw));
return FileReadingIterator.newInstance(fileUris, inputs, ctx.expressions(), compression, ImmutableMap.of(LocalFsFileInputFactory.NAME, new LocalFsFileInputFactory(), S3FileInputFactory.NAME, () -> new S3FileInput(new S3ClientHelper() {
@Override
protected AmazonS3 initClient(String accessKey, String secretKey) throws IOException {
AmazonS3 client = mock(AmazonS3Client.class);
ObjectListing objectListing = mock(ObjectListing.class);
S3ObjectSummary summary = mock(S3ObjectSummary.class);
S3Object s3Object = mock(S3Object.class);
when(client.listObjects(anyString(), anyString())).thenReturn(objectListing);
when(objectListing.getObjectSummaries()).thenReturn(Arrays.asList(summary));
when(summary.getKey()).thenReturn("foo");
when(client.getObject("fakebucket", "foo")).thenReturn(s3Object);
when(s3Object.getObjectContent()).thenReturn(s3InputStream);
when(client.listNextBatchOfObjects(any(ObjectListing.class))).thenReturn(objectListing);
when(objectListing.isTruncated()).thenReturn(false);
return client;
}
})), false, 1, 0);
}
Aggregations