use of com.amazonaws.services.s3.AmazonS3 in project deeplearning4j by deeplearning4j.
the class S3Uploader method multiPartUpload.
/**
* Multi part upload for big files
* @param file the file to upload
* @param bucketName the bucket name to upload
*/
public void multiPartUpload(File file, String bucketName) {
AmazonS3 client = new AmazonS3Client(creds);
bucketName = ensureValidBucketName(bucketName);
List<Bucket> buckets = client.listBuckets();
for (Bucket b : buckets) if (b.getName().equals(bucketName)) {
doMultiPart(client, bucketName, file);
return;
}
//bucket didn't exist: create it
client.createBucket(bucketName);
doMultiPart(client, bucketName, file);
}
use of com.amazonaws.services.s3.AmazonS3 in project crate by crate.
the class FileReadingCollectorTest method createBatchIterator.
private BatchIterator createBatchIterator(Collection<String> fileUris, String compression, final S3ObjectInputStream s3InputStream) {
Reference raw = createReference("_raw", DataTypes.STRING);
InputFactory.Context<LineCollectorExpression<?>> ctx = inputFactory.ctxForRefs(FileLineReferenceResolver::getImplementation);
List<Input<?>> inputs = Collections.singletonList(ctx.add(raw));
return FileReadingIterator.newInstance(fileUris, inputs, ctx.expressions(), compression, ImmutableMap.of(LocalFsFileInputFactory.NAME, new LocalFsFileInputFactory(), S3FileInputFactory.NAME, () -> new S3FileInput(new S3ClientHelper() {
@Override
protected AmazonS3 initClient(String accessKey, String secretKey) throws IOException {
AmazonS3 client = mock(AmazonS3Client.class);
ObjectListing objectListing = mock(ObjectListing.class);
S3ObjectSummary summary = mock(S3ObjectSummary.class);
S3Object s3Object = mock(S3Object.class);
when(client.listObjects(anyString(), anyString())).thenReturn(objectListing);
when(objectListing.getObjectSummaries()).thenReturn(Arrays.asList(summary));
when(summary.getKey()).thenReturn("foo");
when(client.getObject("fakebucket", "foo")).thenReturn(s3Object);
when(s3Object.getObjectContent()).thenReturn(s3InputStream);
when(client.listNextBatchOfObjects(any(ObjectListing.class))).thenReturn(objectListing);
when(objectListing.isTruncated()).thenReturn(false);
return client;
}
})), false, 1, 0);
}
use of com.amazonaws.services.s3.AmazonS3 in project elasticsearch by elastic.
the class InternalAwsS3Service method client.
@Override
public synchronized AmazonS3 client(Settings repositorySettings, Integer maxRetries, boolean useThrottleRetries, Boolean pathStyleAccess) {
String clientName = CLIENT_NAME.get(repositorySettings);
String foundEndpoint = findEndpoint(logger, repositorySettings, settings, clientName);
AWSCredentialsProvider credentials = buildCredentials(logger, deprecationLogger, settings, repositorySettings, clientName);
Tuple<String, String> clientDescriptor = new Tuple<>(foundEndpoint, credentials.getCredentials().getAWSAccessKeyId());
AmazonS3Client client = clients.get(clientDescriptor);
if (client != null) {
return client;
}
client = new AmazonS3Client(credentials, buildConfiguration(logger, repositorySettings, settings, clientName, maxRetries, foundEndpoint, useThrottleRetries));
if (pathStyleAccess != null) {
client.setS3ClientOptions(new S3ClientOptions().withPathStyleAccess(pathStyleAccess));
}
if (!foundEndpoint.isEmpty()) {
client.setEndpoint(foundEndpoint);
}
clients.put(clientDescriptor, client);
return client;
}
use of com.amazonaws.services.s3.AmazonS3 in project h2o-2 by h2oai.
the class ImportFiles2 method serveS3.
protected void serveS3() {
Futures fs = new Futures();
assert path.startsWith("s3://");
path = path.substring(5);
int bend = path.indexOf('/');
if (bend == -1)
bend = path.length();
String bucket = path.substring(0, bend);
String prefix = bend < path.length() ? path.substring(bend + 1) : "";
AmazonS3 s3 = PersistS3.getClient();
if (!s3.doesBucketExist(bucket))
throw new IllegalArgumentException("S3 Bucket " + bucket + " not found!");
;
ArrayList<String> succ = new ArrayList<String>();
ArrayList<String> fail = new ArrayList<String>();
ObjectListing currentList = s3.listObjects(bucket, prefix);
while (true) {
for (S3ObjectSummary obj : currentList.getObjectSummaries()) try {
succ.add(S3FileVec.make(obj, fs).toString());
} catch (Throwable e) {
fail.add(obj.getKey());
Log.err("Failed to loadfile from S3: path = " + obj.getKey() + ", error = " + e.getClass().getName() + ", msg = " + e.getMessage());
}
if (currentList.isTruncated())
currentList = s3.listNextBatchOfObjects(currentList);
else
break;
}
keys = succ.toArray(new String[succ.size()]);
files = keys;
fails = fail.toArray(new String[fail.size()]);
this.prefix = getCommonPrefix(keys);
}
use of com.amazonaws.services.s3.AmazonS3 in project h2o-2 by h2oai.
the class ImportS3 method serve.
@Override
protected Response serve() {
String bucket = _bucket.value();
Log.info("ImportS3 processing (" + bucket + ")");
JsonObject json = new JsonObject();
JsonArray succ = new JsonArray();
JsonArray fail = new JsonArray();
AmazonS3 s3 = PersistS3.getClient();
ObjectListing currentList = s3.listObjects(bucket);
processListing(currentList, succ, fail);
while (currentList.isTruncated()) {
currentList = s3.listNextBatchOfObjects(currentList);
processListing(currentList, succ, fail);
}
json.add(NUM_SUCCEEDED, new JsonPrimitive(succ.size()));
json.add(SUCCEEDED, succ);
json.add(NUM_FAILED, new JsonPrimitive(fail.size()));
json.add(FAILED, fail);
DKV.write_barrier();
Response r = Response.done(json);
r.setBuilder(SUCCEEDED + "." + KEY, new KeyCellBuilder());
return r;
}
Aggregations