use of com.amazonaws.services.s3.model.Bucket in project exhibitor by soabase.
the class TestS3BackupProviderBase method testDownload.
@Test
public void testDownload() throws Exception {
InputStream in = null;
OutputStream out = null;
File tempFile = File.createTempFile("test", ".test");
try {
in = new FileInputStream(sourceFile);
PutObjectRequest dummyRequest = new PutObjectRequest("bucket", "exhibitor-backup" + S3BackupProvider.SEPARATOR + "test" + S3BackupProvider.SEPARATOR + 1, in, null);
MockS3Client s3Client = new MockS3Client(null, null);
s3Client.putObject(dummyRequest);
S3BackupProvider provider = new S3BackupProvider(new MockS3ClientFactory(s3Client), new PropertyBasedS3Credential(new Properties()), new PropertyBasedS3ClientConfig(new Properties()), null);
out = new FileOutputStream(tempFile);
provider.downloadBackup(null, new BackupMetaData("test", 1), out, Maps.<String, String>newHashMap());
Assert.assertEquals(Files.toByteArray(sourceFile), Files.toByteArray(tempFile));
} finally {
CloseableUtils.closeQuietly(in);
CloseableUtils.closeQuietly(out);
//noinspection ResultOfMethodCallIgnored
tempFile.delete();
}
}
use of com.amazonaws.services.s3.model.Bucket in project YCSB by brianfrankcooper.
the class S3Client method readFromStorage.
/**
* Download an object from S3.
*
* @param bucket
* The name of the bucket
* @param key
* The file key of the object to upload/update.
* @param result
* The Hash map where data from the object are written
*
*/
protected Status readFromStorage(String bucket, String key, HashMap<String, ByteIterator> result, SSECustomerKey ssecLocal) {
try {
Map.Entry<S3Object, ObjectMetadata> objectAndMetadata = getS3ObjectAndMetadata(bucket, key, ssecLocal);
//consuming the stream
InputStream objectData = objectAndMetadata.getKey().getObjectContent();
// writing the stream to bytes and to results
int sizeOfFile = (int) objectAndMetadata.getValue().getContentLength();
byte[] inputStreamToByte = new byte[sizeOfFile];
objectData.read(inputStreamToByte, 0, sizeOfFile);
result.put(key, new ByteArrayByteIterator(inputStreamToByte));
objectData.close();
objectAndMetadata.getKey().close();
} catch (Exception e) {
System.err.println("Not possible to get the object " + key);
e.printStackTrace();
return Status.ERROR;
}
return Status.OK;
}
use of com.amazonaws.services.s3.model.Bucket in project h2o-2 by h2oai.
the class ImportFiles2 method serveS3.
protected void serveS3() {
Futures fs = new Futures();
assert path.startsWith("s3://");
path = path.substring(5);
int bend = path.indexOf('/');
if (bend == -1)
bend = path.length();
String bucket = path.substring(0, bend);
String prefix = bend < path.length() ? path.substring(bend + 1) : "";
AmazonS3 s3 = PersistS3.getClient();
if (!s3.doesBucketExist(bucket))
throw new IllegalArgumentException("S3 Bucket " + bucket + " not found!");
;
ArrayList<String> succ = new ArrayList<String>();
ArrayList<String> fail = new ArrayList<String>();
ObjectListing currentList = s3.listObjects(bucket, prefix);
while (true) {
for (S3ObjectSummary obj : currentList.getObjectSummaries()) try {
succ.add(S3FileVec.make(obj, fs).toString());
} catch (Throwable e) {
fail.add(obj.getKey());
Log.err("Failed to loadfile from S3: path = " + obj.getKey() + ", error = " + e.getClass().getName() + ", msg = " + e.getMessage());
}
if (currentList.isTruncated())
currentList = s3.listNextBatchOfObjects(currentList);
else
break;
}
keys = succ.toArray(new String[succ.size()]);
files = keys;
fails = fail.toArray(new String[fail.size()]);
this.prefix = getCommonPrefix(keys);
}
use of com.amazonaws.services.s3.model.Bucket in project h2o-2 by h2oai.
the class ImportS3 method serve.
@Override
protected Response serve() {
String bucket = _bucket.value();
Log.info("ImportS3 processing (" + bucket + ")");
JsonObject json = new JsonObject();
JsonArray succ = new JsonArray();
JsonArray fail = new JsonArray();
AmazonS3 s3 = PersistS3.getClient();
ObjectListing currentList = s3.listObjects(bucket);
processListing(currentList, succ, fail);
while (currentList.isTruncated()) {
currentList = s3.listNextBatchOfObjects(currentList);
processListing(currentList, succ, fail);
}
json.add(NUM_SUCCEEDED, new JsonPrimitive(succ.size()));
json.add(SUCCEEDED, succ);
json.add(NUM_FAILED, new JsonPrimitive(fail.size()));
json.add(FAILED, fail);
DKV.write_barrier();
Response r = Response.done(json);
r.setBuilder(SUCCEEDED + "." + KEY, new KeyCellBuilder());
return r;
}
use of com.amazonaws.services.s3.model.Bucket in project h2o-3 by h2oai.
the class PersistS3 method uriToKey.
@Override
public Key uriToKey(URI uri) throws IOException {
AmazonS3 s3 = getClient();
// Decompose URI into bucket, key
String[] parts = decodePath(uri.toString());
try {
ObjectMetadata om = s3.getObjectMetadata(parts[0], parts[1]);
// Voila: create S3 specific key pointing to the file
return S3FileVec.make(encodePath(parts[0], parts[1]), om.getContentLength());
} catch (AmazonServiceException e) {
if (e.getErrorCode().contains("404")) {
throw new IOException(e);
} else {
Log.err("AWS failed for " + Arrays.toString(parts) + ": " + e.getMessage());
throw e;
}
}
}
Aggregations