use of com.amazonaws.services.s3.transfer.Upload in project jackrabbit-oak by apache.
the class S3Backend method write.
/**
* It uploads file to Amazon S3. If file size is greater than 5MB, this
* method uses parallel concurrent connections to upload.
*/
@Override
public void write(DataIdentifier identifier, File file) throws DataStoreException {
String key = getKeyName(identifier);
ObjectMetadata objectMetaData = null;
long start = System.currentTimeMillis();
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
// check if the same record already exists
try {
objectMetaData = s3service.getObjectMetadata(bucket, key);
} catch (AmazonServiceException ase) {
if (!(ase.getStatusCode() == 404 || ase.getStatusCode() == 403)) {
throw ase;
}
}
if (objectMetaData != null) {
long l = objectMetaData.getContentLength();
if (l != file.length()) {
throw new DataStoreException("Collision: " + key + " new length: " + file.length() + " old length: " + l);
}
LOG.debug("[{}]'s exists, lastmodified = [{}]", key, objectMetaData.getLastModified().getTime());
CopyObjectRequest copReq = new CopyObjectRequest(bucket, key, bucket, key);
copReq.setNewObjectMetadata(objectMetaData);
Copy copy = tmx.copy(s3ReqDecorator.decorate(copReq));
try {
copy.waitForCopyResult();
LOG.debug("lastModified of [{}] updated successfully.", identifier);
} catch (Exception e2) {
throw new DataStoreException("Could not upload " + key, e2);
}
}
if (objectMetaData == null) {
try {
// start multipart parallel upload using amazon sdk
Upload up = tmx.upload(s3ReqDecorator.decorate(new PutObjectRequest(bucket, key, file)));
// wait for upload to finish
up.waitForUploadResult();
LOG.debug("synchronous upload to identifier [{}] completed.", identifier);
} catch (Exception e2) {
throw new DataStoreException("Could not upload " + key, e2);
}
}
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
LOG.debug("write of [{}], length=[{}], in [{}]ms", identifier, file.length(), (System.currentTimeMillis() - start));
}
use of com.amazonaws.services.s3.transfer.Upload in project jackrabbit-oak by apache.
the class S3Backend method addMetadataRecord.
@Override
public void addMetadataRecord(File input, String name) throws DataStoreException {
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
Upload upload = tmx.upload(s3ReqDecorator.decorate(new PutObjectRequest(bucket, addMetaKeyPrefix(name), input)));
upload.waitForUploadResult();
} catch (InterruptedException e) {
LOG.error("Exception in uploading metadata file {}", new Object[] { input, e });
throw new DataStoreException("Error in uploading metadata file", e);
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
}
use of com.amazonaws.services.s3.transfer.Upload in project jackrabbit-oak by apache.
the class S3Backend method addMetadataRecord.
public void addMetadataRecord(final InputStream input, final String name) throws DataStoreException {
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
Upload upload = tmx.upload(s3ReqDecorator.decorate(new PutObjectRequest(bucket, addMetaKeyPrefix(name), input, new ObjectMetadata())));
upload.waitForUploadResult();
} catch (InterruptedException e) {
LOG.error("Error in uploading", e);
throw new DataStoreException("Error in uploading", e);
} finally {
if (contextClassLoader != null) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
}
use of com.amazonaws.services.s3.transfer.Upload in project aws-doc-sdk-examples by awsdocs.
the class XferMgrUpload method uploadFile.
public static void uploadFile(String file_path, String bucket_name, String key_prefix, boolean pause) {
System.out.println("file: " + file_path + (pause ? " (pause)" : ""));
String key_name = null;
if (key_prefix != null) {
key_name = key_prefix + '/' + file_path;
} else {
key_name = file_path;
}
File f = new File(file_path);
TransferManager xfer_mgr = new TransferManager();
try {
Upload xfer = xfer_mgr.upload(bucket_name, key_name, f);
// loop with Transfer.isDone()
XferMgrProgress.showTransferProgress(xfer);
// or block with Transfer.waitForCompletion()
XferMgrProgress.waitForCompletion(xfer);
} catch (AmazonServiceException e) {
System.err.println(e.getErrorMessage());
System.exit(1);
}
xfer_mgr.shutdownNow();
}
use of com.amazonaws.services.s3.transfer.Upload in project aws-doc-sdk-examples by awsdocs.
the class PutObject method main.
public static void main(String[] args) {
final String USAGE = "\n" + "To run this example, supply the name of an S3 bucket and a file to\n" + "upload to it.\n" + "\n" + "Ex: PutObject <bucketname> <filename>\n";
if (args.length < 2) {
System.out.println(USAGE);
System.exit(1);
}
String bucket_name = args[0];
String file_path = args[1];
String key_name = Paths.get(file_path).getFileName().toString();
System.out.format("Uploading %s to S3 bucket %s...\n", file_path, bucket_name);
final AmazonS3 s3 = AmazonS3ClientBuilder.defaultClient();
try {
s3.putObject(bucket_name, key_name, file_path);
} catch (AmazonServiceException e) {
System.err.println(e.getErrorMessage());
System.exit(1);
}
System.out.println("Done!");
}
Aggregations