use of com.amazonaws.services.s3.AmazonS3 in project hadoop by apache.
the class ITestS3AConfiguration method shouldBeAbleToSwitchOnS3PathStyleAccessViaConfigProperty.
@Test
public void shouldBeAbleToSwitchOnS3PathStyleAccessViaConfigProperty() throws Exception {
conf = new Configuration();
conf.set(Constants.PATH_STYLE_ACCESS, Boolean.toString(true));
assertTrue(conf.getBoolean(Constants.PATH_STYLE_ACCESS, false));
try {
fs = S3ATestUtils.createTestFileSystem(conf);
assertNotNull(fs);
AmazonS3 s3 = fs.getAmazonS3Client();
assertNotNull(s3);
S3ClientOptions clientOptions = getField(s3, S3ClientOptions.class, "clientOptions");
assertTrue("Expected to find path style access to be switched on!", clientOptions.isPathStyleAccess());
byte[] file = ContractTestUtils.toAsciiByteArray("test file");
ContractTestUtils.writeAndRead(fs, new Path("/path/style/access/testFile"), file, file.length, (int) conf.getLongBytes(Constants.FS_S3A_BLOCK_SIZE, file.length), false, true);
} catch (final AWSS3IOException e) {
LOG.error("Caught exception: ", e);
// Catch/pass standard path style access behaviour when live bucket
// isn't in the same region as the s3 client default. See
// http://docs.aws.amazon.com/AmazonS3/latest/dev/VirtualHosting.html
assertEquals(e.getStatusCode(), HttpStatus.SC_MOVED_PERMANENTLY);
}
}
use of com.amazonaws.services.s3.AmazonS3 in project deeplearning4j by deeplearning4j.
the class S3Uploader method upload.
/**
* Upload the file to the bucket.
* Will create the bucket if it hasn't already been created
* @param file the file to upload
* @param bucketName the name of the bucket
*/
public void upload(File file, String bucketName) {
AmazonS3 client = new AmazonS3Client(creds);
bucketName = ensureValidBucketName(bucketName);
List<Bucket> buckets = client.listBuckets();
for (Bucket b : buckets) if (b.getName().equals(bucketName)) {
client.putObject(bucketName, file.getName(), file);
return;
}
//bucket didn't exist: create it
client.createBucket(bucketName);
client.putObject(bucketName, file.getName(), file);
}
use of com.amazonaws.services.s3.AmazonS3 in project symmetric-ds by JumpMind.
the class RedshiftBulkDatabaseWriter method flush.
protected void flush() {
if (loadedRows > 0) {
stagedInputFile.close();
statistics.get(batch).startTimer(DataWriterStatisticConstants.DATABASEMILLIS);
AmazonS3 s3client = new AmazonS3Client(new BasicAWSCredentials(accessKey, secretKey));
if (isNotBlank(s3Endpoint)) {
s3client.setEndpoint(s3Endpoint);
}
String objectKey = stagedInputFile.getFile().getName();
try {
s3client.putObject(bucket, objectKey, stagedInputFile.getFile());
} catch (AmazonServiceException ase) {
log.error("Exception from AWS service: " + ase.getMessage());
} catch (AmazonClientException ace) {
log.error("Exception from AWS client: " + ace.getMessage());
}
try {
JdbcSqlTransaction jdbcTransaction = (JdbcSqlTransaction) transaction;
Connection c = jdbcTransaction.getConnection();
String sql = "COPY " + getTargetTable().getFullyQualifiedTableName() + " (" + Table.getCommaDeliminatedColumns(table.getColumns()) + ") FROM 's3://" + bucket + "/" + objectKey + "' CREDENTIALS 'aws_access_key_id=" + accessKey + ";aws_secret_access_key=" + secretKey + "' CSV DATEFORMAT 'YYYY-MM-DD HH:MI:SS' " + (needsExplicitIds ? "EXPLICIT_IDS" : "") + (isNotBlank(appendToCopyCommand) ? (" " + appendToCopyCommand) : "");
Statement stmt = c.createStatement();
log.debug(sql);
stmt.execute(sql);
stmt.close();
transaction.commit();
} catch (SQLException ex) {
throw platform.getSqlTemplate().translate(ex);
} finally {
statistics.get(batch).stopTimer(DataWriterStatisticConstants.DATABASEMILLIS);
}
stagedInputFile.delete();
try {
s3client.deleteObject(bucket, objectKey);
} catch (AmazonServiceException ase) {
log.error("Exception from AWS service: " + ase.getMessage());
} catch (AmazonClientException ace) {
log.error("Exception from AWS client: " + ace.getMessage());
}
createStagingFile();
loadedRows = 0;
loadedBytes = 0;
}
}
use of com.amazonaws.services.s3.AmazonS3 in project aws-doc-sdk-examples by awsdocs.
the class CreateBucket method createBucket.
public static Bucket createBucket(String bucket_name) {
final AmazonS3 s3 = AmazonS3ClientBuilder.defaultClient();
Bucket b = null;
if (s3.doesBucketExist(bucket_name)) {
System.out.format("Bucket %s already exists.\n", bucket_name);
b = getBucket(bucket_name);
} else {
try {
b = s3.createBucket(bucket_name);
} catch (AmazonS3Exception e) {
System.err.println(e.getErrorMessage());
}
}
return b;
}
use of com.amazonaws.services.s3.AmazonS3 in project aws-doc-sdk-examples by awsdocs.
the class SetAcl method setBucketAcl.
public static void setBucketAcl(String bucket_name, String email, String access) {
System.out.format("Setting %s access for %s\n", access, email);
System.out.println("on bucket: " + bucket_name);
final AmazonS3 s3 = AmazonS3ClientBuilder.defaultClient();
try {
// get the current ACL
AccessControlList acl = s3.getBucketAcl(bucket_name);
// set access for the grantee
EmailAddressGrantee grantee = new EmailAddressGrantee(email);
Permission permission = Permission.valueOf(access);
acl.grantPermission(grantee, permission);
s3.setBucketAcl(bucket_name, acl);
} catch (AmazonServiceException e) {
System.err.println(e.getErrorMessage());
System.exit(1);
}
}
Aggregations