use of com.amazonaws.AmazonClientException in project beam by apache.
the class S3ReadableSeekableByteChannel method read.
@Override
public int read(ByteBuffer destinationBuffer) throws IOException {
if (!isOpen()) {
throw new ClosedChannelException();
}
if (!destinationBuffer.hasRemaining()) {
return 0;
}
if (position == contentLength) {
return -1;
}
if (s3Object == null) {
GetObjectRequest request = new GetObjectRequest(path.getBucket(), path.getKey());
request.setSSECustomerKey(config.getSSECustomerKey());
if (position > 0) {
request.setRange(position, contentLength);
}
try {
s3Object = amazonS3.getObject(request);
} catch (AmazonClientException e) {
throw new IOException(e);
}
s3ObjectContentChannel = Channels.newChannel(new BufferedInputStream(s3Object.getObjectContent(), 1024 * 1024));
}
int totalBytesRead = 0;
int bytesRead = 0;
do {
totalBytesRead += bytesRead;
try {
bytesRead = s3ObjectContentChannel.read(destinationBuffer);
} catch (AmazonClientException e) {
// TODO replace all catch AmazonServiceException with client exception
throw new IOException(e);
}
} while (bytesRead > 0);
position += totalBytesRead;
return totalBytesRead;
}
use of com.amazonaws.AmazonClientException in project beam by apache.
the class S3WritableByteChannel method close.
@Override
public void close() throws IOException {
open = false;
if (uploadBuffer.remaining() > 0) {
flush();
}
CompleteMultipartUploadRequest request = new CompleteMultipartUploadRequest().withBucketName(path.getBucket()).withKey(path.getKey()).withUploadId(uploadId).withPartETags(eTags);
try {
amazonS3.completeMultipartUpload(request);
} catch (AmazonClientException e) {
throw new IOException(e);
}
}
use of com.amazonaws.AmazonClientException in project jackrabbit by apache.
the class Utils method openService.
/**
* Create AmazonS3Client from properties.
*
* @param prop properties to configure @link {@link AmazonS3Client}
* @return {@link AmazonS3Client}
*/
public static AmazonS3Client openService(final Properties prop) {
String accessKey = prop.getProperty(S3Constants.ACCESS_KEY);
String secretKey = prop.getProperty(S3Constants.SECRET_KEY);
AmazonS3Client s3service = null;
if (StringUtils.isNullOrEmpty(accessKey) || StringUtils.isNullOrEmpty(secretKey)) {
LOG.info("Configuring Amazon Client from environment");
s3service = new AmazonS3Client(getClientConfiguration(prop));
} else {
LOG.info("Configuring Amazon Client from property file.");
AWSCredentials credentials = new BasicAWSCredentials(accessKey, secretKey);
s3service = new AmazonS3Client(credentials, getClientConfiguration(prop));
}
String region = prop.getProperty(S3Constants.S3_REGION);
String endpoint = null;
String propEndPoint = prop.getProperty(S3Constants.S3_END_POINT);
if ((propEndPoint != null) && !"".equals(propEndPoint)) {
endpoint = propEndPoint;
} else {
if (StringUtils.isNullOrEmpty(region)) {
com.amazonaws.regions.Region s3Region = Regions.getCurrentRegion();
if (s3Region != null) {
region = s3Region.getName();
} else {
throw new AmazonClientException("parameter [" + S3Constants.S3_REGION + "] not configured and cannot be derived from environment");
}
}
if (DEFAULT_AWS_BUCKET_REGION.equals(region)) {
endpoint = S3 + DOT + AWSDOTCOM;
} else if (Region.EU_Ireland.toString().equals(region)) {
endpoint = "s3-eu-west-1" + DOT + AWSDOTCOM;
} else {
endpoint = S3 + DASH + region + DOT + AWSDOTCOM;
}
}
/*
* setting endpoint to remove latency of redirection. If endpoint is
* not set, invocation first goes us standard region, which
* redirects it to correct location.
*/
s3service.setEndpoint(endpoint);
LOG.info("S3 service endpoint [{}] ", endpoint);
s3service.setS3ClientOptions(getS3ClientOptions(prop));
return s3service;
}
use of com.amazonaws.AmazonClientException in project aws-doc-sdk-examples by awsdocs.
the class DiscoverInstances method main.
public static void main(String[] args) {
final String USAGE = "\n" + "To run this example, supply the Namespacename , ServiceName of aws cloud map!\n" + "\n" + "Ex: DiscoverInstances <namespace-name> <service-name> \n";
if (args.length < 2) {
System.out.println(USAGE);
System.exit(1);
}
String namespace_name = args[0];
String service_name = args[1];
AWSCredentials credentials = null;
try {
credentials = new EnvironmentVariableCredentialsProvider().getCredentials();
} catch (Exception e) {
throw new AmazonClientException("Cannot Load Credentials");
}
System.out.format("Instances in AWS cloud map %s:\n", namespace_name);
AWSServiceDiscovery client = AWSServiceDiscoveryClientBuilder.standard().withCredentials(new AWSStaticCredentialsProvider(credentials)).withRegion(System.getenv("AWS_REGION")).build();
DiscoverInstancesRequest request = new DiscoverInstancesRequest();
request.setNamespaceName(namespace_name);
request.setServiceName(service_name);
DiscoverInstancesResult result = client.discoverInstances(request);
System.out.println(result.toString());
}
use of com.amazonaws.AmazonClientException in project aws-doc-sdk-examples by awsdocs.
the class Main method main.
public static void main(String[] args) {
AWSCredentials credentials_profile = null;
try {
credentials_profile = new ProfileCredentialsProvider("default").getCredentials();
} catch (Exception e) {
throw new AmazonClientException("Cannot load credentials from .aws/credentials file. " + "Make sure that the credentials file exists and the profile name is specified within it.", e);
}
AmazonElasticMapReduce emr = AmazonElasticMapReduceClientBuilder.standard().withCredentials(new AWSStaticCredentialsProvider(credentials_profile)).withRegion(Regions.US_WEST_1).build();
// Run a bash script using a predefined step in the StepFactory helper class
StepFactory stepFactory = new StepFactory();
StepConfig runBashScript = new StepConfig().withName("Run a bash script").withHadoopJarStep(stepFactory.newScriptRunnerStep("s3://jeffgoll/emr-scripts/create_users.sh")).withActionOnFailure("CONTINUE");
// Run a custom jar file as a step
HadoopJarStepConfig hadoopConfig1 = new HadoopJarStepConfig().withJar(// replace with the location of the jar to run as a step
"s3://path/to/my/jarfolder").withMainClass(// optional main class, this can be omitted if jar above has a manifest
"com.my.Main1").withArgs(// optional list of arguments to pass to the jar
"--verbose");
StepConfig myCustomJarStep = new StepConfig("RunHadoopJar", hadoopConfig1);
AddJobFlowStepsResult result = emr.addJobFlowSteps(new AddJobFlowStepsRequest().withJobFlowId(// replace with cluster id to run the steps
"j-xxxxxxxxxxxx").withSteps(runBashScript, myCustomJarStep));
System.out.println(result.getStepIds());
}
Aggregations