use of com.talend.shaded.com.amazonaws.services.s3.AmazonS3 in project FP-PSP-SERVER by FundacionParaguaya.
the class ImageUploadServiceImpl method deleteImage.
@Override
public void deleteImage(String logoUrl, String imageDirectory) {
if (logoUrl == null) {
return;
}
try {
String strRegion = applicationProperties.getAws().getStrRegion();
Regions region = Regions.valueOf(strRegion);
AmazonS3 s3Client = AmazonS3ClientBuilder.standard().withRegion(region).build();
String bucketName = applicationProperties.getAws().getBucketName();
String fileName = logoUrl.substring(logoUrl.lastIndexOf('/') + 1);
String keyName = imageDirectory + fileName;
s3Client.deleteObject(new DeleteObjectRequest(bucketName, keyName));
} catch (SdkClientException sdkClientExc) {
LOG.error(sdkClientExc.getMessage(), sdkClientExc);
throw new AWSS3RuntimeException(sdkClientExc);
}
}
use of com.talend.shaded.com.amazonaws.services.s3.AmazonS3 in project molgenis by molgenis.
the class AmazonBucketIngester method ingest.
public FileMeta ingest(String jobExecutionID, String targetEntityTypeName, String bucket, String key, String extension, String accessKey, String secretKey, String region, boolean isExpression, Progress progress) {
FileMeta fileMeta;
try {
progress.setProgressMax(3);
progress.progress(0, "Connection to Amazon Bucket with accessKey '" + accessKey + "'");
AmazonS3 client = amazonBucketClient.getClient(accessKey, secretKey, region);
progress.progress(1, "downloading...");
File file = amazonBucketClient.downloadFile(client, fileStore, jobExecutionID, bucket, key, extension, isExpression, targetEntityTypeName);
if (targetEntityTypeName != null && ExcelUtils.isExcelFile(file.getName())) {
if (ExcelUtils.getNumberOfSheets(file) == 1) {
ExcelUtils.renameSheet(targetEntityTypeName, file, 0);
} else {
throw new MolgenisDataException("Amazon Bucket imports to a specified entityType are only possible with CSV files or Excel files with one sheet");
}
}
progress.progress(2, "Importing...");
ImportService importService = importServiceFactory.getImportService(file.getName());
File renamed = new File(String.format("%s%s%s.%s", file.getParent(), File.separatorChar, targetEntityTypeName, extension));
Files.copy(file.toPath(), renamed.toPath(), StandardCopyOption.REPLACE_EXISTING);
RepositoryCollection repositoryCollection = fileRepositoryCollectionFactory.createFileRepositoryCollection(renamed);
EntityImportReport report = importService.doImport(repositoryCollection, DatabaseAction.ADD_UPDATE_EXISTING, "base");
progress.status("Download and import from Amazon Bucket done.");
progress.progress(3, "Successfully imported " + report.getNrImportedEntitiesMap().keySet().toString() + " entities.");
fileMeta = createFileMeta(jobExecutionID, file);
} catch (Exception e) {
throw new MolgenisDataException(e);
}
return fileMeta;
}
use of com.talend.shaded.com.amazonaws.services.s3.AmazonS3 in project Singularity by HubSpot.
the class S3ArtifactDownloader method downloadThrows.
private void downloadThrows(final S3Artifact s3Artifact, final Path downloadTo) throws Exception {
log.info("Downloading {}", s3Artifact);
ClientConfiguration clientConfiguration = new ClientConfiguration().withSocketTimeout(configuration.getS3ChunkDownloadTimeoutMillis());
if (configuration.isS3UseV2Signing()) {
clientConfiguration.setSignerOverride("S3SignerType");
}
final AmazonS3 s3Client = new AmazonS3Client(getCredentialsForBucket(s3Artifact.getS3Bucket()), clientConfiguration);
if (configuration.getS3Endpoint().isPresent()) {
s3Client.setEndpoint(configuration.getS3Endpoint().get());
}
long length = 0;
if (s3Artifact.getFilesize().isPresent()) {
length = s3Artifact.getFilesize().get();
} else {
S3Object details = s3Client.getObject(s3Artifact.getS3Bucket(), s3Artifact.getS3ObjectKey());
Preconditions.checkNotNull(details, "Couldn't find object at %s/%s", s3Artifact.getS3Bucket(), s3Artifact.getS3ObjectKey());
length = details.getObjectMetadata().getContentLength();
}
int numChunks = (int) (length / configuration.getS3ChunkSize());
if (length % configuration.getS3ChunkSize() > 0) {
numChunks++;
}
final long chunkSize = length / numChunks + (length % numChunks);
log.info("Downloading {}/{} in {} chunks of {} bytes to {}", s3Artifact.getS3Bucket(), s3Artifact.getS3ObjectKey(), numChunks, chunkSize, downloadTo);
final ExecutorService chunkExecutorService = Executors.newFixedThreadPool(numChunks, new ThreadFactoryBuilder().setDaemon(true).setNameFormat("S3ArtifactDownloaderChunkThread-%d").build());
final List<Future<Path>> futures = Lists.newArrayListWithCapacity(numChunks);
for (int chunk = 0; chunk < numChunks; chunk++) {
futures.add(chunkExecutorService.submit(new S3ArtifactChunkDownloader(configuration, log, s3Client, s3Artifact, downloadTo, chunk, chunkSize, length, exceptionNotifier)));
}
long remainingMillis = configuration.getS3DownloadTimeoutMillis();
boolean failed = false;
for (int chunk = 0; chunk < numChunks; chunk++) {
final Future<Path> future = futures.get(chunk);
if (failed) {
future.cancel(true);
continue;
}
final long start = System.currentTimeMillis();
if (!handleChunk(s3Artifact, future, downloadTo, chunk, start, remainingMillis)) {
failed = true;
}
remainingMillis -= (System.currentTimeMillis() - start);
}
chunkExecutorService.shutdownNow();
Preconditions.checkState(!failed, "Downloading %s/%s failed", s3Artifact.getS3Bucket(), s3Artifact.getS3ObjectKey());
}
use of com.talend.shaded.com.amazonaws.services.s3.AmazonS3 in project jaqy by Teradata.
the class S3PathTest method test1.
@Test
public void test1() throws Exception {
/*
* A workaround for S3Mock generating an output to stdout.
* https://github.com/findify/s3mock/issues/67
* It generates an output in Eclipse, but not so when running
* mvn clean test.
*/
InMemoryProvider provider = new InMemoryProvider();
ActorSystem actor = S3Mock.$lessinit$greater$default$3(8001, provider);
S3Mock api = new S3Mock(8001, provider, actor);
api.start();
// setup
Globals globals = new Globals();
JaqyInterpreter interpreter = new JaqyInterpreter(globals, null, null);
AmazonS3ClientBuilder builder = S3Utils.getS3Builder(interpreter);
builder.setEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration("http://localhost:8001", "us-west-2"));
// setup some files in the bucket
AmazonS3 s3client = builder.build();
s3client.createBucket("tests");
File dir = new File("../tests/unittests/csv/lib");
s3client.putObject("tests", "unittests/csv/lib/sin.csv", new File(dir, "sin.csv"));
s3client.putObject("tests", "unittests/csv/lib/sin2.csv", new File(dir, "sin2.csv"));
s3client.shutdown();
S3PathHandler handler = new S3PathHandler();
String url = "s3://tests/unittests/csv/lib/sin.csv";
String parent = "s3://tests/unittests/csv/lib";
S3Path path = (S3Path) handler.getPath(url, interpreter);
Assert.assertNotNull(path);
Assert.assertEquals("tests", path.getBucket());
Assert.assertEquals("unittests/csv/lib/sin.csv", path.getFile());
Assert.assertEquals(url, path.getPath());
Assert.assertEquals(url, path.getCanonicalPath());
Assert.assertTrue(path.exists());
Assert.assertEquals(31443, path.length());
Assert.assertEquals(31443, path.length());
Assert.assertTrue(path.isFile());
Assert.assertEquals(0, FileUtils.compare(path.getInputStream(), new FileInputStream(new File(dir, "sin.csv"))));
path = (S3Path) path.getParent();
Assert.assertEquals(parent, path.getPath());
path = (S3Path) path.getRelativePath("sin2.csv");
Assert.assertEquals("s3://tests/unittests/csv/lib/sin2.csv", path.getPath());
Assert.assertTrue(path.isFile());
path = (S3Path) path.getParent();
path = (S3Path) path.getRelativePath("/unittests/csv/lib/import1.csv");
Assert.assertEquals("s3://tests/unittests/csv/lib/import1.csv", path.getPath());
FileUtils.copy(path.getOutputStream(), new FileInputStream(new File(dir, "import1.csv")), new byte[4096]);
Assert.assertEquals(25, path.length());
Assert.assertEquals(25, path.length());
Assert.assertEquals(0, FileUtils.compare(path.getInputStream(), new FileInputStream(new File(dir, "import1.csv"))));
path = (S3Path) path.getParent();
path = (S3Path) path.getRelativePath("../../csv/lib/sin.csv");
Assert.assertEquals(url, path.getPath());
path = (S3Path) path.getParent();
path = (S3Path) path.getRelativePath("../test/abc.csv");
Assert.assertEquals(0, path.length());
Assert.assertEquals(0, path.length());
Assert.assertFalse(path.exists());
Assert.assertFalse(path.exists());
s3client = S3Utils.getS3Client(interpreter);
if (s3client != null)
s3client.shutdown();
CoordinatedShutdown.get(actor).runAll();
api.stop();
}
use of com.talend.shaded.com.amazonaws.services.s3.AmazonS3 in project jaqy by Teradata.
the class S3Utils method getS3Client.
public static AmazonS3 getS3Client(JaqyInterpreter interpreter) {
VariableManager vm = interpreter.getVariableManager();
Variable clientVar = vm.getVariable(S3CLIENT_VAR);
if (clientVar != null) {
Object o = clientVar.get();
if (o instanceof AmazonS3)
return (AmazonS3) o;
}
// now we need to setup a new client.
AmazonS3ClientBuilder builder = getS3Builder(interpreter);
// check if we need to set up the access / secret key
String access = null;
String secret = null;
{
Variable var = vm.getVariable(S3ACCESS_VAR);
if (var != null) {
Object o = var.get();
if (o != null)
access = o.toString();
}
}
{
Variable var = vm.getVariable(S3SECRET_VAR);
if (var != null) {
Object o = var.get();
if (o != null)
secret = o.toString();
}
}
if (access != null && secret != null) {
/*
* When both access and secret are null, we are using the default
* values (i.e. from credential file or env variables etc).
*
* When both are set, then we override the default settings (and
* subsequent uses).
*/
if (access.length() == 0 && secret.length() == 0) {
/*
* This is for accessing publicly accessible buckets.
*/
builder.withCredentials(new AWSStaticCredentialsProvider(new AnonymousAWSCredentials()));
} else {
builder.withCredentials(new AWSStaticCredentialsProvider(new BasicAWSCredentials(access, secret)));
}
}
AmazonS3 client = builder.build();
// now save the client to s3client variable
if (clientVar == null) {
clientVar = new Variable() {
private AmazonS3 m_client;
@Override
public Object get() {
return m_client;
}
@Override
public boolean set(Object value) {
if (value != null && !(value instanceof AmazonS3))
return false;
m_client = (AmazonS3) value;
return true;
}
@Override
public String getName() {
return S3CLIENT_VAR;
}
};
}
clientVar.set(client);
vm.setVariable(clientVar);
return client;
}
Aggregations