use of software.amazon.awssdk.services.s3.model.S3Object in project herd by FINRAOS.
the class S3DaoTest method testGetPropertiesHandleIllegalArgumentException.
@Test
public void testGetPropertiesHandleIllegalArgumentException() throws Exception {
S3Operations originalS3Operations = (S3Operations) ReflectionTestUtils.getField(s3Dao, "s3Operations");
S3Operations mockS3Operations = mock(S3Operations.class);
ReflectionTestUtils.setField(s3Dao, "s3Operations", mockS3Operations);
JavaPropertiesHelper originalJavaPropertiesHelper = (JavaPropertiesHelper) ReflectionTestUtils.getField(s3Dao, "javaPropertiesHelper");
JavaPropertiesHelper mockJavaPropertiesHelper = mock(JavaPropertiesHelper.class);
ReflectionTestUtils.setField(s3Dao, "javaPropertiesHelper", mockJavaPropertiesHelper);
try {
String bucketName = "bucketName";
String key = "key";
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = new S3FileTransferRequestParamsDto();
S3Object s3Object = new S3Object();
s3Object.setObjectContent(new ByteArrayInputStream(new byte[] { 0 }));
when(mockS3Operations.getS3Object(any(), any())).thenReturn(s3Object);
when(mockJavaPropertiesHelper.getProperties(any(InputStream.class))).thenThrow(new IllegalArgumentException("message"));
try {
s3Dao.getProperties(bucketName, key, s3FileTransferRequestParamsDto);
fail();
} catch (Exception e) {
assertEquals(IllegalArgumentException.class, e.getClass());
assertEquals("The properties file in S3 bucket 'bucketName' and key 'key' is invalid.", e.getMessage());
}
} finally {
ReflectionTestUtils.setField(s3Dao, "s3Operations", originalS3Operations);
ReflectionTestUtils.setField(s3Dao, "javaPropertiesHelper", originalJavaPropertiesHelper);
}
}
use of software.amazon.awssdk.services.s3.model.S3Object in project OpenTripPlanner by opentripplanner.
the class ClusterGraphBuilder method getGraph.
/**
* Return the graph for the given unique identifier for graph builder inputs on S3.
* If this is the same as the last graph built, just return the pre-built graph.
* If not, build the graph from the inputs, fetching them from S3 to the local cache as needed.
*/
public synchronized Graph getGraph(String graphId) {
LOG.info("Finding a graph for ID {}", graphId);
if (graphId.equals(currGraphId)) {
LOG.info("GraphID has not changed. Reusing the last graph that was built.");
return currGraph;
}
// The location of the inputs that will be used to build this graph
File graphDataDirectory = new File(GRAPH_CACHE_DIR, graphId);
// If we don't have a local copy of the inputs, fetch graph data as a ZIP from S3 and unzip it
if (!graphDataDirectory.exists() || graphDataDirectory.list().length == 0) {
LOG.info("Downloading graph input files.");
graphDataDirectory.mkdirs();
S3Object graphDataZipObject = s3.getObject(graphBucket, graphId + ".zip");
ZipInputStream zis = new ZipInputStream(graphDataZipObject.getObjectContent());
try {
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
File entryDestination = new File(graphDataDirectory, entry.getName());
// Are both these mkdirs calls necessary?
entryDestination.getParentFile().mkdirs();
if (entry.isDirectory())
entryDestination.mkdirs();
else {
OutputStream entryFileOut = new FileOutputStream(entryDestination);
IOUtils.copy(zis, entryFileOut);
entryFileOut.close();
}
}
zis.close();
} catch (Exception e) {
// TODO delete graph cache dir which is probably corrupted
LOG.info("Error retrieving graph files", e);
}
} else {
LOG.info("Graph input files were found locally. Using these files from the cache.");
}
// Now we have a local copy of these graph inputs. Make a graph out of them.
CommandLineParameters params = new CommandLineParameters();
params.build = new File(GRAPH_CACHE_DIR, graphId);
params.inMemory = true;
GraphBuilder graphBuilder = GraphBuilder.forDirectory(params, params.build);
graphBuilder.run();
Graph graph = graphBuilder.getGraph();
graph.routerId = graphId;
graph.index(new DefaultStreetVertexIndexFactory());
graph.index.clusterStopsAsNeeded();
this.currGraphId = graphId;
this.currGraph = graph;
return graph;
}
use of software.amazon.awssdk.services.s3.model.S3Object in project neo4j-apoc-procedures by neo4j-contrib.
the class S3Aws method getS3AwsInputStream.
public StreamConnection getS3AwsInputStream(S3Params s3Params) {
S3Object s3Object = s3Client.getObject(s3Params.getBucket(), s3Params.getKey());
ObjectMetadata metadata = s3Object.getObjectMetadata();
return new StreamConnection() {
@Override
public InputStream getInputStream() throws IOException {
return s3Object.getObjectContent();
}
@Override
public String getEncoding() {
return metadata.getContentEncoding();
}
@Override
public long getLength() {
return metadata.getContentLength();
}
};
}
use of software.amazon.awssdk.services.s3.model.S3Object in project bender by Nextdoor.
the class GeoIpOperationFactory method setConf.
@Override
public void setConf(AbstractConfig config) {
this.config = (GeoIpOperationConfig) config;
AmazonS3Client client = this.s3Factory.newInstance();
AmazonS3URI uri = new AmazonS3URI(this.config.getGeoLiteDb());
GetObjectRequest req = new GetObjectRequest(uri.getBucket(), uri.getKey());
S3Object obj = client.getObject(req);
try {
this.databaseReader = new DatabaseReader.Builder(obj.getObjectContent()).withCache(new CHMCache()).build();
} catch (IOException e) {
throw new ConfigurationException("Unable to read " + this.config.getGeoLiteDb(), e);
}
}
use of software.amazon.awssdk.services.s3.model.S3Object in project bender by Nextdoor.
the class BenderConfig method load.
public static BenderConfig load(AmazonS3ClientFactory s3ClientFactory, AmazonS3URI s3Uri) {
AmazonS3Client s3 = s3ClientFactory.newInstance();
S3Object s3object = s3.getObject(s3Uri.getBucket(), s3Uri.getKey());
StringWriter writer = new StringWriter();
try {
IOUtils.copy(s3object.getObjectContent(), writer, "UTF-8");
} catch (IOException e) {
throw new ConfigurationException("Unable to read file from s3", e);
}
BenderConfig config = load(s3Uri.getKey().toString(), writer.toString());
config.setConfigFile(s3Uri.getURI().toString());
return config;
}
Aggregations