use of com.baremaps.blob.BlobStore in project baremaps by baremaps.
the class S3BlobStoreTest method readWriteDelete.
@Test
@Tag("integration")
void readWriteDelete() throws IOException, BlobStoreException {
URI uri = URI.create("s3://test/test/test.txt");
String content = "content";
BlobStore blobStore = new S3BlobStore(s3Client);
// Write data
blobStore.put(uri, Blob.builder().withByteArray(content.getBytes(Charsets.UTF_8)).build());
// Read the data
try (InputStream inputStream = blobStore.get(uri).getInputStream()) {
assertEquals(content, CharStreams.toString(new InputStreamReader(inputStream, Charsets.UTF_8)));
}
// Delete the data
blobStore.delete(uri);
// Get unexisting blob
try (InputStream ignored = blobStore.get(uri).getInputStream()) {
fail("Expected an IOException to be thrown");
} catch (BlobStoreException e) {
// Test exception message...
}
}
use of com.baremaps.blob.BlobStore in project baremaps by baremaps.
the class Edit method call.
@Override
public Integer call() throws Exception {
BlobStore blobStore = options.blobStore();
DataSource dataSource = PostgresUtils.datasource(database);
// Configure serialization
ObjectMapper objectMapper = defaultObjectMapper();
// Configure the application
ResourceConfig application = new ResourceConfig().register(CorsFilter.class).register(EditorResources.class).register(contextResolverFor(objectMapper)).register(new AbstractBinder() {
@Override
protected void configure() {
bind(tileset).to(URI.class).named("tileset");
bind(style).to(URI.class).named("style");
bind(blobStore).to(BlobStore.class);
bind(dataSource).to(DataSource.class);
bind(objectMapper).to(ObjectMapper.class);
}
});
BlockingStreamingHttpService httpService = new HttpJerseyRouterBuilder().buildBlockingStreaming(application);
ServerContext serverContext = HttpServers.forPort(port).listenBlockingStreamingAndAwait(httpService);
logger.info("Listening on {}", serverContext.listenAddress());
serverContext.awaitShutdown();
return 0;
}
use of com.baremaps.blob.BlobStore in project baremaps by baremaps.
the class Export method call.
@Override
public Integer call() throws TileStoreException, BlobStoreException, IOException {
ObjectMapper mapper = defaultObjectMapper();
DataSource datasource = PostgresUtils.datasource(database);
BlobStore blobStore = options.blobStore();
TileJSON source = mapper.readValue(blobStore.get(this.tileset).getInputStream(), TileJSON.class);
TileStore tileSource = sourceTileStore(source, datasource);
TileStore tileTarget = targetTileStore(source, blobStore);
Stream<Tile> stream;
if (tiles == null) {
Envelope envelope = new Envelope(source.getBounds().get(0), source.getBounds().get(2), source.getBounds().get(1), source.getBounds().get(3));
long count = Tile.count(envelope, source.getMinzoom(), source.getMaxzoom());
stream = StreamUtils.stream(Tile.iterator(envelope, source.getMinzoom(), source.getMaxzoom())).peek(new StreamProgress<>(count, 5000));
} else {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(blobStore.get(tiles).getInputStream()))) {
stream = reader.lines().flatMap(line -> {
String[] array = line.split(",");
int x = Integer.parseInt(array[0]);
int y = Integer.parseInt(array[1]);
int z = Integer.parseInt(array[2]);
Tile tile = new Tile(x, y, z);
return StreamUtils.stream(Tile.iterator(tile.envelope(), source.getMinzoom(), source.getMaxzoom()));
});
}
}
logger.info("Exporting tiles");
StreamUtils.batch(stream, 10).filter(new TileBatchPredicate(batchArraySize, batchArrayIndex)).forEach(new TileChannel(tileSource, tileTarget));
logger.info("Done");
return 0;
}
use of com.baremaps.blob.BlobStore in project baremaps by baremaps.
the class Update method call.
@Override
public Integer call() throws Exception {
BlobStore blobStore = options.blobStore();
DataSource datasource = PostgresUtils.datasource(database);
LongDataMap<Coordinate> coordinates = new PostgresCoordinateMap(datasource);
LongDataMap<List<Long>> references = new PostgresReferenceMap(datasource);
HeaderRepository headerRepository = new PostgresHeaderRepository(datasource);
Repository<Long, Node> nodeRepository = new PostgresNodeRepository(datasource);
Repository<Long, Way> wayRepository = new PostgresWayRepository(datasource);
Repository<Long, Relation> relationRepository = new PostgresRelationRepository(datasource);
logger.info("Importing changes");
new UpdateService(blobStore, coordinates, references, headerRepository, nodeRepository, wayRepository, relationRepository, srid).call();
logger.info("Done");
return 0;
}
Aggregations