use of io.vertx.rxjava.core.Vertx in project georocket by georocket.
the class ServiceTest method unpublish.
/**
* Test if a service can be published an unpublished again
* @param context the test context
*/
@Test
public void unpublish(TestContext context) {
Vertx vertx = new Vertx(rule.vertx());
Async async = context.async();
ServiceDiscovery discovery = ServiceDiscovery.create(vertx);
Service.publishOnce("A", "a", discovery, vertx).flatMapObservable(v -> Service.discover("A", discovery, vertx)).count().doOnNext(count -> {
context.assertEquals(1, count);
}).flatMap(v -> Service.discover("A", discovery, vertx)).flatMapSingle(service -> service.unpublish(discovery)).flatMap(v -> Service.discover("A", discovery, vertx)).count().doOnTerminate(discovery::close).subscribe(count -> {
context.assertEquals(0, count);
async.complete();
}, context::fail);
}
use of io.vertx.rxjava.core.Vertx in project georocket by georocket.
the class ElasticsearchClientFactory method createElasticsearchClient.
/**
* Create an Elasticsearch client. Either start an Elasticsearch instance or
* connect to an external one - depending on the configuration.
* @param indexName the name of the index the Elasticsearch client will
* operate on
* @return an observable emitting an Elasticsearch client and runner
*/
public Observable<ElasticsearchClient> createElasticsearchClient(String indexName) {
JsonObject config = vertx.getOrCreateContext().config();
boolean embedded = config.getBoolean(ConfigConstants.INDEX_ELASTICSEARCH_EMBEDDED, true);
String host = config.getString(ConfigConstants.INDEX_ELASTICSEARCH_HOST, "localhost");
int port = config.getInteger(ConfigConstants.INDEX_ELASTICSEARCH_PORT, 9200);
ElasticsearchClient client = new RemoteElasticsearchClient(host, port, indexName, vertx);
if (!embedded) {
// just return the client
return Observable.just(client);
}
return client.isRunning().flatMap(running -> {
if (running) {
// we don't have to start Elasticsearch again
return Observable.just(client);
}
String home = config.getString(ConfigConstants.HOME);
String defaultElasticsearchDownloadUrl;
try {
defaultElasticsearchDownloadUrl = IOUtils.toString(getClass().getResource("/elasticsearch_download_url.txt"), StandardCharsets.UTF_8);
} catch (IOException e) {
return Observable.error(e);
}
String elasticsearchDownloadUrl = config.getString(ConfigConstants.INDEX_ELASTICSEARCH_DOWNLOAD_URL, defaultElasticsearchDownloadUrl);
Pattern pattern = Pattern.compile("-([0-9]\\.[0-9]\\.[0-9])\\.zip$");
Matcher matcher = pattern.matcher(elasticsearchDownloadUrl);
if (!matcher.find()) {
return Observable.error(new NoStackTraceThrowable("Could not extract " + "version number from Elasticsearch download URL: " + elasticsearchDownloadUrl));
}
String elasticsearchVersion = matcher.group(1);
String elasticsearchInstallPath = config.getString(ConfigConstants.INDEX_ELASTICSEARCH_INSTALL_PATH, home + "/elasticsearch/" + elasticsearchVersion);
// install Elasticsearch, start it and then create the client
ElasticsearchInstaller installer = new ElasticsearchInstaller(vertx);
ElasticsearchRunner runner = new ElasticsearchRunner(vertx);
return installer.download(elasticsearchDownloadUrl, elasticsearchInstallPath).flatMap(path -> runner.runElasticsearch(host, port, path)).flatMap(v -> runner.waitUntilElasticsearchRunning(client)).map(v -> new EmbeddedElasticsearchClient(client, runner));
});
}
use of io.vertx.rxjava.core.Vertx in project georocket by georocket.
the class StoreEndpointTest method setupServer.
/**
* Starts a MockServer verticle with a StoreEndpoint to test against
* @param context the test context
*/
@BeforeClass
public static void setupServer(TestContext context) {
Async async = context.async();
vertx = new Vertx(rule.vertx());
vertxCore = vertx.getDelegate();
setConfig(vertx.getOrCreateContext().config());
setupMockEndpoint().subscribe(x -> async.complete());
}
use of io.vertx.rxjava.core.Vertx in project georocket by georocket.
the class MockIndexer method mockIndexerQuery.
/**
* Start consuming {@link AddressConstants#INDEXER_QUERY} messages.
* See the class comments to see the logic of the replied items.
*
* Returns "valid" hits that correspond to the items that are returned from the {@link MockStore}.
*
* @param vertx vertx instance
*/
public static void mockIndexerQuery(Vertx vertx) {
indexerQuerySubscription = vertx.eventBus().<JsonObject>consumer(AddressConstants.INDEXER_QUERY).toObservable().subscribe(msg -> {
JsonArray hits = new JsonArray();
String givenScrollId = msg.body().getString("scrollId");
Long numberReturnHits;
String returnScrollId;
if (givenScrollId == null) {
numberReturnHits = HITS_PER_PAGE;
returnScrollId = FIRST_RETURNED_SCROLL_ID;
} else if (givenScrollId.equals(FIRST_RETURNED_SCROLL_ID)) {
numberReturnHits = TOTAL_HITS - HITS_PER_PAGE;
returnScrollId = INVALID_SCROLLID;
} else {
numberReturnHits = 0L;
returnScrollId = INVALID_SCROLLID;
}
for (int i = 0; i < numberReturnHits; i++) {
hits.add(new JsonObject().put("mimeType", "application/geo+json").put("id", "some_id").put("start", 0).put("end", MockStore.RETURNED_CHUNK.length()).put("parents", new JsonArray()));
}
if (INVALID_SCROLLID.equals(givenScrollId)) {
msg.fail(404, "invalid scroll id");
} else {
msg.reply(new JsonObject().put("totalHits", TOTAL_HITS).put("scrollId", returnScrollId).put("hits", hits));
}
});
}
use of io.vertx.rxjava.core.Vertx in project georocket by georocket.
the class ImportCommand method doRun.
@Override
public void doRun(String[] remainingArgs, InputReader in, PrintWriter out, Handler<Integer> handler) throws OptionParserException, IOException {
long start = System.currentTimeMillis();
// resolve file patterns
Queue<String> queue = new ArrayDeque<>();
for (String p : patterns) {
// convert Windows backslashes to slashes (necessary for Files.newDirectoryStream())
if (SystemUtils.IS_OS_WINDOWS) {
p = FilenameUtils.separatorsToUnix(p);
}
// collect paths and glob patterns
List<String> roots = new ArrayList<>();
List<String> globs = new ArrayList<>();
String[] parts = p.split("/");
boolean rootParsed = false;
for (String part : parts) {
if (!rootParsed) {
if (hasGlobCharacter(part)) {
globs.add(part);
rootParsed = true;
} else {
roots.add(part);
}
} else {
globs.add(part);
}
}
if (globs.isEmpty()) {
// string does not contain a glob pattern at all
queue.add(p);
} else {
// string contains a glob pattern
if (roots.isEmpty()) {
// there are not paths in the string. start from the current
// working directory
roots.add(".");
}
// add all files matching the pattern
String root = String.join("/", roots);
String glob = String.join("/", globs);
Project project = new Project();
FileSet fs = new FileSet();
fs.setDir(new File(root));
fs.setIncludes(glob);
DirectoryScanner ds = fs.getDirectoryScanner(project);
Arrays.stream(ds.getIncludedFiles()).map(path -> Paths.get(root, path).toString()).forEach(queue::add);
}
}
if (queue.isEmpty()) {
error("given pattern didn't match any files");
return;
}
Vertx vertx = new Vertx(this.vertx);
GeoRocketClient client = createClient();
int queueSize = queue.size();
doImport(queue, client, vertx, exitCode -> {
client.close();
if (exitCode == 0) {
String m = "file";
if (queueSize > 1) {
m += "s";
}
System.out.println("Successfully imported " + queueSize + " " + m + " in " + DurationFormat.formatUntilNow(start));
}
handler.handle(exitCode);
});
}
Aggregations