use of ai.grakn.client.QueryResponse in project grakn by graknlabs.
the class BatchExecutorClientIT method whenEngineRESTFailsWhileLoadingWithRetryTrue_LoaderRetriesAndWaits.
@Ignore("Randomly failing test which is slowing down dev. This should be fixed")
@Test
public void whenEngineRESTFailsWhileLoadingWithRetryTrue_LoaderRetriesAndWaits() throws Exception {
List<Observable<QueryResponse>> all = new ArrayList<>();
int n = 20;
try (BatchExecutorClient loader = loader(MAX_DELAY)) {
for (int i = 0; i < n; i++) {
all.add(loader.add(query(), keyspace, true).doOnError(ex -> System.out.println("Error " + ex)));
if (i % 5 == 0) {
Thread.sleep(200);
System.out.println("Restarting engine");
engine.server().getHttpHandler().stopHTTP();
Thread.sleep(200);
engine.server().getHttpHandler().startHTTP();
}
}
int completed = allObservable(all).toBlocking().first().size();
assertEquals(n, completed);
}
if (GraknTestUtil.usingJanus()) {
try (GraknTx graph = session.open(GraknTxType.READ)) {
assertEquals(n, graph.getEntityType("name_tag").instances().count());
}
}
}
use of ai.grakn.client.QueryResponse in project grakn by graknlabs.
the class BatchLoader method sendBatchRequest.
static void sendBatchRequest(SimpleURI uri, Keyspace keyspace, Path graqlPath, PrintStream sout, PrintStream serr) throws IOException {
AtomicInteger queriesExecuted = new AtomicInteger(0);
try (FileInputStream inputStream = new FileInputStream(graqlPath.toFile());
Reader queryReader = new InputStreamReader(inputStream, Charsets.UTF_8);
BatchExecutorClient batchExecutorClient = loaderClient(uri)) {
Graql.parser().parseList(queryReader).forEach(query -> {
Observable<QueryResponse> observable = batchExecutorClient.add(query, keyspace, false);
observable.subscribe(/* On success: */
queryResponse -> queriesExecuted.incrementAndGet(), /* On error: */
serr::println);
});
}
sout.println("Statements executed: " + queriesExecuted.get());
}
use of ai.grakn.client.QueryResponse in project grakn by graknlabs.
the class Migrator method load.
/**
* Migrate data constrained by this migrator using a loader configured
* by the provided parameters.
*
* @param template Template used to extract the data
* @param data Data being migrated
*/
public void load(String template, Stream<Map<String, Object>> data) {
GraknClient graknClient = GraknClient.of(uri);
AtomicInteger queriesExecuted = new AtomicInteger(0);
try (BatchExecutorClient loader = BatchExecutorClient.newBuilder().taskClient(graknClient).maxRetries(retries).maxDelay(maxDelayMs).metricRegistry(metricRegistry).build()) {
checkKeyspace(graknClient);
Stream<Query> queryStream = data.flatMap(d -> template(template, d, failFast));
if (maxLines > -1) {
queryStream = queryStream.limit(maxLines);
}
queryStream.forEach(q -> {
LOG.trace("Adding query {}", q);
totalMeter.mark();
// We add get a hot observable. It starts immediately
Observable<QueryResponse> observable = loader.add(q, keyspace, failFast);
subscribeToReportOutcome(failFast, observable, queriesExecuted);
});
}
System.out.println("Loaded " + queriesExecuted + " statements");
}
Aggregations