use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class MongoRyaDirectExample method testAddAndFreeTextSearchWithPCJ.
private static void testAddAndFreeTextSearchWithPCJ(final SailRepositoryConnection conn) throws Exception {
// add data to the repository using the SailRepository add methods
final ValueFactory f = conn.getValueFactory();
final URI person = f.createURI("http://example.org/ontology/Person");
String uuid;
uuid = "urn:people:alice";
conn.add(f.createURI(uuid), RDF.TYPE, person);
conn.add(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Alice Palace Hose", f.createURI("xsd:string")));
uuid = "urn:people:bobss";
conn.add(f.createURI(uuid), RDF.TYPE, person);
conn.add(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Bob Snob Hose", "en"));
String queryString;
TupleQuery tupleQuery;
CountingResultHandler tupleHandler;
// ///////////// search for alice
queryString = //
"PREFIX fts: <http://rdf.useekm.com/fts#> " + //
"SELECT ?person ?match ?e ?c ?l ?o " + //
"{" + //
" ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . " + //
" FILTER(fts:text(?match, \"Palace\")) " + //
"}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 1);
// ///////////// search for alice and bob
queryString = //
"PREFIX fts: <http://rdf.useekm.com/fts#> " + //
"SELECT ?person ?match " + //
"{" + //
" ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . " + //
" ?person a <http://example.org/ontology/Person> . " + //
" FILTER(fts:text(?match, \"alice\")) " + //
"}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 1);
// ///////////// search for alice and bob
queryString = //
"PREFIX fts: <http://rdf.useekm.com/fts#> " + //
"SELECT ?person ?match " + //
"{" + //
" ?person a <http://example.org/ontology/Person> . " + //
" ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . " + //
" FILTER(fts:text(?match, \"alice\")) " + //
" FILTER(fts:text(?match, \"palace\")) " + //
"}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 1);
// ///////////// search for bob
queryString = //
"PREFIX fts: <http://rdf.useekm.com/fts#> " + //
"SELECT ?person ?match ?e ?c ?l ?o " + //
"{" + //
" ?person a <http://example.org/ontology/Person> . " + //
" ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . " + // this is an or query in mongo, a and query in accumulo
" FILTER(fts:text(?match, \"alice hose\")) " + //
"}";
//
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 2);
}
use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class MongoRyaDirectExample method testIntersectionOfInference.
public static void testIntersectionOfInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
log.info("Adding Data");
final String instances = "INSERT DATA\n" + "{ GRAPH <http://updated/test> {\n" + " <urn:Susan> a <urn:Mother> . \n" + " <urn:Mary> a <urn:Woman> . \n" + " <urn:Mary> a <urn:Parent> . \n" + "}}";
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, instances);
update.execute();
final String inferQuery = "select distinct ?x { GRAPH <http://updated/test> { ?x a <urn:Mother> }}";
final String explicitQuery = "select distinct ?x { GRAPH <http://updated/test> {\n" + " { ?x a <urn:Mother> }\n" + " UNION {\n" + " ?x a <urn:Woman> .\n" + " ?x a <urn:Parent> .\n" + " }\n" + "}}";
log.info("Running Explicit Query");
CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, explicitQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 2);
log.info("Running Inference-dependant Query");
resultHandler.resetCount();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 1);
log.info("Adding owl:intersectionOf Schema");
// ONTOLOGY - :Mother intersectionOf[:Woman, :Parent]
final String ontology = "INSERT DATA\n" + "{ GRAPH <http://updated/test> {\n" + " <urn:Mother> owl:intersectionOf _:bnode1 . \n" + " _:bnode1 rdf:first <urn:Woman> . \n" + " _:bnode1 rdf:rest _:bnode2 . \n" + " _:bnode2 rdf:first <urn:Parent> . \n" + " _:bnode2 rdf:rest rdf:nil . \n" + "}}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, ontology);
update.execute();
log.info("Refreshing InferenceEngine");
((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
log.info("Re-running Inference-dependant Query");
resultHandler.resetCount();
resultHandler = new CountingResultHandler();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 2);
}
use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class MongoRyaDirectExample method testInfer.
public static void testInfer(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
// Add data
String query = //
"INSERT DATA\n" + //
"{ \n" + " <http://acme.com/people/Mike> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:type1>. " + " <urn:type1> <http://www.w3.org/2000/01/rdf-schema#subClassOf> <urn:superclass>. }";
log.info("Performing Query");
final Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
// refresh the graph for inferencing (otherwise there is a five minute wait)
((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
query = "select ?s { ?s <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:superclass> . }";
final CountingResultHandler resultHandler = new CountingResultHandler();
final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 1);
resultHandler.resetCount();
}
use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class StatementMetadataExample method query.
public void query(String query, int expected) throws Exception {
prettyPrintQuery(query);
prettyPrintQueryPlan(query);
CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
Validate.isTrue(expected == resultHandler.getCount());
}
use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class MongoBatchUpdatePCJ method updatePCJResults.
private void updatePCJResults(final String ryaInstanceName, final String pcjId, final MongoClient client) throws InstanceDoesNotExistException, PCJDoesNotExistException, RyaClientException {
// Things that have to be closed before we exit.
Sail sail = null;
SailConnection sailConn = null;
try (final PrecomputedJoinStorage pcjStorage = new MongoPcjStorage(client, ryaInstanceName)) {
// Create an instance of Sail backed by the Rya instance.
sail = connectToRya(ryaInstanceName);
final SailRepository sailRepo = new SailRepository(sail);
final SailRepositoryConnection sailRepoConn = sailRepo.getConnection();
// Purge the old results from the PCJ.
try {
pcjStorage.purge(pcjId);
} catch (final PCJStorageException e) {
throw new RyaClientException("Could not batch update PCJ with ID '" + pcjId + "' because the old " + "results could not be purged from it.", e);
}
// Parse the PCJ's SPARQL query.
final PcjMetadata metadata = pcjStorage.getPcjMetadata(pcjId);
final String sparql = metadata.getSparql();
sailConn = sail.getConnection();
final TupleQuery tupleQuery = sailRepoConn.prepareTupleQuery(QueryLanguage.SPARQL, sparql);
// Execute the query.
final List<VisibilityBindingSet> batch = new ArrayList<>(1000);
tupleQuery.evaluate(new TupleQueryResultHandlerBase() {
@Override
public void handleSolution(final BindingSet bindingSet) throws TupleQueryResultHandlerException {
final VisibilityBindingSet result = new VisibilityBindingSet(bindingSet, "");
log.warn("Visibility information on the binding set is lost during a batch update." + " This can create data leaks.");
batch.add(result);
if (batch.size() == 1000) {
try {
pcjStorage.addResults(pcjId, batch);
} catch (final PCJStorageException e) {
throw new TupleQueryResultHandlerException("Fail to batch load new results into the PCJ with ID '" + pcjId + "'.", e);
}
batch.clear();
}
}
});
if (!batch.isEmpty()) {
pcjStorage.addResults(pcjId, batch);
batch.clear();
}
} catch (final MalformedQueryException | PCJStorageException | SailException | QueryEvaluationException | RepositoryException | TupleQueryResultHandlerException e) {
throw new RyaClientException("Fail to batch load new results into the PCJ with ID '" + pcjId + "'.", e);
} finally {
if (sailConn != null) {
try {
sailConn.close();
} catch (final SailException e) {
log.warn(e.getMessage(), e);
}
}
if (sail != null) {
try {
sail.shutDown();
} catch (final SailException e) {
log.warn(e.getMessage(), e);
}
}
}
}
Aggregations