use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class MongoPCJIndexIT method sparqlQuery_Test.
@Test
public void sparqlQuery_Test() throws Exception {
// Setup a Rya Client.
final MongoConnectionDetails connectionDetails = getConnectionDetails();
final RyaClient ryaClient = MongoRyaClientFactory.build(connectionDetails, getMongoClient());
final String pcjQuery = "SELECT ?name WHERE {" + " ?name <urn:likes> <urn:icecream> ." + " ?name <urn:hasEyeColor> <urn:blue> ." + " }";
// Install an instance of Rya and load statements.
ryaClient.getInstall().install(conf.getRyaInstanceName(), InstallConfiguration.builder().setEnablePcjIndex(true).build());
ryaClient.getLoadStatements().loadStatements(conf.getRyaInstanceName(), getStatements());
final String pcjId = ryaClient.getCreatePCJ().createPCJ(conf.getRyaInstanceName(), pcjQuery);
ryaClient.getBatchUpdatePCJ().batchUpdate(conf.getRyaInstanceName(), pcjId);
// purge contents of rya triples collection
getMongoClient().getDatabase(conf.getRyaInstanceName()).getCollection(conf.getTriplesCollectionName()).drop();
// run the query. since the triples collection is gone, if the results match, they came from the PCJ index.
conf.setBoolean(ConfigUtils.USE_PCJ, true);
conf.setBoolean(ConfigUtils.USE_OPTIMAL_PCJ, true);
conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, true);
final Sail sail = RyaSailFactory.getInstance(conf);
SailRepositoryConnection conn = new SailRepository(sail).getConnection();
conn.begin();
final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, pcjQuery);
tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERYPLAN_FLAG, RdfCloudTripleStoreConstants.VALUE_FACTORY.createLiteral(true));
final TupleQueryResult rez = tupleQuery.evaluate();
final Set<BindingSet> results = new HashSet<>();
while (rez.hasNext()) {
final BindingSet bs = rez.next();
results.add(bs);
}
// Verify the correct results were loaded into the PCJ table.
final Set<BindingSet> expectedResults = new HashSet<>();
MapBindingSet bs = new MapBindingSet();
bs.addBinding("name", VF.createURI("urn:Alice"));
expectedResults.add(bs);
bs = new MapBindingSet();
bs.addBinding("name", VF.createURI("urn:Bob"));
expectedResults.add(bs);
bs = new MapBindingSet();
bs.addBinding("name", VF.createURI("urn:Charlie"));
expectedResults.add(bs);
bs = new MapBindingSet();
bs.addBinding("name", VF.createURI("urn:David"));
expectedResults.add(bs);
bs = new MapBindingSet();
bs.addBinding("name", VF.createURI("urn:Eve"));
expectedResults.add(bs);
bs = new MapBindingSet();
bs.addBinding("name", VF.createURI("urn:Frank"));
expectedResults.add(bs);
assertEquals(6, results.size());
assertEquals(expectedResults, results);
}
use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class InferenceExamples method testOneOfInference.
public static void testOneOfInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
log.info("Adding Data");
final String instances = "INSERT DATA" + "{ GRAPH <http://updated/test> {\n" + " <urn:FlopCard1> a <urn:Card> . \n" + " <urn:FlopCard1> <urn:HasRank> <urn:Ace> . \n" + " <urn:FlopCard1> <urn:HasSuit> <urn:Diamonds> . \n" + " <urn:FlopCard2> a <urn:Card> . \n" + " <urn:FlopCard2> <urn:HasRank> <urn:Ace> . \n" + " <urn:FlopCard2> <urn:HasSuit> <urn:Hearts> . \n" + " <urn:FlopCard3> a <urn:Card> . \n" + " <urn:FlopCard3> <urn:HasRank> <urn:King> . \n" + " <urn:FlopCard3> <urn:HasSuit> <urn:Spades> . \n" + " <urn:TurnCard> a <urn:Card> . \n" + " <urn:TurnCard> <urn:HasRank> <urn:10> . \n" + " <urn:TurnCard> <urn:HasSuit> <urn:Clubs> . \n" + " <urn:RiverCard> a <urn:Card> . \n" + " <urn:RiverCard> <urn:HasRank> <urn:Queen> . \n" + " <urn:RiverCard> <urn:HasSuit> <urn:Hearts> . \n" + "}}";
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, instances);
update.execute();
final String explicitQuery = "select distinct ?card { GRAPH <http://updated/test> {\n" + " ?card a <urn:Card> . \n" + " VALUES ?suit { <urn:Clubs> <urn:Diamonds> <urn:Hearts> <urn:Spades> } . \n" + " ?card <urn:HasSuit> ?suit . \n" + "}}";
log.info("Running Explicit Query");
CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, explicitQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 5);
log.info("Adding owl:oneOf Schema");
// ONTOLOGY - :Suits oneOf (:Clubs, :Diamonds, :Hearts, :Spades)
// ONTOLOGY - :Ranks oneOf (:Ace, :1, :2, :3, :4, :5, :6, :7, :8, :9, :10, :Jack, :Queen, :King)
final String ontology = "INSERT DATA { GRAPH <http://updated/test> {\n" + " <urn:Suits> owl:oneOf _:bnodeS1 . \n" + " _:bnodeS1 rdf:first <urn:Clubs> . \n" + " _:bnodeS1 rdf:rest _:bnodeS2 . \n" + " _:bnodeS2 rdf:first <urn:Diamonds> . \n" + " _:bnodeS2 rdf:rest _:bnodeS3 . \n" + " _:bnodeS3 rdf:first <urn:Hearts> . \n" + " _:bnodeS3 rdf:rest _:bnodeS4 . \n" + " _:bnodeS4 rdf:first <urn:Spades> . \n" + " _:bnodeS4 rdf:rest rdf:nil . \n" + " <urn:Ranks> owl:oneOf _:bnodeR1 . \n" + " _:bnodeR1 rdf:first <urn:Ace> . \n" + " _:bnodeR1 rdf:rest _:bnodeR2 . \n" + " _:bnodeR2 rdf:first <urn:2> . \n" + " _:bnodeR2 rdf:rest _:bnodeR3 . \n" + " _:bnodeR3 rdf:first <urn:3> . \n" + " _:bnodeR3 rdf:rest _:bnodeR4 . \n" + " _:bnodeR4 rdf:first <urn:4> . \n" + " _:bnodeR4 rdf:rest _:bnodeR5 . \n" + " _:bnodeR5 rdf:first <urn:5> . \n" + " _:bnodeR5 rdf:rest _:bnodeR6 . \n" + " _:bnodeR6 rdf:first <urn:6> . \n" + " _:bnodeR6 rdf:rest _:bnodeR7 . \n" + " _:bnodeR7 rdf:first <urn:7> . \n" + " _:bnodeR7 rdf:rest _:bnodeR8 . \n" + " _:bnodeR8 rdf:first <urn:8> . \n" + " _:bnodeR8 rdf:rest _:bnodeR9 . \n" + " _:bnodeR9 rdf:first <urn:9> . \n" + " _:bnodeR9 rdf:rest _:bnodeR10 . \n" + " _:bnodeR10 rdf:first <urn:10> . \n" + " _:bnodeR10 rdf:rest _:bnodeR11 . \n" + " _:bnodeR11 rdf:first <urn:Jack> . \n" + " _:bnodeR11 rdf:rest _:bnodeR12 . \n" + " _:bnodeR12 rdf:first <urn:Queen> . \n" + " _:bnodeR12 rdf:rest _:bnodeR13 . \n" + " _:bnodeR13 rdf:first <urn:King> . \n" + " _:bnodeR13 rdf:rest rdf:nil . \n" + " <urn:Card> owl:intersectionOf (\n" + " [ owl:onProperty <urn:HasRank> ; owl:someValuesFrom <urn:Ranks> ]\n" + " [ owl:onProperty <urn:HasSuit> ; owl:someValuesFrom <urn:Suits> ]\n" + " ) . \n" + " <urn:HasRank> owl:range <urn:Ranks> . \n" + " <urn:HasSuit> owl:range <urn:Suits> . \n" + "}}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, ontology);
update.execute();
log.info("Running Inference-dependent Query without refreshing InferenceEngine");
resultHandler.resetCount();
final String inferQuery = "select distinct ?card { GRAPH <http://updated/test> {\n" + " ?card a <urn:Card> . \n" + " ?suit a <urn:Suits> . \n" + " ?card <urn:HasSuit> ?suit . \n" + "}}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 0);
log.info("Refreshing InferenceEngine");
((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
log.info("Re-running Inference-dependent Query");
resultHandler.resetCount();
resultHandler = new CountingResultHandler();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 5);
}
use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class InferenceExamples method testAllValuesFromInference.
public static void testAllValuesFromInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
log.info("Adding Data");
String insert = "INSERT DATA\n" + "{ GRAPH <http://updated/test> {\n" + " <urn:Alice> a <urn:Person> .\n" + " <urn:Alice> <urn:hasParent> <urn:Bob> .\n" + " <urn:Carol> <urn:hasParent> <urn:Dan> .\n" + "}}";
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
update.execute();
final String inferQuery = "select distinct ?x { GRAPH <http://updated/test> { ?x a <urn:Person> }}";
final String explicitQuery = "select distinct ?x { GRAPH <http://updated/test> {\n" + " { ?x a <urn:Person> }\n" + " UNION {\n" + " ?y a <urn:Person> .\n" + " ?y <urn:hasParent> ?x .\n" + " }\n" + "}}";
log.info("Running Explicit Query");
final CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, explicitQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 2);
log.info("Running Inference-dependent Query");
resultHandler.resetCount();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 1);
log.info("Adding owl:allValuesFrom Schema");
insert = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX owl: <" + OWL.NAMESPACE + ">\n" + "INSERT DATA\n" + "{ GRAPH <http://updated/test> {\n" + " <urn:Person> rdfs:subClassOf [ owl:onProperty <urn:hasParent> ; owl:allValuesFrom <urn:Person> ] ." + "}}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
update.execute();
log.info("Refreshing InferenceEngine");
((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
log.info("Re-running Inference-dependent Query");
resultHandler.resetCount();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 2);
}
use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class InferenceExamples method testInfer.
public static void testInfer(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
// Add data
String query = //
"INSERT DATA\n" + //
"{ \n" + " <http://acme.com/people/Mike> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:type1>. " + " <urn:type1> <http://www.w3.org/2000/01/rdf-schema#subClassOf> <urn:superclass>. }";
log.info("Performing Query");
final Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
// refresh the graph for inferencing (otherwise there is a five minute wait)
((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
query = "select ?s { ?s <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:superclass> . }";
final CountingResultHandler resultHandler = new CountingResultHandler();
final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 1);
resultHandler.resetCount();
}
use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class InferenceExamples method testSomeValuesFromInference.
public static void testSomeValuesFromInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
final String lubm = "http://swat.cse.lehigh.edu/onto/univ-bench.owl#";
log.info("Adding Data");
String insert = "PREFIX lubm: <" + lubm + ">\n" + "INSERT DATA { GRAPH <http://updated/test> {\n" + " <urn:Department0> a lubm:Department; lubm:subOrganizationOf <urn:University0> .\n" + " <urn:ResearchGroup0> a lubm:ResearchGroup; lubm:subOrganizationOf <urn:Department0> .\n" + " <urn:Alice> lubm:headOf <urn:Department0> .\n" + " <urn:Bob> lubm:headOf <urn:ResearchGroup0> .\n" + " <urn:Carol> lubm:worksFor <urn:Department0> .\n" + "}}";
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
update.execute();
final String inferQuery = "select distinct ?x { GRAPH <http://updated/test> { ?x a <" + lubm + "Chair> }}";
final String explicitQuery = "prefix lubm: <" + lubm + ">\n" + "select distinct ?x { GRAPH <http://updated/test> {\n" + " { ?x a lubm:Chair }\n" + " UNION\n" + " { ?x lubm:headOf [ a lubm:Department ] }\n" + "}}";
log.info("Running Explicit Query");
final CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, explicitQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 1);
log.info("Running Inference-dependent Query");
resultHandler.resetCount();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 0);
log.info("Adding owl:someValuesFrom Schema");
insert = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX owl: <" + OWL.NAMESPACE + ">\n" + "PREFIX lubm: <" + lubm + ">\n" + "INSERT DATA\n" + "{ GRAPH <http://updated/test> {\n" + " lubm:Chair owl:equivalentClass [ owl:onProperty lubm:headOf ; owl:someValuesFrom lubm:Department ] ." + "}}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
update.execute();
log.info("Refreshing InferenceEngine");
((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
log.info("Re-running Inference-dependent Query");
resultHandler.resetCount();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 1);
}
Aggregations