use of org.openrdf.query.TupleQuery in project gocd by gocd.
the class SesameGraph method renderTupleQuery.
private void renderTupleQuery(Query query, TupleQueryResultWriter writer) throws QueryEvaluationException, TupleQueryResultHandlerException {
TupleQueryResult tupleQueryResult = ((TupleQuery) query).evaluate();
writer.startQueryResult(tupleQueryResult.getBindingNames());
while (tupleQueryResult.hasNext()) {
writer.handleSolution(tupleQueryResult.next());
}
writer.endQueryResult();
}
use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class EntityDirectExample method testAddAndTemporalSearchWithPCJ.
private static void testAddAndTemporalSearchWithPCJ(final SailRepositoryConnection conn) throws Exception {
// create some resources and literals to make statements out of
final String sparqlInsert = "PREFIX pref: <http://www.model/pref#> \n" + //
"INSERT DATA {\n" + //
"<urn:Bob> a pref:Person ;\n" + // one second
" pref:hasProperty1 'property1' ;\n" + // 2 seconds
" pref:hasProperty2 'property2' ;\n" + // 3 seconds
" pref:hasProperty3 'property3' .\n" + //
"<urn:Fred> a pref:Person ; \n" + //
" pref:hasProperty4 'property4' ; \n" + //
" pref:hasProperty5 'property5' ; \n" + "}";
final Update update = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlInsert);
update.execute();
String queryString = //
"PREFIX pref: <http://www.model/pref#> \n" + //
"SELECT ?x ?z \n" + "WHERE { \n" + " ?x a ?z. \n" + //
" ?x pref:hasProperty1 'property1' . \n" + //
" ?x pref:hasProperty2 'property2' . \n" + //
" ?x pref:hasProperty3 'property3' . \n" + //
"}";
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
CountingResultHandler tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 1);
Validate.isTrue(tupleHandler.getBsSize() == 2);
queryString = //
"PREFIX pref: <http://www.model/pref#> \n" + //
"SELECT ?x ?w ?z \n" + "WHERE { \n" + " ?x a ?z. \n" + //
" ?x pref:hasProperty4 'property4' . \n" + //
" ?x pref:hasProperty5 ?w . \n" + //
"}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 1);
Validate.isTrue(tupleHandler.getBsSize() == 3);
queryString = "PREFIX pref: <http://www.model/pref#> " + "SELECT ?v ?w ?x ?y ?z " + "WHERE { " + " ?w a ?z . " + " ?w pref:hasProperty1 ?v . " + " ?w pref:hasProperty2 'property2' . " + " ?w pref:hasProperty3 'property3' . " + " ?x a ?z . " + " ?x pref:hasProperty4 'property4' . " + " ?x pref:hasProperty5 ?y . " + "}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 1);
Validate.isTrue(tupleHandler.getBsSize() == 5);
}
use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class MongoRyaDirectExample method testAddAndDeleteNoContext.
public static void testAddAndDeleteNoContext(final SailRepositoryConnection conn) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException {
// Add data
String query = //
"INSERT DATA\n" + //
"{ \n" + //
" <http://acme.com/people/Mike> " + //
" <http://acme.com/actions/likes> \"A new book\" ;\n" + " <http://acme.com/actions/likes> \"Avocados\" .\n" + " }";
log.info("Performing Query");
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
query = "select ?p ?o {<http://acme.com/people/Mike> ?p ?o . }";
final CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 2);
resultHandler.resetCount();
// Delete Data
query = //
"DELETE DATA\n" + "{ \n" + " <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n" + " <http://acme.com/actions/likes> \"Avocados\" .\n" + "}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
query = "select ?p ?o { {<http://acme.com/people/Mike> ?p ?o . }}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 0);
}
use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class MongoRyaDirectExample method testOneOfInference.
public static void testOneOfInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
log.info("Adding Data");
final String instances = "INSERT DATA" + "{ GRAPH <http://updated/test> {\n" + " <urn:FlopCard1> a <urn:Card> . \n" + " <urn:FlopCard1> <urn:HasRank> <urn:Ace> . \n" + " <urn:FlopCard1> <urn:HasSuit> <urn:Diamonds> . \n" + " <urn:FlopCard2> a <urn:Card> . \n" + " <urn:FlopCard2> <urn:HasRank> <urn:Ace> . \n" + " <urn:FlopCard2> <urn:HasSuit> <urn:Hearts> . \n" + " <urn:FlopCard3> a <urn:Card> . \n" + " <urn:FlopCard3> <urn:HasRank> <urn:King> . \n" + " <urn:FlopCard3> <urn:HasSuit> <urn:Spades> . \n" + " <urn:TurnCard> a <urn:Card> . \n" + " <urn:TurnCard> <urn:HasRank> <urn:10> . \n" + " <urn:TurnCard> <urn:HasSuit> <urn:Clubs> . \n" + " <urn:RiverCard> a <urn:Card> . \n" + " <urn:RiverCard> <urn:HasRank> <urn:Queen> . \n" + " <urn:RiverCard> <urn:HasSuit> <urn:Hearts> . \n" + "}}";
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, instances);
update.execute();
final String explicitQuery = "select distinct ?card { GRAPH <http://updated/test> {\n" + " ?card a <urn:Card> . \n" + " VALUES ?suit { <urn:Clubs> <urn:Diamonds> <urn:Hearts> <urn:Spades> } . \n" + " ?card <urn:HasSuit> ?suit . \n" + "}}";
log.info("Running Explicit Query");
CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, explicitQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 5);
log.info("Adding owl:oneOf Schema");
// ONTOLOGY - :Suits oneOf (:Clubs, :Diamonds, :Hearts, :Spades)
// ONTOLOGY - :Ranks oneOf (:Ace, :1, :2, :3, :4, :5, :6, :7, :8, :9, :10, :Jack, :Queen, :King)
final String ontology = "INSERT DATA { GRAPH <http://updated/test> {\n" + " <urn:Suits> owl:oneOf _:bnodeS1 . \n" + " _:bnodeS1 rdf:first <urn:Clubs> . \n" + " _:bnodeS1 rdf:rest _:bnodeS2 . \n" + " _:bnodeS2 rdf:first <urn:Diamonds> . \n" + " _:bnodeS2 rdf:rest _:bnodeS3 . \n" + " _:bnodeS3 rdf:first <urn:Hearts> . \n" + " _:bnodeS3 rdf:rest _:bnodeS4 . \n" + " _:bnodeS4 rdf:first <urn:Spades> . \n" + " _:bnodeS4 rdf:rest rdf:nil . \n" + " <urn:Ranks> owl:oneOf _:bnodeR1 . \n" + " _:bnodeR1 rdf:first <urn:Ace> . \n" + " _:bnodeR1 rdf:rest _:bnodeR2 . \n" + " _:bnodeR2 rdf:first <urn:2> . \n" + " _:bnodeR2 rdf:rest _:bnodeR3 . \n" + " _:bnodeR3 rdf:first <urn:3> . \n" + " _:bnodeR3 rdf:rest _:bnodeR4 . \n" + " _:bnodeR4 rdf:first <urn:4> . \n" + " _:bnodeR4 rdf:rest _:bnodeR5 . \n" + " _:bnodeR5 rdf:first <urn:5> . \n" + " _:bnodeR5 rdf:rest _:bnodeR6 . \n" + " _:bnodeR6 rdf:first <urn:6> . \n" + " _:bnodeR6 rdf:rest _:bnodeR7 . \n" + " _:bnodeR7 rdf:first <urn:7> . \n" + " _:bnodeR7 rdf:rest _:bnodeR8 . \n" + " _:bnodeR8 rdf:first <urn:8> . \n" + " _:bnodeR8 rdf:rest _:bnodeR9 . \n" + " _:bnodeR9 rdf:first <urn:9> . \n" + " _:bnodeR9 rdf:rest _:bnodeR10 . \n" + " _:bnodeR10 rdf:first <urn:10> . \n" + " _:bnodeR10 rdf:rest _:bnodeR11 . \n" + " _:bnodeR11 rdf:first <urn:Jack> . \n" + " _:bnodeR11 rdf:rest _:bnodeR12 . \n" + " _:bnodeR12 rdf:first <urn:Queen> . \n" + " _:bnodeR12 rdf:rest _:bnodeR13 . \n" + " _:bnodeR13 rdf:first <urn:King> . \n" + " _:bnodeR13 rdf:rest rdf:nil . \n" + " <urn:Card> owl:intersectionOf (\n" + " [ owl:onProperty <urn:HasRank> ; owl:someValuesFrom <urn:Ranks> ]\n" + " [ owl:onProperty <urn:HasSuit> ; owl:someValuesFrom <urn:Suits> ]\n" + " ) . \n" + " <urn:HasRank> owl:range <urn:Ranks> . \n" + " <urn:HasSuit> owl:range <urn:Suits> . \n" + "}}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, ontology);
update.execute();
log.info("Running Inference-dependent Query without refreshing InferenceEngine");
resultHandler.resetCount();
final String inferQuery = "select distinct ?card { GRAPH <http://updated/test> {\n" + " ?card a <urn:Card> . \n" + " ?suit a <urn:Suits> . \n" + " ?card <urn:HasSuit> ?suit . \n" + "}}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 0);
log.info("Refreshing InferenceEngine");
((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
log.info("Re-running Inference-dependent Query");
resultHandler.resetCount();
resultHandler = new CountingResultHandler();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 5);
}
use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class MongoRyaDirectExample method testSomeValuesFromInference.
public static void testSomeValuesFromInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
final String lubm = "http://swat.cse.lehigh.edu/onto/univ-bench.owl#";
log.info("Adding Data");
String insert = "PREFIX lubm: <" + lubm + ">\n" + "INSERT DATA { GRAPH <http://updated/test> {\n" + " <urn:Department0> a lubm:Department; lubm:subOrganizationOf <urn:University0> .\n" + " <urn:ResearchGroup0> a lubm:ResearchGroup; lubm:subOrganizationOf <urn:Department0> .\n" + " <urn:Alice> lubm:headOf <urn:Department0> .\n" + " <urn:Bob> lubm:headOf <urn:ResearchGroup0> .\n" + " <urn:Carol> lubm:worksFor <urn:Department0> .\n" + "}}";
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
update.execute();
final String inferQuery = "select distinct ?x { GRAPH <http://updated/test> { ?x a <" + lubm + "Chair> }}";
final String explicitQuery = "prefix lubm: <" + lubm + ">\n" + "select distinct ?x { GRAPH <http://updated/test> {\n" + " { ?x a lubm:Chair }\n" + " UNION\n" + " { ?x lubm:headOf [ a lubm:Department ] }\n" + "}}";
log.info("Running Explicit Query");
final CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, explicitQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 1);
log.info("Running Inference-dependent Query");
resultHandler.resetCount();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 0);
log.info("Adding owl:someValuesFrom Schema");
insert = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX owl: <" + OWL.NAMESPACE + ">\n" + "PREFIX lubm: <" + lubm + ">\n" + "INSERT DATA\n" + "{ GRAPH <http://updated/test> {\n" + " lubm:Chair owl:equivalentClass [ owl:onProperty lubm:headOf ; owl:someValuesFrom lubm:Department ] ." + "}}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
update.execute();
log.info("Refreshing InferenceEngine");
((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
log.info("Re-running Inference-dependent Query");
resultHandler.resetCount();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 1);
}
Aggregations