use of org.openrdf.query.Update in project incubator-rya by apache.
the class MongoRyaDirectExample method testPropertyChainInferenceAltRepresentation.
public static void testPropertyChainInferenceAltRepresentation(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
// Add data
String query = //
"INSERT DATA\n" + //
"{ GRAPH <http://updated/test> {\n" + " <urn:jenGreatGranMother> <urn:Motherof> <urn:jenGranMother> . " + " <urn:jenGranMother> <urn:isChildOf> <urn:jenGreatGranMother> . " + " <urn:jenGranMother> <urn:Motherof> <urn:jenMother> . " + " <urn:jenMother> <urn:isChildOf> <urn:jenGranMother> . " + " <urn:jenMother> <urn:Motherof> <urn:jen> . " + " <urn:jen> <urn:isChildOf> <urn:jenMother> . " + " <urn:jen> <urn:Motherof> <urn:jenDaughter> . }}";
log.info("Performing Query");
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
query = "select ?p { GRAPH <http://updated/test> {?s <urn:Motherof>/<urn:Motherof> ?p}}";
CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
// try adding a property chain and querying for it
query = //
"INSERT DATA\n" + //
"{ GRAPH <http://updated/test> {\n" + " <urn:greatMother> owl:propertyChainAxiom <urn:12342> . " + " <urn:12342> <http://www.w3.org/1999/02/22-rdf-syntax-ns#first> _:node1atjakcvbx15023 . " + " _:node1atjakcvbx15023 <http://www.w3.org/2002/07/owl#inverseOf> <urn:isChildOf> . " + " <urn:12342> <http://www.w3.org/1999/02/22-rdf-syntax-ns#rest> _:node1atjakcvbx15123 . " + " _:node1atjakcvbx15123 <http://www.w3.org/1999/02/22-rdf-syntax-ns#rest> <http://www.w3.org/1999/02/22-rdf-syntax-ns#nil> . " + " _:node1atjakcvbx15123 <http://www.w3.org/1999/02/22-rdf-syntax-ns#first> <urn:MotherOf> . }}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
resultHandler.resetCount();
query = "select ?x { GRAPH <http://updated/test> {<urn:jenGreatGranMother> <urn:greatMother> ?x}}";
resultHandler = new CountingResultHandler();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
}
use of org.openrdf.query.Update in project incubator-rya by apache.
the class RdfController method performUpdate.
private void performUpdate(final String query, final SailRepositoryConnection conn, final ServletOutputStream os, final String infer, final String vis) throws RepositoryException, MalformedQueryException, IOException {
final Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
if (infer != null && infer.length() > 0) {
update.setBinding(RdfCloudTripleStoreConfiguration.CONF_INFER, VALUE_FACTORY.createLiteral(Boolean.parseBoolean(infer)));
}
if (conn.getSailConnection() instanceof RdfCloudTripleStoreConnection && vis != null) {
final RdfCloudTripleStoreConnection<?> sailConnection = (RdfCloudTripleStoreConnection<?>) conn.getSailConnection();
sailConnection.getConf().set(RdfCloudTripleStoreConfiguration.CONF_CV, vis);
}
final long startTime = System.currentTimeMillis();
try {
update.execute();
} catch (final UpdateExecutionException e) {
final String message = "Update could not be successfully completed for query: ";
os.print(String.format(message + "%s\n\n", StringEscapeUtils.escapeHtml4(query)));
log.error(message + LogUtils.clean(query), e);
}
log.info(String.format("Update Time = %.3f\n", (System.currentTimeMillis() - startTime) / 1000.));
}
use of org.openrdf.query.Update in project incubator-rya by apache.
the class InferenceExamples method testOneOfInference.
public static void testOneOfInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
log.info("Adding Data");
final String instances = "INSERT DATA" + "{ GRAPH <http://updated/test> {\n" + " <urn:FlopCard1> a <urn:Card> . \n" + " <urn:FlopCard1> <urn:HasRank> <urn:Ace> . \n" + " <urn:FlopCard1> <urn:HasSuit> <urn:Diamonds> . \n" + " <urn:FlopCard2> a <urn:Card> . \n" + " <urn:FlopCard2> <urn:HasRank> <urn:Ace> . \n" + " <urn:FlopCard2> <urn:HasSuit> <urn:Hearts> . \n" + " <urn:FlopCard3> a <urn:Card> . \n" + " <urn:FlopCard3> <urn:HasRank> <urn:King> . \n" + " <urn:FlopCard3> <urn:HasSuit> <urn:Spades> . \n" + " <urn:TurnCard> a <urn:Card> . \n" + " <urn:TurnCard> <urn:HasRank> <urn:10> . \n" + " <urn:TurnCard> <urn:HasSuit> <urn:Clubs> . \n" + " <urn:RiverCard> a <urn:Card> . \n" + " <urn:RiverCard> <urn:HasRank> <urn:Queen> . \n" + " <urn:RiverCard> <urn:HasSuit> <urn:Hearts> . \n" + "}}";
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, instances);
update.execute();
final String explicitQuery = "select distinct ?card { GRAPH <http://updated/test> {\n" + " ?card a <urn:Card> . \n" + " VALUES ?suit { <urn:Clubs> <urn:Diamonds> <urn:Hearts> <urn:Spades> } . \n" + " ?card <urn:HasSuit> ?suit . \n" + "}}";
log.info("Running Explicit Query");
CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, explicitQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 5);
log.info("Adding owl:oneOf Schema");
// ONTOLOGY - :Suits oneOf (:Clubs, :Diamonds, :Hearts, :Spades)
// ONTOLOGY - :Ranks oneOf (:Ace, :1, :2, :3, :4, :5, :6, :7, :8, :9, :10, :Jack, :Queen, :King)
final String ontology = "INSERT DATA { GRAPH <http://updated/test> {\n" + " <urn:Suits> owl:oneOf _:bnodeS1 . \n" + " _:bnodeS1 rdf:first <urn:Clubs> . \n" + " _:bnodeS1 rdf:rest _:bnodeS2 . \n" + " _:bnodeS2 rdf:first <urn:Diamonds> . \n" + " _:bnodeS2 rdf:rest _:bnodeS3 . \n" + " _:bnodeS3 rdf:first <urn:Hearts> . \n" + " _:bnodeS3 rdf:rest _:bnodeS4 . \n" + " _:bnodeS4 rdf:first <urn:Spades> . \n" + " _:bnodeS4 rdf:rest rdf:nil . \n" + " <urn:Ranks> owl:oneOf _:bnodeR1 . \n" + " _:bnodeR1 rdf:first <urn:Ace> . \n" + " _:bnodeR1 rdf:rest _:bnodeR2 . \n" + " _:bnodeR2 rdf:first <urn:2> . \n" + " _:bnodeR2 rdf:rest _:bnodeR3 . \n" + " _:bnodeR3 rdf:first <urn:3> . \n" + " _:bnodeR3 rdf:rest _:bnodeR4 . \n" + " _:bnodeR4 rdf:first <urn:4> . \n" + " _:bnodeR4 rdf:rest _:bnodeR5 . \n" + " _:bnodeR5 rdf:first <urn:5> . \n" + " _:bnodeR5 rdf:rest _:bnodeR6 . \n" + " _:bnodeR6 rdf:first <urn:6> . \n" + " _:bnodeR6 rdf:rest _:bnodeR7 . \n" + " _:bnodeR7 rdf:first <urn:7> . \n" + " _:bnodeR7 rdf:rest _:bnodeR8 . \n" + " _:bnodeR8 rdf:first <urn:8> . \n" + " _:bnodeR8 rdf:rest _:bnodeR9 . \n" + " _:bnodeR9 rdf:first <urn:9> . \n" + " _:bnodeR9 rdf:rest _:bnodeR10 . \n" + " _:bnodeR10 rdf:first <urn:10> . \n" + " _:bnodeR10 rdf:rest _:bnodeR11 . \n" + " _:bnodeR11 rdf:first <urn:Jack> . \n" + " _:bnodeR11 rdf:rest _:bnodeR12 . \n" + " _:bnodeR12 rdf:first <urn:Queen> . \n" + " _:bnodeR12 rdf:rest _:bnodeR13 . \n" + " _:bnodeR13 rdf:first <urn:King> . \n" + " _:bnodeR13 rdf:rest rdf:nil . \n" + " <urn:Card> owl:intersectionOf (\n" + " [ owl:onProperty <urn:HasRank> ; owl:someValuesFrom <urn:Ranks> ]\n" + " [ owl:onProperty <urn:HasSuit> ; owl:someValuesFrom <urn:Suits> ]\n" + " ) . \n" + " <urn:HasRank> owl:range <urn:Ranks> . \n" + " <urn:HasSuit> owl:range <urn:Suits> . \n" + "}}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, ontology);
update.execute();
log.info("Running Inference-dependent Query without refreshing InferenceEngine");
resultHandler.resetCount();
final String inferQuery = "select distinct ?card { GRAPH <http://updated/test> {\n" + " ?card a <urn:Card> . \n" + " ?suit a <urn:Suits> . \n" + " ?card <urn:HasSuit> ?suit . \n" + "}}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 0);
log.info("Refreshing InferenceEngine");
((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
log.info("Re-running Inference-dependent Query");
resultHandler.resetCount();
resultHandler = new CountingResultHandler();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 5);
}
use of org.openrdf.query.Update in project incubator-rya by apache.
the class InferenceExamples method testAllValuesFromInference.
public static void testAllValuesFromInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
log.info("Adding Data");
String insert = "INSERT DATA\n" + "{ GRAPH <http://updated/test> {\n" + " <urn:Alice> a <urn:Person> .\n" + " <urn:Alice> <urn:hasParent> <urn:Bob> .\n" + " <urn:Carol> <urn:hasParent> <urn:Dan> .\n" + "}}";
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
update.execute();
final String inferQuery = "select distinct ?x { GRAPH <http://updated/test> { ?x a <urn:Person> }}";
final String explicitQuery = "select distinct ?x { GRAPH <http://updated/test> {\n" + " { ?x a <urn:Person> }\n" + " UNION {\n" + " ?y a <urn:Person> .\n" + " ?y <urn:hasParent> ?x .\n" + " }\n" + "}}";
log.info("Running Explicit Query");
final CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, explicitQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 2);
log.info("Running Inference-dependent Query");
resultHandler.resetCount();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 1);
log.info("Adding owl:allValuesFrom Schema");
insert = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX owl: <" + OWL.NAMESPACE + ">\n" + "INSERT DATA\n" + "{ GRAPH <http://updated/test> {\n" + " <urn:Person> rdfs:subClassOf [ owl:onProperty <urn:hasParent> ; owl:allValuesFrom <urn:Person> ] ." + "}}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
update.execute();
log.info("Refreshing InferenceEngine");
((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
log.info("Re-running Inference-dependent Query");
resultHandler.resetCount();
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 2);
}
use of org.openrdf.query.Update in project incubator-rya by apache.
the class InferenceExamples method testInfer.
public static void testInfer(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
// Add data
String query = //
"INSERT DATA\n" + //
"{ \n" + " <http://acme.com/people/Mike> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:type1>. " + " <urn:type1> <http://www.w3.org/2000/01/rdf-schema#subClassOf> <urn:superclass>. }";
log.info("Performing Query");
final Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
// refresh the graph for inferencing (otherwise there is a five minute wait)
((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
query = "select ?s { ?s <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:superclass> . }";
final CountingResultHandler resultHandler = new CountingResultHandler();
final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 1);
resultHandler.resetCount();
}
Aggregations