use of org.openrdf.repository.RepositoryConnection in project incubator-rya by apache.
the class RdfCloudTripleStoreConnectionTest method testOSPObjRange.
public void testOSPObjRange() throws Exception {
RepositoryConnection conn = repository.getConnection();
URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
Literal six = vf.createLiteral("6");
Literal sev = vf.createLiteral("7");
Literal ten = vf.createLiteral("10");
conn.add(cpu, loadPerc, six);
conn.add(cpu, loadPerc, sev);
conn.add(cpu, loadPerc, ten);
conn.commit();
String query = "PREFIX org.apache: <" + NAMESPACE + ">\n" + "select * where {" + "?s ?p ?o.\n" + "FILTER(org.apache:range(?o, '6', '8'))." + "}";
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
CountTupleHandler cth = new CountTupleHandler();
tupleQuery.evaluate(cth);
conn.close();
assertEquals(cth.getCount(), 2);
}
use of org.openrdf.repository.RepositoryConnection in project incubator-rya by apache.
the class RdfCloudTripleStoreConnectionTest method testInverseOf.
public void testInverseOf() throws Exception {
if (internalInferenceEngine == null) {
// infer not supported;
return;
}
RepositoryConnection conn = repository.getConnection();
conn.add(new StatementImpl(vf.createURI(litdupsNS, "degreeFrom"), OWL.INVERSEOF, vf.createURI(litdupsNS, "hasAlumnus")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), vf.createURI(litdupsNS, "degreeFrom"), vf.createURI(litdupsNS, "Harvard")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradB"), vf.createURI(litdupsNS, "degreeFrom"), vf.createURI(litdupsNS, "Harvard")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "Harvard"), vf.createURI(litdupsNS, "hasAlumnus"), vf.createURI(litdupsNS, "AlumC")));
conn.commit();
conn.close();
internalInferenceEngine.refreshGraph();
conn = repository.getConnection();
String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select * where {lit:Harvard lit:hasAlumnus ?s.}";
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
CountTupleHandler tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(3, tupleHandler.getCount());
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select * where {?s lit:degreeFrom lit:Harvard.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(3, tupleHandler.getCount());
conn.close();
}
use of org.openrdf.repository.RepositoryConnection in project incubator-rya by apache.
the class RdfCloudTripleStoreConnectionTest method testSPOObjRange.
public void testSPOObjRange() throws Exception {
RepositoryConnection conn = repository.getConnection();
URI loadPerc = vf.createURI(litdupsNS, "loadPerc");
Literal six = vf.createLiteral("6");
Literal sev = vf.createLiteral("7");
Literal ten = vf.createLiteral("10");
conn.add(cpu, loadPerc, six);
conn.add(cpu, loadPerc, sev);
conn.add(cpu, loadPerc, ten);
conn.commit();
String query = "PREFIX org.apache: <" + NAMESPACE + ">\n" + "select * where {" + "<" + cpu.stringValue() + "> <" + loadPerc.stringValue() + "> ?o.\n" + "FILTER(org.apache:range(?o, '6', '8'))." + "}";
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
CountTupleHandler cth = new CountTupleHandler();
tupleQuery.evaluate(cth);
conn.close();
assertEquals(cth.getCount(), 2);
}
use of org.openrdf.repository.RepositoryConnection in project incubator-rya by apache.
the class RdfCloudTripleStoreConnectionTest method testSameAs.
public void testSameAs() throws Exception {
if (internalInferenceEngine == null) {
// infer not supported;
return;
}
RepositoryConnection conn = repository.getConnection();
conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentA1"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentA2")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentA2"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentA3")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB1"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentB2")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB2"), OWL.SAMEAS, vf.createURI(litdupsNS, "StudentB3")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentA1"), vf.createURI(litdupsNS, "pred1"), vf.createURI(litdupsNS, "StudentB3")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "StudentB1"), vf.createURI(litdupsNS, "pred2"), vf.createURI(litdupsNS, "StudentA3")));
conn.commit();
conn.close();
internalInferenceEngine.refreshGraph();
conn = repository.getConnection();
// query where finds sameAs for obj, pred specified
String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select ?s where {?s lit:pred1 lit:StudentB2.}";
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
CountTupleHandler tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(1, tupleHandler.getCount());
// query where finds sameAs for obj only specified
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select ?s where {?s ?p lit:StudentB2.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
// including sameAs assertions
assertEquals(3, tupleHandler.getCount());
// query where finds sameAs for subj, pred specified
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select ?s where {lit:StudentB2 lit:pred2 ?s.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
// including sameAs assertions
assertEquals(1, tupleHandler.getCount());
// query where finds sameAs for subj only specified
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select ?s where {lit:StudentB2 ?p ?s.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
// including sameAs assertions
assertEquals(3, tupleHandler.getCount());
// query where finds sameAs for subj, obj specified
query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select ?s where {lit:StudentB2 ?s lit:StudentA2.}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(1, tupleHandler.getCount());
conn.close();
}
use of org.openrdf.repository.RepositoryConnection in project incubator-rya by apache.
the class RdfCloudTripleStoreConnectionTest method testEquivPropOf.
public void testEquivPropOf() throws Exception {
if (internalInferenceEngine == null) {
// infer not supported;
return;
}
RepositoryConnection conn = repository.getConnection();
conn.add(new StatementImpl(vf.createURI(litdupsNS, "undergradDegreeFrom"), OWL.EQUIVALENTPROPERTY, vf.createURI(litdupsNS, "ugradDegreeFrom")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "UgradA"), vf.createURI(litdupsNS, "undergradDegreeFrom"), vf.createURI(litdupsNS, "Harvard")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradB"), vf.createURI(litdupsNS, "ugradDegreeFrom"), vf.createURI(litdupsNS, "Harvard")));
conn.add(new StatementImpl(vf.createURI(litdupsNS, "GradC"), vf.createURI(litdupsNS, "ugraduateDegreeFrom"), vf.createURI(litdupsNS, "Harvard")));
conn.commit();
conn.close();
internalInferenceEngine.refreshGraph();
conn = repository.getConnection();
String query = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX rdf: <" + RDF.NAMESPACE + ">\n" + "PREFIX lit: <" + litdupsNS + ">\n" + "select * where {?s lit:ugradDegreeFrom lit:Harvard.}";
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
CountTupleHandler tupleHandler = new CountTupleHandler();
tupleQuery.evaluate(tupleHandler);
assertEquals(2, tupleHandler.getCount());
conn.close();
}
Aggregations