use of org.eclipse.rdf4j.repository.sail.SailRepositoryConnection in project incubator-rya by apache.
the class MongoIndexerDeleteIT method deleteTest.
@Test
public void deleteTest() throws Exception {
final Sail sail = GeoRyaSailFactory.getInstance(conf);
final SailRepositoryConnection conn = new SailRepository(sail).getConnection();
try {
populateRya(conn);
final MongoClient client = conf.getMongoClient();
// The extra 1 is from the person type defined in freetext
assertEquals(8, client.getDatabase(conf.getMongoDBName()).getCollection(conf.getTriplesCollectionName()).count());
assertEquals(4, client.getDatabase(conf.getMongoDBName()).getCollection("ryatest_geo").count());
assertEquals(1, client.getDatabase(conf.getMongoDBName()).getCollection("ryatest_temporal").count());
assertEquals(2, client.getDatabase(conf.getMongoDBName()).getCollection("ryatest_freetext").count());
// free text -- remove one from many
String delete = //
"DELETE DATA \n" + "{\n" + " <urn:people> <http://www.w3.org/2000/01/rdf-schema#label> \"Alice Palace Hose\" " + "}";
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, delete);
update.execute();
// temporal -- remove one from one
delete = //
"DELETE DATA \n" + "{\n" + " <foo:time> <Property:atTime> \"0001-02-03T04:05:06Z\" " + "}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, delete);
update.execute();
// geo -- remove many from many
delete = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>\n" + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>\n" + //
"DELETE \n" + "{\n" + " <urn:geo> geo:asWKT ?point \n" + "}" + "WHERE { \n" + " <urn:geo> geo:asWKT ?point .\n" + " FILTER(geof:sfWithin(?point, \"POLYGON((0 0, 2 0, 2 1, 0 1, 0 0))\"^^geo:wktLiteral))" + "}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, delete);
update.execute();
assertEquals(2, client.getDatabase(conf.getMongoDBName()).getCollection("ryatest_geo").count());
assertEquals(0, client.getDatabase(conf.getMongoDBName()).getCollection("ryatest_temporal").count());
assertEquals(1, client.getDatabase(conf.getMongoDBName()).getCollection("ryatest_freetext").count());
assertEquals(4, client.getDatabase(conf.getMongoDBName()).getCollection(conf.getTriplesCollectionName()).count());
} finally {
conn.close();
sail.shutDown();
}
}
use of org.eclipse.rdf4j.repository.sail.SailRepositoryConnection in project incubator-rya by apache.
the class RdfController method loadRdf.
@RequestMapping(value = "/loadrdf", method = RequestMethod.POST)
public void loadRdf(@RequestParam(required = false) final String format, @RequestParam(value = RdfCloudTripleStoreConfiguration.CONF_CV, required = false) final String cv, @RequestParam(required = false) final String graph, @RequestBody final String body, final HttpServletResponse response) throws RepositoryException, IOException, RDFParseException {
RDFFormat format_r = RDFFormat.RDFXML;
if (format != null) {
format_r = RdfFormatUtils.getRdfFormatFromName(format);
if (format_r == null) {
throw new RuntimeException("RDFFormat[" + format + "] not found");
}
}
// add named graph as context (if specified).
final List<Resource> contextList = new ArrayList<Resource>();
if (graph != null) {
contextList.add(VALUE_FACTORY.createIRI(graph));
}
SailRepositoryConnection conn = null;
try {
conn = repository.getConnection();
if (conn.getSailConnection() instanceof RdfCloudTripleStoreConnection && cv != null) {
final RdfCloudTripleStoreConnection<?> sailConnection = (RdfCloudTripleStoreConnection<?>) conn.getSailConnection();
sailConnection.getConf().set(RdfCloudTripleStoreConfiguration.CONF_CV, cv);
}
conn.add(new StringReader(body), "", format_r, contextList.toArray(new Resource[contextList.size()]));
conn.commit();
} finally {
if (conn != null) {
conn.close();
}
}
}
use of org.eclipse.rdf4j.repository.sail.SailRepositoryConnection in project incubator-rya by apache.
the class PropertyChainTest method testGraphConfiguration.
@Test
public void testGraphConfiguration() throws Exception {
// build a connection
RdfCloudTripleStore store = new RdfCloudTripleStore();
store.setConf(conf);
store.setRyaDAO(ryaDAO);
InferenceEngine inferenceEngine = new InferenceEngine();
inferenceEngine.setRyaDAO(ryaDAO);
store.setInferenceEngine(inferenceEngine);
inferenceEngine.refreshGraph();
store.initialize();
SailRepository repository = new SailRepository(store);
SailRepositoryConnection conn = repository.getConnection();
String query = //
"INSERT DATA\n" + //
"{ GRAPH <http://updated/test> {\n" + " <urn:greatMother> owl:propertyChainAxiom <urn:12342> . " + " <urn:12342> <http://www.w3.org/1999/02/22-rdf-syntax-ns#first> _:node1atjakcvbx15023 . " + " _:node1atjakcvbx15023 <http://www.w3.org/2002/07/owl#inverseOf> <urn:isChildOf> . " + " <urn:12342> <http://www.w3.org/1999/02/22-rdf-syntax-ns#rest> _:node1atjakcvbx15123 . " + " _:node1atjakcvbx15123 <http://www.w3.org/1999/02/22-rdf-syntax-ns#rest> <http://www.w3.org/1999/02/22-rdf-syntax-ns#nil> . " + " _:node1atjakcvbx15123 <http://www.w3.org/1999/02/22-rdf-syntax-ns#first> <urn:MotherOf> . }}";
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
inferenceEngine.refreshGraph();
List<IRI> chain = inferenceEngine.getPropertyChain(VF.createIRI("urn:greatMother"));
Assert.assertEquals(chain.size(), 2);
Assert.assertEquals(chain.get(0), new InverseIRI(VF.createIRI("urn:isChildOf")));
Assert.assertEquals(chain.get(1), VF.createIRI("urn:MotherOf"));
}
use of org.eclipse.rdf4j.repository.sail.SailRepositoryConnection in project incubator-rya by apache.
the class PcjDocumentsIntegrationTest method populatePcj.
/**
* Ensure when results are already stored in Rya, that we are able to populate
* the PCJ table for a new SPARQL query using those results.
* <p>
* The method being tested is: {@link PcjTables#populatePcj(Connector, String, RepositoryConnection, String)}
*/
@Test
public void populatePcj() throws Exception {
final MongoDBRyaDAO dao = new MongoDBRyaDAO();
dao.setConf(new StatefulMongoDBRdfConfiguration(conf, getMongoClient()));
dao.init();
final RdfCloudTripleStore<StatefulMongoDBRdfConfiguration> ryaStore = new RdfCloudTripleStore<>();
ryaStore.setRyaDAO(dao);
ryaStore.initialize();
final SailRepositoryConnection ryaConn = new RyaSailRepository(ryaStore).getConnection();
ryaConn.begin();
try {
// Load some Triples into Rya.
final Set<Statement> triples = new HashSet<>();
triples.add(VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(14))));
triples.add(VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")));
triples.add(VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(16))));
triples.add(VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")));
triples.add(VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(12))));
triples.add(VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")));
triples.add(VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(43))));
triples.add(VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")));
for (final Statement triple : triples) {
ryaConn.add(triple);
}
// Create a PCJ table that will include those triples in its results.
final String sparql = "SELECT ?name ?age " + "{" + "FILTER(?age < 30) ." + "?name <http://hasAge> ?age." + "?name <http://playsSport> \"Soccer\" " + "}";
final String pcjTableName = "testPcj";
final MongoPcjDocuments pcjs = new MongoPcjDocuments(getMongoClient(), conf.getRyaInstanceName());
pcjs.createPcj(pcjTableName, sparql);
// Populate the PCJ table using a Rya connection.
pcjs.populatePcj(pcjTableName, ryaConn);
final Collection<BindingSet> fetchedResults = loadPcjResults(pcjTableName);
// Make sure the cardinality was updated.
final PcjMetadata metadata = pcjs.getPcjMetadata(pcjTableName);
assertEquals(3, metadata.getCardinality());
// Ensure the expected results match those that were stored.
final MapBindingSet alice = new MapBindingSet();
alice.addBinding("name", VF.createIRI("http://Alice"));
alice.addBinding("age", VF.createLiteral(BigInteger.valueOf(14)));
final MapBindingSet bob = new MapBindingSet();
bob.addBinding("name", VF.createIRI("http://Bob"));
bob.addBinding("age", VF.createLiteral(BigInteger.valueOf(16)));
final MapBindingSet charlie = new MapBindingSet();
charlie.addBinding("name", VF.createIRI("http://Charlie"));
charlie.addBinding("age", VF.createLiteral(BigInteger.valueOf(12)));
final Set<BindingSet> expected = Sets.<BindingSet>newHashSet(alice, bob, charlie);
assertEquals(expected, fetchedResults);
} finally {
ryaConn.close();
ryaStore.shutDown();
}
}
use of org.eclipse.rdf4j.repository.sail.SailRepositoryConnection in project incubator-rya by apache.
the class RyaInputIncrementalUpdateIT method streamResultsThroughRya.
/**
* Ensure historic matches are included in the result.
*/
@Test
public void streamResultsThroughRya() throws Exception {
// A query that finds people who talk to Eve and work at Chipotle.
final String sparql = "SELECT ?x " + "WHERE { " + "?x <http://talksTo> <http://Eve>. " + "?x <http://worksAt> <http://Chipotle>." + "}";
// Triples that are loaded into Rya before the PCJ is created.
final ValueFactory vf = SimpleValueFactory.getInstance();
final Set<Statement> historicTriples = Sets.newHashSet(vf.createStatement(vf.createIRI("http://Alice"), vf.createIRI("http://talksTo"), vf.createIRI("http://Eve")), vf.createStatement(vf.createIRI("http://Bob"), vf.createIRI("http://talksTo"), vf.createIRI("http://Eve")), vf.createStatement(vf.createIRI("http://Charlie"), vf.createIRI("http://talksTo"), vf.createIRI("http://Eve")), vf.createStatement(vf.createIRI("http://Eve"), vf.createIRI("http://helps"), vf.createIRI("http://Kevin")), vf.createStatement(vf.createIRI("http://Bob"), vf.createIRI("http://worksAt"), vf.createIRI("http://Chipotle")), vf.createStatement(vf.createIRI("http://Charlie"), vf.createIRI("http://worksAt"), vf.createIRI("http://Chipotle")), vf.createStatement(vf.createIRI("http://Eve"), vf.createIRI("http://worksAt"), vf.createIRI("http://Chipotle")), vf.createStatement(vf.createIRI("http://David"), vf.createIRI("http://worksAt"), vf.createIRI("http://Chipotle")));
// The expected results of the SPARQL query once the PCJ has been
// computed.
final Set<BindingSet> expected = new HashSet<>();
MapBindingSet bs = new MapBindingSet();
bs.addBinding("x", vf.createIRI("http://Bob"));
expected.add(bs);
bs = new MapBindingSet();
bs.addBinding("x", vf.createIRI("http://Charlie"));
expected.add(bs);
// Create the PCJ table.
final Connector accumuloConn = super.getAccumuloConnector();
final PrecomputedJoinStorage pcjStorage = new AccumuloPcjStorage(accumuloConn, getRyaInstanceName());
final String pcjId = pcjStorage.createPcj(sparql);
try (FluoClient fluoClient = FluoFactory.newClient(super.getFluoConfiguration())) {
// Tell the Fluo app to maintain the PCJ.
new CreateFluoPcj().withRyaIntegration(pcjId, pcjStorage, fluoClient, accumuloConn, getRyaInstanceName());
// Verify the end results of the query match the expected results.
super.getMiniFluo().waitForObservers();
// Load the historic data into Rya.
final SailRepositoryConnection ryaConn = super.getRyaSailRepository().getConnection();
for (final Statement triple : historicTriples) {
ryaConn.add(triple);
}
super.getMiniFluo().waitForObservers();
final Set<BindingSet> results = new HashSet<>();
try (CloseableIterator<BindingSet> resultIt = pcjStorage.listResults(pcjId)) {
while (resultIt.hasNext()) {
results.add(resultIt.next());
}
}
assertEquals(expected, results);
}
}
Aggregations