use of org.openrdf.query.algebra.QueryRoot in project incubator-rya by apache.
the class PipelineQueryIT method testTriplePipeline.
@Test
public void testTriplePipeline() throws Exception {
URI alice = VF.createURI("urn:Alice");
URI bob = VF.createURI("urn:Bob");
URI eve = VF.createURI("urn:Eve");
URI friend = VF.createURI("urn:friend");
URI knows = VF.createURI("urn:knows");
URI year = VF.createURI("urn:year");
Literal yearLiteral = VF.createLiteral("2017", XMLSchema.GYEAR);
final String query = "CONSTRUCT {\n" + " ?x <urn:knows> ?y .\n" + " ?x <urn:year> \"2017\"^^<" + XMLSchema.GYEAR + "> .\n" + "} WHERE { ?x <urn:friend> ?y }";
insert(alice, friend, bob);
insert(bob, knows, eve);
insert(eve, knows, alice);
// Prepare query and convert to pipeline
QueryRoot queryTree = new QueryRoot(PARSER.parseQuery(query, null).getTupleExpr());
SparqlToPipelineTransformVisitor visitor = new SparqlToPipelineTransformVisitor(getRyaCollection());
queryTree.visit(visitor);
// Get pipeline, add triple conversion, and verify that the result is a
// properly serialized statement
Assert.assertTrue(queryTree.getArg() instanceof AggregationPipelineQueryNode);
AggregationPipelineQueryNode pipelineNode = (AggregationPipelineQueryNode) queryTree.getArg();
List<Bson> triplePipeline = pipelineNode.getTriplePipeline(System.currentTimeMillis(), false);
SimpleMongoDBStorageStrategy strategy = new SimpleMongoDBStorageStrategy();
List<Statement> results = new LinkedList<>();
for (Document doc : getRyaCollection().aggregate(triplePipeline)) {
final DBObject dbo = (DBObject) JSON.parse(doc.toJson());
RyaStatement rstmt = strategy.deserializeDBObject(dbo);
Statement stmt = RyaToRdfConversions.convertStatement(rstmt);
results.add(stmt);
}
Assert.assertEquals(2, results.size());
Assert.assertTrue(results.contains(VF.createStatement(alice, knows, bob)));
Assert.assertTrue(results.contains(VF.createStatement(alice, year, yearLiteral)));
}
use of org.openrdf.query.algebra.QueryRoot in project incubator-rya by apache.
the class PipelineQueryIT method testRequiredDerivationLevel.
@Test
public void testRequiredDerivationLevel() throws Exception {
// Insert data
URI person = VF.createURI("urn:Person");
URI livingThing = VF.createURI("urn:LivingThing");
URI human = VF.createURI("urn:Human");
URI programmer = VF.createURI("urn:Programmer");
URI thing = VF.createURI("urn:Thing");
insert(programmer, RDFS.SUBCLASSOF, person);
insert(person, RDFS.SUBCLASSOF, FOAF.PERSON);
insert(FOAF.PERSON, RDFS.SUBCLASSOF, person);
insert(person, OWL.EQUIVALENTCLASS, human);
insert(person, RDFS.SUBCLASSOF, livingThing);
insert(livingThing, RDFS.SUBCLASSOF, thing);
insert(thing, RDFS.SUBCLASSOF, OWL.THING, 1);
insert(OWL.THING, RDFS.SUBCLASSOF, thing);
dao.flush();
// Define query and expected results
final String query = "SELECT ?A ?B WHERE {\n" + " ?A rdfs:subClassOf ?B .\n" + " ?B rdfs:subClassOf ?A .\n" + "}";
List<String> varNames = Arrays.asList("A", "B");
Multiset<BindingSet> expectedSolutions = HashMultiset.create();
expectedSolutions.add(new ListBindingSet(varNames, person, FOAF.PERSON));
expectedSolutions.add(new ListBindingSet(varNames, FOAF.PERSON, person));
expectedSolutions.add(new ListBindingSet(varNames, thing, OWL.THING));
expectedSolutions.add(new ListBindingSet(varNames, OWL.THING, thing));
// Prepare query and convert to pipeline
QueryRoot queryTree = new QueryRoot(PARSER.parseQuery(query, null).getTupleExpr());
SparqlToPipelineTransformVisitor visitor = new SparqlToPipelineTransformVisitor(getRyaCollection());
queryTree.visit(visitor);
Assert.assertTrue(queryTree.getArg() instanceof AggregationPipelineQueryNode);
AggregationPipelineQueryNode pipelineNode = (AggregationPipelineQueryNode) queryTree.getArg();
// Extend the pipeline by requiring a derivation level of zero (should have no effect)
pipelineNode.requireSourceDerivationDepth(0);
Multiset<BindingSet> solutions = HashMultiset.create();
CloseableIteration<BindingSet, QueryEvaluationException> iter = pipelineNode.evaluate(new QueryBindingSet());
while (iter.hasNext()) {
solutions.add(iter.next());
}
Assert.assertEquals(expectedSolutions, solutions);
// Extend the pipeline by requiring a derivation level of one (should produce the thing/thing pair)
expectedSolutions = HashMultiset.create();
expectedSolutions.add(new ListBindingSet(varNames, thing, OWL.THING));
expectedSolutions.add(new ListBindingSet(varNames, OWL.THING, thing));
pipelineNode.requireSourceDerivationDepth(1);
solutions = HashMultiset.create();
iter = pipelineNode.evaluate(new QueryBindingSet());
while (iter.hasNext()) {
solutions.add(iter.next());
}
Assert.assertEquals(expectedSolutions, solutions);
}
use of org.openrdf.query.algebra.QueryRoot in project incubator-rya by apache.
the class PipelineQueryIT method testPipelineQuery.
private void testPipelineQuery(String query, Multiset<BindingSet> expectedSolutions) throws Exception {
// Prepare query and convert to pipeline
QueryRoot queryTree = new QueryRoot(PARSER.parseQuery(query, null).getTupleExpr());
SparqlToPipelineTransformVisitor visitor = new SparqlToPipelineTransformVisitor(getRyaCollection());
queryTree.visit(visitor);
// Execute pipeline and verify results
Assert.assertTrue(queryTree.getArg() instanceof AggregationPipelineQueryNode);
AggregationPipelineQueryNode pipelineNode = (AggregationPipelineQueryNode) queryTree.getArg();
Multiset<BindingSet> solutions = HashMultiset.create();
CloseableIteration<BindingSet, QueryEvaluationException> iter = pipelineNode.evaluate(new QueryBindingSet());
while (iter.hasNext()) {
solutions.add(iter.next());
}
Assert.assertEquals(expectedSolutions, solutions);
}
use of org.openrdf.query.algebra.QueryRoot in project incubator-rya by apache.
the class PipelineQueryIT method testRequiredTimestamp.
@Test
public void testRequiredTimestamp() throws Exception {
// Insert data
URI person = VF.createURI("urn:Person");
URI livingThing = VF.createURI("urn:LivingThing");
URI human = VF.createURI("urn:Human");
URI programmer = VF.createURI("urn:Programmer");
URI thing = VF.createURI("urn:Thing");
insert(programmer, RDFS.SUBCLASSOF, person);
insert(person, RDFS.SUBCLASSOF, FOAF.PERSON, 2);
insert(FOAF.PERSON, RDFS.SUBCLASSOF, person);
insert(person, OWL.EQUIVALENTCLASS, human);
insert(person, RDFS.SUBCLASSOF, livingThing);
insert(livingThing, RDFS.SUBCLASSOF, thing);
insert(thing, RDFS.SUBCLASSOF, OWL.THING);
insert(OWL.THING, RDFS.SUBCLASSOF, thing);
dao.flush();
// Define query and expected results
final String query = "SELECT ?A ?B WHERE {\n" + " ?A rdfs:subClassOf ?B .\n" + " ?B rdfs:subClassOf ?A .\n" + "}";
List<String> varNames = Arrays.asList("A", "B");
Multiset<BindingSet> expectedSolutions = HashMultiset.create();
expectedSolutions.add(new ListBindingSet(varNames, person, FOAF.PERSON));
expectedSolutions.add(new ListBindingSet(varNames, FOAF.PERSON, person));
expectedSolutions.add(new ListBindingSet(varNames, thing, OWL.THING));
expectedSolutions.add(new ListBindingSet(varNames, OWL.THING, thing));
// Prepare query and convert to pipeline
QueryRoot queryTree = new QueryRoot(PARSER.parseQuery(query, null).getTupleExpr());
SparqlToPipelineTransformVisitor visitor = new SparqlToPipelineTransformVisitor(getRyaCollection());
queryTree.visit(visitor);
Assert.assertTrue(queryTree.getArg() instanceof AggregationPipelineQueryNode);
AggregationPipelineQueryNode pipelineNode = (AggregationPipelineQueryNode) queryTree.getArg();
// Extend the pipeline by requiring a timestamp of zero (should have no effect)
pipelineNode.requireSourceTimestamp(0);
Multiset<BindingSet> solutions = HashMultiset.create();
CloseableIteration<BindingSet, QueryEvaluationException> iter = pipelineNode.evaluate(new QueryBindingSet());
while (iter.hasNext()) {
solutions.add(iter.next());
}
Assert.assertEquals(expectedSolutions, solutions);
// Extend the pipeline by requiring a future timestamp (should produce no results)
long delta = 1000 * 60 * 60 * 24;
pipelineNode.requireSourceTimestamp(System.currentTimeMillis() + delta);
iter = pipelineNode.evaluate(new QueryBindingSet());
Assert.assertFalse(iter.hasNext());
}
use of org.openrdf.query.algebra.QueryRoot in project incubator-rya by apache.
the class SparqlToPipelineTransformVisitorTest method testStatementPattern.
@Test
public void testStatementPattern() throws Exception {
QueryRoot query = new QueryRoot(new StatementPattern(new Var("x"), constant(RDF.TYPE), constant(UNDERGRAD)));
SparqlToPipelineTransformVisitor visitor = new SparqlToPipelineTransformVisitor(collection);
query.visit(visitor);
Assert.assertTrue(query.getArg() instanceof AggregationPipelineQueryNode);
AggregationPipelineQueryNode pipelineNode = (AggregationPipelineQueryNode) query.getArg();
Assert.assertEquals(Sets.newHashSet("x"), pipelineNode.getAssuredBindingNames());
}
Aggregations