use of org.openrdf.query.parser.sparql.SPARQLParser in project incubator-rya by apache.
the class AccumuloSelectivityEvalDAOTest method getSpList.
private List<StatementPattern> getSpList(String query) throws MalformedQueryException {
SPARQLParser sp = new SPARQLParser();
ParsedQuery pq = sp.parseQuery(query, null);
TupleExpr te = pq.getTupleExpr();
return StatementPatternCollector.process(te);
}
use of org.openrdf.query.parser.sparql.SPARQLParser in project incubator-rya by apache.
the class AccumuloBatchUpdatePCJ method updatePCJResults.
private void updatePCJResults(final String ryaInstanceName, final String pcjId) throws InstanceDoesNotExistException, PCJDoesNotExistException, RyaClientException {
// Things that have to be closed before we exit.
Sail sail = null;
SailConnection sailConn = null;
CloseableIteration<? extends BindingSet, QueryEvaluationException> results = null;
try (final PrecomputedJoinStorage pcjStorage = new AccumuloPcjStorage(super.getConnector(), ryaInstanceName)) {
// Create an instance of Sail backed by the Rya instance.
sail = connectToRya(ryaInstanceName);
// Purge the old results from the PCJ.
try {
pcjStorage.purge(pcjId);
} catch (final PCJStorageException e) {
throw new RyaClientException("Could not batch update PCJ with ID '" + pcjId + "' because the old " + "results could not be purged from it.", e);
}
// Parse the PCJ's SPARQL query.
final PcjMetadata metadata = pcjStorage.getPcjMetadata(pcjId);
final String sparql = metadata.getSparql();
final SPARQLParser parser = new SPARQLParser();
final ParsedQuery parsedQuery = parser.parseQuery(sparql, null);
// Execute the query.
sailConn = sail.getConnection();
results = sailConn.evaluate(parsedQuery.getTupleExpr(), null, null, false);
// Load the results into the PCJ table.
final List<VisibilityBindingSet> batch = new ArrayList<>(1000);
while (results.hasNext()) {
final VisibilityBindingSet result = new VisibilityBindingSet(results.next(), "");
batch.add(result);
if (batch.size() == 1000) {
pcjStorage.addResults(pcjId, batch);
batch.clear();
}
}
if (!batch.isEmpty()) {
pcjStorage.addResults(pcjId, batch);
batch.clear();
}
} catch (final MalformedQueryException | PCJStorageException | SailException | QueryEvaluationException e) {
throw new RyaClientException("Fail to batch load new results into the PCJ with ID '" + pcjId + "'.", e);
} finally {
if (results != null) {
try {
results.close();
} catch (final QueryEvaluationException e) {
log.warn(e.getMessage(), e);
}
}
if (sailConn != null) {
try {
sailConn.close();
} catch (final SailException e) {
log.warn(e.getMessage(), e);
}
}
if (sail != null) {
try {
sail.shutDown();
} catch (final SailException e) {
log.warn(e.getMessage(), e);
}
}
}
}
use of org.openrdf.query.parser.sparql.SPARQLParser in project incubator-rya by apache.
the class FluoQueryMetadataDAOIT method constructQueryMetadataTest.
@Test
public void constructQueryMetadataTest() throws MalformedQueryException {
final String query = "select ?x ?y where {?x <uri:p1> ?y. ?y <uri:p2> <uri:o1> }";
final SPARQLParser parser = new SPARQLParser();
final ParsedQuery pq = parser.parseQuery(query, null);
final List<StatementPattern> patterns = StatementPatternCollector.process(pq.getTupleExpr());
final FluoQueryMetadataDAO dao = new FluoQueryMetadataDAO();
// Create the object that will be serialized.
final ConstructQueryMetadata.Builder builder = ConstructQueryMetadata.builder();
builder.setNodeId("nodeId");
builder.setChildNodeId("childNodeId");
builder.setParentNodeId("parentNodeId");
builder.setVarOrder(new VariableOrder("a;b;c"));
builder.setConstructGraph(new ConstructGraph(patterns));
final ConstructQueryMetadata originalMetadata = builder.build();
try (FluoClient fluoClient = FluoFactory.newClient(super.getFluoConfiguration())) {
// Write it to the Fluo table.
try (Transaction tx = fluoClient.newTransaction()) {
dao.write(tx, originalMetadata);
tx.commit();
}
// Read it from the Fluo table.
ConstructQueryMetadata storedMetdata = null;
try (Snapshot sx = fluoClient.newSnapshot()) {
storedMetdata = dao.readConstructQueryMetadata(sx, "nodeId");
}
// Ensure the deserialized object is the same as the serialized one.
assertEquals(originalMetadata, storedMetdata);
}
}
use of org.openrdf.query.parser.sparql.SPARQLParser in project incubator-rya by apache.
the class IndexPlanValidatorTest method testEvaluateTwoIndexTwoVarOrder6.
@Test
public void testEvaluateTwoIndexTwoVarOrder6() throws Exception {
final String indexSparqlString = //
"" + //
"SELECT ?e ?l ?c " + //
"{" + //
" ?e a ?c . " + //
" ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l " + //
"}";
final String indexSparqlString2 = //
"" + //
"SELECT ?l ?e ?o " + //
"{" + //
" ?e <uri:talksTo> ?o . " + //
" ?o <http://www.w3.org/2000/01/rdf-schema#label> ?l " + //
"}";
final String queryString = //
"" + //
"SELECT ?e ?c ?l ?o " + //
"{" + //
" ?e a ?c . " + //
" ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . " + //
" ?e <uri:talksTo> ?o . " + //
" ?o <http://www.w3.org/2000/01/rdf-schema#label> ?l " + //
"}";
final SPARQLParser sp = new SPARQLParser();
final ParsedQuery index1 = sp.parseQuery(indexSparqlString, null);
final ParsedQuery index2 = sp.parseQuery(indexSparqlString2, null);
final List<ExternalTupleSet> index = Lists.newArrayList();
final SimpleExternalTupleSet ais1 = new SimpleExternalTupleSet((Projection) index1.getTupleExpr());
final SimpleExternalTupleSet ais2 = new SimpleExternalTupleSet((Projection) index2.getTupleExpr());
index.add(ais2);
index.add(ais1);
final ParsedQuery pq = sp.parseQuery(queryString, null);
final TupleExpr tup = pq.getTupleExpr().clone();
provider.setIndices(index);
final PCJOptimizer pcj = new PCJOptimizer(index, false, provider);
pcj.optimize(tup, null, null);
final IndexPlanValidator ipv = new IndexPlanValidator(false);
Assert.assertEquals(true, ipv.isValid(tup));
}
use of org.openrdf.query.parser.sparql.SPARQLParser in project incubator-rya by apache.
the class IndexPlanValidatorTest method testEvaluateTwoIndexCrossProduct1.
@Test
public void testEvaluateTwoIndexCrossProduct1() throws Exception {
final String indexSparqlString = //
"" + //
"SELECT ?e ?l ?c " + //
"{" + //
" ?e a ?c . " + //
" ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l " + //
"}";
final String indexSparqlString2 = //
"" + //
"SELECT ?e ?l ?o " + //
"{" + //
" ?e <uri:talksTo> ?o . " + //
" ?o <http://www.w3.org/2000/01/rdf-schema#label> ?l " + //
"}";
final String queryString = //
"" + //
"SELECT ?e ?c ?l ?o ?f ?g " + //
"{" + //
" ?e a ?c . " + //
" ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . " + //
" ?e <uri:talksTo> ?o . " + //
" ?o <http://www.w3.org/2000/01/rdf-schema#label> ?l . " + //
" ?f <uri:talksTo> ?g . " + //
"}";
final SPARQLParser sp = new SPARQLParser();
final ParsedQuery index1 = sp.parseQuery(indexSparqlString, null);
final ParsedQuery index2 = sp.parseQuery(indexSparqlString2, null);
final List<ExternalTupleSet> index = Lists.newArrayList();
final SimpleExternalTupleSet ais1 = new SimpleExternalTupleSet((Projection) index1.getTupleExpr());
final SimpleExternalTupleSet ais2 = new SimpleExternalTupleSet((Projection) index2.getTupleExpr());
index.add(ais2);
index.add(ais1);
final ParsedQuery pq = sp.parseQuery(queryString, null);
final TupleExpr tup = pq.getTupleExpr().clone();
provider.setIndices(index);
final PCJOptimizer pcj = new PCJOptimizer(index, false, provider);
pcj.optimize(tup, null, null);
final IndexPlanValidator ipv = new IndexPlanValidator(true);
Assert.assertEquals(false, ipv.isValid(tup));
}
Aggregations