use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class MongoExecuteSparqlQuery method executeSparqlQuery.
@Override
public String executeSparqlQuery(final String ryaInstanceName, final String sparqlQuery) throws InstanceDoesNotExistException, RyaClientException {
requireNonNull(ryaInstanceName);
requireNonNull(sparqlQuery);
// Ensure the Rya Instance exists.
if (!instanceExists.exists(ryaInstanceName)) {
throw new InstanceDoesNotExistException(String.format("There is no Rya instance named '%s'.", ryaInstanceName));
}
Sail sail = null;
SailRepositoryConnection sailRepoConn = null;
try {
// Get a Sail object that is connected to the Rya instance.
final MongoDBRdfConfiguration ryaConf = connectionDetails.build(ryaInstanceName);
sail = RyaSailFactory.getInstance(ryaConf);
final SailRepository sailRepo = new SailRepository(sail);
sailRepoConn = sailRepo.getConnection();
// Execute the query.
final long start = System.currentTimeMillis();
final TupleQuery tupleQuery = sailRepoConn.prepareTupleQuery(QueryLanguage.SPARQL, sparqlQuery);
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
final CountingSPARQLResultsCSVWriter handler = new CountingSPARQLResultsCSVWriter(baos);
tupleQuery.evaluate(handler);
final long end = System.currentTimeMillis();
// Format and return the result of the query.
final String queryResult = new String(baos.toByteArray(), StandardCharsets.UTF_8);
final String queryDuration = new DecimalFormat("0.0##").format((end - start) / 1000.0);
return "Query Result:\n" + queryResult + "Retrieved " + handler.getCount() + " results in " + queryDuration + " seconds.";
} catch (SailException | RyaDAOException | InferenceEngineException | AccumuloException | AccumuloSecurityException e) {
throw new RyaClientException("Could not create the Sail object used to query the RYA instance.", e);
} catch (final MalformedQueryException | QueryEvaluationException | TupleQueryResultHandlerException | RepositoryException e) {
throw new RyaClientException("Could not execute the SPARQL query.", e);
} finally {
// Close the resources that were opened.
if (sailRepoConn != null) {
try {
sailRepoConn.close();
} catch (final RepositoryException e) {
log.error("Couldn't close the SailRepositoryConnection object.", e);
}
}
if (sail != null) {
try {
sail.shutDown();
} catch (final SailException e) {
log.error("Couldn't close the Sail object.", e);
}
}
}
}
use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class RyaGeoDirectExample method testAddPointAndWithinSearchWithPCJ.
private static void testAddPointAndWithinSearchWithPCJ(final SailRepositoryConnection conn) throws Exception {
final String update = //
"PREFIX geo: <http://www.opengis.net/ont/geosparql#> " + //
"INSERT DATA { " + //
" <urn:feature> a geo:Feature ; " + //
" geo:hasGeometry [ " + //
" a geo:Point ; " + //
" geo:asWKT \"Point(-77.03524 38.889468)\"^^geo:wktLiteral " + //
" ] . " + "}";
final Update u = conn.prepareUpdate(QueryLanguage.SPARQL, update);
u.execute();
String queryString;
TupleQuery tupleQuery;
CountingResultHandler tupleHandler;
// point outside search ring
queryString = //
"PREFIX geo: <http://www.opengis.net/ont/geosparql#> " + //
"PREFIX geof: <http://www.opengis.net/def/function/geosparql/> " + //
"SELECT ?feature ?point ?wkt " + //
"{" + //
" ?feature a geo:Feature . " + //
" ?feature geo:hasGeometry ?point . " + //
" ?point a geo:Point . " + //
" ?point geo:asWKT ?wkt . " + //
" FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " + //
"}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("point outside search ring, Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 0);
// point inside search ring
queryString = //
"PREFIX geo: <http://www.opengis.net/ont/geosparql#> " + //
"PREFIX geof: <http://www.opengis.net/def/function/geosparql/> " + // ?e ?l ?o" //
"SELECT ?feature ?point ?wkt " + //
"{" + // + " ?e <uri:talksTo> ?o . "//
" ?feature a geo:Feature . " + //
" ?feature geo:hasGeometry ?point . " + //
" ?point a geo:Point . " + //
" ?point geo:asWKT ?wkt . " + //
" FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " + //
"}";
//
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("point inside search ring, Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 1);
// point inside search ring with Pre-Computed Join
queryString = //
"PREFIX geo: <http://www.opengis.net/ont/geosparql#> " + //
"PREFIX geof: <http://www.opengis.net/def/function/geosparql/> " + // ?e ?l ?o" //
"SELECT ?feature ?point ?wkt " + //
"{" + // + " ?e <uri:talksTo> ?o . "//
" ?feature a geo:Feature . " + //
" ?feature geo:hasGeometry ?point . " + //
" ?point a geo:Point . " + //
" ?point geo:asWKT ?wkt . " + //
" FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " + //
"}";
//
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("point inside search ring with Pre-Computed Join, Result count : " + tupleHandler.getCount());
// may see points from
Validate.isTrue(tupleHandler.getCount() >= 1);
// during previous runs
// point outside search ring with PCJ
queryString = //
"PREFIX geo: <http://www.opengis.net/ont/geosparql#> " + //
"PREFIX geof: <http://www.opengis.net/def/function/geosparql/> " + // ?e ?l ?o " //
"SELECT ?feature ?point ?wkt " + //
"{" + // + " ?e <uri:talksTo> ?o . "//
" ?feature a geo:Feature . " + //
" ?feature geo:hasGeometry ?point . " + //
" ?point a geo:Point . " + //
" ?point geo:asWKT ?wkt . " + //
" FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " + //
"}";
//
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("point outside search ring with PCJ, Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 0);
// point inside search ring with different Pre-Computed Join
queryString = //
"PREFIX geo: <http://www.opengis.net/ont/geosparql#> " + //
"PREFIX geof: <http://www.opengis.net/def/function/geosparql/> " + // ?wkt ?e ?c ?l ?o " //
"SELECT ?feature ?point " + //
"{" + // + " ?feature a geo:Feature . "//
" ?feature geo:hasGeometry ?point . " + //
" ?point a geo:Point . " + //
" ?point geo:asWKT ?wkt . " + //
" FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " + //
"}";
//
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("point inside search ring with different Pre-Computed Join, Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 1);
}
use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class MongoPcjDocuments method populatePcj.
/**
* Scan Rya for results that solve the PCJ's query and store them in the PCJ
* document.
* <p>
* This method assumes the PCJ document has already been created.
*
* @param pcjId - The Id of the PCJ that will receive the results. (not null)
* @param ryaConn - A connection to the Rya store that will be queried to find results. (not null)
* @throws PCJStorageException If results could not be written to the PCJ results document,
* the PCJ results document does not exist, or the query that is being execute was malformed.
*/
public void populatePcj(final String pcjId, final RepositoryConnection ryaConn) throws PCJStorageException {
checkNotNull(pcjId);
checkNotNull(ryaConn);
try {
// Fetch the query that needs to be executed from the PCJ metadata document.
final PcjMetadata pcjMetadata = getPcjMetadata(pcjId);
final String sparql = pcjMetadata.getSparql();
// Query Rya for results to the SPARQL query.
final TupleQuery query = ryaConn.prepareTupleQuery(QueryLanguage.SPARQL, sparql);
final TupleQueryResult results = query.evaluate();
// Load batches of 1000 of them at a time into the PCJ results document.
final Set<VisibilityBindingSet> batch = new HashSet<>(1000);
while (results.hasNext()) {
final VisibilityBindingSet bs = new VisibilityBindingSet(results.next());
batch.add(bs);
if (batch.size() == 1000) {
addResults(pcjId, batch);
batch.clear();
}
}
if (!batch.isEmpty()) {
addResults(pcjId, batch);
}
} catch (RepositoryException | MalformedQueryException | QueryEvaluationException e) {
throw new PCJStorageException("Could not populate a PCJ document with Rya results for the pcj with Id: " + pcjId, e);
}
}
use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class RyaMongoGeoDirectExample method testAddPointAndWithinSearch.
/**
* Try out some geospatial data and queries
* @param repository
*/
private static void testAddPointAndWithinSearch(SailRepositoryConnection conn) throws Exception {
String update = //
"PREFIX geo: <http://www.opengis.net/ont/geosparql#> " + //
"INSERT DATA { " + //
" <urn:feature> a geo:Feature ; " + //
" geo:hasGeometry [ " + //
" a geo:Point ; " + //
" geo:asWKT \"Point(-77.03524 38.889468)\"^^geo:wktLiteral " + //
" ] . " + "}";
Update u = conn.prepareUpdate(QueryLanguage.SPARQL, update);
u.execute();
String queryString;
TupleQuery tupleQuery;
CountingResultHandler tupleHandler;
// ring containing point
queryString = //
"PREFIX geo: <http://www.opengis.net/ont/geosparql#> " + //
"PREFIX geof: <http://www.opengis.net/def/function/geosparql/> " + //
"SELECT ?feature ?point ?wkt " + //
"{" + //
" ?feature a geo:Feature . " + //
" ?feature geo:hasGeometry ?point . " + //
" ?point a geo:Point . " + //
" ?point geo:asWKT ?wkt . " + //
" FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " + //
"}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count -- ring containing point: " + tupleHandler.getCount());
// may see points from during previous runs
Validate.isTrue(tupleHandler.getCount() >= 1);
// ring outside point
queryString = //
"PREFIX geo: <http://www.opengis.net/ont/geosparql#> " + //
"PREFIX geof: <http://www.opengis.net/def/function/geosparql/> " + //
"SELECT ?feature ?point ?wkt " + //
"{" + //
" ?feature a geo:Feature . " + //
" ?feature geo:hasGeometry ?point . " + //
" ?point a geo:Point . " + //
" ?point geo:asWKT ?wkt . " + //
" FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " + //
"}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count -- ring outside point: " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 0);
}
use of org.openrdf.query.TupleQuery in project incubator-rya by apache.
the class PcjTables method populatePcj.
/**
* Scan Rya for results that solve the PCJ's query and store them in the PCJ table.
* <p>
* This method assumes the PCJ table has already been created.
*
* @param accumuloConn - A connection to the Accumulo that hosts the PCJ table. (not null)
* @param pcjTableName - The name of the PCJ table that will receive the results. (not null)
* @param ryaConn - A connection to the Rya store that will be queried to find results. (not null)
* @throws PCJStorageException If results could not be written to the PCJ table,
* the PCJ table does not exist, or the query that is being execute
* was malformed.
*/
public void populatePcj(final Connector accumuloConn, final String pcjTableName, final RepositoryConnection ryaConn) throws PCJStorageException {
checkNotNull(accumuloConn);
checkNotNull(pcjTableName);
checkNotNull(ryaConn);
try {
// Fetch the query that needs to be executed from the PCJ table.
final PcjMetadata pcjMetadata = getPcjMetadata(accumuloConn, pcjTableName);
final String sparql = pcjMetadata.getSparql();
// Query Rya for results to the SPARQL query.
final TupleQuery query = ryaConn.prepareTupleQuery(QueryLanguage.SPARQL, sparql);
final TupleQueryResult results = query.evaluate();
// Load batches of 1000 of them at a time into the PCJ table
final Set<VisibilityBindingSet> batch = new HashSet<>(1000);
while (results.hasNext()) {
batch.add(new VisibilityBindingSet(results.next()));
if (batch.size() == 1000) {
addResults(accumuloConn, pcjTableName, batch);
batch.clear();
}
}
if (!batch.isEmpty()) {
addResults(accumuloConn, pcjTableName, batch);
}
} catch (RepositoryException | MalformedQueryException | QueryEvaluationException e) {
throw new PCJStorageException("Could not populate a PCJ table with Rya results for the table named: " + pcjTableName, e);
}
}
Aggregations