Search in sources :

Example 6 with TupleQuery

use of org.openrdf.query.TupleQuery in project incubator-rya by apache.

the class MongoRyaDirectExample method testAllValuesFromInference.

public static void testAllValuesFromInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
    log.info("Adding Data");
    String insert = "INSERT DATA\n" + "{ GRAPH <http://updated/test> {\n" + "  <urn:Alice> a <urn:Person> .\n" + "  <urn:Alice> <urn:hasParent> <urn:Bob> .\n" + "  <urn:Carol> <urn:hasParent> <urn:Dan> .\n" + "}}";
    Update update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
    update.execute();
    final String inferQuery = "select distinct ?x { GRAPH <http://updated/test> { ?x a <urn:Person> }}";
    final String explicitQuery = "select distinct ?x { GRAPH <http://updated/test> {\n" + "  { ?x a <urn:Person> }\n" + "  UNION {\n" + "    ?y a <urn:Person> .\n" + "    ?y <urn:hasParent> ?x .\n" + "  }\n" + "}}";
    log.info("Running Explicit Query");
    final CountingResultHandler resultHandler = new CountingResultHandler();
    TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, explicitQuery);
    tupleQuery.evaluate(resultHandler);
    log.info("Result count : " + resultHandler.getCount());
    Validate.isTrue(resultHandler.getCount() == 2);
    log.info("Running Inference-dependent Query");
    resultHandler.resetCount();
    tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
    tupleQuery.evaluate(resultHandler);
    log.info("Result count : " + resultHandler.getCount());
    Validate.isTrue(resultHandler.getCount() == 1);
    log.info("Adding owl:allValuesFrom Schema");
    insert = "PREFIX rdfs: <" + RDFS.NAMESPACE + ">\n" + "PREFIX owl: <" + OWL.NAMESPACE + ">\n" + "INSERT DATA\n" + "{ GRAPH <http://updated/test> {\n" + "  <urn:Person> rdfs:subClassOf [ owl:onProperty <urn:hasParent> ; owl:allValuesFrom <urn:Person> ] ." + "}}";
    update = conn.prepareUpdate(QueryLanguage.SPARQL, insert);
    update.execute();
    log.info("Refreshing InferenceEngine");
    ((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
    log.info("Re-running Inference-dependent Query");
    resultHandler.resetCount();
    tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, inferQuery);
    tupleQuery.evaluate(resultHandler);
    log.info("Result count : " + resultHandler.getCount());
    Validate.isTrue(resultHandler.getCount() == 2);
}
Also used : TupleQuery(org.openrdf.query.TupleQuery) Update(org.openrdf.query.Update)

Example 7 with TupleQuery

use of org.openrdf.query.TupleQuery in project incubator-rya by apache.

the class MongoRyaDirectExample method testPropertyChainInferenceAltRepresentation.

public static void testPropertyChainInferenceAltRepresentation(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
    // Add data
    String query = // 
    "INSERT DATA\n" + // 
    "{ GRAPH <http://updated/test> {\n" + "  <urn:jenGreatGranMother> <urn:Motherof> <urn:jenGranMother> . " + "  <urn:jenGranMother> <urn:isChildOf> <urn:jenGreatGranMother> . " + "  <urn:jenGranMother> <urn:Motherof> <urn:jenMother> . " + "  <urn:jenMother> <urn:isChildOf> <urn:jenGranMother> . " + " <urn:jenMother> <urn:Motherof> <urn:jen> . " + "  <urn:jen> <urn:isChildOf> <urn:jenMother> . " + " <urn:jen> <urn:Motherof> <urn:jenDaughter> .  }}";
    log.info("Performing Query");
    Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
    update.execute();
    query = "select ?p { GRAPH <http://updated/test> {?s <urn:Motherof>/<urn:Motherof> ?p}}";
    CountingResultHandler resultHandler = new CountingResultHandler();
    TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
    tupleQuery.evaluate(resultHandler);
    log.info("Result count : " + resultHandler.getCount());
    // try adding a property chain and querying for it
    query = // 
    "INSERT DATA\n" + // 
    "{ GRAPH <http://updated/test> {\n" + "  <urn:greatMother> owl:propertyChainAxiom <urn:12342>  . " + " <urn:12342> <http://www.w3.org/1999/02/22-rdf-syntax-ns#first> _:node1atjakcvbx15023 . " + " _:node1atjakcvbx15023 <http://www.w3.org/2002/07/owl#inverseOf> <urn:isChildOf> . " + " <urn:12342> <http://www.w3.org/1999/02/22-rdf-syntax-ns#rest> _:node1atjakcvbx15123 . " + " _:node1atjakcvbx15123 <http://www.w3.org/1999/02/22-rdf-syntax-ns#rest> <http://www.w3.org/1999/02/22-rdf-syntax-ns#nil> . " + " _:node1atjakcvbx15123 <http://www.w3.org/1999/02/22-rdf-syntax-ns#first> <urn:MotherOf> .  }}";
    update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
    update.execute();
    ((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
    resultHandler.resetCount();
    query = "select ?x { GRAPH <http://updated/test> {<urn:jenGreatGranMother> <urn:greatMother> ?x}}";
    resultHandler = new CountingResultHandler();
    tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
    tupleQuery.evaluate(resultHandler);
    log.info("Result count : " + resultHandler.getCount());
}
Also used : TupleQuery(org.openrdf.query.TupleQuery) Update(org.openrdf.query.Update)

Example 8 with TupleQuery

use of org.openrdf.query.TupleQuery in project incubator-rya by apache.

the class AccumuloExecuteSparqlQuery method executeSparqlQuery.

@Override
public String executeSparqlQuery(final String ryaInstanceName, final String sparqlQuery) throws InstanceDoesNotExistException, RyaClientException {
    requireNonNull(ryaInstanceName);
    requireNonNull(sparqlQuery);
    // Ensure the Rya Instance exists.
    if (!instanceExists.exists(ryaInstanceName)) {
        throw new InstanceDoesNotExistException(String.format("There is no Rya instance named '%s'.", ryaInstanceName));
    }
    Sail sail = null;
    SailRepository sailRepo = null;
    SailRepositoryConnection sailRepoConn = null;
    try {
        // Get a Sail object that is connected to the Rya instance.
        final AccumuloRdfConfiguration ryaConf = getAccumuloConnectionDetails().buildAccumuloRdfConfiguration(ryaInstanceName);
        sail = RyaSailFactory.getInstance(ryaConf);
        sailRepo = new SailRepository(sail);
        sailRepoConn = sailRepo.getConnection();
        // Execute the query.
        final long start = System.currentTimeMillis();
        final TupleQuery tupleQuery = sailRepoConn.prepareTupleQuery(QueryLanguage.SPARQL, sparqlQuery);
        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
        final CountingSPARQLResultsCSVWriter handler = new CountingSPARQLResultsCSVWriter(baos);
        tupleQuery.evaluate(handler);
        final StringBuilder sb = new StringBuilder();
        final String newline = "\n";
        sb.append("Query Result:").append(newline);
        sb.append(new String(baos.toByteArray(), StandardCharsets.UTF_8));
        final String seconds = new DecimalFormat("0.0##").format((System.currentTimeMillis() - start) / 1000.0);
        sb.append("Retrieved ").append(handler.getCount()).append(" results in ").append(seconds).append(" seconds.");
        return sb.toString();
    } catch (final SailException | AccumuloException | AccumuloSecurityException | RyaDAOException | InferenceEngineException e) {
        throw new RyaClientException("A problem connecting to the Rya instance named '" + ryaInstanceName + "' has caused the query to fail.", e);
    } catch (final MalformedQueryException e) {
        throw new RyaClientException("There was a problem parsing the supplied query.", e);
    } catch (final QueryEvaluationException | TupleQueryResultHandlerException e) {
        throw new RyaClientException("There was a problem evaluating the supplied query.", e);
    } catch (final RepositoryException e) {
        throw new RyaClientException("There was a problem executing the query against the Rya instance named " + ryaInstanceName + ".", e);
    } finally {
        // Shut it all down.
        if (sailRepoConn != null) {
            try {
                sailRepoConn.close();
            } catch (final RepositoryException e) {
                log.warn("Couldn't close the SailRepoConnection that is attached to the Rya instance.", e);
            }
        }
        if (sailRepo != null) {
            try {
                sailRepo.shutDown();
            } catch (final RepositoryException e) {
                log.warn("Couldn't shut down the SailRepository that is attached to the Rya instance.", e);
            }
        }
        if (sail != null) {
            try {
                sail.shutDown();
            } catch (final SailException e) {
                log.warn("Couldn't shut down the Sail that is attached to the Rya instance.", e);
            }
        }
    }
}
Also used : AccumuloException(org.apache.accumulo.core.client.AccumuloException) RyaClientException(org.apache.rya.api.client.RyaClientException) TupleQueryResultHandlerException(org.openrdf.query.TupleQueryResultHandlerException) SailRepository(org.openrdf.repository.sail.SailRepository) DecimalFormat(java.text.DecimalFormat) TupleQuery(org.openrdf.query.TupleQuery) InferenceEngineException(org.apache.rya.rdftriplestore.inference.InferenceEngineException) RepositoryException(org.openrdf.repository.RepositoryException) InstanceDoesNotExistException(org.apache.rya.api.client.InstanceDoesNotExistException) ByteArrayOutputStream(java.io.ByteArrayOutputStream) SailException(org.openrdf.sail.SailException) SailRepositoryConnection(org.openrdf.repository.sail.SailRepositoryConnection) AccumuloRdfConfiguration(org.apache.rya.accumulo.AccumuloRdfConfiguration) QueryEvaluationException(org.openrdf.query.QueryEvaluationException) Sail(org.openrdf.sail.Sail) RyaDAOException(org.apache.rya.api.persist.RyaDAOException) MalformedQueryException(org.openrdf.query.MalformedQueryException) AccumuloSecurityException(org.apache.accumulo.core.client.AccumuloSecurityException)

Example 9 with TupleQuery

use of org.openrdf.query.TupleQuery in project incubator-rya by apache.

the class PCJOptionalTestIT method testEvaluateSingeIndexExactMatch.

@Test
public void testEvaluateSingeIndexExactMatch() throws TupleQueryResultHandlerException, QueryEvaluationException, MalformedQueryException, RepositoryException, AccumuloException, AccumuloSecurityException, TableExistsException, RyaDAOException, SailException, TableNotFoundException, PcjException, InferenceEngineException {
    final String indexSparqlString = // 
    "" + // 
    "SELECT ?e ?c ?l ?o" + // 
    "{" + // 
    "  ?e a ?c . " + // 
    "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l " + // 
    "  OPTIONAL{?e <uri:talksTo> ?o } . " + // 
    "}";
    PcjIntegrationTestingUtil.createAndPopulatePcj(conn, accCon, tablePrefix + "INDEX_1", indexSparqlString, new String[] { "e", "c", "l", "o" }, Optional.<PcjVarOrderFactory>absent());
    final String queryString = // 
    "" + // 
    "SELECT ?e ?c ?l ?o " + // 
    "{" + // 
    "  ?e a ?c . " + // 
    "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . " + // 
    "  OPTIONAL {?e <uri:talksTo> ?o } . " + // 
    "}";
    final CountingResultHandler crh = new CountingResultHandler();
    PcjIntegrationTestingUtil.deleteCoreRyaTables(accCon, tablePrefix);
    PcjIntegrationTestingUtil.closeAndShutdown(conn, repo);
    final TupleQuery tupQuery = pcjConn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
    tupQuery.evaluate(crh);
    Assert.assertEquals(3, crh.getCount());
}
Also used : CountingResultHandler(org.apache.rya.indexing.external.PrecompJoinOptimizerIT.CountingResultHandler) TupleQuery(org.openrdf.query.TupleQuery) Test(org.junit.Test)

Example 10 with TupleQuery

use of org.openrdf.query.TupleQuery in project incubator-rya by apache.

the class PcjIntegrationTestingUtil method populatePcj.

/**
 * Scan Rya for results that solve the PCJ's query and store them in the PCJ
 * table.
 * <p>
 * This method assumes the PCJ table has already been created.
 *
 * @param mongoClient - A connection to the mongoDB that hosts the PCJ table. (not null)
 * @param pcjTableName - The name of the PCJ table that will receive the results. (not null)
 * @param ryaConn - A connection to the Rya store that will be queried to find results. (not null)
 * @throws PcjException
 *             If results could not be written to the PCJ table, the PCJ
 *             table does not exist, or the query that is being execute was
 *             malformed.
 */
public static void populatePcj(final MongoPcjDocuments pcj, final String pcjTableName, final RepositoryConnection ryaConn) throws PcjException {
    checkNotNull(pcj);
    checkNotNull(pcjTableName);
    checkNotNull(ryaConn);
    try {
        // Fetch the query that needs to be executed from the PCJ table.
        final PcjMetadata pcjMetadata = pcj.getPcjMetadata(pcjTableName);
        final String sparql = pcjMetadata.getSparql();
        // Query Rya for results to the SPARQL query.
        final TupleQuery query = ryaConn.prepareTupleQuery(QueryLanguage.SPARQL, sparql);
        final TupleQueryResult results = query.evaluate();
        // Load batches of 1000 of them at a time into the PCJ table
        final Set<BindingSet> batch = new HashSet<>(1000);
        while (results.hasNext()) {
            batch.add(results.next());
            if (batch.size() == 1000) {
                writeResults(pcj, pcjTableName, batch);
                batch.clear();
            }
        }
        if (!batch.isEmpty()) {
            writeResults(pcj, pcjTableName, batch);
        }
    } catch (RepositoryException | MalformedQueryException | QueryEvaluationException e) {
        throw new PcjException("Could not populate a PCJ table with Rya results for the table named: " + pcjTableName, e);
    }
}
Also used : VisibilityBindingSet(org.apache.rya.api.model.VisibilityBindingSet) BindingSet(org.openrdf.query.BindingSet) QueryEvaluationException(org.openrdf.query.QueryEvaluationException) PcjException(org.apache.rya.indexing.pcj.storage.PcjException) MalformedQueryException(org.openrdf.query.MalformedQueryException) PcjMetadata(org.apache.rya.indexing.pcj.storage.PcjMetadata) TupleQuery(org.openrdf.query.TupleQuery) RepositoryException(org.openrdf.repository.RepositoryException) TupleQueryResult(org.openrdf.query.TupleQueryResult) HashSet(java.util.HashSet)

Aggregations

TupleQuery (org.openrdf.query.TupleQuery)86 Update (org.openrdf.query.Update)33 RepositoryConnection (org.openrdf.repository.RepositoryConnection)27 URI (org.openrdf.model.URI)13 BindingSet (org.openrdf.query.BindingSet)12 MalformedQueryException (org.openrdf.query.MalformedQueryException)11 TupleQueryResult (org.openrdf.query.TupleQueryResult)11 QueryEvaluationException (org.openrdf.query.QueryEvaluationException)10 RepositoryException (org.openrdf.repository.RepositoryException)10 SailRepository (org.openrdf.repository.sail.SailRepository)10 HashSet (java.util.HashSet)8 Literal (org.openrdf.model.Literal)8 TupleQueryResultHandlerException (org.openrdf.query.TupleQueryResultHandlerException)8 SailRepositoryConnection (org.openrdf.repository.sail.SailRepositoryConnection)8 Sail (org.openrdf.sail.Sail)7 StatementImpl (org.openrdf.model.impl.StatementImpl)6 VisibilityBindingSet (org.apache.rya.api.model.VisibilityBindingSet)5 PcjMetadata (org.apache.rya.indexing.pcj.storage.PcjMetadata)5 Test (org.junit.Test)5 ByteArrayOutputStream (java.io.ByteArrayOutputStream)3