Search in sources :

Example 76 with TupleQuery

use of org.openrdf.query.TupleQuery in project incubator-rya by apache.

the class RdfController method performQuery.

private void performQuery(final String query, final RepositoryConnection conn, final String auth, final String infer, final String nullout, final TupleQueryResultHandler handler) throws RepositoryException, MalformedQueryException, QueryEvaluationException, TupleQueryResultHandlerException {
    final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
    if (auth != null && auth.length() > 0) {
        tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, VALUE_FACTORY.createLiteral(auth));
    }
    if (infer != null && infer.length() > 0) {
        tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_INFER, VALUE_FACTORY.createLiteral(Boolean.parseBoolean(infer)));
    }
    if (nullout != null && nullout.length() > 0) {
        // output nothing, but still run query
        tupleQuery.evaluate(new TupleQueryResultHandler() {

            @Override
            public void startQueryResult(final List<String> strings) throws TupleQueryResultHandlerException {
            }

            @Override
            public void endQueryResult() throws TupleQueryResultHandlerException {
            }

            @Override
            public void handleSolution(final BindingSet bindings) throws TupleQueryResultHandlerException {
            }

            @Override
            public void handleBoolean(final boolean arg0) throws QueryResultHandlerException {
            }

            @Override
            public void handleLinks(final List<String> arg0) throws QueryResultHandlerException {
            }
        });
    } else {
        final CountingTupleQueryResultHandlerWrapper sparqlWriter = new CountingTupleQueryResultHandlerWrapper(handler);
        final long startTime = System.currentTimeMillis();
        tupleQuery.evaluate(sparqlWriter);
        log.info(String.format("Query Time = %.3f   Result Count = %s\n", (System.currentTimeMillis() - startTime) / 1000., sparqlWriter.getCount()));
    }
}
Also used : BindingSet(org.openrdf.query.BindingSet) TupleQueryResultHandlerException(org.openrdf.query.TupleQueryResultHandlerException) ParsedTupleQuery(org.openrdf.query.parser.ParsedTupleQuery) TupleQuery(org.openrdf.query.TupleQuery) TupleQueryResultHandler(org.openrdf.query.TupleQueryResultHandler) QueryResultHandlerException(org.openrdf.query.QueryResultHandlerException) TupleQueryResultHandlerException(org.openrdf.query.TupleQueryResultHandlerException)

Example 77 with TupleQuery

use of org.openrdf.query.TupleQuery in project incubator-rya by apache.

the class PcjIntegrationTestingUtil method populatePcj.

/**
 * Scan Rya for results that solve the PCJ's query and store them in the PCJ
 * table.
 * <p>
 * This method assumes the PCJ table has already been created.
 *
 * @param accumuloConn
 *            - A connection to the Accumulo that hosts the PCJ table. (not
 *            null)
 * @param pcjTableName
 *            - The name of the PCJ table that will receive the results.
 *            (not null)
 * @param ryaConn
 *            - A connection to the Rya store that will be queried to find
 *            results. (not null)
 * @throws PcjException
 *             If results could not be written to the PCJ table, the PCJ
 *             table does not exist, or the query that is being execute was
 *             malformed.
 */
public static void populatePcj(final Connector accumuloConn, final String pcjTableName, final RepositoryConnection ryaConn) throws PcjException {
    checkNotNull(accumuloConn);
    checkNotNull(pcjTableName);
    checkNotNull(ryaConn);
    try {
        // Fetch the query that needs to be executed from the PCJ table.
        final PcjMetadata pcjMetadata = new PcjTables().getPcjMetadata(accumuloConn, pcjTableName);
        final String sparql = pcjMetadata.getSparql();
        // Query Rya for results to the SPARQL query.
        final TupleQuery query = ryaConn.prepareTupleQuery(QueryLanguage.SPARQL, sparql);
        final TupleQueryResult results = query.evaluate();
        // Load batches of 1000 of them at a time into the PCJ table
        final Set<BindingSet> batch = new HashSet<>(1000);
        while (results.hasNext()) {
            batch.add(results.next());
            if (batch.size() == 1000) {
                addResults(accumuloConn, pcjTableName, batch);
                batch.clear();
            }
        }
        if (!batch.isEmpty()) {
            addResults(accumuloConn, pcjTableName, batch);
        }
    } catch (RepositoryException | MalformedQueryException | QueryEvaluationException e) {
        throw new PcjException("Could not populate a PCJ table with Rya results for the table named: " + pcjTableName, e);
    }
}
Also used : VisibilityBindingSet(org.apache.rya.api.model.VisibilityBindingSet) BindingSet(org.openrdf.query.BindingSet) PcjException(org.apache.rya.indexing.pcj.storage.PcjException) TupleQuery(org.openrdf.query.TupleQuery) RepositoryException(org.openrdf.repository.RepositoryException) QueryEvaluationException(org.openrdf.query.QueryEvaluationException) MalformedQueryException(org.openrdf.query.MalformedQueryException) PcjMetadata(org.apache.rya.indexing.pcj.storage.PcjMetadata) PcjTables(org.apache.rya.indexing.pcj.storage.accumulo.PcjTables) TupleQueryResult(org.openrdf.query.TupleQueryResult) HashSet(java.util.HashSet)

Example 78 with TupleQuery

use of org.openrdf.query.TupleQuery in project incubator-rya by apache.

the class MongoPCJIndexIT method sparqlQuery_Test_complex.

@Test
public void sparqlQuery_Test_complex() throws Exception {
    // Setup a Rya Client.
    final MongoConnectionDetails connectionDetails = getConnectionDetails();
    final RyaClient ryaClient = MongoRyaClientFactory.build(connectionDetails, getMongoClient());
    final String pcjQuery = "SELECT ?name WHERE {" + " ?name <urn:likes> <urn:icecream> ." + " ?name <urn:hasEyeColor> <urn:blue> ." + " }";
    final String testQuery = "SELECT ?name WHERE {" + " ?name <urn:hasHairColor> <urn:brown> ." + " ?name <urn:likes> <urn:icecream> ." + " ?name <urn:hasEyeColor> <urn:blue> ." + " }";
    // Install an instance of Rya and load statements.
    conf.setBoolean(ConfigUtils.USE_PCJ, true);
    conf.setBoolean(ConfigUtils.USE_OPTIMAL_PCJ, true);
    conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, true);
    ryaClient.getInstall().install(conf.getRyaInstanceName(), InstallConfiguration.builder().setEnablePcjIndex(true).build());
    ryaClient.getLoadStatements().loadStatements(conf.getRyaInstanceName(), getStatements());
    final String pcjId = ryaClient.getCreatePCJ().createPCJ(conf.getRyaInstanceName(), pcjQuery);
    ryaClient.getBatchUpdatePCJ().batchUpdate(conf.getRyaInstanceName(), pcjId);
    System.out.println("Triples: " + getMongoClient().getDatabase(conf.getRyaInstanceName()).getCollection(conf.getTriplesCollectionName()).count());
    System.out.println("PCJS: " + getMongoClient().getDatabase(conf.getRyaInstanceName()).getCollection("pcjs").count());
    // run the query.  since the triples collection is gone, if the results match, they came from the PCJ index.
    final Sail sail = RyaSailFactory.getInstance(conf);
    SailRepositoryConnection conn = new SailRepository(sail).getConnection();
    conn.begin();
    final TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, testQuery);
    tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERYPLAN_FLAG, RdfCloudTripleStoreConstants.VALUE_FACTORY.createLiteral(true));
    final TupleQueryResult rez = tupleQuery.evaluate();
    final Set<BindingSet> results = new HashSet<>();
    while (rez.hasNext()) {
        final BindingSet bs = rez.next();
        results.add(bs);
    }
    // Verify the correct results were loaded into the PCJ table.
    final Set<BindingSet> expectedResults = new HashSet<>();
    MapBindingSet bs = new MapBindingSet();
    bs = new MapBindingSet();
    bs.addBinding("name", VF.createURI("urn:David"));
    expectedResults.add(bs);
    bs = new MapBindingSet();
    bs.addBinding("name", VF.createURI("urn:Eve"));
    expectedResults.add(bs);
    bs = new MapBindingSet();
    bs.addBinding("name", VF.createURI("urn:Frank"));
    expectedResults.add(bs);
    assertEquals(3, results.size());
    assertEquals(expectedResults, results);
}
Also used : MongoConnectionDetails(org.apache.rya.api.client.mongo.MongoConnectionDetails) MapBindingSet(org.openrdf.query.impl.MapBindingSet) BindingSet(org.openrdf.query.BindingSet) SailRepository(org.openrdf.repository.sail.SailRepository) Sail(org.openrdf.sail.Sail) TupleQuery(org.openrdf.query.TupleQuery) RyaClient(org.apache.rya.api.client.RyaClient) MapBindingSet(org.openrdf.query.impl.MapBindingSet) SailRepositoryConnection(org.openrdf.repository.sail.SailRepositoryConnection) TupleQueryResult(org.openrdf.query.TupleQueryResult) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 79 with TupleQuery

use of org.openrdf.query.TupleQuery in project incubator-rya by apache.

the class InferenceExamples method testPropertyChainInference.

public static void testPropertyChainInference(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
    // Add data
    String query = // 
    "INSERT DATA\n" + // 
    "{ GRAPH <http://updated/test> {\n" + "  <urn:paulGreatGrandfather> <urn:father> <urn:paulGrandfather> . " + "  <urn:paulGrandfather> <urn:father> <urn:paulFather> . " + " <urn:paulFather> <urn:father> <urn:paul> . " + " <urn:paul> <urn:father> <urn:paulSon> .  }}";
    log.info("Performing Query");
    Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
    update.execute();
    query = "select ?p { GRAPH <http://updated/test> {<urn:paulGreatGrandfather> <urn:father>/<urn:father> ?p}}";
    CountingResultHandler resultHandler = new CountingResultHandler();
    TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
    tupleQuery.evaluate(resultHandler);
    log.info("Result count : " + resultHandler.getCount());
    // try adding a property chain and querying for it
    query = // 
    "INSERT DATA\n" + // 
    "{ GRAPH <http://updated/test> {\n" + "  <urn:greatGrandfather> owl:propertyChainAxiom <urn:1234>  . " + " <urn:1234> <http://www.w3.org/2000/10/swap/list#length> 3 . " + " <urn:1234> <http://www.w3.org/2000/10/swap/list#index> (0 <urn:father>) . " + " <urn:1234> <http://www.w3.org/2000/10/swap/list#index> (1 <urn:father>) . " + " <urn:1234> <http://www.w3.org/2000/10/swap/list#index> (2 <urn:father>) .  }}";
    update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
    update.execute();
    query = // 
    "INSERT DATA\n" + // 
    "{ GRAPH <http://updated/test> {\n" + "  <urn:grandfather> owl:propertyChainAxiom <urn:12344>  . " + " <urn:12344> <http://www.w3.org/2000/10/swap/list#length> 2 . " + " <urn:12344> <http://www.w3.org/2000/10/swap/list#index> (0 <urn:father>) . " + " <urn:12344> <http://www.w3.org/2000/10/swap/list#index> (1 <urn:father>) .  }}";
    update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
    update.execute();
    ((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
    resultHandler.resetCount();
    query = "select ?p { GRAPH <http://updated/test> {<urn:paulGreatGrandfather> <urn:greatGrandfather> ?p}}";
    resultHandler = new CountingResultHandler();
    tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
    tupleQuery.evaluate(resultHandler);
    log.info("Result count : " + resultHandler.getCount());
    resultHandler.resetCount();
    query = "select ?s ?p { GRAPH <http://updated/test> {?s <urn:grandfather> ?p}}";
    resultHandler = new CountingResultHandler();
    tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
    tupleQuery.evaluate(resultHandler);
    log.info("Result count : " + resultHandler.getCount());
}
Also used : TupleQuery(org.openrdf.query.TupleQuery) Update(org.openrdf.query.Update)

Example 80 with TupleQuery

use of org.openrdf.query.TupleQuery in project incubator-rya by apache.

the class InferenceExamples method testPropertyChainInferenceAltRepresentation.

public static void testPropertyChainInferenceAltRepresentation(final SailRepositoryConnection conn, final Sail sail) throws MalformedQueryException, RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException, InferenceEngineException {
    // Add data
    String query = // 
    "INSERT DATA\n" + // 
    "{ GRAPH <http://updated/test> {\n" + "  <urn:jenGreatGranMother> <urn:Motherof> <urn:jenGranMother> . " + "  <urn:jenGranMother> <urn:isChildOf> <urn:jenGreatGranMother> . " + "  <urn:jenGranMother> <urn:Motherof> <urn:jenMother> . " + "  <urn:jenMother> <urn:isChildOf> <urn:jenGranMother> . " + " <urn:jenMother> <urn:Motherof> <urn:jen> . " + "  <urn:jen> <urn:isChildOf> <urn:jenMother> . " + " <urn:jen> <urn:Motherof> <urn:jenDaughter> .  }}";
    log.info("Performing Query");
    Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
    update.execute();
    query = "select ?p { GRAPH <http://updated/test> {?s <urn:Motherof>/<urn:Motherof> ?p}}";
    CountingResultHandler resultHandler = new CountingResultHandler();
    TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
    tupleQuery.evaluate(resultHandler);
    log.info("Result count : " + resultHandler.getCount());
    // try adding a property chain and querying for it
    query = // 
    "INSERT DATA\n" + // 
    "{ GRAPH <http://updated/test> {\n" + "  <urn:greatMother> owl:propertyChainAxiom <urn:12342>  . " + " <urn:12342> <http://www.w3.org/1999/02/22-rdf-syntax-ns#first> _:node1atjakcvbx15023 . " + " _:node1atjakcvbx15023 <http://www.w3.org/2002/07/owl#inverseOf> <urn:isChildOf> . " + " <urn:12342> <http://www.w3.org/1999/02/22-rdf-syntax-ns#rest> _:node1atjakcvbx15123 . " + " _:node1atjakcvbx15123 <http://www.w3.org/1999/02/22-rdf-syntax-ns#rest> <http://www.w3.org/1999/02/22-rdf-syntax-ns#nil> . " + " _:node1atjakcvbx15123 <http://www.w3.org/1999/02/22-rdf-syntax-ns#first> <urn:MotherOf> .  }}";
    update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
    update.execute();
    ((RdfCloudTripleStore) sail).getInferenceEngine().refreshGraph();
    resultHandler.resetCount();
    query = "select ?x { GRAPH <http://updated/test> {<urn:jenGreatGranMother> <urn:greatMother> ?x}}";
    resultHandler = new CountingResultHandler();
    tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
    tupleQuery.evaluate(resultHandler);
    log.info("Result count : " + resultHandler.getCount());
}
Also used : TupleQuery(org.openrdf.query.TupleQuery) Update(org.openrdf.query.Update)

Aggregations

TupleQuery (org.openrdf.query.TupleQuery)86 Update (org.openrdf.query.Update)33 RepositoryConnection (org.openrdf.repository.RepositoryConnection)27 URI (org.openrdf.model.URI)13 BindingSet (org.openrdf.query.BindingSet)12 MalformedQueryException (org.openrdf.query.MalformedQueryException)11 TupleQueryResult (org.openrdf.query.TupleQueryResult)11 QueryEvaluationException (org.openrdf.query.QueryEvaluationException)10 RepositoryException (org.openrdf.repository.RepositoryException)10 SailRepository (org.openrdf.repository.sail.SailRepository)10 HashSet (java.util.HashSet)8 Literal (org.openrdf.model.Literal)8 TupleQueryResultHandlerException (org.openrdf.query.TupleQueryResultHandlerException)8 SailRepositoryConnection (org.openrdf.repository.sail.SailRepositoryConnection)8 Sail (org.openrdf.sail.Sail)7 StatementImpl (org.openrdf.model.impl.StatementImpl)6 VisibilityBindingSet (org.apache.rya.api.model.VisibilityBindingSet)5 PcjMetadata (org.apache.rya.indexing.pcj.storage.PcjMetadata)5 Test (org.junit.Test)5 ByteArrayOutputStream (java.io.ByteArrayOutputStream)3