Search in sources :

Example 16 with PCJStorageException

use of org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException in project incubator-rya by apache.

the class MongoPcjIndexSetProvider method getIndices.

@Override
protected List<ExternalTupleSet> getIndices() throws PcjIndexSetException {
    try {
        final StatefulMongoDBRdfConfiguration mongoConf = (StatefulMongoDBRdfConfiguration) conf;
        final MongoClient client = mongoConf.getMongoClient();
        final MongoPcjDocuments pcjDocs = new MongoPcjDocuments(client, mongoConf.getRyaInstanceName());
        List<String> documents = null;
        documents = mongoConf.getPcjTables();
        // this maps associates pcj document name with pcj sparql query
        final Map<String, String> indexDocuments = Maps.newLinkedHashMap();
        try (final PrecomputedJoinStorage storage = new MongoPcjStorage(client, mongoConf.getRyaInstanceName())) {
            final boolean docsProvided = documents != null && !documents.isEmpty();
            if (docsProvided) {
                // if tables provided, associate table name with sparql
                for (final String doc : documents) {
                    indexDocuments.put(doc, storage.getPcjMetadata(doc).getSparql());
                }
            } else if (hasRyaDetails()) {
                // If this is a newer install of Rya, and it has PCJ Details, then
                // use those.
                final List<String> ids = storage.listPcjs();
                for (final String pcjId : ids) {
                    indexDocuments.put(pcjId, storage.getPcjMetadata(pcjId).getSparql());
                }
            } else {
                // Otherwise figure it out by getting document IDs.
                documents = pcjDocs.listPcjDocuments();
                for (final String pcjId : documents) {
                    if (pcjId.startsWith("INDEX")) {
                        indexDocuments.put(pcjId, pcjDocs.getPcjMetadata(pcjId).getSparql());
                    }
                }
            }
        }
        final List<ExternalTupleSet> index = Lists.newArrayList();
        if (indexDocuments.isEmpty()) {
            log.info("No Index found");
        } else {
            for (final String pcjID : indexDocuments.keySet()) {
                final String indexSparqlString = indexDocuments.get(pcjID);
                index.add(new MongoPcjQueryNode(indexSparqlString, pcjID, pcjDocs));
            }
        }
        return index;
    } catch (final PCJStorageException | MalformedQueryException e) {
        throw new PcjIndexSetException("Failed to get indicies for this PCJ index.", e);
    }
}
Also used : StatefulMongoDBRdfConfiguration(org.apache.rya.mongodb.StatefulMongoDBRdfConfiguration) MongoPcjDocuments(org.apache.rya.indexing.pcj.storage.mongo.MongoPcjDocuments) ExternalTupleSet(org.apache.rya.indexing.external.tupleSet.ExternalTupleSet) MongoPcjStorage(org.apache.rya.indexing.pcj.storage.mongo.MongoPcjStorage) MongoClient(com.mongodb.MongoClient) PrecomputedJoinStorage(org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage) MalformedQueryException(org.openrdf.query.MalformedQueryException) List(java.util.List) PCJStorageException(org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException)

Example 17 with PCJStorageException

use of org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException in project incubator-rya by apache.

the class AccumuloUninstall method uninstall.

@Override
public void uninstall(final String ryaInstanceName) throws InstanceDoesNotExistException, RyaClientException {
    requireNonNull(ryaInstanceName);
    // Ensure the Rya Instance exists.
    if (!instanceExists.exists(ryaInstanceName)) {
        throw new InstanceDoesNotExistException(String.format("There is no Rya instance named '%s'.", ryaInstanceName));
    }
    try {
        // Build the list of tables that are present within the Rya instance.
        final List<String> tables = new RyaTableNames().getTableNames(ryaInstanceName, getConnector());
        // Delete them.
        final TableOperations tableOps = getConnector().tableOperations();
        for (final String table : tables) {
            try {
                tableOps.delete(table);
            } catch (final TableNotFoundException e) {
                log.warn("Uninstall could not delete table named '" + LogUtils.clean(table) + "' because it does not exist. " + "Something else is also deleting tables.");
            }
        }
    } catch (PCJStorageException | RyaDetailsRepositoryException e) {
        throw new RyaClientException("Could not uninstall the Rya instance named '" + ryaInstanceName + "' because we could not determine which tables are associated with it.", e);
    } catch (AccumuloException | AccumuloSecurityException e) {
        throw new RyaClientException("Could not uninstall the Rya instance named '" + ryaInstanceName + "' because of a problem interacting with Accumulo..", e);
    }
}
Also used : AccumuloException(org.apache.accumulo.core.client.AccumuloException) RyaClientException(org.apache.rya.api.client.RyaClientException) InstanceDoesNotExistException(org.apache.rya.api.client.InstanceDoesNotExistException) RyaTableNames(org.apache.rya.accumulo.utils.RyaTableNames) TableNotFoundException(org.apache.accumulo.core.client.TableNotFoundException) TableOperations(org.apache.accumulo.core.client.admin.TableOperations) RyaDetailsRepositoryException(org.apache.rya.api.instance.RyaDetailsRepository.RyaDetailsRepositoryException) AccumuloSecurityException(org.apache.accumulo.core.client.AccumuloSecurityException) PCJStorageException(org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException)

Example 18 with PCJStorageException

use of org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException in project incubator-rya by apache.

the class MongoBatchUpdatePCJ method updatePCJResults.

private void updatePCJResults(final String ryaInstanceName, final String pcjId, final MongoClient client) throws InstanceDoesNotExistException, PCJDoesNotExistException, RyaClientException {
    // Things that have to be closed before we exit.
    Sail sail = null;
    SailConnection sailConn = null;
    try (final PrecomputedJoinStorage pcjStorage = new MongoPcjStorage(client, ryaInstanceName)) {
        // Create an instance of Sail backed by the Rya instance.
        sail = connectToRya(ryaInstanceName);
        final SailRepository sailRepo = new SailRepository(sail);
        final SailRepositoryConnection sailRepoConn = sailRepo.getConnection();
        // Purge the old results from the PCJ.
        try {
            pcjStorage.purge(pcjId);
        } catch (final PCJStorageException e) {
            throw new RyaClientException("Could not batch update PCJ with ID '" + pcjId + "' because the old " + "results could not be purged from it.", e);
        }
        // Parse the PCJ's SPARQL query.
        final PcjMetadata metadata = pcjStorage.getPcjMetadata(pcjId);
        final String sparql = metadata.getSparql();
        sailConn = sail.getConnection();
        final TupleQuery tupleQuery = sailRepoConn.prepareTupleQuery(QueryLanguage.SPARQL, sparql);
        // Execute the query.
        final List<VisibilityBindingSet> batch = new ArrayList<>(1000);
        tupleQuery.evaluate(new TupleQueryResultHandlerBase() {

            @Override
            public void handleSolution(final BindingSet bindingSet) throws TupleQueryResultHandlerException {
                final VisibilityBindingSet result = new VisibilityBindingSet(bindingSet, "");
                log.warn("Visibility information on the binding set is lost during a batch update." + "  This can create data leaks.");
                batch.add(result);
                if (batch.size() == 1000) {
                    try {
                        pcjStorage.addResults(pcjId, batch);
                    } catch (final PCJStorageException e) {
                        throw new TupleQueryResultHandlerException("Fail to batch load new results into the PCJ with ID '" + pcjId + "'.", e);
                    }
                    batch.clear();
                }
            }
        });
        if (!batch.isEmpty()) {
            pcjStorage.addResults(pcjId, batch);
            batch.clear();
        }
    } catch (final MalformedQueryException | PCJStorageException | SailException | QueryEvaluationException | RepositoryException | TupleQueryResultHandlerException e) {
        throw new RyaClientException("Fail to batch load new results into the PCJ with ID '" + pcjId + "'.", e);
    } finally {
        if (sailConn != null) {
            try {
                sailConn.close();
            } catch (final SailException e) {
                log.warn(e.getMessage(), e);
            }
        }
        if (sail != null) {
            try {
                sail.shutDown();
            } catch (final SailException e) {
                log.warn(e.getMessage(), e);
            }
        }
    }
}
Also used : VisibilityBindingSet(org.apache.rya.api.model.VisibilityBindingSet) BindingSet(org.openrdf.query.BindingSet) RyaClientException(org.apache.rya.api.client.RyaClientException) VisibilityBindingSet(org.apache.rya.api.model.VisibilityBindingSet) TupleQueryResultHandlerBase(org.openrdf.query.TupleQueryResultHandlerBase) TupleQueryResultHandlerException(org.openrdf.query.TupleQueryResultHandlerException) SailRepository(org.openrdf.repository.sail.SailRepository) ArrayList(java.util.ArrayList) TupleQuery(org.openrdf.query.TupleQuery) RepositoryException(org.openrdf.repository.RepositoryException) RyaDetailsRepositoryException(org.apache.rya.api.instance.RyaDetailsRepository.RyaDetailsRepositoryException) SailException(org.openrdf.sail.SailException) SailRepositoryConnection(org.openrdf.repository.sail.SailRepositoryConnection) MongoPcjStorage(org.apache.rya.indexing.pcj.storage.mongo.MongoPcjStorage) SailConnection(org.openrdf.sail.SailConnection) QueryEvaluationException(org.openrdf.query.QueryEvaluationException) Sail(org.openrdf.sail.Sail) PrecomputedJoinStorage(org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage) MalformedQueryException(org.openrdf.query.MalformedQueryException) PcjMetadata(org.apache.rya.indexing.pcj.storage.PcjMetadata) PCJStorageException(org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException)

Example 19 with PCJStorageException

use of org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException in project incubator-rya by apache.

the class MongoCreatePCJ method createPCJ.

@Override
public String createPCJ(final String ryaInstanceName, final String sparql, final Set<ExportStrategy> strategies) throws InstanceDoesNotExistException, RyaClientException {
    requireNonNull(ryaInstanceName);
    requireNonNull(sparql);
    // Ensure the Rya Instance exists.
    if (!instanceExists.exists(ryaInstanceName)) {
        throw new InstanceDoesNotExistException(String.format("There is no Rya instance named '%s'.", ryaInstanceName));
    }
    try (final MongoPcjStorage pcjStore = new MongoPcjStorage(mongoClient, ryaInstanceName)) {
        return pcjStore.createPcj(sparql);
    } catch (final PCJStorageException e) {
        throw new RyaClientException("Unable to create PCJ for: " + sparql, e);
    }
}
Also used : MongoPcjStorage(org.apache.rya.indexing.pcj.storage.mongo.MongoPcjStorage) RyaClientException(org.apache.rya.api.client.RyaClientException) InstanceDoesNotExistException(org.apache.rya.api.client.InstanceDoesNotExistException) PCJStorageException(org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException)

Example 20 with PCJStorageException

use of org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException in project incubator-rya by apache.

the class MongoPcjDocuments method makeMetadataDocument.

/**
 * Creates a {@link Document} containing the metadata defining the PCj.
 *
 * @param pcjId - Uniquely identifies a PCJ within Rya. (not null)
 * @param sparql - The sparql query the PCJ will use.
 * @return The document built around the provided metadata.
 * @throws PCJStorageException - Thrown when the sparql query is malformed.
 */
public Document makeMetadataDocument(final String pcjId, final String sparql) throws PCJStorageException {
    requireNonNull(pcjId);
    requireNonNull(sparql);
    final Set<VariableOrder> varOrders;
    try {
        varOrders = pcjVarOrderFactory.makeVarOrders(sparql);
    } catch (final MalformedQueryException e) {
        throw new PCJStorageException("Can not create the PCJ. The SPARQL is malformed.", e);
    }
    return new Document().append(PCJ_METADATA_ID, makeMetadataID(pcjId)).append(SPARQL_FIELD, sparql).append(CARDINALITY_FIELD, 0).append(VAR_ORDER_FIELD, varOrders);
}
Also used : VariableOrder(org.apache.rya.indexing.pcj.storage.accumulo.VariableOrder) MalformedQueryException(org.openrdf.query.MalformedQueryException) PCJStorageException(org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException) Document(org.bson.Document)

Aggregations

PCJStorageException (org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException)25 TableNotFoundException (org.apache.accumulo.core.client.TableNotFoundException)11 PcjMetadata (org.apache.rya.indexing.pcj.storage.PcjMetadata)11 AccumuloException (org.apache.accumulo.core.client.AccumuloException)9 AccumuloSecurityException (org.apache.accumulo.core.client.AccumuloSecurityException)9 RyaClientException (org.apache.rya.api.client.RyaClientException)9 MalformedQueryException (org.openrdf.query.MalformedQueryException)9 QueryEvaluationException (org.openrdf.query.QueryEvaluationException)7 ConditionalMutation (org.apache.accumulo.core.data.ConditionalMutation)6 PrecomputedJoinStorage (org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage)6 HashSet (java.util.HashSet)5 MutationsRejectedException (org.apache.accumulo.core.client.MutationsRejectedException)5 Mutation (org.apache.accumulo.core.data.Mutation)5 InstanceDoesNotExistException (org.apache.rya.api.client.InstanceDoesNotExistException)5 RyaDetailsRepositoryException (org.apache.rya.api.instance.RyaDetailsRepository.RyaDetailsRepositoryException)5 VisibilityBindingSet (org.apache.rya.api.model.VisibilityBindingSet)5 RepositoryException (org.openrdf.repository.RepositoryException)5 BatchWriter (org.apache.accumulo.core.client.BatchWriter)4 BatchWriterConfig (org.apache.accumulo.core.client.BatchWriterConfig)4 AccumuloPcjStorage (org.apache.rya.indexing.pcj.storage.accumulo.AccumuloPcjStorage)4