use of org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException in project incubator-rya by apache.
the class MongoPcjIndexSetProvider method getIndices.
@Override
protected List<ExternalTupleSet> getIndices() throws PcjIndexSetException {
try {
final StatefulMongoDBRdfConfiguration mongoConf = (StatefulMongoDBRdfConfiguration) conf;
final MongoClient client = mongoConf.getMongoClient();
final MongoPcjDocuments pcjDocs = new MongoPcjDocuments(client, mongoConf.getRyaInstanceName());
List<String> documents = null;
documents = mongoConf.getPcjTables();
// this maps associates pcj document name with pcj sparql query
final Map<String, String> indexDocuments = Maps.newLinkedHashMap();
try (final PrecomputedJoinStorage storage = new MongoPcjStorage(client, mongoConf.getRyaInstanceName())) {
final boolean docsProvided = documents != null && !documents.isEmpty();
if (docsProvided) {
// if tables provided, associate table name with sparql
for (final String doc : documents) {
indexDocuments.put(doc, storage.getPcjMetadata(doc).getSparql());
}
} else if (hasRyaDetails()) {
// If this is a newer install of Rya, and it has PCJ Details, then
// use those.
final List<String> ids = storage.listPcjs();
for (final String pcjId : ids) {
indexDocuments.put(pcjId, storage.getPcjMetadata(pcjId).getSparql());
}
} else {
// Otherwise figure it out by getting document IDs.
documents = pcjDocs.listPcjDocuments();
for (final String pcjId : documents) {
if (pcjId.startsWith("INDEX")) {
indexDocuments.put(pcjId, pcjDocs.getPcjMetadata(pcjId).getSparql());
}
}
}
}
final List<ExternalTupleSet> index = Lists.newArrayList();
if (indexDocuments.isEmpty()) {
log.info("No Index found");
} else {
for (final String pcjID : indexDocuments.keySet()) {
final String indexSparqlString = indexDocuments.get(pcjID);
index.add(new MongoPcjQueryNode(indexSparqlString, pcjID, pcjDocs));
}
}
return index;
} catch (final PCJStorageException | MalformedQueryException e) {
throw new PcjIndexSetException("Failed to get indicies for this PCJ index.", e);
}
}
use of org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException in project incubator-rya by apache.
the class AccumuloUninstall method uninstall.
@Override
public void uninstall(final String ryaInstanceName) throws InstanceDoesNotExistException, RyaClientException {
requireNonNull(ryaInstanceName);
// Ensure the Rya Instance exists.
if (!instanceExists.exists(ryaInstanceName)) {
throw new InstanceDoesNotExistException(String.format("There is no Rya instance named '%s'.", ryaInstanceName));
}
try {
// Build the list of tables that are present within the Rya instance.
final List<String> tables = new RyaTableNames().getTableNames(ryaInstanceName, getConnector());
// Delete them.
final TableOperations tableOps = getConnector().tableOperations();
for (final String table : tables) {
try {
tableOps.delete(table);
} catch (final TableNotFoundException e) {
log.warn("Uninstall could not delete table named '" + LogUtils.clean(table) + "' because it does not exist. " + "Something else is also deleting tables.");
}
}
} catch (PCJStorageException | RyaDetailsRepositoryException e) {
throw new RyaClientException("Could not uninstall the Rya instance named '" + ryaInstanceName + "' because we could not determine which tables are associated with it.", e);
} catch (AccumuloException | AccumuloSecurityException e) {
throw new RyaClientException("Could not uninstall the Rya instance named '" + ryaInstanceName + "' because of a problem interacting with Accumulo..", e);
}
}
use of org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException in project incubator-rya by apache.
the class MongoBatchUpdatePCJ method updatePCJResults.
private void updatePCJResults(final String ryaInstanceName, final String pcjId, final MongoClient client) throws InstanceDoesNotExistException, PCJDoesNotExistException, RyaClientException {
// Things that have to be closed before we exit.
Sail sail = null;
SailConnection sailConn = null;
try (final PrecomputedJoinStorage pcjStorage = new MongoPcjStorage(client, ryaInstanceName)) {
// Create an instance of Sail backed by the Rya instance.
sail = connectToRya(ryaInstanceName);
final SailRepository sailRepo = new SailRepository(sail);
final SailRepositoryConnection sailRepoConn = sailRepo.getConnection();
// Purge the old results from the PCJ.
try {
pcjStorage.purge(pcjId);
} catch (final PCJStorageException e) {
throw new RyaClientException("Could not batch update PCJ with ID '" + pcjId + "' because the old " + "results could not be purged from it.", e);
}
// Parse the PCJ's SPARQL query.
final PcjMetadata metadata = pcjStorage.getPcjMetadata(pcjId);
final String sparql = metadata.getSparql();
sailConn = sail.getConnection();
final TupleQuery tupleQuery = sailRepoConn.prepareTupleQuery(QueryLanguage.SPARQL, sparql);
// Execute the query.
final List<VisibilityBindingSet> batch = new ArrayList<>(1000);
tupleQuery.evaluate(new TupleQueryResultHandlerBase() {
@Override
public void handleSolution(final BindingSet bindingSet) throws TupleQueryResultHandlerException {
final VisibilityBindingSet result = new VisibilityBindingSet(bindingSet, "");
log.warn("Visibility information on the binding set is lost during a batch update." + " This can create data leaks.");
batch.add(result);
if (batch.size() == 1000) {
try {
pcjStorage.addResults(pcjId, batch);
} catch (final PCJStorageException e) {
throw new TupleQueryResultHandlerException("Fail to batch load new results into the PCJ with ID '" + pcjId + "'.", e);
}
batch.clear();
}
}
});
if (!batch.isEmpty()) {
pcjStorage.addResults(pcjId, batch);
batch.clear();
}
} catch (final MalformedQueryException | PCJStorageException | SailException | QueryEvaluationException | RepositoryException | TupleQueryResultHandlerException e) {
throw new RyaClientException("Fail to batch load new results into the PCJ with ID '" + pcjId + "'.", e);
} finally {
if (sailConn != null) {
try {
sailConn.close();
} catch (final SailException e) {
log.warn(e.getMessage(), e);
}
}
if (sail != null) {
try {
sail.shutDown();
} catch (final SailException e) {
log.warn(e.getMessage(), e);
}
}
}
}
use of org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException in project incubator-rya by apache.
the class MongoCreatePCJ method createPCJ.
@Override
public String createPCJ(final String ryaInstanceName, final String sparql, final Set<ExportStrategy> strategies) throws InstanceDoesNotExistException, RyaClientException {
requireNonNull(ryaInstanceName);
requireNonNull(sparql);
// Ensure the Rya Instance exists.
if (!instanceExists.exists(ryaInstanceName)) {
throw new InstanceDoesNotExistException(String.format("There is no Rya instance named '%s'.", ryaInstanceName));
}
try (final MongoPcjStorage pcjStore = new MongoPcjStorage(mongoClient, ryaInstanceName)) {
return pcjStore.createPcj(sparql);
} catch (final PCJStorageException e) {
throw new RyaClientException("Unable to create PCJ for: " + sparql, e);
}
}
use of org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.PCJStorageException in project incubator-rya by apache.
the class MongoPcjDocuments method makeMetadataDocument.
/**
* Creates a {@link Document} containing the metadata defining the PCj.
*
* @param pcjId - Uniquely identifies a PCJ within Rya. (not null)
* @param sparql - The sparql query the PCJ will use.
* @return The document built around the provided metadata.
* @throws PCJStorageException - Thrown when the sparql query is malformed.
*/
public Document makeMetadataDocument(final String pcjId, final String sparql) throws PCJStorageException {
requireNonNull(pcjId);
requireNonNull(sparql);
final Set<VariableOrder> varOrders;
try {
varOrders = pcjVarOrderFactory.makeVarOrders(sparql);
} catch (final MalformedQueryException e) {
throw new PCJStorageException("Can not create the PCJ. The SPARQL is malformed.", e);
}
return new Document().append(PCJ_METADATA_ID, makeMetadataID(pcjId)).append(SPARQL_FIELD, sparql).append(CARDINALITY_FIELD, 0).append(VAR_ORDER_FIELD, varOrders);
}
Aggregations