use of org.apache.rya.api.persist.RyaDAOException in project incubator-rya by apache.
the class RyaOutputFormat method getRyaIndexer.
private static AccumuloRyaDAO getRyaIndexer(final Configuration conf) throws IOException {
try {
if (!conf.getBoolean(ENABLE_CORE, true)) {
return null;
}
final AccumuloRyaDAO ryaIndexer = new AccumuloRyaDAO();
final Connector conn = ConfigUtils.getConnector(conf);
ryaIndexer.setConnector(conn);
final AccumuloRdfConfiguration ryaConf = new AccumuloRdfConfiguration();
final String tablePrefix = conf.get(OUTPUT_PREFIX_PROPERTY, null);
if (tablePrefix != null) {
ryaConf.setTablePrefix(tablePrefix);
}
ryaConf.setDisplayQueryPlan(false);
ryaIndexer.setConf(ryaConf);
ryaIndexer.init();
return ryaIndexer;
} catch (final AccumuloException e) {
logger.error("Cannot create RyaIndexer", e);
throw new IOException(e);
} catch (final AccumuloSecurityException e) {
logger.error("Cannot create RyaIndexer", e);
throw new IOException(e);
} catch (final RyaDAOException e) {
logger.error("Cannot create RyaIndexer", e);
throw new IOException(e);
}
}
use of org.apache.rya.api.persist.RyaDAOException in project incubator-rya by apache.
the class AccumuloCreatePCJ method createPCJ.
@Override
public String createPCJ(final String instanceName, final String sparql, Set<ExportStrategy> strategies) throws InstanceDoesNotExistException, RyaClientException {
requireNonNull(instanceName);
requireNonNull(sparql);
final Optional<RyaDetails> ryaDetailsHolder = getInstanceDetails.getDetails(instanceName);
final boolean ryaInstanceExists = ryaDetailsHolder.isPresent();
if (!ryaInstanceExists) {
throw new InstanceDoesNotExistException(String.format("The '%s' instance of Rya does not exist.", instanceName));
}
final PCJIndexDetails pcjIndexDetails = ryaDetailsHolder.get().getPCJIndexDetails();
final boolean pcjIndexingEnabeld = pcjIndexDetails.isEnabled();
if (!pcjIndexingEnabeld) {
throw new RyaClientException(String.format("The '%s' instance of Rya does not have PCJ Indexing enabled.", instanceName));
}
// Create the PCJ table that will receive the index results.
final String pcjId;
try (final PrecomputedJoinStorage pcjStorage = new AccumuloPcjStorage(getConnector(), instanceName)) {
pcjId = pcjStorage.createPcj(sparql);
// If a Fluo application is being used, task it with updating the PCJ.
final Optional<FluoDetails> fluoDetailsHolder = pcjIndexDetails.getFluoDetails();
if (fluoDetailsHolder.isPresent()) {
final String fluoAppName = fluoDetailsHolder.get().getUpdateAppName();
try {
updateFluoApp(instanceName, fluoAppName, pcjId, sparql, strategies);
} catch (RepositoryException | MalformedQueryException | SailException | QueryEvaluationException | PcjException | RyaDAOException e) {
throw new RyaClientException("Problem while initializing the Fluo application with the new PCJ.", e);
} catch (UnsupportedQueryException e) {
throw new RyaClientException("The new PCJ could not be initialized because it either contains an unsupported query node " + "or an invalid ExportStrategy for the given QueryType. Projection queries can be exported to either Rya or Kafka," + "unless they contain an aggregation, in which case they can only be exported to Kafka. Construct queries can be exported" + "to Rya and Kafka, and Periodic queries can only be exported to Rya.");
}
// Update the Rya Details to indicate the PCJ is being updated incrementally.
final RyaDetailsRepository detailsRepo = new AccumuloRyaInstanceDetailsRepository(getConnector(), instanceName);
try {
new RyaDetailsUpdater(detailsRepo).update(new RyaDetailsMutator() {
@Override
public RyaDetails mutate(final RyaDetails originalDetails) throws CouldNotApplyMutationException {
// Update the original PCJ Details to indicate they are incrementally updated.
final PCJDetails originalPCJDetails = originalDetails.getPCJIndexDetails().getPCJDetails().get(pcjId);
final PCJDetails.Builder mutatedPCJDetails = PCJDetails.builder(originalPCJDetails).setUpdateStrategy(PCJUpdateStrategy.INCREMENTAL);
// Replace the old PCJ Details with the updated ones.
final RyaDetails.Builder builder = RyaDetails.builder(originalDetails);
builder.getPCJIndexDetails().addPCJDetails(mutatedPCJDetails);
return builder.build();
}
});
} catch (RyaDetailsRepositoryException | CouldNotApplyMutationException e) {
throw new RyaClientException("Problem while updating the Rya instance's Details to indicate the PCJ is being incrementally updated.", e);
}
}
// Return the ID that was assigned to the PCJ.
return pcjId;
} catch (final PCJStorageException e) {
throw new RyaClientException("Problem while initializing the PCJ table.", e);
}
}
use of org.apache.rya.api.persist.RyaDAOException in project incubator-rya by apache.
the class MergeToolTest method testMergeTool.
@Test
public void testMergeTool() throws Exception {
// This statement was in both parent/child instances a month ago and is before the start time of yesterday
// but it was left alone. It should remain in the parent after merging.
final RyaStatement ryaStatementOutOfTimeRange = createRyaStatement("coach", "called", "timeout", LAST_MONTH);
// This statement was in both parent/child instances a month ago but after the start time of yesterday
// the parent deleted it and the child still has it. It should stay deleted in the parent after merging.
final RyaStatement ryaStatementParentDeletedAfter = createRyaStatement("parent", "deleted", "after", LAST_MONTH);
// This statement was added by the parent after the start time of yesterday and doesn't exist in the child.
// It should stay in the parent after merging.
final RyaStatement ryaStatementParentAddedAfter = createRyaStatement("parent", "added", "after", TODAY);
// This statement was in both parent/child instances a month ago but after the start time of yesterday
// the child deleted it and the parent still has it. It should be deleted from the parent after merging.
final RyaStatement ryaStatementChildDeletedAfter = createRyaStatement("child", "deleted", "after", LAST_MONTH);
// This statement was added by the child after the start time of yesterday and doesn't exist in the parent.
// It should be added to the parent after merging.
final RyaStatement ryaStatementChildAddedAfter = createRyaStatement("child", "added", "after", TODAY);
// This statement was modified by the child after the start of yesterday (The timestamp changes after updating)
// It should be updated in the parent to match the child.
final RyaStatement ryaStatementUpdatedByChild = createRyaStatement("bob", "catches", "ball", LAST_MONTH);
final RyaStatement ryaStatementUntouchedByChild = createRyaStatement("bill", "talks to", "john", LAST_MONTH);
final RyaStatement ryaStatementDeletedByChild = createRyaStatement("susan", "eats", "burgers", LAST_MONTH);
final RyaStatement ryaStatementAddedByChild = createRyaStatement("ronnie", "plays", "guitar", TODAY);
// This statement was modified by the child to change the column visibility.
// The parent should combine the child's visibility with its visibility.
final RyaStatement ryaStatementVisibilityDifferent = createRyaStatement("I", "see", "you", LAST_MONTH);
ryaStatementVisibilityDifferent.setColumnVisibility(PARENT_COLUMN_VISIBILITY.getExpression());
// Setup initial parent instance with 7 rows
// This is the state of the parent data (as it is today) before merging occurs which will use the specified start time of yesterday.
// Merging should keep statement
parentDao.add(ryaStatementOutOfTimeRange);
// Merging should update statement
parentDao.add(ryaStatementUpdatedByChild);
// Merging should keep statement
parentDao.add(ryaStatementUntouchedByChild);
// Merging should delete statement
parentDao.add(ryaStatementDeletedByChild);
// Merging should update statement
parentDao.add(ryaStatementVisibilityDifferent);
// Merging should keep statement
parentDao.add(ryaStatementParentAddedAfter);
// Merging should delete statement
parentDao.add(ryaStatementChildDeletedAfter);
// Simulate the child coming back with a modified data set before the merging occurs.
// (1 updated row, 1 row left alone because it was unchanged, 1 row outside time range,
// 1 row deleted, 1 new row added, 1 modified visibility, 1 deleted by child, 1 added by child).
// There should be 5 rows in the child instance (4 which will be scanned over from the start time).
ryaStatementUpdatedByChild.setObject(TestUtils.createRyaUri("football"));
ryaStatementUpdatedByChild.setTimestamp(TODAY.getTime());
ryaStatementVisibilityDifferent.setColumnVisibility(CHILD_COLUMN_VISIBILITY.getExpression());
childDao.add(ryaStatementOutOfTimeRange);
childDao.add(ryaStatementUpdatedByChild);
childDao.add(ryaStatementUntouchedByChild);
// Merging should add statement
childDao.add(ryaStatementAddedByChild);
childDao.add(ryaStatementVisibilityDifferent);
childDao.add(ryaStatementParentDeletedAfter);
// Merging should add statement
childDao.add(ryaStatementChildAddedAfter);
AccumuloRyaUtils.printTable(PARENT_TABLE_PREFIX + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX, parentConfig);
AccumuloRyaUtils.printTable(CHILD_TABLE_PREFIX + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX, childConfig);
log.info("Starting merge tool. Merging all data after the specified start time: " + YESTERDAY);
mergeToolRun(YESTERDAY);
for (final String tableSuffix : AccumuloInstanceDriver.TABLE_NAME_SUFFIXES) {
AccumuloRyaUtils.printTable(PARENT_TABLE_PREFIX + tableSuffix, parentConfig);
}
final Scanner scanner = AccumuloRyaUtils.getScanner(PARENT_TABLE_PREFIX + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX, parentConfig);
final Iterator<Entry<Key, Value>> iterator = scanner.iterator();
int count = 0;
while (iterator.hasNext()) {
iterator.next();
count++;
}
// Make sure we have all of them in the parent.
assertEquals(7, count);
assertStatementInParent("Parent missing statement that untouched by the child", 1, ryaStatementUntouchedByChild);
assertStatementInParent("Parent missing statement that was out of time range", 1, ryaStatementOutOfTimeRange);
assertStatementInParent("Parent missing statement that was updated by the child", 1, ryaStatementUpdatedByChild);
assertStatementInParent("Parent missing statement that was added by the child", 1, ryaStatementAddedByChild);
assertStatementInParent("Parent has statement that the child deleted", 0, ryaStatementDeletedByChild);
// Check that it can be queried with parent's visibility
assertStatementInParent("Parent missing statement with parent visibility", 1, ryaStatementVisibilityDifferent);
// Check that it can be queried with child's visibility
parentConfig.set(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, CHILD_AUTH);
final Authorizations newParentAuths = AccumuloRyaUtils.addUserAuths(accumuloDualInstanceDriver.getParentUser(), accumuloDualInstanceDriver.getParentSecOps(), CHILD_AUTH);
accumuloDualInstanceDriver.getParentSecOps().changeUserAuthorizations(accumuloDualInstanceDriver.getParentUser(), newParentAuths);
assertStatementInParent("Parent missing statement with child visibility", 1, ryaStatementVisibilityDifferent);
// Check that it can NOT be queried with some other visibility
parentConfig.set(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, "bad_auth");
final CloseableIteration<RyaStatement, RyaDAOException> iter = parentDao.getQueryEngine().query(ryaStatementVisibilityDifferent, parentConfig);
count = 0;
try {
while (iter.hasNext()) {
iter.next();
count++;
}
} catch (final Exception e) {
// Expected
if (!(e.getCause() instanceof AccumuloSecurityException)) {
fail();
}
}
iter.close();
assertEquals(0, count);
// reset auth
parentConfig.set(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, PARENT_AUTH);
assertStatementInParent("Parent has statement it deleted later", 0, ryaStatementParentDeletedAfter);
assertStatementInParent("Parent missing statement it added later", 1, ryaStatementParentAddedAfter);
assertStatementInParent("Parent has statement child deleted later", 0, ryaStatementChildDeletedAfter);
assertStatementInParent("Parent missing statement child added later", 1, ryaStatementChildAddedAfter);
log.info("DONE");
}
use of org.apache.rya.api.persist.RyaDAOException in project incubator-rya by apache.
the class RyaDirectExample method createPCJ.
// private static void testDeleteGeoData(final SailRepositoryConnection conn)
// throws Exception {
// // Delete all stored points
// final String sparqlDelete = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
// + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
// + "DELETE {\n" //
// + " ?feature a geo:Feature . "//
// + " ?feature geo:hasGeometry ?point . "//
// + " ?point a geo:Point . "//
// + " ?point geo:asWKT ?wkt . "//
// + "}\n" + "WHERE { \n" + " ?feature a geo:Feature . "//
// + " ?feature geo:hasGeometry ?point . "//
// + " ?point a geo:Point . "//
// + " ?point geo:asWKT ?wkt . "//
// + "}";//
//
// final Update deleteUpdate = conn.prepareUpdate(QueryLanguage.SPARQL,
// sparqlDelete);
// deleteUpdate.execute();
//
// String queryString;
// TupleQuery tupleQuery;
// CountingResultHandler tupleHandler;
//
// // Find all stored points
// queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
// + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
// + "SELECT ?feature ?point ?wkt " //
// + "{" //
// + " ?feature a geo:Feature . "//
// + " ?feature geo:hasGeometry ?point . "//
// + " ?point a geo:Point . "//
// + " ?point geo:asWKT ?wkt . "//
// + "}";//
// tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
// tupleHandler = new CountingResultHandler();
// tupleQuery.evaluate(tupleHandler);
// log.info("Result count : " + tupleHandler.getCount());
// Validate.isTrue(tupleHandler.getCount() == 0);
// }
private static void createPCJ(final Configuration conf) throws RepositoryException, AccumuloException, AccumuloSecurityException, TableExistsException, PcjException, InferenceEngineException, NumberFormatException, UnknownHostException, SailException, TableNotFoundException {
final Configuration config = new AccumuloRdfConfiguration(conf);
config.set(ConfigUtils.USE_PCJ, "false");
SailRepository repository = null;
SailRepositoryConnection conn = null;
try {
final Sail extSail = RyaSailFactory.getInstance(config);
repository = new SailRepository(extSail);
conn = repository.getConnection();
final String queryString1 = //
"" + //
"SELECT ?e ?c ?l ?o " + //
"{" + //
" ?c a ?e . " + //
" ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . " + //
" ?e <uri:talksTo> ?o . " + //
"}";
final String queryString2 = //
"" + //
"SELECT ?e ?c ?l ?o " + //
"{" + //
" ?e a ?c . " + //
" ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . " + //
" ?e <uri:talksTo> ?o . " + //
"}";
URI obj, subclass, talksTo;
final URI person = new URIImpl("urn:people:alice");
final URI feature = new URIImpl("urn:feature");
final URI sub = new URIImpl("uri:entity");
subclass = new URIImpl("uri:class");
obj = new URIImpl("uri:obj");
talksTo = new URIImpl("uri:talksTo");
conn.add(person, RDF.TYPE, sub);
conn.add(feature, RDF.TYPE, sub);
conn.add(sub, RDF.TYPE, subclass);
conn.add(sub, RDFS.LABEL, new LiteralImpl("label"));
conn.add(sub, talksTo, obj);
final String tablename1 = RYA_TABLE_PREFIX + "INDEX_1";
final String tablename2 = RYA_TABLE_PREFIX + "INDEX_2";
final Connector accCon = new MockInstance(INSTANCE).getConnector("root", new PasswordToken("".getBytes(StandardCharsets.UTF_8)));
new PcjTables().createAndPopulatePcj(conn, accCon, tablename1, queryString1, new String[] { "e", "c", "l", "o" }, Optional.<PcjVarOrderFactory>absent());
new PcjTables().createAndPopulatePcj(conn, accCon, tablename2, queryString2, new String[] { "e", "c", "l", "o" }, Optional.<PcjVarOrderFactory>absent());
} catch (final RyaDAOException e) {
throw new Error("While creating PCJ tables.", e);
} finally {
closeQuietly(conn);
closeQuietly(repository);
}
}
use of org.apache.rya.api.persist.RyaDAOException in project incubator-rya by apache.
the class MongoExecuteSparqlQuery method executeSparqlQuery.
@Override
public String executeSparqlQuery(final String ryaInstanceName, final String sparqlQuery) throws InstanceDoesNotExistException, RyaClientException {
requireNonNull(ryaInstanceName);
requireNonNull(sparqlQuery);
// Ensure the Rya Instance exists.
if (!instanceExists.exists(ryaInstanceName)) {
throw new InstanceDoesNotExistException(String.format("There is no Rya instance named '%s'.", ryaInstanceName));
}
Sail sail = null;
SailRepositoryConnection sailRepoConn = null;
try {
// Get a Sail object that is connected to the Rya instance.
final MongoDBRdfConfiguration ryaConf = connectionDetails.build(ryaInstanceName);
sail = RyaSailFactory.getInstance(ryaConf);
final SailRepository sailRepo = new SailRepository(sail);
sailRepoConn = sailRepo.getConnection();
// Execute the query.
final long start = System.currentTimeMillis();
final TupleQuery tupleQuery = sailRepoConn.prepareTupleQuery(QueryLanguage.SPARQL, sparqlQuery);
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
final CountingSPARQLResultsCSVWriter handler = new CountingSPARQLResultsCSVWriter(baos);
tupleQuery.evaluate(handler);
final long end = System.currentTimeMillis();
// Format and return the result of the query.
final String queryResult = new String(baos.toByteArray(), StandardCharsets.UTF_8);
final String queryDuration = new DecimalFormat("0.0##").format((end - start) / 1000.0);
return "Query Result:\n" + queryResult + "Retrieved " + handler.getCount() + " results in " + queryDuration + " seconds.";
} catch (SailException | RyaDAOException | InferenceEngineException | AccumuloException | AccumuloSecurityException e) {
throw new RyaClientException("Could not create the Sail object used to query the RYA instance.", e);
} catch (final MalformedQueryException | QueryEvaluationException | TupleQueryResultHandlerException | RepositoryException e) {
throw new RyaClientException("Could not execute the SPARQL query.", e);
} finally {
// Close the resources that were opened.
if (sailRepoConn != null) {
try {
sailRepoConn.close();
} catch (final RepositoryException e) {
log.error("Couldn't close the SailRepositoryConnection object.", e);
}
}
if (sail != null) {
try {
sail.shutDown();
} catch (final SailException e) {
log.error("Couldn't close the Sail object.", e);
}
}
}
}
Aggregations