use of org.apache.rya.rdftriplestore.inference.InferenceEngine in project incubator-rya by apache.
the class QueryRuleset method setRules.
/**
* Extract the rules from the query string, applying inference rules if configured to.
* @throws QueryRulesetException if the parsed query can't be parsed and translated into valid rules.
*/
private void setRules() throws QueryRulesetException {
final ParsedTupleQuery ptq;
final TupleExpr te;
try {
ptq = QueryParserUtil.parseTupleQuery(QueryLanguage.SPARQL, query, null);
} catch (UnsupportedQueryLanguageException | MalformedQueryException e) {
throw new QueryRulesetException("Error parsing query:\n" + query, e);
}
te = ptq.getTupleExpr();
// Before converting to rules (and renaming variables), validate that no statement patterns
// consist of only variables (this would result in a rule that matches every triple).
// Needs to be done before inference, since inference rules may create such statement patterns
// that are OK because they won'd be converted to rules directly.
te.visit(new QueryModelVisitorBase<QueryRulesetException>() {
@Override
public void meet(final StatementPattern node) throws QueryRulesetException {
if (!(node.getSubjectVar().hasValue() || node.getPredicateVar().hasValue() || node.getObjectVar().hasValue())) {
throw new QueryRulesetException("Statement pattern with no constants would match every statement:\n" + node + "\nFrom parsed query:\n" + te);
}
}
});
// Apply inference, if applicable
if (conf != null && conf.isInfer()) {
RdfCloudTripleStore store = null;
try {
log.info("Applying inference rules");
store = (RdfCloudTripleStore) RyaSailFactory.getInstance(conf);
final InferenceEngine inferenceEngine = store.getInferenceEngine();
// Apply in same order as query evaluation:
te.visit(new TransitivePropertyVisitor(conf, inferenceEngine));
te.visit(new SymmetricPropertyVisitor(conf, inferenceEngine));
te.visit(new InverseOfVisitor(conf, inferenceEngine));
te.visit(new SubPropertyOfVisitor(conf, inferenceEngine));
te.visit(new SubClassOfVisitor(conf, inferenceEngine));
te.visit(new SameAsVisitor(conf, inferenceEngine));
log.info("Query after inference:\n");
for (final String line : te.toString().split("\n")) {
log.info("\t" + line);
}
} catch (final Exception e) {
throw new QueryRulesetException("Error applying inference to parsed query:\n" + te, e);
} finally {
if (store != null) {
try {
store.shutDown();
} catch (final SailException e) {
log.error("Error shutting down Sail after applying inference", e);
}
}
}
}
// Extract the StatementPatterns and Filters and turn them into rules:
final RulesetVisitor rv = new RulesetVisitor();
try {
te.visit(rv);
rv.addSchema();
} catch (final QueryRulesetException e) {
throw new QueryRulesetException("Error extracting rules from parsed query:\n" + te, e);
}
for (final CopyRule candidateRule : rv.rules) {
boolean unique = true;
for (final CopyRule otherRule : rv.rules) {
if (!candidateRule.equals(otherRule) && otherRule.isGeneralizationOf(candidateRule)) {
unique = false;
break;
}
}
if (unique) {
rules.add(candidateRule);
}
}
}
use of org.apache.rya.rdftriplestore.inference.InferenceEngine in project incubator-rya by apache.
the class SparqlQueryPigEngine method init.
public void init() throws Exception {
Preconditions.checkNotNull(sparqlToPigTransformVisitor, "Sparql To Pig Transform Visitor must not be null");
logger.info("Initializing Sparql Query Pig Engine");
if (hadoopDir != null) {
// set hadoop dir property
System.setProperty("HADOOPDIR", hadoopDir);
}
if (pigServer == null) {
pigServer = new PigServer(execType);
}
if (inference || stats) {
final String instance = sparqlToPigTransformVisitor.getInstance();
final String zoo = sparqlToPigTransformVisitor.getZk();
final String user = sparqlToPigTransformVisitor.getUser();
final String pass = sparqlToPigTransformVisitor.getPassword();
final Connector connector = new ZooKeeperInstance(instance, zoo).getConnector(user, new PasswordToken(pass.getBytes(StandardCharsets.UTF_8)));
final String tablePrefix = sparqlToPigTransformVisitor.getTablePrefix();
conf.setTablePrefix(tablePrefix);
if (inference) {
logger.info("Using inference");
inferenceEngine = new InferenceEngine();
ryaDAO = new AccumuloRyaDAO();
ryaDAO.setConf(conf);
ryaDAO.setConnector(connector);
ryaDAO.init();
inferenceEngine.setRyaDAO(ryaDAO);
inferenceEngine.setConf(conf);
inferenceEngine.setSchedule(false);
inferenceEngine.init();
}
if (stats) {
logger.info("Using stats");
rdfEvalStatsDAO = new AccumuloRdfEvalStatsDAO();
rdfEvalStatsDAO.setConf(conf);
rdfEvalStatsDAO.setConnector(connector);
// rdfEvalStatsDAO.setEvalTable(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX);
rdfEvalStatsDAO.init();
rdfCloudTripleStoreEvaluationStatistics = new RdfCloudTripleStoreEvaluationStatistics<AccumuloRdfConfiguration>(conf, rdfEvalStatsDAO);
}
}
}
use of org.apache.rya.rdftriplestore.inference.InferenceEngine in project incubator-rya by apache.
the class SameAsTest method testGraphConfiguration.
@Test
public // This isn't a good test. It's simply a cut-and-paste from a test that was failing in a different package in the SameAsVisitor.
void testGraphConfiguration() throws Exception {
URI a = vf.createURI(namespace, "a");
Statement statement = new StatementImpl(a, vf.createURI(namespace, "p"), vf.createLiteral("l"));
Statement statement2 = new StatementImpl(a, vf.createURI(namespace, "p2"), vf.createLiteral("l"));
ryaDAO.add(RdfToRyaConversions.convertStatement(statement));
ryaDAO.add(RdfToRyaConversions.convertStatement(statement2));
ryaDAO.add(RdfToRyaConversions.convertStatement(new StatementImpl(vf.createURI(namespace, "b"), vf.createURI(namespace, "p"), vf.createLiteral("l"))));
ryaDAO.add(RdfToRyaConversions.convertStatement(new StatementImpl(vf.createURI(namespace, "c"), vf.createURI(namespace, "n"), vf.createLiteral("l"))));
// build a connection
RdfCloudTripleStore store = new RdfCloudTripleStore();
store.setConf(conf);
store.setRyaDAO(ryaDAO);
InferenceEngine inferenceEngine = new InferenceEngine();
inferenceEngine.setRyaDAO(ryaDAO);
store.setInferenceEngine(inferenceEngine);
store.initialize();
System.out.println(Iterations.asList(store.getConnection().getStatements(a, vf.createURI(namespace, "p"), vf.createLiteral("l"), false, new Resource[0])).size());
}
use of org.apache.rya.rdftriplestore.inference.InferenceEngine in project incubator-rya by apache.
the class HasValueVisitorTest method testRewriteValuePattern.
@Test
public void testRewriteValuePattern() throws Exception {
// Configure a mock inference engine with an ontology:
final InferenceEngine inferenceEngine = mock(InferenceEngine.class);
Map<Resource, Set<Value>> typeToCharacteristic = new HashMap<>();
Set<Value> chordateCharacteristics = new HashSet<>();
Set<Value> vertebrateCharacteristics = new HashSet<>();
chordateCharacteristics.add(notochord);
vertebrateCharacteristics.addAll(chordateCharacteristics);
vertebrateCharacteristics.add(skull);
typeToCharacteristic.put(chordate, chordateCharacteristics);
typeToCharacteristic.put(tunicate, chordateCharacteristics);
typeToCharacteristic.put(vertebrate, vertebrateCharacteristics);
typeToCharacteristic.put(mammal, vertebrateCharacteristics);
when(inferenceEngine.getHasValueByProperty(hasCharacteristic)).thenReturn(typeToCharacteristic);
// Query for a specific type and rewrite using the visitor:
final Projection query = new Projection(new StatementPattern(new Var("s"), new Var("p", hasCharacteristic), new Var("o")), new ProjectionElemList(new ProjectionElem("s", "subject"), new ProjectionElem("o", "characteristic")));
query.visit(new HasValueVisitor(conf, inferenceEngine));
// Expected structure: Union(Join(FSP, SP), [original SP])
Assert.assertTrue(query.getArg() instanceof Union);
final Union union = (Union) query.getArg();
final StatementPattern originalSP = new StatementPattern(new Var("s"), new Var("p", hasCharacteristic), new Var("o"));
Join join;
if (union.getLeftArg() instanceof Join) {
join = (Join) union.getLeftArg();
Assert.assertEquals(originalSP, union.getRightArg());
} else {
Assert.assertTrue(union.getRightArg() instanceof Join);
join = (Join) union.getRightArg();
Assert.assertEquals(originalSP, union.getLeftArg());
}
Assert.assertTrue(join.getLeftArg() instanceof FixedStatementPattern);
Assert.assertTrue(join.getRightArg() instanceof StatementPattern);
final FixedStatementPattern fsp = (FixedStatementPattern) join.getLeftArg();
final StatementPattern sp = (StatementPattern) join.getRightArg();
// Verify join: FSP{ ?t _ ?originalObjectVar } JOIN { ?originalSubjectVar rdf:type ?t }
Assert.assertEquals(originalSP.getSubjectVar(), sp.getSubjectVar());
Assert.assertEquals(RDF.TYPE, sp.getPredicateVar().getValue());
Assert.assertEquals(fsp.getSubjectVar(), sp.getObjectVar());
Assert.assertEquals(originalSP.getObjectVar(), fsp.getObjectVar());
// Verify FSP: should provide (type, value) pairs
final Set<Statement> expectedStatements = new HashSet<>();
final URI fspPred = (URI) fsp.getPredicateVar().getValue();
expectedStatements.add(vf.createStatement(chordate, fspPred, notochord));
expectedStatements.add(vf.createStatement(tunicate, fspPred, notochord));
expectedStatements.add(vf.createStatement(vertebrate, fspPred, notochord));
expectedStatements.add(vf.createStatement(mammal, fspPred, notochord));
expectedStatements.add(vf.createStatement(vertebrate, fspPred, skull));
expectedStatements.add(vf.createStatement(mammal, fspPred, skull));
final Set<Statement> actualStatements = new HashSet<>(fsp.statements);
Assert.assertEquals(expectedStatements, actualStatements);
}
use of org.apache.rya.rdftriplestore.inference.InferenceEngine in project incubator-rya by apache.
the class GeoRyaSailFactory method getRyaSail.
private static Sail getRyaSail(final Configuration config) throws InferenceEngineException, RyaDAOException, AccumuloException, AccumuloSecurityException, SailException {
final RdfCloudTripleStore store = new RdfCloudTripleStore();
final RyaDAO<?> dao;
final RdfCloudTripleStoreConfiguration rdfConfig;
final String user;
final String pswd;
// XXX Should(?) be MongoDBRdfConfiguration.MONGO_COLLECTION_PREFIX inside the if below. RYA-135
final String ryaInstance = config.get(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX);
Objects.requireNonNull(ryaInstance, "RyaInstance or table prefix is missing from configuration." + RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX);
if (ConfigUtils.getUseMongo(config)) {
// Get a reference to a Mongo DB configuration object.
final MongoDBRdfConfiguration mongoConfig = (config instanceof MongoDBRdfConfiguration) ? (MongoDBRdfConfiguration) config : new MongoDBRdfConfiguration(config);
// Create the MongoClient that will be used by the Sail object's components.
final MongoClient client = createMongoClient(mongoConfig);
// Add the Indexer and Optimizer names to the configuration object that are configured to be used.
OptionalConfigUtils.setIndexers(mongoConfig);
// Populate the configuration using previously stored Rya Details if this instance uses them.
try {
final MongoRyaInstanceDetailsRepository ryaDetailsRepo = new MongoRyaInstanceDetailsRepository(client, mongoConfig.getRyaInstanceName());
RyaDetailsToConfiguration.addRyaDetailsToConfiguration(ryaDetailsRepo.getRyaInstanceDetails(), mongoConfig);
} catch (final RyaDetailsRepositoryException e) {
LOG.info("Instance does not have a rya details collection, skipping.");
}
// Set the configuration to the stateful configuration that is used to pass the constructed objects around.
final StatefulMongoDBRdfConfiguration statefulConfig = new StatefulMongoDBRdfConfiguration(mongoConfig, client);
final List<MongoSecondaryIndex> indexers = statefulConfig.getInstances(AccumuloRdfConfiguration.CONF_ADDITIONAL_INDEXERS, MongoSecondaryIndex.class);
statefulConfig.setIndexers(indexers);
rdfConfig = statefulConfig;
// Create the DAO that is able to interact with MongoDB.
final MongoDBRyaDAO mongoDao = new MongoDBRyaDAO();
mongoDao.setConf(statefulConfig);
mongoDao.init();
dao = mongoDao;
} else {
rdfConfig = new AccumuloRdfConfiguration(config);
user = rdfConfig.get(ConfigUtils.CLOUDBASE_USER);
pswd = rdfConfig.get(ConfigUtils.CLOUDBASE_PASSWORD);
Objects.requireNonNull(user, "Accumulo user name is missing from configuration." + ConfigUtils.CLOUDBASE_USER);
Objects.requireNonNull(pswd, "Accumulo user password is missing from configuration." + ConfigUtils.CLOUDBASE_PASSWORD);
rdfConfig.setTableLayoutStrategy(new TablePrefixLayoutStrategy(ryaInstance));
RyaSailFactory.updateAccumuloConfig((AccumuloRdfConfiguration) rdfConfig, user, pswd, ryaInstance);
dao = getAccumuloDAO((AccumuloRdfConfiguration) rdfConfig);
}
store.setRyaDAO(dao);
rdfConfig.setTablePrefix(ryaInstance);
if (rdfConfig.isInfer()) {
final InferenceEngine inferenceEngine = new InferenceEngine();
inferenceEngine.setConf(rdfConfig);
inferenceEngine.setRyaDAO(dao);
inferenceEngine.init();
store.setInferenceEngine(inferenceEngine);
}
store.initialize();
return store;
}
Aggregations