use of org.apache.rya.forwardchain.ForwardChainException in project incubator-rya by apache.
the class MongoPipelineStrategy method toPipeline.
/**
* Converts a construct rule into a series of documents representing
* aggregation pipeline steps.
* @param rule A construct query rule.
* @param sourceLevel Only make derivations whose source triples have this
* derivation level or higher, i.e. took some number of forward chaining
* steps to infer. Set to zero to skip this check.
* @param timestamp Timestamp to be set for all inferred triples.
* @return An aggregation pipeline.
* @throws ForwardChainException if pipeline construction fails.
*/
private List<Bson> toPipeline(AbstractConstructRule rule, int sourceLevel, long timestamp) throws ForwardChainException {
TupleExpr tupleExpr = rule.getQuery().getTupleExpr();
if (!(tupleExpr instanceof QueryRoot)) {
tupleExpr = new QueryRoot(tupleExpr);
}
try {
tupleExpr.visit(pipelineVisitor);
} catch (Exception e) {
throw new ForwardChainException("Error converting construct rule to an aggregation pipeline", e);
}
if (tupleExpr instanceof QueryRoot) {
QueryRoot root = (QueryRoot) tupleExpr;
if (root.getArg() instanceof AggregationPipelineQueryNode) {
AggregationPipelineQueryNode pipelineNode = (AggregationPipelineQueryNode) root.getArg();
// require distinct triples
pipelineNode.distinct();
pipelineNode.requireSourceDerivationDepth(sourceLevel);
long latestTime = executionTimes.getOrDefault(rule, 0L);
if (latestTime > 0) {
pipelineNode.requireSourceTimestamp(latestTime);
}
return pipelineNode.getTriplePipeline(timestamp, false);
}
}
return null;
}
use of org.apache.rya.forwardchain.ForwardChainException in project incubator-rya by apache.
the class SailExecutionStrategy method executeConstructRule.
/**
* Executes a CONSTRUCT query through the SAIL and inserts the results into
* the DAO.
* @param rule A construct query; not null.
* @param metadata Metadata to add to any inferred triples; not null.
* @return The number of inferred triples.
* @throws ForwardChainException if query execution or data insert fails.
*/
@Override
public long executeConstructRule(AbstractConstructRule rule, StatementMetadata metadata) throws ForwardChainException {
Preconditions.checkNotNull(rule);
Preconditions.checkNotNull(metadata);
if (!initialized) {
initialize();
}
ParsedGraphQuery graphQuery = rule.getQuery();
long statementsAdded = 0;
logger.info("Applying inference rule " + rule + "...");
for (String line : graphQuery.getTupleExpr().toString().split("\n")) {
logger.debug("\t" + line);
}
InferredStatementHandler<?> handler = new InferredStatementHandler<>(dao, metadata);
try {
GraphQuery executableQuery = new SailGraphQuery(graphQuery, conn) {
};
executableQuery.evaluate(handler);
statementsAdded = handler.getNumStatementsAdded();
logger.info("Added " + statementsAdded + " inferred statements.");
return statementsAdded;
} catch (QueryEvaluationException e) {
throw new ForwardChainException("Error evaluating query portion of construct rule", e);
} catch (RDFHandlerException e) {
throw new ForwardChainException("Error processing results of construct rule", e);
}
}
use of org.apache.rya.forwardchain.ForwardChainException in project incubator-rya by apache.
the class SailExecutionStrategy method initialize.
/**
* Connect to the Rya SAIL. If a DAO wasn't provided, instantiate one from
* the configuration.
* @throws ForwardChainException if connecting fails.
*/
@Override
public void initialize() throws ForwardChainException {
try {
if (dao == null) {
dao = getDAO();
}
repo = new SailRepository(RyaSailFactory.getInstance(conf));
conn = repo.getConnection();
initialized = true;
} catch (Exception e) {
shutDown();
throw new ForwardChainException("Error connecting to SAIL", e);
}
}
use of org.apache.rya.forwardchain.ForwardChainException in project incubator-rya by apache.
the class SpinConstructRule method loadSpinRules.
/**
* Load a set of SPIN rules from a data store.
* @param conf Contains the connection information. Not null.
* @return A map of rule identifiers to rule objects.
* @throws ForwardChainException if connecting, querying for rules, or
* parsing rules fails.
*/
public static Ruleset loadSpinRules(RdfCloudTripleStoreConfiguration conf) throws ForwardChainException {
Preconditions.checkNotNull(conf);
Map<Resource, Rule> rules = new ConcurrentHashMap<>();
// Connect to Rya
SailRepository repository = null;
SailRepositoryConnection conn = null;
try {
repository = new SailRepository(RyaSailFactory.getInstance(conf));
} catch (Exception e) {
throw new ForwardChainException("Couldn't initialize SAIL from configuration", e);
}
// Load and parse the individual SPIN rules from the data store
String ruleQueryString = "SELECT ?type ?rule ?text WHERE {\n" + " ?type <" + SPIN.RULE_PROPERTY.stringValue() + "> ?rule .\n" + " {\n" + " ?rule a <" + SP.CONSTRUCT_CLASS.stringValue() + "> .\n" + " ?rule <" + SP.TEXT_PROPERTY.stringValue() + "> ?text .\n" + " } UNION {\n" + " ?rule a ?template .\n" + " ?template <" + SPIN.BODY_PROPERTY + ">? ?body .\n" + " ?body a <" + SP.CONSTRUCT_CLASS.stringValue() + "> .\n" + " ?body <" + SP.TEXT_PROPERTY.stringValue() + "> ?text .\n" + " }\n" + "}";
SPARQLParser parser = new SPARQLParser();
try {
conn = repository.getConnection();
TupleQuery ruleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, ruleQueryString);
ruleQuery.evaluate(new TupleQueryResultHandlerBase() {
@Override
public void handleSolution(BindingSet bs) throws TupleQueryResultHandlerException {
// For each rule identifier found, instantiate a SpinRule
Value requiredType = bs.getValue("type");
Value ruleIdentifier = bs.getValue("rule");
Value ruleText = bs.getValue("text");
if (requiredType instanceof Resource && ruleIdentifier instanceof Resource && ruleText instanceof Literal) {
ParsedQuery parsedRule;
try {
parsedRule = parser.parseQuery(ruleText.stringValue(), null);
if (parsedRule instanceof ParsedGraphQuery) {
SpinConstructRule rule = new SpinConstructRule((Resource) requiredType, (Resource) ruleIdentifier, (ParsedGraphQuery) parsedRule);
if (rule.hasAnonymousConsequent()) {
logger.error("Skipping unsupported rule " + ruleIdentifier + " -- consequent refers to bnode, which is not" + " currently supported (creating new bnodes at each" + " application could lead to infinite recursion).");
} else {
rules.put((Resource) ruleIdentifier, rule);
}
}
} catch (Exception e) {
throw new TupleQueryResultHandlerException(e);
}
}
}
});
} catch (TupleQueryResultHandlerException | QueryEvaluationException | MalformedQueryException | RepositoryException e) {
throw new ForwardChainException("Couldn't retrieve SPIN rules", e);
} finally {
if (conn != null) {
try {
conn.close();
} catch (RepositoryException e) {
logger.warn("Error closing repository connection", e);
}
}
if (repository.isInitialized()) {
try {
repository.shutDown();
} catch (RepositoryException e) {
logger.warn("Error shutting down repository", e);
}
}
}
return new Ruleset(rules.values());
}
use of org.apache.rya.forwardchain.ForwardChainException in project incubator-rya by apache.
the class MongoPipelineStrategy method executeConstructRule.
/**
* Execute a CONSTRUCT rule by converting it into a pipeline, iterating
* through the resulting documents, and inserting them back to the data
* store as new triples. If pipeline conversion fails, falls back on
* default execution strategy.
* @param rule A construct query rule; not null.
* @param metadata StatementMetadata to attach to new triples; not null.
* @return The number of new triples inferred.
* @throws ForwardChainException if execution fails.
*/
@Override
public long executeConstructRule(AbstractConstructRule rule, StatementMetadata metadata) throws ForwardChainException {
Preconditions.checkNotNull(rule);
logger.info("Applying inference rule " + rule + "...");
long timestamp = System.currentTimeMillis();
// Get a pipeline that turns individual matches into triples
List<Bson> pipeline = null;
try {
int requireSourceLevel = 0;
if (!usedBackup) {
// If we can assume derivation levels are set properly, we can optimize by
// pruning any derived fact whose sources are all old information. (i.e. we can
// infer that the pruned fact would have already been derived in a previous
// step.) But if the backup strategy has ever been used, the source triples aren't
// guaranteed to have derivation level set.
requireSourceLevel = requiredLevel;
}
pipeline = toPipeline(rule, requireSourceLevel, timestamp);
} catch (ForwardChainException e) {
logger.error(e);
}
if (pipeline == null) {
if (backup == null) {
logger.error("Couldn't convert " + rule + " to pipeline:");
for (String line : rule.getQuery().toString().split("\n")) {
logger.error("\t" + line);
}
throw new UnsupportedOperationException("Couldn't convert query to pipeline.");
} else {
logger.debug("Couldn't convert " + rule + " to pipeline:");
for (String line : rule.getQuery().toString().split("\n")) {
logger.debug("\t" + line);
}
logger.debug("Using fallback strategy.");
usedBackup = true;
return backup.executeConstructRule(rule, metadata);
}
}
// Execute the pipeline
for (Bson step : pipeline) {
logger.debug("\t" + step.toString());
}
LongAdder count = new LongAdder();
baseCollection.aggregate(pipeline).allowDiskUse(true).batchSize(PIPELINE_BATCH_SIZE).forEach(new Block<Document>() {
@Override
public void apply(Document doc) {
final DBObject dbo = (DBObject) JSON.parse(doc.toJson());
RyaStatement rstmt = storageStrategy.deserializeDBObject(dbo);
if (!statementExists(rstmt)) {
count.increment();
doc.replace(SimpleMongoDBStorageStrategy.STATEMENT_METADATA, metadata.toString());
try {
batchWriter.addObjectToQueue(doc);
} catch (MongoDbBatchWriterException e) {
logger.error("Couldn't insert " + rstmt, e);
}
}
}
});
try {
batchWriter.flush();
} catch (MongoDbBatchWriterException e) {
throw new ForwardChainException("Error writing to Mongo", e);
}
logger.info("Added " + count + " new statements.");
executionTimes.compute(rule, (r, previous) -> {
if (previous != null && previous > timestamp) {
return previous;
} else {
return timestamp;
}
});
return count.longValue();
}
Aggregations