use of org.eclipse.rdf4j.sail.SailException in project incubator-rya by apache.
the class QueryRuleset method setRules.
/**
* Extract the rules from the query string, applying inference rules if configured to.
* @throws QueryRulesetException if the parsed query can't be parsed and translated into valid rules.
*/
private void setRules() throws QueryRulesetException {
final ParsedTupleQuery ptq;
final TupleExpr te;
try {
ptq = QueryParserUtil.parseTupleQuery(QueryLanguage.SPARQL, query, null);
} catch (UnsupportedQueryLanguageException | MalformedQueryException e) {
throw new QueryRulesetException("Error parsing query:\n" + query, e);
}
te = ptq.getTupleExpr();
// Before converting to rules (and renaming variables), validate that no statement patterns
// consist of only variables (this would result in a rule that matches every triple).
// Needs to be done before inference, since inference rules may create such statement patterns
// that are OK because they won'd be converted to rules directly.
te.visit(new AbstractQueryModelVisitor<QueryRulesetException>() {
@Override
public void meet(final StatementPattern node) throws QueryRulesetException {
if (!(node.getSubjectVar().hasValue() || node.getPredicateVar().hasValue() || node.getObjectVar().hasValue())) {
throw new QueryRulesetException("Statement pattern with no constants would match every statement:\n" + node + "\nFrom parsed query:\n" + te);
}
}
});
// Apply inference, if applicable
if (conf != null && conf.isInfer()) {
RdfCloudTripleStore store = null;
try {
log.info("Applying inference rules");
store = (RdfCloudTripleStore) RyaSailFactory.getInstance(conf);
final InferenceEngine inferenceEngine = store.getInferenceEngine();
// Apply in same order as query evaluation:
te.visit(new TransitivePropertyVisitor(conf, inferenceEngine));
te.visit(new SymmetricPropertyVisitor(conf, inferenceEngine));
te.visit(new InverseOfVisitor(conf, inferenceEngine));
te.visit(new SubPropertyOfVisitor(conf, inferenceEngine));
te.visit(new SubClassOfVisitor(conf, inferenceEngine));
te.visit(new SameAsVisitor(conf, inferenceEngine));
log.info("Query after inference:\n");
for (final String line : te.toString().split("\n")) {
log.info("\t" + line);
}
} catch (final Exception e) {
throw new QueryRulesetException("Error applying inference to parsed query:\n" + te, e);
} finally {
if (store != null) {
try {
store.shutDown();
} catch (final SailException e) {
log.error("Error shutting down Sail after applying inference", e);
}
}
}
}
// Extract the StatementPatterns and Filters and turn them into rules:
final RulesetVisitor rv = new RulesetVisitor();
try {
te.visit(rv);
rv.addSchema();
} catch (final QueryRulesetException e) {
throw new QueryRulesetException("Error extracting rules from parsed query:\n" + te, e);
}
for (final CopyRule candidateRule : rv.rules) {
boolean unique = true;
for (final CopyRule otherRule : rv.rules) {
if (!candidateRule.equals(otherRule) && otherRule.isGeneralizationOf(candidateRule)) {
unique = false;
break;
}
}
if (unique) {
rules.add(candidateRule);
}
}
}
use of org.eclipse.rdf4j.sail.SailException in project incubator-rya by apache.
the class RdfCloudTripleStoreConnection method refreshConnection.
protected void refreshConnection() throws SailException {
try {
checkNotNull(store.getRyaDAO());
checkArgument(store.getRyaDAO().isInitialized());
checkNotNull(store.getNamespaceManager());
this.ryaDAO = store.getRyaDAO();
this.rdfEvalStatsDAO = store.getRdfEvalStatsDAO();
this.selectEvalDAO = store.getSelectEvalDAO();
this.inferenceEngine = store.getInferenceEngine();
this.namespaceManager = store.getNamespaceManager();
this.provenanceCollector = store.getProvenanceCollector();
} catch (final Exception e) {
throw new SailException(e);
}
}
use of org.eclipse.rdf4j.sail.SailException in project incubator-rya by apache.
the class RdfCloudTripleStoreConnection method evaluateInternal.
@Override
protected CloseableIteration<? extends BindingSet, QueryEvaluationException> evaluateInternal(TupleExpr tupleExpr, final Dataset dataset, BindingSet bindings, final boolean flag) throws SailException {
verifyIsOpen();
logger.trace("Incoming query model:\n{}", tupleExpr.toString());
if (provenanceCollector != null) {
try {
provenanceCollector.recordQuery(tupleExpr.toString());
} catch (final ProvenanceCollectionException e) {
logger.trace("Provenance failed to record query.", e);
}
}
tupleExpr = tupleExpr.clone();
final C queryConf = (C) store.getConf().clone();
if (queryConf == null) {
// Should not happen, but this is better than a null dereference error.
throw new SailException("Cloning store.getConf() returned null, aborting.");
}
if (bindings != null) {
final Binding dispPlan = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_QUERYPLAN_FLAG);
if (dispPlan != null) {
queryConf.setDisplayQueryPlan(Boolean.parseBoolean(dispPlan.getValue().stringValue()));
}
final Binding authBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH);
if (authBinding != null) {
queryConf.setAuths(authBinding.getValue().stringValue().split(","));
}
final Binding ttlBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_TTL);
if (ttlBinding != null) {
queryConf.setTtl(Long.valueOf(ttlBinding.getValue().stringValue()));
}
final Binding startTimeBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_STARTTIME);
if (startTimeBinding != null) {
queryConf.setStartTime(Long.valueOf(startTimeBinding.getValue().stringValue()));
}
final Binding performantBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_PERFORMANT);
if (performantBinding != null) {
queryConf.setBoolean(RdfCloudTripleStoreConfiguration.CONF_PERFORMANT, Boolean.parseBoolean(performantBinding.getValue().stringValue()));
}
final Binding inferBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_INFER);
if (inferBinding != null) {
queryConf.setInfer(Boolean.parseBoolean(inferBinding.getValue().stringValue()));
}
final Binding useStatsBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_USE_STATS);
if (useStatsBinding != null) {
queryConf.setUseStats(Boolean.parseBoolean(useStatsBinding.getValue().stringValue()));
}
final Binding offsetBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_OFFSET);
if (offsetBinding != null) {
queryConf.setOffset(Long.parseLong(offsetBinding.getValue().stringValue()));
}
final Binding limitBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_LIMIT);
if (limitBinding != null) {
queryConf.setLimit(Long.parseLong(limitBinding.getValue().stringValue()));
}
} else {
bindings = new QueryBindingSet();
}
if (!(tupleExpr instanceof QueryRoot)) {
tupleExpr = new QueryRoot(tupleExpr);
}
try {
final List<Class<QueryOptimizer>> optimizers = queryConf.getOptimizers();
final Class<QueryOptimizer> pcjOptimizer = queryConf.getPcjOptimizer();
if (pcjOptimizer != null) {
QueryOptimizer opt = null;
try {
final Constructor<QueryOptimizer> construct = pcjOptimizer.getDeclaredConstructor();
opt = construct.newInstance();
} catch (final Exception e) {
}
if (opt == null) {
throw new NoSuchMethodException("Could not find valid constructor for " + pcjOptimizer.getName());
}
if (opt instanceof Configurable) {
((Configurable) opt).setConf(conf);
}
opt.optimize(tupleExpr, dataset, bindings);
}
final ParallelEvaluationStrategyImpl strategy = new ParallelEvaluationStrategyImpl(new StoreTripleSource<C>(queryConf, ryaDAO), inferenceEngine, dataset, queryConf);
(new BindingAssigner()).optimize(tupleExpr, dataset, bindings);
(new ConstantOptimizer(strategy)).optimize(tupleExpr, dataset, bindings);
(new CompareOptimizer()).optimize(tupleExpr, dataset, bindings);
(new ConjunctiveConstraintSplitter()).optimize(tupleExpr, dataset, bindings);
(new DisjunctiveConstraintOptimizer()).optimize(tupleExpr, dataset, bindings);
(new SameTermFilterOptimizer()).optimize(tupleExpr, dataset, bindings);
(new QueryModelNormalizer()).optimize(tupleExpr, dataset, bindings);
(new IterativeEvaluationOptimizer()).optimize(tupleExpr, dataset, bindings);
if (!optimizers.isEmpty()) {
for (final Class<QueryOptimizer> optclz : optimizers) {
QueryOptimizer result = null;
try {
final Constructor<QueryOptimizer> meth = optclz.getDeclaredConstructor();
result = meth.newInstance();
} catch (final Exception e) {
}
try {
final Constructor<QueryOptimizer> meth = optclz.getDeclaredConstructor(EvaluationStrategy.class);
result = meth.newInstance(strategy);
} catch (final Exception e) {
}
if (result == null) {
throw new NoSuchMethodException("Could not find valid constructor for " + optclz.getName());
}
if (result instanceof Configurable) {
((Configurable) result).setConf(conf);
}
result.optimize(tupleExpr, dataset, bindings);
}
}
(new FilterOptimizer()).optimize(tupleExpr, dataset, bindings);
(new OrderLimitOptimizer()).optimize(tupleExpr, dataset, bindings);
logger.trace("Optimized query model:\n{}", tupleExpr.toString());
if (queryConf.isInfer() && this.inferenceEngine != null) {
try {
tupleExpr.visit(new DomainRangeVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new SomeValuesFromVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new AllValuesFromVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new HasValueVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new IntersectionOfVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new ReflexivePropertyVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new PropertyChainVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new TransitivePropertyVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new SymmetricPropertyVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new InverseOfVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new SubPropertyOfVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new SubClassOfVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new SameAsVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new OneOfVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new HasSelfVisitor(queryConf, inferenceEngine));
} catch (final Exception e) {
logger.error("Error encountered while visiting query node.", e);
}
}
if (queryConf.isPerformant()) {
tupleExpr.visit(new SeparateFilterJoinsVisitor());
// tupleExpr.visit(new FilterTimeIndexVisitor(queryConf));
// tupleExpr.visit(new PartitionFilterTimeIndexVisitor(queryConf));
}
final FilterRangeVisitor rangeVisitor = new FilterRangeVisitor(queryConf);
tupleExpr.visit(rangeVisitor);
// this has to be done twice to get replace the statementpatterns with the right ranges
tupleExpr.visit(rangeVisitor);
EvaluationStatistics stats = null;
if (!queryConf.isUseStats() && queryConf.isPerformant() || rdfEvalStatsDAO == null) {
stats = new DefaultStatistics();
} else if (queryConf.isUseStats()) {
if (queryConf.isUseSelectivity()) {
stats = new RdfCloudTripleStoreSelectivityEvaluationStatistics<C>(queryConf, rdfEvalStatsDAO, selectEvalDAO);
} else {
stats = new RdfCloudTripleStoreEvaluationStatistics<C>(queryConf, rdfEvalStatsDAO);
}
}
if (stats != null) {
if (stats instanceof RdfCloudTripleStoreSelectivityEvaluationStatistics) {
final QueryJoinSelectOptimizer qjso = new QueryJoinSelectOptimizer(stats, selectEvalDAO);
qjso.optimize(tupleExpr, dataset, bindings);
} else {
final QueryJoinOptimizer qjo = new QueryJoinOptimizer(stats);
// TODO: Make pluggable
qjo.optimize(tupleExpr, dataset, bindings);
}
}
final CloseableIteration<BindingSet, QueryEvaluationException> iter = strategy.evaluate(tupleExpr, EmptyBindingSet.getInstance());
final CloseableIteration<BindingSet, QueryEvaluationException> iterWrap = new CloseableIteration<BindingSet, QueryEvaluationException>() {
@Override
public void remove() throws QueryEvaluationException {
iter.remove();
}
@Override
public BindingSet next() throws QueryEvaluationException {
return iter.next();
}
@Override
public boolean hasNext() throws QueryEvaluationException {
return iter.hasNext();
}
@Override
public void close() throws QueryEvaluationException {
iter.close();
strategy.shutdown();
}
};
return iterWrap;
} catch (final QueryEvaluationException e) {
throw new SailException(e);
} catch (final Exception e) {
throw new SailException(e);
}
}
use of org.eclipse.rdf4j.sail.SailException in project incubator-rya by apache.
the class RdfCloudTripleStoreConnection method removeStatementsInternal.
@Override
protected void removeStatementsInternal(final Resource subject, final IRI predicate, final Value object, final Resource... contexts) throws SailException {
if (!(subject instanceof IRI)) {
throw new SailException("Subject[" + subject + "] must be URI");
}
try {
if (contexts != null && contexts.length > 0) {
for (final Resource context : contexts) {
if (!(context instanceof IRI)) {
throw new SailException("Context[" + context + "] must be URI");
}
final RyaStatement statement = new RyaStatement(RdfToRyaConversions.convertResource(subject), RdfToRyaConversions.convertIRI(predicate), RdfToRyaConversions.convertValue(object), RdfToRyaConversions.convertResource(context));
ryaDAO.delete(statement, conf);
}
} else {
final RyaStatement statement = new RyaStatement(RdfToRyaConversions.convertResource(subject), RdfToRyaConversions.convertIRI(predicate), RdfToRyaConversions.convertValue(object), null);
ryaDAO.delete(statement, conf);
}
} catch (final RyaDAOException e) {
throw new SailException(e);
}
}
use of org.eclipse.rdf4j.sail.SailException in project incubator-rya by apache.
the class RdfCloudTripleStore method initializeInternal.
@Override
protected void initializeInternal() throws SailException {
checkNotNull(ryaDAO);
if (this.conf == null) {
this.conf = ryaDAO.getConf();
}
checkNotNull(this.conf);
try {
if (!ryaDAO.isInitialized()) {
ryaDAO.setConf(this.conf);
ryaDAO.init();
}
} catch (final RyaDAOException e) {
throw new SailException(e);
}
if (rdfEvalStatsDAO != null && !rdfEvalStatsDAO.isInitialized()) {
rdfEvalStatsDAO.setConf(this.conf);
rdfEvalStatsDAO.init();
}
if (namespaceManager == null) {
this.namespaceManager = new NamespaceManager(ryaDAO, this.conf);
}
}
Aggregations