use of org.openrdf.repository.sail.SailRepository in project incubator-rya by apache.
the class RulesetCopyIT method runQuery.
private Set<BindingSet> runQuery(final String query, final Configuration conf) throws Exception {
SailRepository repository = null;
SailRepositoryConnection conn = null;
try {
final Sail extSail = RyaSailFactory.getInstance(conf);
repository = new SailRepository(extSail);
conn = repository.getConnection();
final ResultHandler handler = new ResultHandler();
final TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tq.evaluate(handler);
return handler.getSolutions();
} finally {
if (conn != null) {
conn.close();
}
if (repository != null) {
repository.shutDown();
}
}
}
use of org.openrdf.repository.sail.SailRepository in project incubator-rya by apache.
the class MongoSpinIT method setup.
@Before
public void setup() throws Exception {
Logger.getLogger("org.apache.rya.mongodb").setLevel(Level.WARN);
Logger.getLogger("org.apache.rya.forwardchain").setLevel(Level.INFO);
conf = getConf();
repository = new SailRepository(RyaSailFactory.getInstance(conf));
}
use of org.openrdf.repository.sail.SailRepository in project incubator-rya by apache.
the class SpinConstructRule method loadSpinRules.
/**
* Load a set of SPIN rules from a data store.
* @param conf Contains the connection information. Not null.
* @return A map of rule identifiers to rule objects.
* @throws ForwardChainException if connecting, querying for rules, or
* parsing rules fails.
*/
public static Ruleset loadSpinRules(RdfCloudTripleStoreConfiguration conf) throws ForwardChainException {
Preconditions.checkNotNull(conf);
Map<Resource, Rule> rules = new ConcurrentHashMap<>();
// Connect to Rya
SailRepository repository = null;
SailRepositoryConnection conn = null;
try {
repository = new SailRepository(RyaSailFactory.getInstance(conf));
} catch (Exception e) {
throw new ForwardChainException("Couldn't initialize SAIL from configuration", e);
}
// Load and parse the individual SPIN rules from the data store
String ruleQueryString = "SELECT ?type ?rule ?text WHERE {\n" + " ?type <" + SPIN.RULE_PROPERTY.stringValue() + "> ?rule .\n" + " {\n" + " ?rule a <" + SP.CONSTRUCT_CLASS.stringValue() + "> .\n" + " ?rule <" + SP.TEXT_PROPERTY.stringValue() + "> ?text .\n" + " } UNION {\n" + " ?rule a ?template .\n" + " ?template <" + SPIN.BODY_PROPERTY + ">? ?body .\n" + " ?body a <" + SP.CONSTRUCT_CLASS.stringValue() + "> .\n" + " ?body <" + SP.TEXT_PROPERTY.stringValue() + "> ?text .\n" + " }\n" + "}";
SPARQLParser parser = new SPARQLParser();
try {
conn = repository.getConnection();
TupleQuery ruleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, ruleQueryString);
ruleQuery.evaluate(new TupleQueryResultHandlerBase() {
@Override
public void handleSolution(BindingSet bs) throws TupleQueryResultHandlerException {
// For each rule identifier found, instantiate a SpinRule
Value requiredType = bs.getValue("type");
Value ruleIdentifier = bs.getValue("rule");
Value ruleText = bs.getValue("text");
if (requiredType instanceof Resource && ruleIdentifier instanceof Resource && ruleText instanceof Literal) {
ParsedQuery parsedRule;
try {
parsedRule = parser.parseQuery(ruleText.stringValue(), null);
if (parsedRule instanceof ParsedGraphQuery) {
SpinConstructRule rule = new SpinConstructRule((Resource) requiredType, (Resource) ruleIdentifier, (ParsedGraphQuery) parsedRule);
if (rule.hasAnonymousConsequent()) {
logger.error("Skipping unsupported rule " + ruleIdentifier + " -- consequent refers to bnode, which is not" + " currently supported (creating new bnodes at each" + " application could lead to infinite recursion).");
} else {
rules.put((Resource) ruleIdentifier, rule);
}
}
} catch (Exception e) {
throw new TupleQueryResultHandlerException(e);
}
}
}
});
} catch (TupleQueryResultHandlerException | QueryEvaluationException | MalformedQueryException | RepositoryException e) {
throw new ForwardChainException("Couldn't retrieve SPIN rules", e);
} finally {
if (conn != null) {
try {
conn.close();
} catch (RepositoryException e) {
logger.warn("Error closing repository connection", e);
}
}
if (repository.isInitialized()) {
try {
repository.shutDown();
} catch (RepositoryException e) {
logger.warn("Error shutting down repository", e);
}
}
}
return new Ruleset(rules.values());
}
use of org.openrdf.repository.sail.SailRepository in project stanbol by apache.
the class SesameYardTest method initYard.
@BeforeClass
public static final void initYard() throws RepositoryException {
SesameYardConfig config = new SesameYardConfig("testYardId");
config.setName("Sesame Yard Test");
config.setDescription("The Sesame Yard instance used to execute the Unit Tests defined for the Yard Interface");
repo = new SailRepository(new MemoryStore());
repo.initialize();
yard = new SesameYard(repo, config);
}
use of org.openrdf.repository.sail.SailRepository in project stanbol by apache.
the class RdfIndexingSource method loadRepositoryConfig.
/**
* @param repoConfigFile
* @return
*/
private RepositoryConfig loadRepositoryConfig(File repoConfigFile) {
Repository configRepo = new SailRepository(new MemoryStore());
RepositoryConnection con = null;
try {
configRepo.initialize();
con = configRepo.getConnection();
// We need to load the configuration into a context
org.openrdf.model.URI configContext = con.getValueFactory().createURI("urn:stanbol.entityhub:indexing.source.sesame:config.context");
RDFFormat format = Rio.getParserFormatForFileName(repoConfigFile.getName());
try {
con.add(new InputStreamReader(new FileInputStream(repoConfigFile), Charset.forName("UTF-8")), baseUri, format, configContext);
} catch (RDFParseException e) {
throw new IllegalArgumentException("Unable to parsed '" + repoConfigFile + "' using RDF format '" + format + "'!", e);
} catch (IOException e) {
throw new IllegalArgumentException("Unable to access '" + repoConfigFile + "'!", e);
}
con.commit();
} catch (RepositoryException e) {
throw new IllegalStateException("Unable to load '" + repoConfigFile + "' to inmemory Sail!", e);
} finally {
if (con != null) {
try {
con.close();
} catch (RepositoryException e) {
/* ignore */
}
}
}
Set<String> repoNames;
RepositoryConfig repoConfig;
try {
repoNames = RepositoryConfigUtil.getRepositoryIDs(configRepo);
if (repoNames.size() == 1) {
repoConfig = RepositoryConfigUtil.getRepositoryConfig(configRepo, repoNames.iterator().next());
repoConfig.validate();
} else if (repoNames.size() > 1) {
throw new IllegalArgumentException("Repository configuration file '" + repoConfigFile + "' MUST only contain a single repository configuration!");
} else {
throw new IllegalArgumentException("Repository configuration file '" + repoConfigFile + "' DOES NOT contain a repository configuration!");
}
} catch (RepositoryException e) {
throw new IllegalStateException("Unable to read RepositoryConfiguration form the " + "in-memory Sail!", e);
} catch (RepositoryConfigException e) {
throw new IllegalArgumentException("Repository Configuration in '" + repoConfigFile + "is not valid!", e);
} finally {
try {
configRepo.shutDown();
} catch (RepositoryException e) {
/* ignore */
}
}
if (repoConfig.getRepositoryImplConfig() == null) {
throw new IllegalArgumentException("Missing RepositoryImpl config for " + "config " + repoConfig.getID() + " of file " + repoConfigFile + "!");
}
return repoConfig;
}
Aggregations