use of org.eclipse.rdf4j.repository.sparql.SPARQLRepository in project rdf4j by eclipse.
the class SPARQLFederatedService method createSPARQLRepository.
private static SPARQLRepository createSPARQLRepository(String serviceUrl, HttpClientSessionManager client) {
SPARQLRepository rep = new SPARQLRepository(serviceUrl);
rep.setHttpClientSessionManager(client);
return rep;
}
use of org.eclipse.rdf4j.repository.sparql.SPARQLRepository in project platform by dashjoin.
the class RDF4J method connectAndCollectMetadata.
@Override
@SuppressWarnings("unchecked")
public Map<String, Object> connectAndCollectMetadata() throws Exception {
if ("memory".equals(mode))
_cp = new SailRepository(new MemoryStore());
if ("local".equals(mode))
_cp = new SailRepository(new NativeStore(new File(folder)));
if ("sesame".equals(mode)) {
_cp = new HTTPRepository(endpoint);
((HTTPRepository) _cp).setUsernameAndPassword(username, password);
}
if ("client".equals(mode)) {
_cp = new SPARQLRepository(endpoint);
((SPARQLRepository) _cp).setUsernameAndPassword(username, password);
}
if (_cp == null)
throw new Exception("dashjoin.database.mode must be set to one of memory, local, sesame, client");
_cp.init();
vf = _cp.getValueFactory();
Map<String, Object> meta = new HashMap<>();
try (RepositoryConnection con = _cp.getConnection()) {
if (datasets != null && "memory".equals(mode))
for (String s : datasets) {
log.info("loading dataset " + s);
InputStream ddl = Loader.open(s);
RDFFormat format = Rio.getParserFormatForFileName(s).orElse(RDFFormat.RDFXML);
con.add(ddl, "", format);
}
if (ontologies != null) {
log.info("loading ontologies from " + ontologies);
RDF4J tmp = new RDF4J() {
@Override
RepositoryConnection getConnection() {
return _cp.getConnection();
}
};
tmp.ID = ID;
tmp.mode = "memory";
tmp.datasets = this.ontologies;
return tmp.connectAndCollectMetadata();
}
// scan the ontology for property (domain and range info)
log.info("loading ontologies from database");
Map<IRI, Set<IRI>> type2props = new HashMap<>();
Map<IRI, Set<IRI>> prop2types = new HashMap<>();
for (IRI dr : new IRI[] { RDFS.DOMAIN, RDFS.RANGE }) {
Map<IRI, Set<IRI>> drs = dr == RDFS.DOMAIN ? type2props : prop2types;
try (RepositoryResult<Statement> d = con.getStatements(null, dr, null)) {
while (d.hasNext()) {
Statement i = d.next();
if (i.getSubject() instanceof IRI && i.getObject() instanceof IRI) {
IRI s = dr == RDFS.DOMAIN ? (IRI) i.getObject() : (IRI) i.getSubject();
IRI o = dr == RDFS.DOMAIN ? (IRI) i.getSubject() : (IRI) i.getObject();
Set<IRI> set = drs.get(s);
if (set == null) {
set = new HashSet<>();
drs.put(s, set);
}
set.add(o);
}
}
}
}
// remember subclass tree (no multiple inheritance)
Map<IRI, Set<IRI>> subclasses = new LinkedHashMap<>();
// scan the ontology for classes
for (IRI[] i : new IRI[][] { new IRI[] { RDF.TYPE, OWL.CLASS }, new IRI[] { RDF.TYPE, RDFS.CLASS }, new IRI[] { RDFS.SUBCLASSOF, null } }) try (RepositoryResult<Statement> types = con.getStatements(null, i[0], i[1])) {
while (types.hasNext()) {
Statement stmt = types.next();
Resource s = stmt.getSubject();
if (s instanceof IRI) {
if (stmt.getObject() instanceof IRI)
if (stmt.getPredicate().equals(RDFS.SUBCLASSOF)) {
Set<IRI> set = subclasses.get(stmt.getObject());
if (set == null) {
set = new HashSet<>();
subclasses.put((IRI) stmt.getObject(), set);
}
set.add((IRI) s);
}
Map<String, Object> table = new HashMap<>();
table.put("parent", ID);
table.put("name", s.stringValue());
table.put("ID", ID + "/" + Escape.encodeTableOrColumnName(s.stringValue()));
table.put("type", "object");
Map<String, Object> properties = new LinkedHashMap<>();
Map<String, Object> id = new HashMap<>();
id.put("pkpos", 0);
id.put("name", "ID");
id.put("type", "string");
id.put("format", "uri");
id.put("errorMessage", "Please enter a valid URI");
id.put("parent", table.get("ID"));
id.put("ID", table.get("ID") + "/ID");
properties.put("ID", id);
table.put("properties", properties);
table.put("required", Arrays.asList("ID"));
meta.put(s.stringValue(), table);
Set<IRI> props = type2props.get(s);
if (props != null)
for (IRI prop : props) {
Set<IRI> ranges = prop2types.get(prop);
if (ranges != null)
if (ranges.size() == 1) {
Integer maxcard = getMaxCardinality(prop);
addProp("" + table.get("ID"), prop, properties, ranges.iterator().next(), maxcard == null || maxcard > 1);
}
}
}
}
}
Set<IRI> roots = new HashSet<IRI>(subclasses.keySet());
for (Set<IRI> sub : subclasses.values()) roots.removeAll(sub);
for (IRI root : roots) copyProps(root, subclasses, meta);
log.info("detected " + meta.size() + " classes");
// scan props using one sample
log.info("scannning data...");
for (Entry<String, Object> cls : meta.entrySet()) try (RepositoryResult<Statement> types = con.getStatements(null, RDF.TYPE, iri(cls.getKey()))) {
if (types.hasNext()) {
Statement type = types.next();
Map<String, Object> table = (Map<String, Object>) cls.getValue();
Map<String, Object> properties = (Map<String, Object>) table.get("properties");
try (RepositoryResult<Statement> columns = con.getStatements(type.getSubject(), null, null)) {
// list of detected props that will be added to / enhances the ontology
Map<IRI, ColType> cols = new LinkedHashMap<>();
while (columns.hasNext()) {
Statement column = columns.next();
if (column.getPredicate().equals(RDF.TYPE))
continue;
ColType col = cols.get(column.getPredicate());
if (col != null)
// predicate appears again => must be array
col.array = true;
else {
col = new ColType();
col.sample = column.getObject();
col.array = false;
cols.put(column.getPredicate(), col);
}
}
for (Entry<IRI, ColType> e : cols.entrySet()) {
Map<String, Object> property = (Map<String, Object>) properties.get(e.getKey().stringValue());
if (property == null) {
// prop is not yet in the ontology
Value value = e.getValue().sample;
if (value instanceof Literal)
addProp((String) table.get("ID"), e.getKey(), properties, ((Literal) value).getDatatype(), e.getValue().array);
else if (value instanceof IRI) {
IRI t = getType((IRI) value);
if (t != null)
addProp((String) table.get("ID"), e.getKey(), properties, t, e.getValue().array);
}
} else {
// check cardinality
if (property.get("type").equals("array"))
if (!e.getValue().array) {
// data suggests single value - retract array type
Map<String, Object> items = (Map<String, Object>) property.remove("items");
property.putAll(items);
// change display props also - see addProp below
// https://github.com/dashjoin/platform/issues/94
property.remove("layout");
if (property.remove("displayWith") != null)
property.put("displayWith", "fkln");
}
}
}
}
}
}
log.info("done");
}
return meta;
}
use of org.eclipse.rdf4j.repository.sparql.SPARQLRepository in project inception by inception-project.
the class KnowledgeBaseServiceImpl method getConnection.
@Override
public RepositoryConnection getConnection(KnowledgeBase kb) {
assertRegistration(kb);
Repository repo = repoManager.getRepository(kb.getRepositoryId());
if (repo instanceof SPARQLRepository) {
SPARQLRepositoryConfig sparqlRepoConfig = (SPARQLRepositoryConfig) getKnowledgeBaseConfig(kb);
URI uri = URI.create(sparqlRepoConfig.getQueryEndpointUrl());
String userInfo = uri.getUserInfo();
if (StringUtils.isNotBlank(userInfo)) {
userInfo = userInfo.trim();
String username;
String password;
if (userInfo.contains(":")) {
username = substringBefore(userInfo, ":");
password = substringAfter(userInfo, ":");
} else {
username = userInfo;
password = "";
}
SPARQLRepository sparqlRepo = (SPARQLRepository) repo;
sparqlRepo.setUsernameAndPassword(username, password);
}
}
return new RepositoryConnectionWrapper(repo, repo.getConnection()) {
{
skipCertificateChecks(kb.isSkipSslValidation());
}
@Override
public void close() throws RepositoryException {
try {
super.close();
} finally {
restoreSslVerification();
}
}
};
}
use of org.eclipse.rdf4j.repository.sparql.SPARQLRepository in project inception by inception-project.
the class SPARQLQueryBuilderTest method assertIsReachable.
public static void assertIsReachable(Repository aRepository) {
if (!(aRepository instanceof SPARQLRepository)) {
return;
}
SPARQLRepository sparqlRepository = (SPARQLRepository) aRepository;
assumeTrue(isReachable(sparqlRepository.toString()), "Remote repository at [" + sparqlRepository + "] is not reachable");
}
use of org.eclipse.rdf4j.repository.sparql.SPARQLRepository in project inception by inception-project.
the class SPARQLQueryBuilderTest method buildSparqlRepository.
private Repository buildSparqlRepository(String aUrl) {
SPARQLRepository repo = new SPARQLRepository(aUrl);
repo.setHttpClient(newPerThreadSslCheckingHttpClientBuilder().build());
repo.init();
return repo;
}
Aggregations