Search in sources :

Example 1 with SPARQLRepository

use of org.eclipse.rdf4j.repository.sparql.SPARQLRepository in project rdf4j by eclipse.

the class SPARQLFederatedService method createSPARQLRepository.

private static SPARQLRepository createSPARQLRepository(String serviceUrl, HttpClientSessionManager client) {
    SPARQLRepository rep = new SPARQLRepository(serviceUrl);
    rep.setHttpClientSessionManager(client);
    return rep;
}
Also used : SPARQLRepository(org.eclipse.rdf4j.repository.sparql.SPARQLRepository)

Example 2 with SPARQLRepository

use of org.eclipse.rdf4j.repository.sparql.SPARQLRepository in project platform by dashjoin.

the class RDF4J method connectAndCollectMetadata.

@Override
@SuppressWarnings("unchecked")
public Map<String, Object> connectAndCollectMetadata() throws Exception {
    if ("memory".equals(mode))
        _cp = new SailRepository(new MemoryStore());
    if ("local".equals(mode))
        _cp = new SailRepository(new NativeStore(new File(folder)));
    if ("sesame".equals(mode)) {
        _cp = new HTTPRepository(endpoint);
        ((HTTPRepository) _cp).setUsernameAndPassword(username, password);
    }
    if ("client".equals(mode)) {
        _cp = new SPARQLRepository(endpoint);
        ((SPARQLRepository) _cp).setUsernameAndPassword(username, password);
    }
    if (_cp == null)
        throw new Exception("dashjoin.database.mode must be set to one of memory, local, sesame, client");
    _cp.init();
    vf = _cp.getValueFactory();
    Map<String, Object> meta = new HashMap<>();
    try (RepositoryConnection con = _cp.getConnection()) {
        if (datasets != null && "memory".equals(mode))
            for (String s : datasets) {
                log.info("loading dataset " + s);
                InputStream ddl = Loader.open(s);
                RDFFormat format = Rio.getParserFormatForFileName(s).orElse(RDFFormat.RDFXML);
                con.add(ddl, "", format);
            }
        if (ontologies != null) {
            log.info("loading ontologies from " + ontologies);
            RDF4J tmp = new RDF4J() {

                @Override
                RepositoryConnection getConnection() {
                    return _cp.getConnection();
                }
            };
            tmp.ID = ID;
            tmp.mode = "memory";
            tmp.datasets = this.ontologies;
            return tmp.connectAndCollectMetadata();
        }
        // scan the ontology for property (domain and range info)
        log.info("loading ontologies from database");
        Map<IRI, Set<IRI>> type2props = new HashMap<>();
        Map<IRI, Set<IRI>> prop2types = new HashMap<>();
        for (IRI dr : new IRI[] { RDFS.DOMAIN, RDFS.RANGE }) {
            Map<IRI, Set<IRI>> drs = dr == RDFS.DOMAIN ? type2props : prop2types;
            try (RepositoryResult<Statement> d = con.getStatements(null, dr, null)) {
                while (d.hasNext()) {
                    Statement i = d.next();
                    if (i.getSubject() instanceof IRI && i.getObject() instanceof IRI) {
                        IRI s = dr == RDFS.DOMAIN ? (IRI) i.getObject() : (IRI) i.getSubject();
                        IRI o = dr == RDFS.DOMAIN ? (IRI) i.getSubject() : (IRI) i.getObject();
                        Set<IRI> set = drs.get(s);
                        if (set == null) {
                            set = new HashSet<>();
                            drs.put(s, set);
                        }
                        set.add(o);
                    }
                }
            }
        }
        // remember subclass tree (no multiple inheritance)
        Map<IRI, Set<IRI>> subclasses = new LinkedHashMap<>();
        // scan the ontology for classes
        for (IRI[] i : new IRI[][] { new IRI[] { RDF.TYPE, OWL.CLASS }, new IRI[] { RDF.TYPE, RDFS.CLASS }, new IRI[] { RDFS.SUBCLASSOF, null } }) try (RepositoryResult<Statement> types = con.getStatements(null, i[0], i[1])) {
            while (types.hasNext()) {
                Statement stmt = types.next();
                Resource s = stmt.getSubject();
                if (s instanceof IRI) {
                    if (stmt.getObject() instanceof IRI)
                        if (stmt.getPredicate().equals(RDFS.SUBCLASSOF)) {
                            Set<IRI> set = subclasses.get(stmt.getObject());
                            if (set == null) {
                                set = new HashSet<>();
                                subclasses.put((IRI) stmt.getObject(), set);
                            }
                            set.add((IRI) s);
                        }
                    Map<String, Object> table = new HashMap<>();
                    table.put("parent", ID);
                    table.put("name", s.stringValue());
                    table.put("ID", ID + "/" + Escape.encodeTableOrColumnName(s.stringValue()));
                    table.put("type", "object");
                    Map<String, Object> properties = new LinkedHashMap<>();
                    Map<String, Object> id = new HashMap<>();
                    id.put("pkpos", 0);
                    id.put("name", "ID");
                    id.put("type", "string");
                    id.put("format", "uri");
                    id.put("errorMessage", "Please enter a valid URI");
                    id.put("parent", table.get("ID"));
                    id.put("ID", table.get("ID") + "/ID");
                    properties.put("ID", id);
                    table.put("properties", properties);
                    table.put("required", Arrays.asList("ID"));
                    meta.put(s.stringValue(), table);
                    Set<IRI> props = type2props.get(s);
                    if (props != null)
                        for (IRI prop : props) {
                            Set<IRI> ranges = prop2types.get(prop);
                            if (ranges != null)
                                if (ranges.size() == 1) {
                                    Integer maxcard = getMaxCardinality(prop);
                                    addProp("" + table.get("ID"), prop, properties, ranges.iterator().next(), maxcard == null || maxcard > 1);
                                }
                        }
                }
            }
        }
        Set<IRI> roots = new HashSet<IRI>(subclasses.keySet());
        for (Set<IRI> sub : subclasses.values()) roots.removeAll(sub);
        for (IRI root : roots) copyProps(root, subclasses, meta);
        log.info("detected " + meta.size() + " classes");
        // scan props using one sample
        log.info("scannning data...");
        for (Entry<String, Object> cls : meta.entrySet()) try (RepositoryResult<Statement> types = con.getStatements(null, RDF.TYPE, iri(cls.getKey()))) {
            if (types.hasNext()) {
                Statement type = types.next();
                Map<String, Object> table = (Map<String, Object>) cls.getValue();
                Map<String, Object> properties = (Map<String, Object>) table.get("properties");
                try (RepositoryResult<Statement> columns = con.getStatements(type.getSubject(), null, null)) {
                    // list of detected props that will be added to / enhances the ontology
                    Map<IRI, ColType> cols = new LinkedHashMap<>();
                    while (columns.hasNext()) {
                        Statement column = columns.next();
                        if (column.getPredicate().equals(RDF.TYPE))
                            continue;
                        ColType col = cols.get(column.getPredicate());
                        if (col != null)
                            // predicate appears again => must be array
                            col.array = true;
                        else {
                            col = new ColType();
                            col.sample = column.getObject();
                            col.array = false;
                            cols.put(column.getPredicate(), col);
                        }
                    }
                    for (Entry<IRI, ColType> e : cols.entrySet()) {
                        Map<String, Object> property = (Map<String, Object>) properties.get(e.getKey().stringValue());
                        if (property == null) {
                            // prop is not yet in the ontology
                            Value value = e.getValue().sample;
                            if (value instanceof Literal)
                                addProp((String) table.get("ID"), e.getKey(), properties, ((Literal) value).getDatatype(), e.getValue().array);
                            else if (value instanceof IRI) {
                                IRI t = getType((IRI) value);
                                if (t != null)
                                    addProp((String) table.get("ID"), e.getKey(), properties, t, e.getValue().array);
                            }
                        } else {
                            // check cardinality
                            if (property.get("type").equals("array"))
                                if (!e.getValue().array) {
                                    // data suggests single value - retract array type
                                    Map<String, Object> items = (Map<String, Object>) property.remove("items");
                                    property.putAll(items);
                                    // change display props also - see addProp below
                                    // https://github.com/dashjoin/platform/issues/94
                                    property.remove("layout");
                                    if (property.remove("displayWith") != null)
                                        property.put("displayWith", "fkln");
                                }
                        }
                    }
                }
            }
        }
        log.info("done");
    }
    return meta;
}
Also used : RepositoryConnection(org.eclipse.rdf4j.repository.RepositoryConnection) IRI(org.eclipse.rdf4j.model.IRI) Set(java.util.Set) HashSet(java.util.HashSet) BindingSet(org.eclipse.rdf4j.query.BindingSet) SPARQLRepository(org.eclipse.rdf4j.repository.sparql.SPARQLRepository) SailRepository(org.eclipse.rdf4j.repository.sail.SailRepository) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) NativeStore(org.eclipse.rdf4j.sail.nativerdf.NativeStore) HTTPRepository(org.eclipse.rdf4j.repository.http.HTTPRepository) LinkedHashMap(java.util.LinkedHashMap) MemoryStore(org.eclipse.rdf4j.sail.memory.MemoryStore) Entry(java.util.Map.Entry) Literal(org.eclipse.rdf4j.model.Literal) RepositoryResult(org.eclipse.rdf4j.repository.RepositoryResult) RDFFormat(org.eclipse.rdf4j.rio.RDFFormat) HashSet(java.util.HashSet) InputStream(java.io.InputStream) Statement(org.eclipse.rdf4j.model.Statement) Resource(org.eclipse.rdf4j.model.Resource) RepositoryException(org.eclipse.rdf4j.repository.RepositoryException) Value(org.eclipse.rdf4j.model.Value) File(java.io.File) Map(java.util.Map) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap)

Example 3 with SPARQLRepository

use of org.eclipse.rdf4j.repository.sparql.SPARQLRepository in project inception by inception-project.

the class KnowledgeBaseServiceImpl method getConnection.

@Override
public RepositoryConnection getConnection(KnowledgeBase kb) {
    assertRegistration(kb);
    Repository repo = repoManager.getRepository(kb.getRepositoryId());
    if (repo instanceof SPARQLRepository) {
        SPARQLRepositoryConfig sparqlRepoConfig = (SPARQLRepositoryConfig) getKnowledgeBaseConfig(kb);
        URI uri = URI.create(sparqlRepoConfig.getQueryEndpointUrl());
        String userInfo = uri.getUserInfo();
        if (StringUtils.isNotBlank(userInfo)) {
            userInfo = userInfo.trim();
            String username;
            String password;
            if (userInfo.contains(":")) {
                username = substringBefore(userInfo, ":");
                password = substringAfter(userInfo, ":");
            } else {
                username = userInfo;
                password = "";
            }
            SPARQLRepository sparqlRepo = (SPARQLRepository) repo;
            sparqlRepo.setUsernameAndPassword(username, password);
        }
    }
    return new RepositoryConnectionWrapper(repo, repo.getConnection()) {

        {
            skipCertificateChecks(kb.isSkipSslValidation());
        }

        @Override
        public void close() throws RepositoryException {
            try {
                super.close();
            } finally {
                restoreSslVerification();
            }
        }
    };
}
Also used : SPARQLRepositoryConfig(org.eclipse.rdf4j.repository.sparql.config.SPARQLRepositoryConfig) SailRepository(org.eclipse.rdf4j.repository.sail.SailRepository) SPARQLRepository(org.eclipse.rdf4j.repository.sparql.SPARQLRepository) Repository(org.eclipse.rdf4j.repository.Repository) SPARQLRepository(org.eclipse.rdf4j.repository.sparql.SPARQLRepository) RepositoryConnectionWrapper(org.eclipse.rdf4j.repository.base.RepositoryConnectionWrapper) URI(java.net.URI)

Example 4 with SPARQLRepository

use of org.eclipse.rdf4j.repository.sparql.SPARQLRepository in project inception by inception-project.

the class SPARQLQueryBuilderTest method assertIsReachable.

public static void assertIsReachable(Repository aRepository) {
    if (!(aRepository instanceof SPARQLRepository)) {
        return;
    }
    SPARQLRepository sparqlRepository = (SPARQLRepository) aRepository;
    assumeTrue(isReachable(sparqlRepository.toString()), "Remote repository at [" + sparqlRepository + "] is not reachable");
}
Also used : SPARQLRepository(org.eclipse.rdf4j.repository.sparql.SPARQLRepository)

Example 5 with SPARQLRepository

use of org.eclipse.rdf4j.repository.sparql.SPARQLRepository in project inception by inception-project.

the class SPARQLQueryBuilderTest method buildSparqlRepository.

private Repository buildSparqlRepository(String aUrl) {
    SPARQLRepository repo = new SPARQLRepository(aUrl);
    repo.setHttpClient(newPerThreadSslCheckingHttpClientBuilder().build());
    repo.init();
    return repo;
}
Also used : SPARQLRepository(org.eclipse.rdf4j.repository.sparql.SPARQLRepository)

Aggregations

SPARQLRepository (org.eclipse.rdf4j.repository.sparql.SPARQLRepository)17 RepositoryConnection (org.eclipse.rdf4j.repository.RepositoryConnection)8 Repository (org.eclipse.rdf4j.repository.Repository)6 IOException (java.io.IOException)4 TupleQuery (org.eclipse.rdf4j.query.TupleQuery)4 TupleQueryResult (org.eclipse.rdf4j.query.TupleQueryResult)4 URI (java.net.URI)3 BindingSet (org.eclipse.rdf4j.query.BindingSet)3 NeptuneSigV4SignerException (com.amazonaws.neptune.auth.NeptuneSigV4SignerException)2 OutputWriter (com.amazonaws.services.neptune.io.OutputWriter)2 HashMap (java.util.HashMap)2 Resource (org.eclipse.rdf4j.model.Resource)2 Statement (org.eclipse.rdf4j.model.Statement)2 Value (org.eclipse.rdf4j.model.Value)2 SailRepository (org.eclipse.rdf4j.repository.sail.SailRepository)2 RDFWriter (org.eclipse.rdf4j.rio.RDFWriter)2 File (java.io.File)1 InputStream (java.io.InputStream)1 HttpURLConnection (java.net.HttpURLConnection)1 MalformedURLException (java.net.MalformedURLException)1