Search in sources :

Example 1 with NativeStore

use of org.eclipse.rdf4j.sail.nativerdf.NativeStore in project platform by dashjoin.

the class RDF4J method connectAndCollectMetadata.

@Override
@SuppressWarnings("unchecked")
public Map<String, Object> connectAndCollectMetadata() throws Exception {
    if ("memory".equals(mode))
        _cp = new SailRepository(new MemoryStore());
    if ("local".equals(mode))
        _cp = new SailRepository(new NativeStore(new File(folder)));
    if ("sesame".equals(mode)) {
        _cp = new HTTPRepository(endpoint);
        ((HTTPRepository) _cp).setUsernameAndPassword(username, password);
    }
    if ("client".equals(mode)) {
        _cp = new SPARQLRepository(endpoint);
        ((SPARQLRepository) _cp).setUsernameAndPassword(username, password);
    }
    if (_cp == null)
        throw new Exception("dashjoin.database.mode must be set to one of memory, local, sesame, client");
    _cp.init();
    vf = _cp.getValueFactory();
    Map<String, Object> meta = new HashMap<>();
    try (RepositoryConnection con = _cp.getConnection()) {
        if (datasets != null && "memory".equals(mode))
            for (String s : datasets) {
                log.info("loading dataset " + s);
                InputStream ddl = Loader.open(s);
                RDFFormat format = Rio.getParserFormatForFileName(s).orElse(RDFFormat.RDFXML);
                con.add(ddl, "", format);
            }
        if (ontologies != null) {
            log.info("loading ontologies from " + ontologies);
            RDF4J tmp = new RDF4J() {

                @Override
                RepositoryConnection getConnection() {
                    return _cp.getConnection();
                }
            };
            tmp.ID = ID;
            tmp.mode = "memory";
            tmp.datasets = this.ontologies;
            return tmp.connectAndCollectMetadata();
        }
        // scan the ontology for property (domain and range info)
        log.info("loading ontologies from database");
        Map<IRI, Set<IRI>> type2props = new HashMap<>();
        Map<IRI, Set<IRI>> prop2types = new HashMap<>();
        for (IRI dr : new IRI[] { RDFS.DOMAIN, RDFS.RANGE }) {
            Map<IRI, Set<IRI>> drs = dr == RDFS.DOMAIN ? type2props : prop2types;
            try (RepositoryResult<Statement> d = con.getStatements(null, dr, null)) {
                while (d.hasNext()) {
                    Statement i = d.next();
                    if (i.getSubject() instanceof IRI && i.getObject() instanceof IRI) {
                        IRI s = dr == RDFS.DOMAIN ? (IRI) i.getObject() : (IRI) i.getSubject();
                        IRI o = dr == RDFS.DOMAIN ? (IRI) i.getSubject() : (IRI) i.getObject();
                        Set<IRI> set = drs.get(s);
                        if (set == null) {
                            set = new HashSet<>();
                            drs.put(s, set);
                        }
                        set.add(o);
                    }
                }
            }
        }
        // remember subclass tree (no multiple inheritance)
        Map<IRI, Set<IRI>> subclasses = new LinkedHashMap<>();
        // scan the ontology for classes
        for (IRI[] i : new IRI[][] { new IRI[] { RDF.TYPE, OWL.CLASS }, new IRI[] { RDF.TYPE, RDFS.CLASS }, new IRI[] { RDFS.SUBCLASSOF, null } }) try (RepositoryResult<Statement> types = con.getStatements(null, i[0], i[1])) {
            while (types.hasNext()) {
                Statement stmt = types.next();
                Resource s = stmt.getSubject();
                if (s instanceof IRI) {
                    if (stmt.getObject() instanceof IRI)
                        if (stmt.getPredicate().equals(RDFS.SUBCLASSOF)) {
                            Set<IRI> set = subclasses.get(stmt.getObject());
                            if (set == null) {
                                set = new HashSet<>();
                                subclasses.put((IRI) stmt.getObject(), set);
                            }
                            set.add((IRI) s);
                        }
                    Map<String, Object> table = new HashMap<>();
                    table.put("parent", ID);
                    table.put("name", s.stringValue());
                    table.put("ID", ID + "/" + Escape.encodeTableOrColumnName(s.stringValue()));
                    table.put("type", "object");
                    Map<String, Object> properties = new LinkedHashMap<>();
                    Map<String, Object> id = new HashMap<>();
                    id.put("pkpos", 0);
                    id.put("name", "ID");
                    id.put("type", "string");
                    id.put("format", "uri");
                    id.put("errorMessage", "Please enter a valid URI");
                    id.put("parent", table.get("ID"));
                    id.put("ID", table.get("ID") + "/ID");
                    properties.put("ID", id);
                    table.put("properties", properties);
                    table.put("required", Arrays.asList("ID"));
                    meta.put(s.stringValue(), table);
                    Set<IRI> props = type2props.get(s);
                    if (props != null)
                        for (IRI prop : props) {
                            Set<IRI> ranges = prop2types.get(prop);
                            if (ranges != null)
                                if (ranges.size() == 1) {
                                    Integer maxcard = getMaxCardinality(prop);
                                    addProp("" + table.get("ID"), prop, properties, ranges.iterator().next(), maxcard == null || maxcard > 1);
                                }
                        }
                }
            }
        }
        Set<IRI> roots = new HashSet<IRI>(subclasses.keySet());
        for (Set<IRI> sub : subclasses.values()) roots.removeAll(sub);
        for (IRI root : roots) copyProps(root, subclasses, meta);
        log.info("detected " + meta.size() + " classes");
        // scan props using one sample
        log.info("scannning data...");
        for (Entry<String, Object> cls : meta.entrySet()) try (RepositoryResult<Statement> types = con.getStatements(null, RDF.TYPE, iri(cls.getKey()))) {
            if (types.hasNext()) {
                Statement type = types.next();
                Map<String, Object> table = (Map<String, Object>) cls.getValue();
                Map<String, Object> properties = (Map<String, Object>) table.get("properties");
                try (RepositoryResult<Statement> columns = con.getStatements(type.getSubject(), null, null)) {
                    // list of detected props that will be added to / enhances the ontology
                    Map<IRI, ColType> cols = new LinkedHashMap<>();
                    while (columns.hasNext()) {
                        Statement column = columns.next();
                        if (column.getPredicate().equals(RDF.TYPE))
                            continue;
                        ColType col = cols.get(column.getPredicate());
                        if (col != null)
                            // predicate appears again => must be array
                            col.array = true;
                        else {
                            col = new ColType();
                            col.sample = column.getObject();
                            col.array = false;
                            cols.put(column.getPredicate(), col);
                        }
                    }
                    for (Entry<IRI, ColType> e : cols.entrySet()) {
                        Map<String, Object> property = (Map<String, Object>) properties.get(e.getKey().stringValue());
                        if (property == null) {
                            // prop is not yet in the ontology
                            Value value = e.getValue().sample;
                            if (value instanceof Literal)
                                addProp((String) table.get("ID"), e.getKey(), properties, ((Literal) value).getDatatype(), e.getValue().array);
                            else if (value instanceof IRI) {
                                IRI t = getType((IRI) value);
                                if (t != null)
                                    addProp((String) table.get("ID"), e.getKey(), properties, t, e.getValue().array);
                            }
                        } else {
                            // check cardinality
                            if (property.get("type").equals("array"))
                                if (!e.getValue().array) {
                                    // data suggests single value - retract array type
                                    Map<String, Object> items = (Map<String, Object>) property.remove("items");
                                    property.putAll(items);
                                    // change display props also - see addProp below
                                    // https://github.com/dashjoin/platform/issues/94
                                    property.remove("layout");
                                    if (property.remove("displayWith") != null)
                                        property.put("displayWith", "fkln");
                                }
                        }
                    }
                }
            }
        }
        log.info("done");
    }
    return meta;
}
Also used : RepositoryConnection(org.eclipse.rdf4j.repository.RepositoryConnection) IRI(org.eclipse.rdf4j.model.IRI) Set(java.util.Set) HashSet(java.util.HashSet) BindingSet(org.eclipse.rdf4j.query.BindingSet) SPARQLRepository(org.eclipse.rdf4j.repository.sparql.SPARQLRepository) SailRepository(org.eclipse.rdf4j.repository.sail.SailRepository) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) NativeStore(org.eclipse.rdf4j.sail.nativerdf.NativeStore) HTTPRepository(org.eclipse.rdf4j.repository.http.HTTPRepository) LinkedHashMap(java.util.LinkedHashMap) MemoryStore(org.eclipse.rdf4j.sail.memory.MemoryStore) Entry(java.util.Map.Entry) Literal(org.eclipse.rdf4j.model.Literal) RepositoryResult(org.eclipse.rdf4j.repository.RepositoryResult) RDFFormat(org.eclipse.rdf4j.rio.RDFFormat) HashSet(java.util.HashSet) InputStream(java.io.InputStream) Statement(org.eclipse.rdf4j.model.Statement) Resource(org.eclipse.rdf4j.model.Resource) RepositoryException(org.eclipse.rdf4j.repository.RepositoryException) Value(org.eclipse.rdf4j.model.Value) File(java.io.File) Map(java.util.Map) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap)

Example 2 with NativeStore

use of org.eclipse.rdf4j.sail.nativerdf.NativeStore in project corese by Wimmics.

the class Rdf4jExperimentations method saveDateInFile.

@Test
public void saveDateInFile() throws RDFParseException, UnsupportedRDFormatException, IOException {
    // Open a turtle file
    InputStream input_stream = Rdf4jExperimentations.class.getResourceAsStream("peopleWork.ttl");
    // Load file content in CoreseModel
    CoreseModel model = new CoreseModel();
    RDFParser rdfParser = Rio.createParser(RDFFormat.TURTLE);
    rdfParser.setRDFHandler(new StatementCollector(model));
    rdfParser.parse(input_stream);
    // Create a new Repository.
    URL url_data = Rdf4jExperimentations.class.getResource("");
    File dataDir = new File(url_data.getPath() + "/data/");
    Repository db = new SailRepository(new NativeStore(dataDir));
    // Open a connection to the database
    try (RepositoryConnection conn = db.getConnection()) {
        // add the model
        conn.add(model);
    } finally {
        // before our program exits, make sure the database is properly shut down.
        db.shutDown();
    }
}
Also used : RepositoryConnection(org.eclipse.rdf4j.repository.RepositoryConnection) CoreseModel(fr.inria.corese.rdf4j.CoreseModel) Repository(org.eclipse.rdf4j.repository.Repository) SailRepository(org.eclipse.rdf4j.repository.sail.SailRepository) SailRepository(org.eclipse.rdf4j.repository.sail.SailRepository) InputStream(java.io.InputStream) StatementCollector(org.eclipse.rdf4j.rio.helpers.StatementCollector) NativeStore(org.eclipse.rdf4j.sail.nativerdf.NativeStore) RDFParser(org.eclipse.rdf4j.rio.RDFParser) File(java.io.File) URL(java.net.URL) Test(org.junit.Test)

Example 3 with NativeStore

use of org.eclipse.rdf4j.sail.nativerdf.NativeStore in project com.inova8.intelligentgraph by peterjohnlawrence.

the class ObjectProvenanceTest method setUpBeforeClass.

/**
 * Sets the up before class.
 *
 * @throws Exception the exception
 */
@BeforeAll
static void setUpBeforeClass() throws Exception {
    File dataDir = new File("src/test/resources/datadir/olgap/");
    FileUtils.deleteDirectory(dataDir);
    org.eclipse.rdf4j.repository.Repository workingRep = new SailRepository(new NativeStore(dataDir));
    String modelFilename = "src/test/resources/calc2graph.data.ttl";
    InputStream input = new FileInputStream(modelFilename);
    Model model = Rio.parse(input, "", RDFFormat.TURTLE);
    conn = workingRep.getConnection();
    conn.add(model.getStatements(null, null, null));
}
Also used : SailRepository(org.eclipse.rdf4j.repository.sail.SailRepository) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) Model(org.eclipse.rdf4j.model.Model) NativeStore(org.eclipse.rdf4j.sail.nativerdf.NativeStore) File(java.io.File) FileInputStream(java.io.FileInputStream) BeforeAll(org.junit.jupiter.api.BeforeAll)

Example 4 with NativeStore

use of org.eclipse.rdf4j.sail.nativerdf.NativeStore in project com.inova8.intelligentgraph by peterjohnlawrence.

the class FactTest method setUpBeforeClass.

/**
 * Sets the up before class.
 *
 * @throws Exception the exception
 */
@BeforeAll
static void setUpBeforeClass() throws Exception {
    File dataDir = new File("src/test/resources/datadir/olgap/");
    FileUtils.deleteDirectory(dataDir);
    org.eclipse.rdf4j.repository.Repository workingRep = new SailRepository(new NativeStore(dataDir));
    String dataFilename = "src/test/resources/calc2graph.data.ttl";
    InputStream dataInput = new FileInputStream(dataFilename);
    Model dataModel = Rio.parse(dataInput, "", RDFFormat.TURTLE);
    conn = workingRep.getConnection();
    conn.add(dataModel.getStatements(null, null, null));
    String modelFilename = "src/test/resources/calc2graph.def.ttl";
    InputStream modelInput = new FileInputStream(modelFilename);
    Model modelModel = Rio.parse(modelInput, "", RDFFormat.TURTLE);
    conn = workingRep.getConnection();
    conn.add(modelModel.getStatements(null, null, null));
// String plantFilename = "src/test/resources/Plant.2d.def.ttl";
// InputStream plantInput = new FileInputStream(plantFilename);
// Model plantModel = Rio.parse(plantInput, "", RDFFormat.TURTLE);
// conn = workingRep.getConnection();
// conn.add(plantModel.getStatements(null, null, null));
}
Also used : SailRepository(org.eclipse.rdf4j.repository.sail.SailRepository) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) Model(org.eclipse.rdf4j.model.Model) NativeStore(org.eclipse.rdf4j.sail.nativerdf.NativeStore) File(java.io.File) FileInputStream(java.io.FileInputStream) BeforeAll(org.junit.jupiter.api.BeforeAll)

Example 5 with NativeStore

use of org.eclipse.rdf4j.sail.nativerdf.NativeStore in project com.inova8.intelligentgraph by peterjohnlawrence.

the class FactValueTest method setUpBeforeClass.

/**
 * Sets the up before class.
 *
 * @throws Exception the exception
 */
@BeforeAll
static void setUpBeforeClass() throws Exception {
    File dataDir = new File("src/test/resources/datadir/olgap/");
    FileUtils.deleteDirectory(dataDir);
    org.eclipse.rdf4j.repository.Repository workingRep = new SailRepository(new NativeStore(dataDir));
    String dataFilename = "src/test/resources/calc2graph.data.ttl";
    InputStream dataInput = new FileInputStream(dataFilename);
    Model dataModel = Rio.parse(dataInput, "", RDFFormat.TURTLE);
    conn = workingRep.getConnection();
    conn.add(dataModel.getStatements(null, null, null));
    String modelFilename = "src/test/resources/calc2graph.def.ttl";
    InputStream modelInput = new FileInputStream(modelFilename);
    Model modelModel = Rio.parse(modelInput, "", RDFFormat.TURTLE);
    conn = workingRep.getConnection();
    conn.add(modelModel.getStatements(null, null, null));
// String plantFilename = "src/test/resources/Plant.2d.def.ttl";
// InputStream plantInput = new FileInputStream(plantFilename);
// Model plantModel = Rio.parse(plantInput, "", RDFFormat.TURTLE);
// conn = workingRep.getConnection();
// conn.add(plantModel.getStatements(null, null, null));
}
Also used : SailRepository(org.eclipse.rdf4j.repository.sail.SailRepository) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) Model(org.eclipse.rdf4j.model.Model) NativeStore(org.eclipse.rdf4j.sail.nativerdf.NativeStore) File(java.io.File) FileInputStream(java.io.FileInputStream) BeforeAll(org.junit.jupiter.api.BeforeAll)

Aggregations

SailRepository (org.eclipse.rdf4j.repository.sail.SailRepository)14 NativeStore (org.eclipse.rdf4j.sail.nativerdf.NativeStore)14 File (java.io.File)13 InputStream (java.io.InputStream)9 FileInputStream (java.io.FileInputStream)7 Model (org.eclipse.rdf4j.model.Model)7 BeforeAll (org.junit.jupiter.api.BeforeAll)7 Sail (org.eclipse.rdf4j.sail.Sail)4 IntelligentGraphSail (com.inova8.intelligentgraph.sail.IntelligentGraphSail)3 RepositoryConnection (org.eclipse.rdf4j.repository.RepositoryConnection)3 LuceneSail (org.eclipse.rdf4j.sail.lucene.LuceneSail)3 IntelligentGraphConfig (com.inova8.intelligentgraph.sail.IntelligentGraphConfig)2 IntelligentGraphFactory (com.inova8.intelligentgraph.sail.IntelligentGraphFactory)2 URL (java.net.URL)2 BindingSet (org.eclipse.rdf4j.query.BindingSet)2 Repository (org.eclipse.rdf4j.repository.Repository)2 Test (org.junit.Test)2 CoreseModel (fr.inria.corese.rdf4j.CoreseModel)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1