use of org.eclipse.rdf4j.sail.nativerdf.NativeStore in project platform by dashjoin.
the class RDF4J method connectAndCollectMetadata.
@Override
@SuppressWarnings("unchecked")
public Map<String, Object> connectAndCollectMetadata() throws Exception {
if ("memory".equals(mode))
_cp = new SailRepository(new MemoryStore());
if ("local".equals(mode))
_cp = new SailRepository(new NativeStore(new File(folder)));
if ("sesame".equals(mode)) {
_cp = new HTTPRepository(endpoint);
((HTTPRepository) _cp).setUsernameAndPassword(username, password);
}
if ("client".equals(mode)) {
_cp = new SPARQLRepository(endpoint);
((SPARQLRepository) _cp).setUsernameAndPassword(username, password);
}
if (_cp == null)
throw new Exception("dashjoin.database.mode must be set to one of memory, local, sesame, client");
_cp.init();
vf = _cp.getValueFactory();
Map<String, Object> meta = new HashMap<>();
try (RepositoryConnection con = _cp.getConnection()) {
if (datasets != null && "memory".equals(mode))
for (String s : datasets) {
log.info("loading dataset " + s);
InputStream ddl = Loader.open(s);
RDFFormat format = Rio.getParserFormatForFileName(s).orElse(RDFFormat.RDFXML);
con.add(ddl, "", format);
}
if (ontologies != null) {
log.info("loading ontologies from " + ontologies);
RDF4J tmp = new RDF4J() {
@Override
RepositoryConnection getConnection() {
return _cp.getConnection();
}
};
tmp.ID = ID;
tmp.mode = "memory";
tmp.datasets = this.ontologies;
return tmp.connectAndCollectMetadata();
}
// scan the ontology for property (domain and range info)
log.info("loading ontologies from database");
Map<IRI, Set<IRI>> type2props = new HashMap<>();
Map<IRI, Set<IRI>> prop2types = new HashMap<>();
for (IRI dr : new IRI[] { RDFS.DOMAIN, RDFS.RANGE }) {
Map<IRI, Set<IRI>> drs = dr == RDFS.DOMAIN ? type2props : prop2types;
try (RepositoryResult<Statement> d = con.getStatements(null, dr, null)) {
while (d.hasNext()) {
Statement i = d.next();
if (i.getSubject() instanceof IRI && i.getObject() instanceof IRI) {
IRI s = dr == RDFS.DOMAIN ? (IRI) i.getObject() : (IRI) i.getSubject();
IRI o = dr == RDFS.DOMAIN ? (IRI) i.getSubject() : (IRI) i.getObject();
Set<IRI> set = drs.get(s);
if (set == null) {
set = new HashSet<>();
drs.put(s, set);
}
set.add(o);
}
}
}
}
// remember subclass tree (no multiple inheritance)
Map<IRI, Set<IRI>> subclasses = new LinkedHashMap<>();
// scan the ontology for classes
for (IRI[] i : new IRI[][] { new IRI[] { RDF.TYPE, OWL.CLASS }, new IRI[] { RDF.TYPE, RDFS.CLASS }, new IRI[] { RDFS.SUBCLASSOF, null } }) try (RepositoryResult<Statement> types = con.getStatements(null, i[0], i[1])) {
while (types.hasNext()) {
Statement stmt = types.next();
Resource s = stmt.getSubject();
if (s instanceof IRI) {
if (stmt.getObject() instanceof IRI)
if (stmt.getPredicate().equals(RDFS.SUBCLASSOF)) {
Set<IRI> set = subclasses.get(stmt.getObject());
if (set == null) {
set = new HashSet<>();
subclasses.put((IRI) stmt.getObject(), set);
}
set.add((IRI) s);
}
Map<String, Object> table = new HashMap<>();
table.put("parent", ID);
table.put("name", s.stringValue());
table.put("ID", ID + "/" + Escape.encodeTableOrColumnName(s.stringValue()));
table.put("type", "object");
Map<String, Object> properties = new LinkedHashMap<>();
Map<String, Object> id = new HashMap<>();
id.put("pkpos", 0);
id.put("name", "ID");
id.put("type", "string");
id.put("format", "uri");
id.put("errorMessage", "Please enter a valid URI");
id.put("parent", table.get("ID"));
id.put("ID", table.get("ID") + "/ID");
properties.put("ID", id);
table.put("properties", properties);
table.put("required", Arrays.asList("ID"));
meta.put(s.stringValue(), table);
Set<IRI> props = type2props.get(s);
if (props != null)
for (IRI prop : props) {
Set<IRI> ranges = prop2types.get(prop);
if (ranges != null)
if (ranges.size() == 1) {
Integer maxcard = getMaxCardinality(prop);
addProp("" + table.get("ID"), prop, properties, ranges.iterator().next(), maxcard == null || maxcard > 1);
}
}
}
}
}
Set<IRI> roots = new HashSet<IRI>(subclasses.keySet());
for (Set<IRI> sub : subclasses.values()) roots.removeAll(sub);
for (IRI root : roots) copyProps(root, subclasses, meta);
log.info("detected " + meta.size() + " classes");
// scan props using one sample
log.info("scannning data...");
for (Entry<String, Object> cls : meta.entrySet()) try (RepositoryResult<Statement> types = con.getStatements(null, RDF.TYPE, iri(cls.getKey()))) {
if (types.hasNext()) {
Statement type = types.next();
Map<String, Object> table = (Map<String, Object>) cls.getValue();
Map<String, Object> properties = (Map<String, Object>) table.get("properties");
try (RepositoryResult<Statement> columns = con.getStatements(type.getSubject(), null, null)) {
// list of detected props that will be added to / enhances the ontology
Map<IRI, ColType> cols = new LinkedHashMap<>();
while (columns.hasNext()) {
Statement column = columns.next();
if (column.getPredicate().equals(RDF.TYPE))
continue;
ColType col = cols.get(column.getPredicate());
if (col != null)
// predicate appears again => must be array
col.array = true;
else {
col = new ColType();
col.sample = column.getObject();
col.array = false;
cols.put(column.getPredicate(), col);
}
}
for (Entry<IRI, ColType> e : cols.entrySet()) {
Map<String, Object> property = (Map<String, Object>) properties.get(e.getKey().stringValue());
if (property == null) {
// prop is not yet in the ontology
Value value = e.getValue().sample;
if (value instanceof Literal)
addProp((String) table.get("ID"), e.getKey(), properties, ((Literal) value).getDatatype(), e.getValue().array);
else if (value instanceof IRI) {
IRI t = getType((IRI) value);
if (t != null)
addProp((String) table.get("ID"), e.getKey(), properties, t, e.getValue().array);
}
} else {
// check cardinality
if (property.get("type").equals("array"))
if (!e.getValue().array) {
// data suggests single value - retract array type
Map<String, Object> items = (Map<String, Object>) property.remove("items");
property.putAll(items);
// change display props also - see addProp below
// https://github.com/dashjoin/platform/issues/94
property.remove("layout");
if (property.remove("displayWith") != null)
property.put("displayWith", "fkln");
}
}
}
}
}
}
log.info("done");
}
return meta;
}
use of org.eclipse.rdf4j.sail.nativerdf.NativeStore in project corese by Wimmics.
the class Rdf4jExperimentations method saveDateInFile.
@Test
public void saveDateInFile() throws RDFParseException, UnsupportedRDFormatException, IOException {
// Open a turtle file
InputStream input_stream = Rdf4jExperimentations.class.getResourceAsStream("peopleWork.ttl");
// Load file content in CoreseModel
CoreseModel model = new CoreseModel();
RDFParser rdfParser = Rio.createParser(RDFFormat.TURTLE);
rdfParser.setRDFHandler(new StatementCollector(model));
rdfParser.parse(input_stream);
// Create a new Repository.
URL url_data = Rdf4jExperimentations.class.getResource("");
File dataDir = new File(url_data.getPath() + "/data/");
Repository db = new SailRepository(new NativeStore(dataDir));
// Open a connection to the database
try (RepositoryConnection conn = db.getConnection()) {
// add the model
conn.add(model);
} finally {
// before our program exits, make sure the database is properly shut down.
db.shutDown();
}
}
use of org.eclipse.rdf4j.sail.nativerdf.NativeStore in project com.inova8.intelligentgraph by peterjohnlawrence.
the class ObjectProvenanceTest method setUpBeforeClass.
/**
* Sets the up before class.
*
* @throws Exception the exception
*/
@BeforeAll
static void setUpBeforeClass() throws Exception {
File dataDir = new File("src/test/resources/datadir/olgap/");
FileUtils.deleteDirectory(dataDir);
org.eclipse.rdf4j.repository.Repository workingRep = new SailRepository(new NativeStore(dataDir));
String modelFilename = "src/test/resources/calc2graph.data.ttl";
InputStream input = new FileInputStream(modelFilename);
Model model = Rio.parse(input, "", RDFFormat.TURTLE);
conn = workingRep.getConnection();
conn.add(model.getStatements(null, null, null));
}
use of org.eclipse.rdf4j.sail.nativerdf.NativeStore in project com.inova8.intelligentgraph by peterjohnlawrence.
the class FactTest method setUpBeforeClass.
/**
* Sets the up before class.
*
* @throws Exception the exception
*/
@BeforeAll
static void setUpBeforeClass() throws Exception {
File dataDir = new File("src/test/resources/datadir/olgap/");
FileUtils.deleteDirectory(dataDir);
org.eclipse.rdf4j.repository.Repository workingRep = new SailRepository(new NativeStore(dataDir));
String dataFilename = "src/test/resources/calc2graph.data.ttl";
InputStream dataInput = new FileInputStream(dataFilename);
Model dataModel = Rio.parse(dataInput, "", RDFFormat.TURTLE);
conn = workingRep.getConnection();
conn.add(dataModel.getStatements(null, null, null));
String modelFilename = "src/test/resources/calc2graph.def.ttl";
InputStream modelInput = new FileInputStream(modelFilename);
Model modelModel = Rio.parse(modelInput, "", RDFFormat.TURTLE);
conn = workingRep.getConnection();
conn.add(modelModel.getStatements(null, null, null));
// String plantFilename = "src/test/resources/Plant.2d.def.ttl";
// InputStream plantInput = new FileInputStream(plantFilename);
// Model plantModel = Rio.parse(plantInput, "", RDFFormat.TURTLE);
// conn = workingRep.getConnection();
// conn.add(plantModel.getStatements(null, null, null));
}
use of org.eclipse.rdf4j.sail.nativerdf.NativeStore in project com.inova8.intelligentgraph by peterjohnlawrence.
the class FactValueTest method setUpBeforeClass.
/**
* Sets the up before class.
*
* @throws Exception the exception
*/
@BeforeAll
static void setUpBeforeClass() throws Exception {
File dataDir = new File("src/test/resources/datadir/olgap/");
FileUtils.deleteDirectory(dataDir);
org.eclipse.rdf4j.repository.Repository workingRep = new SailRepository(new NativeStore(dataDir));
String dataFilename = "src/test/resources/calc2graph.data.ttl";
InputStream dataInput = new FileInputStream(dataFilename);
Model dataModel = Rio.parse(dataInput, "", RDFFormat.TURTLE);
conn = workingRep.getConnection();
conn.add(dataModel.getStatements(null, null, null));
String modelFilename = "src/test/resources/calc2graph.def.ttl";
InputStream modelInput = new FileInputStream(modelFilename);
Model modelModel = Rio.parse(modelInput, "", RDFFormat.TURTLE);
conn = workingRep.getConnection();
conn.add(modelModel.getStatements(null, null, null));
// String plantFilename = "src/test/resources/Plant.2d.def.ttl";
// InputStream plantInput = new FileInputStream(plantFilename);
// Model plantModel = Rio.parse(plantInput, "", RDFFormat.TURTLE);
// conn = workingRep.getConnection();
// conn.add(plantModel.getStatements(null, null, null));
}
Aggregations