use of com.bigdata.rdf.sail.BigdataSailRepositoryConnection in project minerva by geneontology.
the class BlazegraphMolecularModelManager method dumpStoredModel.
/**
* Save a model to disk.
*
* @throws OWLOntologyStorageException
* @throws OWLOntologyCreationException
* @throws IOException
*/
public void dumpStoredModel(IRI modelId, File folder) throws IOException {
// preliminary checks for the target file
String fileName = StringUtils.replaceOnce(modelId.toString(), modelIdPrefix, "") + ".ttl";
File targetFile = new File(folder, fileName).getAbsoluteFile();
if (targetFile.exists()) {
if (targetFile.isFile() == false) {
throw new IOException("For modelId: '" + modelId + "', the resulting path is not a file: " + targetFile.getAbsolutePath());
}
if (targetFile.canWrite() == false) {
throw new IOException("For modelId: '" + modelId + "', Cannot write to the file: " + targetFile.getAbsolutePath());
}
} else {
File targetFolder = targetFile.getParentFile();
FileUtils.forceMkdir(targetFolder);
}
File tempFile = null;
try {
// create tempFile
// TODO escape
String prefix = modelId.toString();
tempFile = File.createTempFile(prefix, ".ttl");
try {
BigdataSailRepositoryConnection connection = repo.getReadOnlyConnection();
OutputStream out = new FileOutputStream(tempFile);
try {
// Workaround for order dependence of RDF reading by OWL API
// Need to output ontology triple first until this bug is fixed:
// https://github.com/owlcs/owlapi/issues/574
ValueFactory factory = connection.getValueFactory();
Statement ontologyDeclaration = factory.createStatement(factory.createURI(modelId.toString()), RDF.TYPE, OWL.ONTOLOGY);
Rio.write(Collections.singleton(ontologyDeclaration), out, RDFFormat.TURTLE);
// end workaround
RDFWriter writer = Rio.createWriter(RDFFormat.TURTLE, out);
connection.export(writer, new URIImpl(modelId.toString()));
// copy temp file to the finalFile
FileUtils.copyFile(tempFile, targetFile);
} finally {
out.close();
connection.close();
}
} catch (RepositoryException e) {
throw new IOException(e);
} catch (RDFHandlerException e) {
throw new IOException(e);
}
} finally {
// delete temp file
FileUtils.deleteQuietly(tempFile);
}
}
use of com.bigdata.rdf.sail.BigdataSailRepositoryConnection in project minerva by geneontology.
the class BlazegraphOntologyManager method getLabels.
public Map<String, String> getLabels(Set<String> entities) throws IOException {
Map<String, String> uri_label = new HashMap<String, String>();
String values = "VALUES ?entity {";
for (String uri : entities) {
values += "<" + uri + "> ";
}
values += "} . ";
String query = "select ?entity ?label where { " + values + " ?entity rdfs:label ?label }";
try {
BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection();
try {
TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
TupleQueryResult result = tupleQuery.evaluate();
while (result.hasNext()) {
BindingSet binding = result.next();
Value v = binding.getValue("label");
String label = v.stringValue();
Value ev = binding.getValue("entity");
String entity = ev.stringValue();
uri_label.put(entity, label);
}
} catch (MalformedQueryException e) {
throw new IOException(e);
} catch (QueryEvaluationException e) {
throw new IOException(e);
} finally {
connection.close();
}
} catch (RepositoryException e) {
throw new IOException(e);
}
return uri_label;
}
use of com.bigdata.rdf.sail.BigdataSailRepositoryConnection in project minerva by geneontology.
the class BlazegraphOntologyManager method getGenesByTaxid.
public Set<String> getGenesByTaxid(String ncbi_tax_id) throws IOException {
Set<String> genes = new HashSet<String>();
try {
BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection();
try {
String query = "select ?gene \n" + "where { \n" + " ?gene rdfs:subClassOf ?taxon_restriction .\n" + " ?taxon_restriction owl:onProperty <http://purl.obolibrary.org/obo/RO_0002162> .\n" + " ?taxon_restriction owl:someValuesFrom <http://purl.obolibrary.org/obo/NCBITaxon_" + ncbi_tax_id + "> \n" + "}";
TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
TupleQueryResult result = tupleQuery.evaluate();
while (result.hasNext()) {
BindingSet binding = result.next();
Value v = binding.getValue("gene");
// ignore anonymous sub classes
if (v instanceof URI) {
String gene = binding.getValue("gene").stringValue();
genes.add(gene);
}
}
} catch (MalformedQueryException e) {
throw new IOException(e);
} catch (QueryEvaluationException e) {
throw new IOException(e);
} finally {
connection.close();
}
} catch (RepositoryException e) {
throw new IOException(e);
}
return genes;
}
use of com.bigdata.rdf.sail.BigdataSailRepositoryConnection in project minerva by geneontology.
the class BlazegraphOntologyManager method loadRepositoryFromOWLFile.
public void loadRepositoryFromOWLFile(File file, String iri, boolean reset) throws OWLOntologyCreationException, RepositoryException, IOException, RDFParseException, RDFHandlerException {
synchronized (go_lego_repo) {
final BigdataSailRepositoryConnection connection = go_lego_repo.getUnisolatedConnection();
try {
connection.begin();
try {
URI graph = new URIImpl(iri);
if (reset) {
connection.clear(graph);
}
if (file.getName().endsWith(".ttl")) {
connection.add(file, "", RDFFormat.TURTLE, graph);
} else if (file.getName().endsWith(".owl")) {
connection.add(file, "", RDFFormat.RDFXML, graph);
}
connection.commit();
} catch (Exception e) {
connection.rollback();
throw e;
}
} finally {
connection.close();
}
}
}
use of com.bigdata.rdf.sail.BigdataSailRepositoryConnection in project minerva by geneontology.
the class BlazegraphOntologyManager method buildClassDepthMap.
public Map<String, Integer> buildClassDepthMap(String root_term) throws IOException {
Map<String, Integer> class_depth = new HashMap<String, Integer>();
try {
BigdataSailRepositoryConnection connection = go_lego_repo.getReadOnlyConnection();
try {
String query = "PREFIX owl: <http://www.w3.org/2002/07/owl#> " + "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> " + "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> " + "SELECT ?class (count(?mid) as ?depth) " + "WHERE { " + "?class rdfs:subClassOf* ?mid . " + "values ?root_term {<" + root_term + ">} . " + " ?mid rdfs:subClassOf* ?root_term ." + "filter ( ?class != ?mid )}" + "group by ?class " + " order by ?depth";
TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
TupleQueryResult result = tupleQuery.evaluate();
while (result.hasNext()) {
BindingSet binding = result.next();
Value v = binding.getValue("depth");
Integer depth = Integer.parseInt(v.stringValue());
String c = binding.getValue("class").stringValue();
Integer k = class_depth.get(c);
if ((k == null) || (depth < k)) {
class_depth.put(c, depth);
}
}
} catch (MalformedQueryException e) {
throw new IOException(e);
} catch (QueryEvaluationException e) {
throw new IOException(e);
} finally {
connection.close();
}
} catch (RepositoryException e) {
throw new IOException(e);
}
return class_depth;
}
Aggregations