use of org.semanticweb.owlapi.model.OWLOntologyChange in project stanbol by apache.
the class SessionManagerResource method listSessions.
@GET
@Produces(value = { RDF_XML, OWL_XML, TURTLE, X_TURTLE, FUNCTIONAL_OWL, MANCHESTER_OWL, RDF_JSON, N3, N_TRIPLE, TEXT_PLAIN })
public Response listSessions(@Context UriInfo uriInfo, @Context HttpHeaders headers) {
OWLOntologyManager ontMgr = OWLManager.createOWLOntologyManager();
OWLDataFactory df = ontMgr.getOWLDataFactory();
OWLClass cSession = df.getOWLClass(IRI.create("http://stanbol.apache.org/ontologies/meta/Session"));
OWLOntology o;
try {
o = ontMgr.createOntology(IRI.create(uriInfo.getRequestUri()));
List<OWLOntologyChange> changes = new ArrayList<OWLOntologyChange>();
for (String id : sessionManager.getRegisteredSessionIDs()) {
IRI sessionid = IRI.create(sessionManager.getDefaultNamespace() + sessionManager.getID() + "/" + id);
OWLNamedIndividual ind = df.getOWLNamedIndividual(sessionid);
changes.add(new AddAxiom(o, df.getOWLClassAssertionAxiom(cSession, ind)));
}
ontMgr.applyChanges(changes);
} catch (OWLOntologyCreationException e) {
throw new WebApplicationException(e, INTERNAL_SERVER_ERROR);
}
ResponseBuilder rb = Response.ok(o);
MediaType mediaType = MediaTypeUtil.getAcceptableMediaType(headers, null);
if (mediaType != null)
rb.header(HttpHeaders.CONTENT_TYPE, mediaType);
// addCORSOrigin(servletContext, rb, headers);
return rb.build();
}
use of org.semanticweb.owlapi.model.OWLOntologyChange in project stanbol by apache.
the class AbstractOntologyCollectorImpl method getOntologyAsOWLOntology.
protected OWLOntology getOntologyAsOWLOntology(OWLOntologyID ontologyId, boolean merge, org.semanticweb.owlapi.model.IRI universalPrefix) {
// if (merge) throw new UnsupportedOperationException("Merge not implemented yet for OWLOntology.");
// Remove the check below. It might be an unmanaged dependency (TODO remove from collector and
// reintroduce check?).
// if (!hasOntology(ontologyIri)) return null;
OWLOntology o;
o = ontologyProvider.getStoredOntology(ontologyId, OWLOntology.class, merge);
if (merge) {
final Set<OWLOntology> set = new HashSet<OWLOntology>();
log.debug("Merging {} with its imports, if any.", o);
set.add(o);
// Actually, if the provider already performed the merge, this won't happen
for (OWLOntology impo : o.getImportsClosure()) {
log.debug("Imported ontology {} will be merged with {}.", impo, o);
set.add(impo);
}
OWLOntologySetProvider provider = new OWLOntologySetProvider() {
@Override
public Set<OWLOntology> getOntologies() {
return set;
}
};
OWLOntologyMerger merger = new OWLOntologyMerger(provider);
try {
o = merger.createMergedOntology(OWLManager.createOWLOntologyManager(), ontologyId.getOntologyIRI());
} catch (OWLOntologyCreationException e) {
log.error("Failed to merge imports for ontology " + ontologyId, e);
// do not reassign the root ontology
}
} else {
// Rewrite import statements
List<OWLOntologyChange> changes = new ArrayList<OWLOntologyChange>();
OWLDataFactory df = OWLManager.getOWLDataFactory();
/*
* TODO manage import rewrites better once the container ID is fully configurable (i.e. instead of
* going upOne() add "session" or "ontology" if needed). But only do this if we keep considering
* imported ontologies as *not* managed.
*/
for (OWLImportsDeclaration oldImp : o.getImportsDeclarations()) {
changes.add(new RemoveImport(o, oldImp));
String s = oldImp.getIRI().toString();
// FIXME Ugly way to check, but we'll get through with it
if (s.contains("::"))
s = s.substring(s.indexOf("::") + 2, s.length());
boolean managed = managedOntologies.contains(oldImp.getIRI());
// For space, always go up at least one
String tid = getID();
if (backwardPathLength > 0)
tid = tid.split("/")[0];
org.semanticweb.owlapi.model.IRI target = org.semanticweb.owlapi.model.IRI.create((managed ? universalPrefix + "/" + tid + "/" : URIUtils.upOne(universalPrefix) + "/") + s);
changes.add(new AddImport(o, df.getOWLImportsDeclaration(target)));
}
o.getOWLOntologyManager().applyChanges(changes);
}
return o;
}
use of org.semanticweb.owlapi.model.OWLOntologyChange in project stanbol by apache.
the class AbstractOntologyCollectorImpl method exportToOWLOntology.
/**
* This method has no conversion calls, to it can be invoked by subclasses that wish to modify it
* afterwards.
*
* FIXME not merging yet FIXME not including imported ontologies unless they are merged *before* storage.
*
* @param merge
* @return
*/
protected OWLOntology exportToOWLOntology(boolean merge, org.semanticweb.owlapi.model.IRI prefix) {
long before = System.currentTimeMillis();
// Create a new ontology
OWLOntology root;
OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager();
org.semanticweb.owlapi.model.IRI iri = org.semanticweb.owlapi.model.IRI.create(prefix + _id);
try {
root = ontologyManager.createOntology(iri);
} catch (OWLOntologyAlreadyExistsException e) {
// It should be impossible, but just in case.
ontologyManager.removeOntology(ontologyManager.getOntology(iri));
try {
root = ontologyManager.createOntology(iri);
} catch (OWLOntologyAlreadyExistsException e1) {
root = ontologyManager.getOntology(iri);
} catch (OWLOntologyCreationException e1) {
log.error("Failed to assemble root ontology for scope " + iri, e);
root = null;
}
} catch (OWLOntologyCreationException e) {
log.error("Failed to assemble root ontology for scope " + _id, e);
root = null;
}
// Add the import declarations for directly managed ontologies.
if (root != null) {
if (merge) {
final Set<OWLOntology> set = new HashSet<OWLOntology>();
log.debug("Merging {} with its imports.", root);
set.add(root);
for (OWLOntologyID ontologyId : managedOntologies) {
log.debug("Merging {} with {}.", ontologyId, root);
set.add(getOntology(ontologyId, OWLOntology.class, true));
}
OWLOntologySetProvider provider = new OWLOntologySetProvider() {
@Override
public Set<OWLOntology> getOntologies() {
return set;
}
};
OWLOntologyMerger merger = new OWLOntologyMerger(provider);
try {
root = merger.createMergedOntology(OWLManager.createOWLOntologyManager(), iri);
} catch (OWLOntologyCreationException e) {
log.error("Failed to merge imports for ontology " + iri, e);
root = null;
}
} else {
// Add the import declarations for directly managed ontologies.
List<OWLOntologyChange> changes = new LinkedList<OWLOntologyChange>();
OWLDataFactory df = ontologyManager.getOWLDataFactory();
String base = prefix + getID();
for (int i = 0; i < backwardPathLength; i++) base = URIUtils.upOne(URI.create(base)).toString();
base += "/";
// The key set of managedOntologies contains the ontology IRIs, not their storage keys.
for (OWLOntologyID ontologyId : managedOntologies) {
// XXX some day the versionIRI will be the only physical reference for the ontology
org.semanticweb.owlapi.model.IRI physIRI = org.semanticweb.owlapi.model.IRI.create(base + OntologyUtils.encode(ontologyId));
changes.add(new AddImport(root, df.getOWLImportsDeclaration(physIRI)));
}
ontologyManager.applyChanges(changes);
}
}
log.debug("OWL export of {} completed in {} ms.", getID(), System.currentTimeMillis() - before);
return root;
}
use of org.semanticweb.owlapi.model.OWLOntologyChange in project stanbol by apache.
the class ScopeImpl method exportToOWLOntology.
/**
* Get an OWL API {@link OWLOntology} representation of the scope.
*
* @param merge
* if true the core and custom spaces will be recursively merged with the scope ontology,
* otherwise owl:imports statements will be added.
* @return the OWL representation of the scope.
*/
protected OWLOntology exportToOWLOntology(boolean merge, org.semanticweb.owlapi.model.IRI universalPrefix) {
// if (merge) throw new UnsupportedOperationException(
// "Ontology merging only implemented for managed ontologies, not for collectors. "
// + "Please set merge parameter to false.");
// Create an ontology manager on the fly. We don't really need a permanent one.
OWLOntologyManager mgr = OWLManager.createOWLOntologyManager();
OWLDataFactory df = mgr.getOWLDataFactory();
OWLOntology ont = null;
try {
if (merge) {
final Set<OWLOntology> set = new HashSet<OWLOntology>();
log.debug("Merging custom space of {}.", getID());
set.add(this.getCustomSpace().export(OWLOntology.class, merge));
log.debug("Merging core space of {}.", getID());
set.add(this.getCoreSpace().export(OWLOntology.class, merge));
OWLOntologySetProvider provider = new OWLOntologySetProvider() {
@Override
public Set<OWLOntology> getOntologies() {
return set;
}
};
OWLOntologyMerger merger = new OWLOntologyMerger(provider);
try {
ont = merger.createMergedOntology(OWLManager.createOWLOntologyManager(), org.semanticweb.owlapi.model.IRI.create(getDefaultNamespace() + getID()));
} catch (OWLOntologyCreationException e) {
log.error("Failed to merge imports for ontology.", e);
ont = null;
}
} else {
// The root ontology ID is in the form [namespace][scopeId]
ont = mgr.createOntology(org.semanticweb.owlapi.model.IRI.create(universalPrefix + getID()));
List<OWLOntologyChange> additions = new LinkedList<OWLOntologyChange>();
// Add the import statement for the custom space, if existing and not empty
OntologySpace spc = getCustomSpace();
if (spc != null && spc.listManagedOntologies().size() > 0) {
org.semanticweb.owlapi.model.IRI spaceIri = org.semanticweb.owlapi.model.IRI.create(universalPrefix + spc.getID());
additions.add(new AddImport(ont, df.getOWLImportsDeclaration(spaceIri)));
}
// Add the import statement for the core space, if existing and not empty
spc = getCoreSpace();
if (spc != null && spc.listManagedOntologies().size() > 0) {
org.semanticweb.owlapi.model.IRI spaceIri = org.semanticweb.owlapi.model.IRI.create(universalPrefix + spc.getID());
additions.add(new AddImport(ont, df.getOWLImportsDeclaration(spaceIri)));
}
mgr.applyChanges(additions);
}
} catch (OWLOntologyCreationException e) {
log.error("Failed to generate an OWL form of scope " + getID(), e);
ont = null;
}
return ont;
}
use of org.semanticweb.owlapi.model.OWLOntologyChange in project stanbol by apache.
the class ClerezzaOntologyProvider method toOWLOntology.
/**
*
* @param graphName
* @param forceMerge
* if set to false, the selected import management policy will be applied.
* @return
* @throws OWLOntologyCreationException
*/
protected OWLOntology toOWLOntology(IRI graphName, boolean forceMerge) throws OWLOntologyCreationException {
log.debug("Exporting graph to OWLOntology");
log.debug(" -- ImmutableGraph name : {}", graphName);
OWLOntologyManager mgr = OWLManager.createOWLOntologyManager();
// Never try to import
mgr.addIRIMapper(new PhonyIRIMapper(Collections.<org.semanticweb.owlapi.model.IRI>emptySet()));
Set<OWLOntologyID> loaded = new HashSet<OWLOntologyID>();
Graph graph = store.getGraph(graphName);
IRI ontologyId = null;
// Get the id of this ontology.
Iterator<Triple> itt = graph.filter(null, RDF.type, OWL.Ontology);
if (itt.hasNext()) {
BlankNodeOrIRI nl = itt.next().getSubject();
if (nl instanceof IRI)
ontologyId = (IRI) nl;
}
List<OWLOntologyID> revImps = new Stack<OWLOntologyID>();
List<OWLOntologyID> lvl1 = new Stack<OWLOntologyID>();
fillImportsReverse(keymap.getReverseMapping(graphName), revImps, lvl1);
// If not set to merge (either by policy of by force), adopt the set import policy.
if (!forceMerge && !ImportManagementPolicy.MERGE.equals(getImportManagementPolicy())) {
OWLOntology o = OWLAPIToClerezzaConverter.clerezzaGraphToOWLOntology(graph, mgr);
// TODO make it not flat.
// Examining the reverse imports stack will flatten all imports.
List<OWLOntologyChange> changes = new ArrayList<OWLOntologyChange>();
OWLDataFactory df = OWLManager.getOWLDataFactory();
List<OWLOntologyID> listToUse;
switch(getImportManagementPolicy()) {
case FLATTEN:
listToUse = revImps;
break;
case PRESERVE:
listToUse = lvl1;
break;
default:
listToUse = lvl1;
break;
}
for (OWLOntologyID ref : listToUse) if (!loaded.contains(ref) && !ref.equals(keymap.getReverseMapping(graphName))) {
changes.add(new AddImport(o, df.getOWLImportsDeclaration(ref.getOntologyIRI())));
loaded.add(ref);
}
o.getOWLOntologyManager().applyChanges(changes);
return o;
} else {
// If there is just the root ontology, convert it straight away.
if (revImps.size() == 1 && revImps.contains(graphName)) {
OWLOntology o = OWLAPIToClerezzaConverter.clerezzaGraphToOWLOntology(graph, mgr);
return o;
}
// FIXME when there's more than one ontology, this way of merging them seems inefficient...
Graph tempGraph = new IndexedGraph();
// The set of triples that will be excluded from the merge
Set<Triple> exclusions = new HashSet<Triple>();
// Examine all reverse imports
for (OWLOntologyID ref : revImps) if (!loaded.contains(ref)) {
// Get the triples
Graph imported = // store.getTriples(ref);
getStoredOntology(getKey(ref), Graph.class, false);
// For each owl:Ontology
Iterator<Triple> remove = imported.filter(null, RDF.type, OWL.Ontology);
while (remove.hasNext()) {
BlankNodeOrIRI subj = remove.next().getSubject();
/*
* If it's not the root ontology, trash all its triples. If the root ontology is
* anonymous, all ontology annotations are to be trashed without distinction.
*/
if (ontologyId == null || !subj.equals(ontologyId)) {
Iterator<Triple> it = imported.filter(subj, null, null);
while (it.hasNext()) {
Triple t = it.next();
exclusions.add(t);
}
}
}
Iterator<Triple> it = imported.iterator();
while (it.hasNext()) {
Triple t = it.next();
if (!exclusions.contains(t))
tempGraph.add(t);
}
loaded.add(ref);
}
// online.
return OWLAPIToClerezzaConverter.clerezzaGraphToOWLOntology(tempGraph, mgr);
}
}
Aggregations