use of org.apache.clerezza.commons.rdf.ImmutableGraph in project stanbol by apache.
the class GenericEnhancerUiResource method getExecutionNodes.
/**
* Getter for the executionNodes
*
* @return
*/
public Set<ExecutionNode> getExecutionNodes() {
if (_executionNodes == null) {
ImmutableGraph ep;
try {
ep = chain.getExecutionPlan();
} catch (ChainException e) {
ep = null;
}
if (ep != null) {
_executionNodes = new LinkedHashSet<ExecutionNode>();
Set<BlankNodeOrIRI> processed = new HashSet<BlankNodeOrIRI>();
Set<BlankNodeOrIRI> next;
do {
next = ExecutionPlanHelper.getExecutable(ep, processed);
for (BlankNodeOrIRI node : next) {
_executionNodes.add(new ExecutionNode(ep, node));
}
processed.addAll(next);
} while (!next.isEmpty());
}
}
return _executionNodes;
}
use of org.apache.clerezza.commons.rdf.ImmutableGraph in project stanbol by apache.
the class ScopeResource method asOntologyGraph.
@GET
@Produces(value = { APPLICATION_JSON, N3, N_TRIPLE, RDF_JSON })
public Response asOntologyGraph(@PathParam("scopeid") String scopeid, @DefaultValue("false") @QueryParam("merge") boolean merge, @Context HttpHeaders headers) {
scope = onm.getScope(scopeid);
if (scope == null)
return Response.status(NOT_FOUND).build();
IRI prefix = IRI.create(getPublicBaseUri() + "ontonet/ontology/");
// Export to Clerezza ImmutableGraph, which can be rendered as JSON-LD.
ResponseBuilder rb = Response.ok(scope.export(ImmutableGraph.class, merge, prefix));
// addCORSOrigin(servletContext, rb, headers);
return rb.build();
}
use of org.apache.clerezza.commons.rdf.ImmutableGraph in project stanbol by apache.
the class ScopeResource method managedOntologyGetGraph.
/**
* Gets the ontology with the given identifier in its version managed by the session.
*
* @param sessionId
* the session identifier.
* @param ontologyId
* the ontology identifier.
* @param uriInfo
* @param headers
* @return the requested managed ontology, or {@link Status#NOT_FOUND} if either the sessionn does not
* exist, or the if the ontology either does not exist or is not managed.
*/
@GET
@Path("/{ontologyId:.+}")
@Produces(value = { APPLICATION_JSON, N3, N_TRIPLE, RDF_JSON })
public Response managedOntologyGetGraph(@PathParam("scopeid") String scopeid, @PathParam("ontologyId") String ontologyId, @DefaultValue("false") @QueryParam("merge") boolean merge, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
log.debug("Absolute URL Path {}", uriInfo.getRequestUri());
log.debug("Ontology ID {}", ontologyId);
ResponseBuilder rb;
scope = onm.getScope(scopeid);
if (scope == null)
rb = Response.status(NOT_FOUND);
else {
IRI prefix = IRI.create(getPublicBaseUri() + "ontonet/ontology/");
ImmutableGraph o = null;
OWLOntologyID id = OntologyUtils.decode(ontologyId);
OntologySpace spc = scope.getCustomSpace();
if (spc != null && spc.hasOntology(id)) {
o = spc.getOntology(id, ImmutableGraph.class, merge, prefix);
} else {
spc = scope.getCoreSpace();
if (spc != null && spc.hasOntology(id))
o = spc.getOntology(id, ImmutableGraph.class, merge, prefix);
}
if (o == null)
rb = Response.status(NOT_FOUND);
else
rb = Response.ok(o);
}
// addCORSOrigin(servletContext, rb, headers);
return rb.build();
}
use of org.apache.clerezza.commons.rdf.ImmutableGraph in project stanbol by apache.
the class SessionResource method managedOntologyGetGraph.
/**
* Gets the ontology with the given identifier in its version managed by the session.
*
* @param sessionId
* the session identifier.
* @param ontologyId
* the ontology identifier.
* @param uriInfo
* @param headers
* @return the requested managed ontology, or {@link Status#NOT_FOUND} if either the sessionn does not
* exist, or the if the ontology either does not exist or is not managed.
*/
@GET
@Path(value = "/{ontologyId:.+}")
@Produces(value = { APPLICATION_JSON, N3, N_TRIPLE, RDF_JSON })
public Response managedOntologyGetGraph(@PathParam("id") String sessionId, @PathParam("ontologyId") String ontologyId, @DefaultValue("false") @QueryParam("merge") boolean merge, // @Context UriInfo uriInfo,
@Context HttpHeaders headers) {
session = sesMgr.getSession(sessionId);
if (session == null)
return Response.status(NOT_FOUND).build();
IRI prefix = IRI.create(getPublicBaseUri() + "ontonet/session/");
ImmutableGraph o = session.getOntology(OntologyUtils.decode(ontologyId), ImmutableGraph.class, merge, prefix);
ResponseBuilder rb = (o != null) ? Response.ok(o) : Response.status(NOT_FOUND);
// addCORSOrigin(servletContext, rb, headers);
return rb.build();
}
use of org.apache.clerezza.commons.rdf.ImmutableGraph in project stanbol by apache.
the class AbstractOntologyCollectorImpl method exportToGraph.
/**
* This method has no conversion calls, to it can be invoked by subclasses that wish to modify it
* afterwards.
*
* @param merge
* @return
*/
protected Graph exportToGraph(boolean merge, org.semanticweb.owlapi.model.IRI prefix) {
// if (merge) throw new UnsupportedOperationException(
// "Merge not implemented yet for Clerezza triple collections.");
long before = System.currentTimeMillis();
// No need to store, give it a name, or anything.
Graph root = new SimpleGraph();
IRI iri = new IRI(prefix + _id);
// Add the import declarations for directly managed ontologies.
if (root != null) {
// Set the ontology ID
root.add(new TripleImpl(iri, RDF.type, OWL.Ontology));
if (merge) {
log.warn("Merging of Clerezza triple collections is only implemented one level down. Import statements will be preserved for further levels.");
Iterator<Triple> it;
Set<RDFTerm> importTargets = new HashSet<RDFTerm>();
for (OWLOntologyID ontologyId : managedOntologies) {
ImmutableGraph g = getOntology(ontologyId, ImmutableGraph.class, false);
root.addAll(g);
it = g.filter(null, OWL.imports, null);
while (it.hasNext()) {
org.semanticweb.owlapi.model.IRI tgt;
RDFTerm r = it.next().getObject();
try {
if (r instanceof IRI)
tgt = org.semanticweb.owlapi.model.IRI.create(((IRI) r).getUnicodeString());
else if (r instanceof Literal)
tgt = org.semanticweb.owlapi.model.IRI.create(((Literal) r).getLexicalForm());
else
tgt = org.semanticweb.owlapi.model.IRI.create(r.toString());
tgt = URIUtils.sanitize(tgt);
importTargets.add(new IRI(tgt.toString()));
} catch (Exception ex) {
log.error("FAILED to obtain import target from resource {}", r);
continue;
}
}
it = g.filter(null, RDF.type, OWL.Ontology);
while (it.hasNext()) {
BlankNodeOrIRI ontology = it.next().getSubject();
log.debug("Removing all triples related to {} from {}", ontology, iri);
Iterator<Triple> it2 = g.filter(ontology, null, null);
while (it2.hasNext()) root.remove(it2.next());
}
/*
* Reinstate import statements, though. If imported ontologies were not merged earlier, we
* are not doing it now anyway.
*/
for (RDFTerm target : importTargets) root.add(new TripleImpl(iri, OWL.imports, target));
}
} else {
String base = prefix + getID();
for (int i = 0; i < backwardPathLength; i++) base = URIUtils.upOne(URI.create(base)).toString();
base += "/";
// The key set of managedOntologies contains the ontology IRIs, not their storage keys.
for (OWLOntologyID ontologyId : managedOntologies) {
org.semanticweb.owlapi.model.IRI physIRI = // .create(base + ontologyId.getVersionIRI()));
org.semanticweb.owlapi.model.IRI.create(base + OntologyUtils.encode(ontologyId));
root.add(new TripleImpl(iri, OWL.imports, new IRI(physIRI.toString())));
}
}
log.debug("Clerezza export of {} completed in {} ms.", getID(), System.currentTimeMillis() - before);
}
return root;
}
Aggregations