use of org.apache.clerezza.commons.rdf.BlankNodeOrIRI in project stanbol by apache.
the class TestMetaxaCore method printTriples.
/**
* This prints out the Stanbol Enhancer triples that would be created for the metadata
* contained in the given model.
*
* @param m a {@link Model}
*
* @return an {@code int} with the number of added triples
*/
private int printTriples(Model m) {
int tripleCounter = 0;
HashMap<BlankNode, BlankNode> blankNodeMap = new HashMap<BlankNode, BlankNode>();
ClosableIterator<Statement> it = m.iterator();
while (it.hasNext()) {
Statement oneStmt = it.next();
BlankNodeOrIRI subject = (BlankNodeOrIRI) MetaxaEngine.asClerezzaResource(oneStmt.getSubject(), blankNodeMap);
IRI predicate = (IRI) MetaxaEngine.asClerezzaResource(oneStmt.getPredicate(), blankNodeMap);
RDFTerm object = MetaxaEngine.asClerezzaResource(oneStmt.getObject(), blankNodeMap);
if (null != subject && null != predicate && null != object) {
Triple t = new TripleImpl(subject, predicate, object);
LOG.debug("adding " + t);
tripleCounter++;
} else {
LOG.debug("skipped " + oneStmt.toString());
}
}
it.close();
return tripleCounter;
}
use of org.apache.clerezza.commons.rdf.BlankNodeOrIRI in project stanbol by apache.
the class AbstractOntologyCollectorImpl method exportToGraph.
/**
* This method has no conversion calls, to it can be invoked by subclasses that wish to modify it
* afterwards.
*
* @param merge
* @return
*/
protected Graph exportToGraph(boolean merge, org.semanticweb.owlapi.model.IRI prefix) {
// if (merge) throw new UnsupportedOperationException(
// "Merge not implemented yet for Clerezza triple collections.");
long before = System.currentTimeMillis();
// No need to store, give it a name, or anything.
Graph root = new SimpleGraph();
IRI iri = new IRI(prefix + _id);
// Add the import declarations for directly managed ontologies.
if (root != null) {
// Set the ontology ID
root.add(new TripleImpl(iri, RDF.type, OWL.Ontology));
if (merge) {
log.warn("Merging of Clerezza triple collections is only implemented one level down. Import statements will be preserved for further levels.");
Iterator<Triple> it;
Set<RDFTerm> importTargets = new HashSet<RDFTerm>();
for (OWLOntologyID ontologyId : managedOntologies) {
ImmutableGraph g = getOntology(ontologyId, ImmutableGraph.class, false);
root.addAll(g);
it = g.filter(null, OWL.imports, null);
while (it.hasNext()) {
org.semanticweb.owlapi.model.IRI tgt;
RDFTerm r = it.next().getObject();
try {
if (r instanceof IRI)
tgt = org.semanticweb.owlapi.model.IRI.create(((IRI) r).getUnicodeString());
else if (r instanceof Literal)
tgt = org.semanticweb.owlapi.model.IRI.create(((Literal) r).getLexicalForm());
else
tgt = org.semanticweb.owlapi.model.IRI.create(r.toString());
tgt = URIUtils.sanitize(tgt);
importTargets.add(new IRI(tgt.toString()));
} catch (Exception ex) {
log.error("FAILED to obtain import target from resource {}", r);
continue;
}
}
it = g.filter(null, RDF.type, OWL.Ontology);
while (it.hasNext()) {
BlankNodeOrIRI ontology = it.next().getSubject();
log.debug("Removing all triples related to {} from {}", ontology, iri);
Iterator<Triple> it2 = g.filter(ontology, null, null);
while (it2.hasNext()) root.remove(it2.next());
}
/*
* Reinstate import statements, though. If imported ontologies were not merged earlier, we
* are not doing it now anyway.
*/
for (RDFTerm target : importTargets) root.add(new TripleImpl(iri, OWL.imports, target));
}
} else {
String base = prefix + getID();
for (int i = 0; i < backwardPathLength; i++) base = URIUtils.upOne(URI.create(base)).toString();
base += "/";
// The key set of managedOntologies contains the ontology IRIs, not their storage keys.
for (OWLOntologyID ontologyId : managedOntologies) {
org.semanticweb.owlapi.model.IRI physIRI = // .create(base + ontologyId.getVersionIRI()));
org.semanticweb.owlapi.model.IRI.create(base + OntologyUtils.encode(ontologyId));
root.add(new TripleImpl(iri, OWL.imports, new IRI(physIRI.toString())));
}
}
log.debug("Clerezza export of {} completed in {} ms.", getID(), System.currentTimeMillis() - before);
}
return root;
}
use of org.apache.clerezza.commons.rdf.BlankNodeOrIRI in project stanbol by apache.
the class CustomSpaceImpl method getOntologyAsGraph.
@Override
protected Graph getOntologyAsGraph(OWLOntologyID ontologyId, boolean merge, org.semanticweb.owlapi.model.IRI universalPrefix) {
Graph o = super.getOntologyAsGraph(ontologyId, merge, universalPrefix);
switch(getConnectivityPolicy()) {
case LOOSE:
break;
case TIGHT:
// Expected to be a singleton
Set<BlankNodeOrIRI> onts = new HashSet<BlankNodeOrIRI>();
synchronized (o) {
Iterator<Triple> it = o.filter(null, RDF.type, OWL.Ontology);
while (it.hasNext()) onts.add(it.next().getSubject());
}
String s = getID();
// strip "custom"
s = s.substring(0, s.indexOf(SUFFIX));
// concatenate "core"
s += SpaceType.CORE.getIRISuffix();
IRI target = new IRI(universalPrefix + s);
for (BlankNodeOrIRI subject : onts) o.add(new TripleImpl(subject, OWL.imports, target));
break;
default:
break;
}
return o;
}
use of org.apache.clerezza.commons.rdf.BlankNodeOrIRI in project stanbol by apache.
the class ScopeImpl method exportToGraph.
/**
* Get a Clerezza {@link Graph} representation of the scope.
*
* @param merge
* if true the core and custom spaces will be recursively merged with the scope graph,
* otherwise owl:imports statements will be added.
* @return the RDF representation of the scope as a modifiable graph.
*/
protected Graph exportToGraph(boolean merge, org.semanticweb.owlapi.model.IRI universalPrefix) {
// No need to store, give it a name, or anything.
Graph root = new SimpleGraph();
IRI iri = new IRI(universalPrefix + getID());
if (root != null) {
// Set the ontology ID
root.add(new TripleImpl(iri, RDF.type, OWL.Ontology));
if (merge) {
ImmutableGraph custom, core;
// Get the subjects of "bad" triples (those with subjects of type owl:Ontology).
Iterator<Triple> it;
Set<BlankNodeOrIRI> ontologies = new HashSet<BlankNodeOrIRI>();
Set<RDFTerm> importTargets = new HashSet<RDFTerm>();
custom = this.getCustomSpace().export(ImmutableGraph.class, merge);
// root.addAll(space);
it = custom.filter(null, RDF.type, OWL.Ontology);
while (it.hasNext()) ontologies.add(it.next().getSubject());
it = custom.filter(null, OWL.imports, null);
while (it.hasNext()) importTargets.add(it.next().getObject());
core = this.getCoreSpace().export(ImmutableGraph.class, merge);
// root.addAll(space);
it = core.filter(null, RDF.type, OWL.Ontology);
while (it.hasNext()) ontologies.add(it.next().getSubject());
it = core.filter(null, OWL.imports, null);
while (it.hasNext()) importTargets.add(it.next().getObject());
// Make sure the scope itself is not in the "bad" subjects.
ontologies.remove(iri);
for (BlankNodeOrIRI nl : ontologies) log.debug("{} -related triples will not be added to {}", nl, iri);
// Merge the two spaces, skipping the "bad" triples.
log.debug("Merging custom space of {}.", getID());
for (Triple t : custom) if (!ontologies.contains(t.getSubject()))
root.add(t);
log.debug("Merging core space of {}.", getID());
for (Triple t : core) if (!ontologies.contains(t.getSubject()))
root.add(t);
/*
* Reinstate import statements, though. If imported ontologies were not merged earlier, we are
* not doing it now anyway.
*/
for (RDFTerm target : importTargets) root.add(new TripleImpl(iri, OWL.imports, target));
} else {
IRI physIRI = new IRI(universalPrefix.toString() + this.getID() + "/" + SpaceType.CUSTOM.getIRISuffix());
root.add(new TripleImpl(iri, OWL.imports, physIRI));
physIRI = new IRI(universalPrefix.toString() + this.getID() + "/" + SpaceType.CORE.getIRISuffix());
root.add(new TripleImpl(iri, OWL.imports, physIRI));
}
}
return root;
}
use of org.apache.clerezza.commons.rdf.BlankNodeOrIRI in project stanbol by apache.
the class GraphMultiplexer method getHandles.
@Override
public Set<OntologyCollector> getHandles(OWLOntologyID publicKey) {
Set<OntologyCollector> handles = new HashSet<OntologyCollector>();
Set<OWLOntologyID> aliases = listAliases(publicKey);
aliases.add(publicKey);
for (OWLOntologyID alias : aliases) {
IRI ontologyId = buildResource(alias);
for (Iterator<Triple> it = meta.filter(null, MANAGES_URIREF, ontologyId); it.hasNext(); ) {
BlankNodeOrIRI sub = it.next().getSubject();
if (sub instanceof IRI)
checkHandle((IRI) sub, handles);
else
throw new InvalidMetaGraphStateException(sub + " is not a valid ontology collector identifer.");
}
for (Iterator<Triple> it = meta.filter(ontologyId, IS_MANAGED_BY_URIREF, null); it.hasNext(); ) {
RDFTerm obj = it.next().getObject();
if (obj instanceof IRI)
checkHandle((IRI) obj, handles);
else
throw new InvalidMetaGraphStateException(obj + " is not a valid ontology collector identifer.");
}
}
return handles;
// throw new UnsupportedOperationException("Not implemented yet.");
}
Aggregations