use of org.apache.clerezza.commons.rdf.RDFTerm in project stanbol by apache.
the class TestEntityLinkingEnhancementEngine method validateAllEntityAnnotations.
private static int validateAllEntityAnnotations(NamedEntityTaggingEngine entityLinkingEngine, ContentItem ci) {
Map<IRI, RDFTerm> expectedValues = new HashMap<IRI, RDFTerm>();
expectedValues.put(ENHANCER_EXTRACTED_FROM, ci.getUri());
expectedValues.put(DC_CREATOR, LiteralFactory.getInstance().createTypedLiteral(entityLinkingEngine.getClass().getName()));
Iterator<Triple> entityAnnotationIterator = ci.getMetadata().filter(null, RDF_TYPE, ENHANCER_ENTITYANNOTATION);
//adding null as expected for confidence makes it a required property
expectedValues.put(Properties.ENHANCER_CONFIDENCE, null);
int entityAnnotationCount = 0;
while (entityAnnotationIterator.hasNext()) {
IRI entityAnnotation = (IRI) entityAnnotationIterator.next().getSubject();
// test if selected Text is added
validateEntityAnnotation(ci.getMetadata(), entityAnnotation, expectedValues);
//fise:confidence now checked by EnhancementStructureHelper (STANBOL-630)
// Iterator<Triple> confidenceIterator = ci.getMetadata().filter(entityAnnotation, ENHANCER_CONFIDENCE, null);
// assertTrue("Expected fise:confidence value is missing (entityAnnotation "
// +entityAnnotation+")",confidenceIterator.hasNext());
// Double confidence = LiteralFactory.getInstance().createObject(Double.class,
// (TypedLiteral)confidenceIterator.next().getObject());
// assertTrue("fise:confidence MUST BE <= 1 (value= '"+confidence
// + "',entityAnnotation " +entityAnnotation+")",
// 1.0 >= confidence.doubleValue());
// assertTrue("fise:confidence MUST BE >= 0 (value= '"+confidence
// +"',entityAnnotation "+entityAnnotation+")",
// 0.0 <= confidence.doubleValue());
//Test the entityhub:site property (STANBOL-625)
IRI ENTITYHUB_SITE = new IRI(RdfResourceEnum.site.getUri());
Iterator<Triple> entitySiteIterator = ci.getMetadata().filter(entityAnnotation, ENTITYHUB_SITE, null);
assertTrue("Expected entityhub:site value is missing (entityAnnotation " + entityAnnotation + ")", entitySiteIterator.hasNext());
RDFTerm siteResource = entitySiteIterator.next().getObject();
assertTrue("entityhub:site values MUST BE Literals", siteResource instanceof Literal);
assertEquals("'dbpedia' is expected as entityhub:site value", "dbpedia", ((Literal) siteResource).getLexicalForm());
assertFalse("entityhub:site MUST HAVE only a single value", entitySiteIterator.hasNext());
entityAnnotationCount++;
}
return entityAnnotationCount;
}
use of org.apache.clerezza.commons.rdf.RDFTerm in project stanbol by apache.
the class ClerezzaOntologyProvider method computeAliasClosure.
protected void computeAliasClosure(OWLOntologyID publicKey, Set<OWLOntologyID> target) {
target.add(publicKey);
Graph meta = getMetaGraph(Graph.class);
IRI ont = keymap.buildResource(publicKey);
Set<RDFTerm> resources = new HashSet<RDFTerm>();
// Forwards
for (Iterator<Triple> it = meta.filter(ont, OWL.sameAs, null); it.hasNext(); ) resources.add(it.next().getObject());
// Backwards
for (Iterator<Triple> it = meta.filter(null, OWL.sameAs, ont); it.hasNext(); ) resources.add(it.next().getSubject());
for (RDFTerm r : resources) if (r instanceof IRI) {
OWLOntologyID newKey = keymap.buildPublicKey((IRI) r);
if (!target.contains(newKey))
computeAliasClosure(newKey, target);
}
}
use of org.apache.clerezza.commons.rdf.RDFTerm in project stanbol by apache.
the class ClerezzaOntologyProvider method getOntologyNetworkConfiguration.
public OntologyNetworkConfiguration getOntologyNetworkConfiguration() {
Map<String, Collection<OWLOntologyID>> coreOntologies = new HashMap<String, Collection<OWLOntologyID>>(), customOntologies = new HashMap<String, Collection<OWLOntologyID>>();
Map<String, Collection<String>> attachedScopes = new HashMap<String, Collection<String>>();
final Graph meta = store.getGraph(new IRI(metaGraphId));
// Scopes first
for (Iterator<Triple> it = meta.filter(null, RDF.type, SCOPE_URIREF); it.hasNext(); ) {
// for each
// scope
Triple ta = it.next();
BlankNodeOrIRI sub = ta.getSubject();
if (sub instanceof IRI) {
String s = ((IRI) sub).getUnicodeString(), prefix = _NS_STANBOL_INTERNAL + Scope.shortName + "/";
if (s.startsWith(prefix)) {
String scopeId = s.substring(prefix.length());
log.info("Rebuilding scope \"{}\".", scopeId);
coreOntologies.put(scopeId, new TreeSet<OWLOntologyID>());
customOntologies.put(scopeId, new TreeSet<OWLOntologyID>());
IRI core_ur = null, custom_ur = null;
RDFTerm r;
// Check core space
Iterator<Triple> it2 = meta.filter(sub, HAS_SPACE_CORE_URIREF, null);
if (it2.hasNext()) {
r = it2.next().getObject();
if (r instanceof IRI)
core_ur = (IRI) r;
} else {
it2 = meta.filter(null, IS_SPACE_CORE_OF_URIREF, sub);
if (it2.hasNext()) {
r = it2.next().getSubject();
if (r instanceof IRI)
core_ur = (IRI) r;
}
}
// Check custom space
it2 = meta.filter(sub, HAS_SPACE_CUSTOM_URIREF, null);
if (it2.hasNext()) {
r = it2.next().getObject();
if (r instanceof IRI)
custom_ur = (IRI) r;
} else {
it2 = meta.filter(null, IS_SPACE_CUSTOM_OF_URIREF, sub);
if (it2.hasNext()) {
r = it2.next().getSubject();
if (r instanceof IRI)
custom_ur = (IRI) r;
}
}
// retrieve the ontologies
if (core_ur != null) {
for (it2 = meta.filter(core_ur, null, null); it2.hasNext(); ) {
Triple t = it2.next();
IRI predicate = t.getPredicate();
if (predicate.equals(MANAGES_URIREF)) {
if (t.getObject() instanceof IRI)
coreOntologies.get(scopeId).add(// FIXME must be very
keymap.buildPublicKey((IRI) t.getObject()));
}
}
for (it2 = meta.filter(null, null, core_ur); it2.hasNext(); ) {
Triple t = it2.next();
IRI predicate = t.getPredicate();
if (predicate.equals(IS_MANAGED_BY_URIREF)) {
if (t.getSubject() instanceof IRI)
coreOntologies.get(scopeId).add(// FIXME must be very
keymap.buildPublicKey((IRI) t.getSubject()));
}
}
}
if (custom_ur != null) {
for (it2 = meta.filter(custom_ur, null, null); it2.hasNext(); ) {
Triple t = it2.next();
IRI predicate = t.getPredicate();
if (predicate.equals(MANAGES_URIREF)) {
if (t.getObject() instanceof IRI)
customOntologies.get(scopeId).add(// FIXME must be very
keymap.buildPublicKey((IRI) t.getObject()));
}
}
for (it2 = meta.filter(null, null, custom_ur); it2.hasNext(); ) {
Triple t = it2.next();
IRI predicate = t.getPredicate();
if (predicate.equals(IS_MANAGED_BY_URIREF)) {
if (t.getSubject() instanceof IRI)
customOntologies.get(scopeId).add(// FIXME must be very
keymap.buildPublicKey((IRI) t.getSubject()));
}
}
}
}
}
}
// Sessions next
Map<String, Collection<OWLOntologyID>> sessionOntologies = new HashMap<String, Collection<OWLOntologyID>>();
for (Iterator<Triple> it = meta.filter(null, RDF.type, SESSION_URIREF); it.hasNext(); ) {
// for each
// scope
Triple ta = it.next();
BlankNodeOrIRI sub = ta.getSubject();
if (sub instanceof IRI) {
IRI ses_ur = (IRI) sub;
String s = ((IRI) sub).getUnicodeString();
String prefix = _NS_STANBOL_INTERNAL + Session.shortName + "/";
if (s.startsWith(prefix)) {
String sessionId = s.substring(prefix.length());
log.info("Rebuilding session \"{}\".", sessionId);
sessionOntologies.put(sessionId, new TreeSet<OWLOntologyID>());
attachedScopes.put(sessionId, new TreeSet<String>());
// retrieve the ontologies
if (ses_ur != null) {
for (Iterator<Triple> it2 = meta.filter(ses_ur, MANAGES_URIREF, null); it2.hasNext(); ) {
RDFTerm obj = it2.next().getObject();
if (obj instanceof IRI)
sessionOntologies.get(sessionId).add(// FIXME must be very temporary!
keymap.buildPublicKey((IRI) obj));
}
for (Iterator<Triple> it2 = meta.filter(null, IS_MANAGED_BY_URIREF, ses_ur); it2.hasNext(); ) {
RDFTerm subj = it2.next().getSubject();
if (subj instanceof IRI)
sessionOntologies.get(sessionId).add(// FIXME must be very temporary!
keymap.buildPublicKey((IRI) subj));
}
for (Iterator<Triple> it2 = meta.filter(null, APPENDED_TO_URIREF, ses_ur); it2.hasNext(); ) {
RDFTerm subj = it2.next().getSubject();
if (subj instanceof IRI) {
String s1 = ((IRI) subj).getUnicodeString();
String prefix1 = _NS_STANBOL_INTERNAL + Scope.shortName + "/";
if (s1.startsWith(prefix1)) {
String scopeId = s1.substring(prefix1.length());
attachedScopes.get(sessionId).add(scopeId);
}
}
}
for (Iterator<Triple> it2 = meta.filter(ses_ur, HAS_APPENDED_URIREF, null); it2.hasNext(); ) {
RDFTerm obj = it2.next().getObject();
if (obj instanceof IRI) {
String s1 = ((IRI) obj).getUnicodeString();
String prefix1 = _NS_STANBOL_INTERNAL + Scope.shortName + "/";
if (s1.startsWith(prefix1)) {
String scopeId = s1.substring(prefix1.length());
attachedScopes.get(sessionId).add(scopeId);
}
}
}
}
}
}
}
return new OntologyNetworkConfiguration(coreOntologies, customOntologies, sessionOntologies, attachedScopes);
}
use of org.apache.clerezza.commons.rdf.RDFTerm in project stanbol by apache.
the class ClerezzaOntologyProvider method fillImportsReverse.
/**
* Fills a reverse stack of import targets for the graph identified by key <tt>importing</tt>. The import
* tree is visited in <i>pre-order</i> and the stack is filled accordingly. Optionally, a second stack can
* be supplied to store only the level 1 imports. This can be used for preserving the original import tree
* structure.<br>
* <br>
* TODO there should be a more space-efficient implementation.
*
* @param importing
* the key of the root graph, which will be at the bottom of every list.
* @param reverseImports
* the list that will store all import target keys in pre-order.
* @param level1Imports
* a second list that will store the level 1 import target keys, and is not passed to recursive
* calls. Will be ignored if null.
*/
private void fillImportsReverse(OWLOntologyID importing, List<OWLOntologyID> reverseImports, List<OWLOntologyID> level1Imports) {
log.debug("Filling reverse imports for {}", importing);
// Add the importing ontology first
reverseImports.add(importing);
if (level1Imports != null)
level1Imports.add(importing);
// Get the graph and explore its imports
// store.getTriples(importing);
Graph graph = getStoredOntology(/* getPublicKey */
(importing), Graph.class, false);
Iterator<Triple> it = graph.filter(null, RDF.type, OWL.Ontology);
if (!it.hasNext())
return;
Iterator<Triple> it2 = graph.filter(it.next().getSubject(), OWL.imports, null);
while (it2.hasNext()) {
// obj is the *original* import target
RDFTerm obj = it2.next().getObject();
if (obj instanceof IRI) {
// Right now getKey() is returning the "private" storage ID
String key = getKey(org.semanticweb.owlapi.model.IRI.create(((IRI) obj).getUnicodeString()));
// TODO this will not be needed when getKey() and getPublicKey() return the proper public key.
OWLOntologyID oid = keymap.getReverseMapping(new IRI(key));
// (Unoptimized, should not use contains() for stacks.)
if (!reverseImports.contains(oid)) {
if (level1Imports != null)
level1Imports.add(oid);
fillImportsReverse(oid, reverseImports, null);
}
}
}
}
use of org.apache.clerezza.commons.rdf.RDFTerm in project stanbol by apache.
the class ConstantMapping method apply.
@Override
public boolean apply(Graph graph, BlankNodeOrIRI subject, Metadata metadata) {
for (RDFTerm value : values) {
graph.add(new TripleImpl(subject, ontProperty, value));
mappingLogger.log(subject, ontProperty, null, value);
}
return true;
}
Aggregations