use of org.apache.clerezza.commons.rdf.Graph in project stanbol by apache.
the class TestClerezzaSpaces method testCreateSpace.
@Test
public void testCreateSpace() throws Exception {
OntologySpace space = factory.createCustomOntologySpace(scopeId, dropSrc);
OWLOntologyID logicalId = null;
Object o = dropSrc.getRootOntology();
if (o instanceof Graph)
logicalId = OWLUtils.extractOntologyID((Graph) o);
else if (o instanceof OWLOntology)
logicalId = OWLUtils.extractOntologyID((OWLOntology) o);
assertNotNull(logicalId);
assertTrue(space.hasOntology(logicalId));
}
use of org.apache.clerezza.commons.rdf.Graph in project stanbol by apache.
the class TestClerezzaSpaces method setup.
@BeforeClass
public static void setup() throws Exception {
offline = new OfflineConfigurationImpl(new Hashtable<String, Object>());
ScopeRegistry reg = new ScopeRegistryImpl();
// This one is created from scratch
Graph ont2 = ClerezzaOWLUtils.createOntology(baseIri2.toString());
minorSrc = new GraphSource(ont2.getImmutableGraph());
dropSrc = getLocalSource("/ontologies/droppedcharacters.owl");
nonexSrc = getLocalSource("/ontologies/nonexistentcharacters.owl");
inMemorySrc = new ParentPathInputSource(new File(TestClerezzaSpaces.class.getResource("/ontologies/maincharacters.owl").toURI()));
OWLDataFactory df = OWLManager.getOWLDataFactory();
OWLClass cHuman = df.getOWLClass(IRI.create(baseIri + "/" + Constants.humanBeing));
OWLIndividual iLinus = df.getOWLNamedIndividual(IRI.create(baseIri + "/" + Constants.linus));
linusIsHuman = df.getOWLClassAssertionAxiom(cHuman, iLinus);
factory = new ClerezzaCollectorFactory(new ClerezzaOntologyProvider(tcManager, offline, parser), new Hashtable<String, Object>());
factory.setDefaultNamespace(IRI.create("http://stanbol.apache.org/ontology/"));
}
use of org.apache.clerezza.commons.rdf.Graph in project stanbol by apache.
the class TestOntologyNetworkPersistence method updatesGraphOnSpaceModification.
@Test
public void updatesGraphOnSpaceModification() throws Exception {
// Ensure the metadata graph is there.
Graph meta = ontologyProvider.getMetaGraph(Graph.class);
assertNotNull(meta);
String scopeId = "updateTest";
Scope scope = onm.createOntologyScope(scopeId, new GraphContentInputSource(getClass().getResourceAsStream("/ontologies/test1.owl")));
IRI collector = new IRI(_NS_STANBOL_INTERNAL + OntologySpace.shortName + "/" + scope.getCoreSpace().getID());
// Has no versionIRI
IRI test1id = new IRI("http://stanbol.apache.org/ontologies/test1.owl");
// Be strict: the whole property pair must be there.
IRI predicate = MANAGES_URIREF;
assertTrue(meta.contains(new TripleImpl(collector, predicate, test1id)));
predicate = IS_MANAGED_BY_URIREF;
assertTrue(meta.contains(new TripleImpl(test1id, predicate, collector)));
// To modify the core space.
scope.tearDown();
scope.getCoreSpace().addOntology(new GraphContentInputSource(getClass().getResourceAsStream("/ontologies/minorcharacters.owl")));
IRI minorId = new IRI("http://stanbol.apache.org/ontologies/pcomics/minorcharacters.owl");
predicate = MANAGES_URIREF;
assertTrue(meta.contains(new TripleImpl(collector, predicate, minorId)));
predicate = IS_MANAGED_BY_URIREF;
assertTrue(meta.contains(new TripleImpl(minorId, predicate, collector)));
scope.getCustomSpace().addOntology(new GraphContentInputSource(getClass().getResourceAsStream("/ontologies/test1.owl")));
scope.getCustomSpace().addOntology(new GraphContentInputSource(getClass().getResourceAsStream("/ontologies/minorcharacters.owl")));
}
use of org.apache.clerezza.commons.rdf.Graph in project stanbol by apache.
the class ResourceMapping method apply.
@Override
public boolean apply(Graph graph, BlankNodeOrIRI subject, Metadata metadata) {
boolean added = false;
BlankNodeOrIRI s = new BlankNode();
mappingLogger.log(subject, ontProperty, null, s);
if (!required.isEmpty()) {
Graph g = new SimpleGraph();
for (Mapping m : required) {
if (!m.apply(g, s, metadata)) {
return false;
}
}
graph.addAll(g);
added = true;
}
for (Mapping m : optional) {
if (m.apply(graph, s, metadata)) {
added = true;
}
}
if (added) {
for (Mapping m : additional) {
m.apply(graph, s, metadata);
}
graph.add(new TripleImpl(subject, ontProperty, s));
}
return added;
}
use of org.apache.clerezza.commons.rdf.Graph in project stanbol by apache.
the class RestfulLangidentEngine method computeEnhancements.
/**
* Compute enhancements for supplied ContentItem. The results of the process
* are expected to be stored in the metadata of the content item.
* <p/>
* The client (usually an {@link org.apache.stanbol.enhancer.servicesapi.EnhancementJobManager}) should take care of
* persistent storage of the enhanced {@link org.apache.stanbol.enhancer.servicesapi.ContentItem}.
* <p/>
* This method creates a new POSContentPart using {@link org.apache.stanbol.enhancer.engines.pos.api.POSTaggerHelper#createContentPart} from a text/plain part and
* stores it as a new part in the content item. The metadata is not changed.
*
* @throws org.apache.stanbol.enhancer.servicesapi.EngineException
* if the underlying process failed to work as
* expected
*/
@Override
public void computeEnhancements(final ContentItem ci) throws EngineException {
//get the plain text Blob
Map.Entry<IRI, Blob> textBlob = getPlainText(this, ci, false);
Blob blob = textBlob.getValue();
//send the text to the server
final HttpPost request = new HttpPost(serviceUrl);
request.setEntity(new InputStreamEntity(blob.getStream(), blob.getContentLength(), ContentType.create(blob.getMimeType(), blob.getParameter().get("charset"))));
//execute the request
List<LangSuggestion> detected;
try {
detected = AccessController.doPrivileged(new PrivilegedExceptionAction<List<LangSuggestion>>() {
public List<LangSuggestion> run() throws ClientProtocolException, IOException {
return httpClient.execute(request, new LangIdentResponseHandler(ci, objectMapper));
}
});
} catch (PrivilegedActionException pae) {
Exception e = pae.getException();
if (e instanceof ClientProtocolException) {
throw new EngineException(this, ci, "Exception while executing Request " + "on RESTful Language Identification Service at " + serviceUrl, e);
} else if (e instanceof IOException) {
throw new EngineException(this, ci, "Exception while executing Request " + "on RESTful Language Identification Service at " + serviceUrl, e);
} else {
throw RuntimeException.class.cast(e);
}
}
Graph metadata = ci.getMetadata();
log.debug("Detected Languages for ContentItem {} and Blob {}");
ci.getLock().writeLock().lock();
try {
//write TextAnnotations for the detected languages
for (LangSuggestion suggestion : detected) {
// add a hypothesis
log.debug(" > {}@{}", suggestion.getLanguage(), suggestion.hasProbability() ? suggestion.getProbability() : "-,--");
IRI textEnhancement = EnhancementEngineHelper.createTextEnhancement(ci, this);
metadata.add(new TripleImpl(textEnhancement, DC_LANGUAGE, new PlainLiteralImpl(suggestion.getLanguage())));
metadata.add(new TripleImpl(textEnhancement, DC_TYPE, DCTERMS_LINGUISTIC_SYSTEM));
if (suggestion.hasProbability()) {
metadata.add(new TripleImpl(textEnhancement, ENHANCER_CONFIDENCE, literalFactory.createTypedLiteral(suggestion.getProbability())));
}
}
} finally {
ci.getLock().writeLock().unlock();
}
}
Aggregations