use of org.apache.clerezza.commons.rdf.IRI in project stanbol by apache.
the class UserResource method changeUser.
/**
* Modify user given a graph describing the change.
*
* @param inputGraph change graph
* @return HTTP response
*/
@POST
@Consumes(SupportedFormat.TURTLE)
@Path("change-user")
public Response changeUser(ImmutableGraph inputGraph) {
Lock readLock = systemGraph.getLock().readLock();
readLock.lock();
Iterator<Triple> changes = inputGraph.filter(null, null, Ontology.Change);
Triple oldTriple = null;
Triple newTriple = null;
if (changes.hasNext()) {
Triple changeTriple = changes.next();
BlankNodeOrIRI changeNode = changeTriple.getSubject();
Literal userName = (Literal) inputGraph.filter(changeNode, PLATFORM.userName, null).next().getObject();
Iterator<Triple> userTriples = systemGraph.filter(null, PLATFORM.userName, userName);
// if (userTriples.hasNext()) {
BlankNodeOrIRI userNode = userTriples.next().getSubject();
IRI predicateIRI = (IRI) inputGraph.filter(changeNode, Ontology.predicate, null).next().getObject();
// handle old value (if it exists)
Iterator<Triple> iterator = inputGraph.filter(changeNode, Ontology.oldValue, null);
RDFTerm oldValue = null;
if (iterator.hasNext()) {
oldValue = iterator.next().getObject();
// Triple oldTriple = systemGraph.filter(null, predicateIRI,
// oldValue).next();
Iterator<Triple> oldTriples = systemGraph.filter(userNode, predicateIRI, oldValue);
if (oldTriples.hasNext()) {
oldTriple = oldTriples.next();
}
}
RDFTerm newValue = inputGraph.filter(changeNode, Ontology.newValue, null).next().getObject();
newTriple = new TripleImpl(userNode, predicateIRI, newValue);
// }
}
readLock.unlock();
Lock writeLock = systemGraph.getLock().writeLock();
writeLock.lock();
if (oldTriple != null) {
systemGraph.remove(oldTriple);
}
systemGraph.add(newTriple);
writeLock.unlock();
// seems the most appropriate response
return Response.noContent().build();
}
use of org.apache.clerezza.commons.rdf.IRI in project stanbol by apache.
the class PermissionDefinitions method retrievePermissions.
/**
* Returns the permissions of a specified location.
* I.e. the permissions of all permission assignments matching <code>location</code>.
*
* @param location the location of a bundle
* @return an array with <code>PermissionInfo</code> elements
*/
PermissionInfo[] retrievePermissions(String location) {
List<PermissionInfo> permInfoList = new ArrayList<PermissionInfo>();
Iterator<Triple> ownerTriples = systemGraph.filter(new IRI(location), OSGI.owner, null);
if (ownerTriples.hasNext()) {
BlankNodeOrIRI user = (BlankNodeOrIRI) ownerTriples.next().getObject();
lookForPermissions(user, permInfoList);
}
if (permInfoList.isEmpty()) {
return null;
}
return permInfoList.toArray(new PermissionInfo[permInfoList.size()]);
}
use of org.apache.clerezza.commons.rdf.IRI in project stanbol by apache.
the class TopicClassificationEngine method computeEnhancements.
@Override
public void computeEnhancements(ContentItem ci) throws EngineException {
Entry<IRI, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
if (contentPart == null) {
throw new IllegalStateException("No ContentPart with a supported Mime Type" + "found for ContentItem " + ci.getUri() + "(supported: '" + SUPPORTED_MIMETYPES + "') -> this indicates that canEnhance was" + "NOT called and indicates a bug in the used EnhancementJobManager!");
}
String language = EnhancementEngineHelper.getLanguage(ci);
if (!(acceptedLanguageSet.isEmpty() || acceptedLanguageSet.contains(language) || acceptedLanguageSet.contains(""))) {
throw new IllegalStateException("The language '" + language + "' of the ContentItem is not configured as " + " active for this Engine (active: " + acceptedLanguageSet + ").");
}
String text;
try {
text = ContentItemHelper.getText(contentPart.getValue());
} catch (IOException e) {
throw new InvalidContentException(String.format("Unable to extract " + " textual content from ContentPart %s of ContentItem %s!", contentPart.getKey(), ci.getUri()), e);
}
if (text.trim().isEmpty()) {
log.warn("ContentPart {} of ContentItem {} does not contain any " + "text to extract topics from", contentPart.getKey(), ci.getUri());
return;
}
Graph metadata = ci.getMetadata();
List<TopicSuggestion> topics;
try {
topics = suggestTopics(text);
if (topics.isEmpty()) {
return;
}
} catch (ClassifierException e) {
throw new EngineException(e);
}
IRI precision = new IRI(NamespaceEnum.fise + "classifier/precision");
IRI recall = new IRI(NamespaceEnum.fise + "classifier/recall");
IRI f1 = new IRI(NamespaceEnum.fise + "classifier/f1");
LiteralFactory lf = LiteralFactory.getInstance();
ci.getLock().writeLock().lock();
try {
// Global text annotation to attach all the topic annotation to it.
IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
metadata.add(new TripleImpl(textAnnotation, org.apache.stanbol.enhancer.servicesapi.rdf.Properties.DC_TYPE, OntologicalClasses.SKOS_CONCEPT));
for (TopicSuggestion topic : topics) {
IRI enhancement = EnhancementEngineHelper.createEntityEnhancement(ci, this);
metadata.add(new TripleImpl(enhancement, org.apache.stanbol.enhancer.servicesapi.rdf.Properties.RDF_TYPE, TechnicalClasses.ENHANCER_TOPICANNOTATION));
metadata.add(new TripleImpl(enhancement, org.apache.stanbol.enhancer.servicesapi.rdf.Properties.DC_RELATION, textAnnotation));
// add link to entity
metadata.add(new TripleImpl(enhancement, org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_ENTITY_REFERENCE, new IRI(topic.conceptUri)));
metadata.add(new TripleImpl(enhancement, org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_ENTITY_TYPE, OntologicalClasses.SKOS_CONCEPT));
// add confidence information
metadata.add(new TripleImpl(enhancement, org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_CONFIDENCE, lf.createTypedLiteral(Double.valueOf(topic.score))));
// add performance estimates of the classifier if available
ClassificationReport perf = getPerformanceEstimates(topic.conceptUri);
if (perf.uptodate) {
metadata.add(new TripleImpl(enhancement, precision, lf.createTypedLiteral(Double.valueOf(perf.precision))));
metadata.add(new TripleImpl(enhancement, recall, lf.createTypedLiteral(Double.valueOf(perf.recall))));
metadata.add(new TripleImpl(enhancement, f1, lf.createTypedLiteral(Double.valueOf(perf.f1))));
}
// fetch concept label from the entityhub or a referenced site if available
Entity entity = entityhub.getEntity(topic.conceptUri);
if (entity == null) {
entity = referencedSiteManager.getEntity(topic.conceptUri);
}
if (entity != null) {
Representation representation = entity.getRepresentation();
// TODO: extract all languages based on some configuration instead of hardcoding English
Text label = representation.getFirst(NamespaceEnum.skos + "prefLabel", "en", "en-US", "en-GB");
if (label == null) {
label = representation.getFirst(NamespaceEnum.rdfs + "label", "en", "en-US", "en-GB");
}
if (label != null) {
metadata.add(new TripleImpl(enhancement, org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_ENTITY_LABEL, new PlainLiteralImpl(label.getText())));
}
}
}
} catch (ClassifierException e) {
throw new EngineException(e);
} catch (IllegalArgumentException e) {
throw new EngineException(e);
} catch (EntityhubException e) {
throw new EngineException(e);
} finally {
ci.getLock().writeLock().unlock();
}
}
use of org.apache.clerezza.commons.rdf.IRI in project stanbol by apache.
the class Mapping method toResource.
/**
* Converts the parsed value based on the mapping information to an RDF
* {@link RDFTerm}. Optionally supports also validation if the parsed
* value is valid for the {@link Mapping#ontType ontology type} specified by
* the parsed mapping.
* @param value the value
* @param mapping the mapping
* @param validate
* @return the {@link RDFTerm} or <code>null</code> if the parsed value is
* <code>null</code> or {@link String#isEmpty() empty}.
* @throws IllegalArgumentException if the parsed {@link Mapping} is
* <code>null</code>
*/
protected RDFTerm toResource(String value, boolean validate) {
// used for date validation
Metadata dummy = null;
if (value == null || value.isEmpty()) {
// ignore null and empty values
return null;
}
RDFTerm object;
if (ontType == null) {
object = new PlainLiteralImpl(value);
} else if (ontType == RDFS.Resource) {
try {
if (validate) {
new URI(value);
}
object = new IRI(value);
} catch (URISyntaxException e) {
log.warn("Unable to create Reference for value {} (not a valid URI)" + " -> create a literal instead", value);
object = new PlainLiteralImpl(value);
}
} else {
// typed literal
Class<?> clazz = Mapping.ONT_TYPE_MAP.get(ontType);
if (clazz.equals(Date.class)) {
// parseDate(..) method
if (dummy == null) {
dummy = new Metadata();
}
// any Property with the Date type could be used here
dummy.add(DATE.getName(), value);
// access parseDate(..)
Date date = dummy.getDate(DublinCore.DATE);
if (date != null) {
// now use the Clerezza Literal factory
object = lf.createTypedLiteral(date);
} else {
// fall back to xsd:string
object = new TypedLiteralImpl(value, XSD.string);
}
} else {
object = new TypedLiteralImpl(value, ontType);
}
if (validate && clazz != null && !clazz.equals(Date.class)) {
// we need not to validate dates
try {
lf.createObject(clazz, (Literal) object);
} catch (NoConvertorException e) {
log.info("Unable to validate typed literals of type {} because" + "there is no converter for Class {} registered with Clerezza", ontType, clazz);
} catch (InvalidLiteralTypeException e) {
log.info("The value '{}' is not valid for dataType {}!" + "create literal with type 'xsd:string' instead", value, ontType);
object = new TypedLiteralImpl(value, XSD.string);
}
}
// else no validation needed
}
if (converter != null) {
object = converter.convert(object);
}
return object;
}
use of org.apache.clerezza.commons.rdf.IRI in project stanbol by apache.
the class UIMAToTriples method computeEnhancements.
public void computeEnhancements(ContentItem ci) throws EngineException {
FeatureStructureListHolder holder;
LiteralFactory literalFactory = LiteralFactory.getInstance();
try {
IRI uimaIRI = new IRI(uimaUri);
logger.info(new StringBuilder("Trying to load holder for ref:").append(uimaUri).toString());
holder = ci.getPart(uimaIRI, FeatureStructureListHolder.class);
for (String source : sourceNames) {
logger.info(new StringBuilder("Processing UIMA source:").append(source).toString());
List<FeatureStructure> sourceList = holder.getFeatureStructureList(source);
if (sourceList != null) {
logger.info(new StringBuilder("UIMA source:").append(source).append(" contains ").append(sourceList.size()).append(" annotations.").toString());
} else {
logger.info(new StringBuilder("Source list is null:").append(source).toString());
continue;
}
for (FeatureStructure fs : sourceList) {
String typeName = fs.getTypeName();
logger.debug(new StringBuilder("Checking ").append(typeName).toString());
if (tnfs.checkFeatureStructureAllowed(typeName, fs.getFeatures())) {
logger.debug(new StringBuilder("Adding ").append(typeName).toString());
IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
Graph metadata = ci.getMetadata();
String uriRefStr = uimaUri + ":" + typeName;
if (mappings.containsKey(typeName)) {
uriRefStr = mappings.get(typeName);
}
metadata.add(new TripleImpl(textAnnotation, DC_TYPE, new IRI(uriRefStr)));
if (fs.getFeature("begin") != null) {
metadata.add(new TripleImpl(textAnnotation, ENHANCER_START, literalFactory.createTypedLiteral(fs.getFeature("begin").getValueAsInteger())));
}
if (fs.getFeature("end") != null) {
metadata.add(new TripleImpl(textAnnotation, ENHANCER_END, literalFactory.createTypedLiteral(fs.getFeature("end").getValueAsInteger())));
}
if (fs.getCoveredText() != null && !fs.getCoveredText().isEmpty()) {
metadata.add(new TripleImpl(textAnnotation, ENHANCER_SELECTED_TEXT, new PlainLiteralImpl(fs.getCoveredText())));
}
for (Feature f : fs.getFeatures()) {
if (!f.getName().equals("begin") && !f.getName().equals("end") && tnfs.checkFeatureToConvert(typeName, f)) {
String predRefStr = uimaUri + ":" + f.getName();
if (mappings.containsKey(f.getName())) {
predRefStr = mappings.get(f.getName());
}
IRI predicate = new IRI(predRefStr);
metadata.add(new TripleImpl(textAnnotation, predicate, new PlainLiteralImpl(f.getValueAsString())));
}
}
}
}
}
} catch (NoSuchPartException e) {
logger.error(new StringBuilder("No UIMA results found with ref:").append(uimaUri).toString(), e);
}
}
Aggregations