use of org.apache.clerezza.commons.rdf.Graph in project stanbol by apache.
the class AbstractEnhancerResource method enhance.
/**
* Enhances the parsed ContentItem
* @param ci the content item to enhance
* @param reqProp the request properties or <code>null</code> if none
* @throws EnhancementException
*/
protected void enhance(ContentItem ci, Map<String, Object> reqProp) throws EnhancementException {
if (jobManager != null) {
jobManager.enhanceContent(ci, getChain());
}
Graph graph = ci.getMetadata();
Boolean includeExecutionMetadata = RequestPropertiesHelper.isIncludeExecutionMetadata(reqProp);
if (includeExecutionMetadata != null && includeExecutionMetadata.booleanValue()) {
try {
graph.addAll(ci.getPart(ExecutionMetadata.CHAIN_EXECUTION, Graph.class));
} catch (NoSuchPartException e) {
// no executionMetadata available
}
}
}
use of org.apache.clerezza.commons.rdf.Graph in project stanbol by apache.
the class ContentItemResource method getPlacesAsJSON.
/**
* @return an RDF/JSON descriptions of places for the word map widget
*/
public String getPlacesAsJSON() throws ParseException, UnsupportedEncodingException {
Graph g = new IndexedGraph();
LiteralFactory lf = LiteralFactory.getInstance();
Graph metadata = contentItem.getMetadata();
for (EntityExtractionSummary p : getPlaceOccurrences()) {
EntitySuggestion bestGuess = p.getBestGuess();
if (bestGuess == null) {
continue;
}
IRI uri = new IRI(bestGuess.getUri());
Iterator<Triple> latitudes = metadata.filter(uri, GEO_LAT, null);
if (latitudes.hasNext()) {
g.add(latitudes.next());
}
Iterator<Triple> longitutes = metadata.filter(uri, GEO_LONG, null);
if (longitutes.hasNext()) {
g.add(longitutes.next());
g.add(new TripleImpl(uri, Properties.RDFS_LABEL, lf.createTypedLiteral(bestGuess.getLabel())));
}
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
serializer.serialize(out, g, SupportedFormat.RDF_JSON);
String rdfString = out.toString("utf-8");
return rdfString;
}
use of org.apache.clerezza.commons.rdf.Graph in project stanbol by apache.
the class ContentItemReaderWriterTest method createTestContentItem.
/**
* @return
*/
@BeforeClass
public static void createTestContentItem() throws IOException {
contentItem = ciFactory.createContentItem(new IRI("urn:test"), new StringSource("<html>\n" + " <body>\n" + " This is a <b>ContentItem</b> to <i>Mime Multipart</i> test!\n" + " </body>\n" + "</html>", "text/html"));
RuntimeDelegate.setInstance(new RuntimeDelegateImpl());
contentItem.addPart(new IRI("run:text:text"), ciFactory.createBlob(new StringSource("This is a ContentItem to Mime Multipart test!")));
contentItem.getMetadata().add(new TripleImpl(new IRI("urn:test"), RDF.type, new IRI("urn:types:Document")));
//mark the main content as parsed and also that all
//contents and contentparts should be included
Map<String, Object> properties = initRequestPropertiesContentPart(contentItem);
properties.put(PARSED_CONTENT_URIS, Collections.singleton(contentItem.getPartUri(0).getUnicodeString()));
properties.put(OUTPUT_CONTENT, Collections.singleton("*/*"));
properties.put(OUTPUT_CONTENT_PART, Collections.singleton("*"));
properties.put(RDF_FORMAT, "application/rdf+xml");
Graph em = initExecutionMetadataContentPart(contentItem);
BlankNodeOrIRI ep = createExecutionPlan(em, "testChain", null);
writeExecutionNode(em, ep, "testEngine", true, null, null);
initExecutionMetadata(em, em, contentItem.getUri(), "testChain", false);
ciWriter = new ContentItemWriter(Serializer.getInstance());
ciReader = new ContentItemReader() {
@Override
protected Parser getParser() {
return Parser.getInstance();
}
@Override
protected ContentItemFactory getContentItemFactory() {
return ciFactory;
}
};
}
use of org.apache.clerezza.commons.rdf.Graph in project stanbol by apache.
the class ReasoningServiceExecutor method save.
/**
* To save data in the triple store.
*
* @param data
* @param targetGraphID
* @throws IOException
*/
protected void save(Object data, String targetGraphID) throws IOException {
log.debug("Attempt saving in target graph {}", targetGraphID);
final long startSave = System.currentTimeMillis();
Graph mGraph;
IRI graphIRI = new IRI(targetGraphID);
// tcManager must be synchronized
synchronized (tcManager) {
try {
// Check whether the graph already exists
mGraph = tcManager.getGraph(graphIRI);
} catch (NoSuchEntityException e) {
mGraph = tcManager.createGraph(graphIRI);
}
}
// We lock the graph before proceed
Lock writeLock = mGraph.getLock().writeLock();
boolean saved = false;
if (data instanceof Model) {
Graph m = JenaToClerezzaConverter.jenaModelToClerezzaGraph((Model) data);
writeLock.lock();
saved = mGraph.addAll(m);
writeLock.unlock();
} else if (data instanceof OWLOntology) {
Graph m = (Graph) OWLAPIToClerezzaConverter.owlOntologyToClerezzaGraph((OWLOntology) data);
writeLock.lock();
saved = mGraph.addAll(m);
writeLock.unlock();
}
if (!saved)
throw new IOException("Cannot save the result in clerezza!");
final long endSave = System.currentTimeMillis();
log.debug("Saved in time: {}ms", (endSave - startSave));
}
use of org.apache.clerezza.commons.rdf.Graph in project stanbol by apache.
the class SentimentSummarizationEngine method writeSentiment.
private void writeSentiment(ContentItem ci, Section section, List<SentimentPhrase> sectionPhrases) {
if (section == null || sectionPhrases == null || sectionPhrases.isEmpty()) {
//nothing to do
return;
}
IRI enh = createTextEnhancement(ci, this);
Graph metadata = ci.getMetadata();
if (section.getType() == SpanTypeEnum.Sentence) {
//TODO use the fise:TextAnnotation new model for
//add start/end positions
metadata.add(new TripleImpl(enh, ENHANCER_START, lf.createTypedLiteral(section.getStart())));
metadata.add(new TripleImpl(enh, ENHANCER_END, lf.createTypedLiteral(section.getEnd())));
}
//TODO: Summarize the sentiments of this section
//add the sentiment information
double positiveSent = 0.0;
int positiveCount = 0;
double negativeSent = 0.0;
int negativeCount = 0;
for (SentimentPhrase sentPhrase : sectionPhrases) {
if (sentPhrase.getNegativeSentiment() != null) {
double neg = sentPhrase.getNegativeSentiment();
negativeSent = negativeSent + (neg * neg);
negativeCount++;
}
if (sentPhrase.getPositiveSentiment() != null) {
double pos = sentPhrase.getPositiveSentiment();
positiveSent = positiveSent + (pos * pos);
positiveCount++;
}
}
if (positiveCount > 0) {
positiveSent = Math.sqrt(positiveSent / (double) positiveCount);
metadata.add(new TripleImpl(enh, POSITIVE_SENTIMENT_PROPERTY, lf.createTypedLiteral(Double.valueOf(positiveSent))));
}
if (negativeCount > 0) {
negativeSent = Math.sqrt(negativeSent / (double) negativeCount) * -1;
metadata.add(new TripleImpl(enh, NEGATIVE_SENTIMENT_PROPERTY, lf.createTypedLiteral(Double.valueOf(negativeSent))));
}
metadata.add(new TripleImpl(enh, SENTIMENT_PROPERTY, lf.createTypedLiteral(Double.valueOf(negativeSent + positiveSent))));
//add the Sentiment type as well as the type of the SSO Ontology
metadata.add(new TripleImpl(enh, DC_TYPE, SENTIMENT_TYPE));
IRI ssoType = NIFHelper.SPAN_TYPE_TO_SSO_TYPE.get(section.getType());
if (ssoType != null) {
metadata.add(new TripleImpl(enh, DC_TYPE, ssoType));
}
if (section.getType() == SpanTypeEnum.Text) {
metadata.add(new TripleImpl(enh, DC_TYPE, DOCUMENT_SENTIMENT_TYPE));
}
}
Aggregations