use of org.apache.clerezza.commons.rdf.BlankNode in project stanbol by apache.
the class ExecutionMetadataHelper method createEngineExecution.
public static BlankNodeOrIRI createEngineExecution(Graph graph, BlankNodeOrIRI chainExecution, BlankNodeOrIRI executionNode) {
BlankNodeOrIRI node = new BlankNode();
graph.add(new TripleImpl(node, RDF_TYPE, EXECUTION));
graph.add(new TripleImpl(node, RDF_TYPE, ENGINE_EXECUTION));
graph.add(new TripleImpl(node, EXECUTION_PART, chainExecution));
graph.add(new TripleImpl(node, EXECUTION_NODE, executionNode));
graph.add(new TripleImpl(node, STATUS, STATUS_SCHEDULED));
return node;
}
use of org.apache.clerezza.commons.rdf.BlankNode in project stanbol by apache.
the class ExecutionPlanHelper method createExecutionPlan.
/**
* Creates an ExecutionPlan for the parsed chainName in the parsed ImmutableGraph
* @param graph the graph
* @param chainName the chain name
* @param enhProps the map with the enhancement properties defined for the
* chain or <code>null</code> if none
* @return the node representing the ex:ExecutionPlan
* @since 0.12.1
*/
public static BlankNodeOrIRI createExecutionPlan(Graph graph, String chainName, Map<String, Object> enhProps) {
if (graph == null) {
throw new IllegalArgumentException("The parsed Graph MUST NOT be NULL!");
}
if (chainName == null || chainName.isEmpty()) {
throw new IllegalArgumentException("The parsed Chain name MUST NOT be NULL nor empty!");
}
BlankNodeOrIRI node = new BlankNode();
graph.add(new TripleImpl(node, RDF_TYPE, EXECUTION_PLAN));
graph.add(new TripleImpl(node, CHAIN, new PlainLiteralImpl(chainName)));
writeEnhancementProperties(graph, node, null, enhProps);
return node;
}
use of org.apache.clerezza.commons.rdf.BlankNode in project stanbol by apache.
the class ClerezzaYard method extractRepresentation.
/**
* Recursive Method internally doing all the work for
* {@link #createRepresentationGraph(IRI, Graph)}
* @param source The graph to extract the Representation (source)
* @param target The graph to store the extracted triples (target)
* @param node the current node. Changes in recursive calls as it follows
* @param visited holding all the visited BlankNodes to avoid cycles. Other nodes
* need not be added because this implementation would not follow it anyway
* outgoing relations if the object is a {@link BlankNode} instance.
* @return the target graph (for convenience)
*/
private Graph extractRepresentation(Graph source, Graph target, BlankNodeOrIRI node, Set<BlankNode> visited) {
//we need all the outgoing relations and also want to follow bNodes until
//the next IRI. However we are not interested in incoming relations!
Iterator<Triple> outgoing = source.filter(node, null, null);
while (outgoing.hasNext()) {
Triple triple = outgoing.next();
target.add(triple);
RDFTerm object = triple.getObject();
if (object instanceof BlankNode) {
//add first and than follow because there might be a triple such as
// bnode1 <urn:someProperty> bnode1
visited.add((BlankNode) object);
extractRepresentation(source, target, (BlankNodeOrIRI) object, visited);
}
}
return target;
}
use of org.apache.clerezza.commons.rdf.BlankNode in project stanbol by apache.
the class UserResource method createUser.
/**
* Creates a new user withe the specified user name
*
* @param newUserName
* @return user node in system graph
*/
private GraphNode createUser(String newUserName) {
BlankNode subject = new BlankNode();
GraphNode userNode = new GraphNode(subject, systemGraph);
userNode.addProperty(RDF.type, FOAF.Agent);
userNode.addProperty(PLATFORM.userName, new PlainLiteralImpl(newUserName));
addRole(userNode, "BasePermissionsRole");
return userNode;
}
use of org.apache.clerezza.commons.rdf.BlankNode in project stanbol by apache.
the class MetaxaEngine method computeEnhancements.
public void computeEnhancements(ContentItem ci) throws EngineException {
// get model from the extraction
URIImpl docId;
Model m = null;
ci.getLock().readLock().lock();
try {
docId = new URIImpl(ci.getUri().getUnicodeString());
m = this.extractor.extract(ci.getStream(), docId, ci.getMimeType());
} catch (ExtractorException e) {
throw new EngineException("Error while processing ContentItem " + ci.getUri() + " with Metaxa", e);
} catch (IOException e) {
throw new EngineException("Error while processing ContentItem " + ci.getUri() + " with Metaxa", e);
} finally {
ci.getLock().readLock().unlock();
}
// the extracted plain text from the model
if (null == m) {
log.debug("Unable to preocess ContentItem {} (mime type {}) with Metaxa", ci.getUri(), ci.getMimeType());
return;
}
ContentSink plainTextSink;
try {
plainTextSink = ciFactory.createContentSink("text/plain");
} catch (IOException e) {
m.close();
throw new EngineException("Unable to initialise Blob for storing" + "the plain text content", e);
}
HashMap<BlankNode, BlankNode> blankNodeMap = new HashMap<BlankNode, BlankNode>();
RDF2GoUtils.urifyBlankNodes(m);
ClosableIterator<Statement> it = m.iterator();
BufferedWriter out = new BufferedWriter(new OutputStreamWriter(plainTextSink.getOutputStream(), UTF8));
//used to detect if some text was extracted
boolean textExtracted = false;
try {
//first add to a temporary graph
Graph g = new SimpleGraph();
while (it.hasNext()) {
Statement oneStmt = it.next();
//the plain text Blob!
if (oneStmt.getSubject().equals(docId) && oneStmt.getPredicate().equals(NIE_PLAINTEXT_PROPERTY)) {
String text = oneStmt.getObject().toString();
if (text != null && !text.isEmpty()) {
try {
out.write(oneStmt.getObject().toString());
} catch (IOException e) {
throw new EngineException("Unable to write extracted" + "plain text to Blob (blob impl: " + plainTextSink.getBlob().getClass() + ")", e);
}
textExtracted = true;
if (includeText) {
BlankNodeOrIRI subject = (BlankNodeOrIRI) asClerezzaResource(oneStmt.getSubject(), blankNodeMap);
IRI predicate = (IRI) asClerezzaResource(oneStmt.getPredicate(), blankNodeMap);
RDFTerm object = asClerezzaResource(oneStmt.getObject(), blankNodeMap);
g.add(new TripleImpl(subject, predicate, object));
}
}
} else {
//add metadata to the metadata of the contentItem
BlankNodeOrIRI subject = (BlankNodeOrIRI) asClerezzaResource(oneStmt.getSubject(), blankNodeMap);
IRI predicate = (IRI) asClerezzaResource(oneStmt.getPredicate(), blankNodeMap);
RDFTerm object = asClerezzaResource(oneStmt.getObject(), blankNodeMap);
if (null != subject && null != predicate && null != object) {
Triple t = new TripleImpl(subject, predicate, object);
g.add(t);
log.debug("added " + t.toString());
}
}
}
//add the extracted triples to the metadata of the ContentItem
ci.getLock().writeLock().lock();
try {
ci.getMetadata().addAll(g);
g = null;
} finally {
ci.getLock().writeLock().unlock();
}
} finally {
it.close();
m.close();
IOUtils.closeQuietly(out);
}
if (textExtracted) {
//add plain text to the content item
IRI blobUri = new IRI("urn:metaxa:plain-text:" + randomUUID());
ci.addPart(blobUri, plainTextSink.getBlob());
}
}
Aggregations