use of org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory in project stanbol by apache.
the class RepresentationReader method parseFromContent.
public Map<String, Representation> parseFromContent(RequestData content, MediaType acceptedMediaType) {
// (3) Parse the Representtion(s) form the entity stream
if (content.getMediaType().isCompatible(MediaType.APPLICATION_JSON_TYPE)) {
// parse from json
throw new UnsupportedOperationException("Parsing of JSON not yet implemented :(");
} else if (isSupported(content.getMediaType())) {
// from RDF serialisation
RdfValueFactory valueFactory = RdfValueFactory.getInstance();
Map<String, Representation> representations = new HashMap<String, Representation>();
Set<BlankNodeOrIRI> processed = new HashSet<BlankNodeOrIRI>();
Graph graph = new IndexedGraph();
try {
parser.parse(graph, content.getEntityStream(), content.getMediaType().toString());
} catch (UnsupportedParsingFormatException e) {
// String acceptedMediaType = httpHeaders.getFirst("Accept");
// throw an internal server Error, because we check in
// isReadable(..) for supported types and still we get here a
// unsupported format -> therefore it looks like an configuration
// error the server (e.g. a missing Bundle with the required bundle)
String message = "Unable to create the Parser for the supported format" + content.getMediaType() + " (" + e + ")";
log.error(message, e);
throw new WebApplicationException(Response.status(Status.INTERNAL_SERVER_ERROR).entity(message).header(HttpHeaders.ACCEPT, acceptedMediaType).build());
} catch (RuntimeException e) {
// NOTE: Clerezza seams not to provide specific exceptions on
// parsing errors. Hence the catch for all RuntimeException
String message = "Unable to parse the provided RDF data (format: " + content.getMediaType() + ", message: " + e.getMessage() + ")";
log.error(message, e);
throw new WebApplicationException(Response.status(Status.BAD_REQUEST).entity(message).header(HttpHeaders.ACCEPT, acceptedMediaType).build());
}
for (Iterator<Triple> st = graph.iterator(); st.hasNext(); ) {
BlankNodeOrIRI resource = st.next().getSubject();
if (resource instanceof IRI && processed.add(resource)) {
// build a new representation
representations.put(((IRI) resource).getUnicodeString(), valueFactory.createRdfRepresentation((IRI) resource, graph));
}
}
return representations;
} else {
// unsupported media type
String message = String.format("Parsed Content-Type '%s' is not one of the supported %s", content.getMediaType(), supportedMediaTypes);
log.info("Bad Request: {}", message);
throw new WebApplicationException(Response.status(Status.BAD_REQUEST).entity(message).header(HttpHeaders.ACCEPT, acceptedMediaType).build());
}
}
use of org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory in project stanbol by apache.
the class ClerezzaYard method getRepresentation.
/**
* Internally used to create Representations for URIs
* @param uri the uri
* @param check if <code>false</code> than there is no check if the URI
* refers to a RDFTerm in the graph that is of type {@link #REPRESENTATION}
* @return the Representation
*/
protected final Representation getRepresentation(IRI uri, boolean check) {
final Lock readLock = readLockGraph();
try {
if (!check || isRepresentation(uri)) {
Graph nodeGraph = createRepresentationGraph(uri, graph);
// Remove the triple internally used to represent an empty Representation
// ... this will only remove the triple if the Representation is empty
// but a check would take longer than the this call
nodeGraph.remove(new TripleImpl(uri, MANAGED_REPRESENTATION, TRUE_LITERAL));
return ((RdfValueFactory) getValueFactory()).createRdfRepresentation(uri, nodeGraph);
} else {
// not found
return null;
}
} finally {
if (readLock != null) {
readLock.unlock();
}
}
}
use of org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory in project stanbol by apache.
the class RdfResultListTest method testRdfResultSorting.
/**
* Providing a sorted Iteration over query results stored in an RDF
* graph is not something trivial. Therefore this test
*/
@Test
public void testRdfResultSorting() {
SortedMap<Double, RdfRepresentation> sorted = new TreeMap<Double, RdfRepresentation>();
Graph resultGraph = new IndexedGraph();
RdfValueFactory vf = new RdfValueFactory(resultGraph);
IRI resultListNode = new IRI(RdfResourceEnum.QueryResultSet.getUri());
IRI resultProperty = new IRI(RdfResourceEnum.queryResult.getUri());
for (int i = 0; i < 100; i++) {
Double rank;
do {
// avoid duplicate keys
rank = Math.random();
} while (sorted.containsKey(rank));
RdfRepresentation r = vf.createRepresentation("urn:sortTest:rep." + i);
// link the representation with the query result set
resultGraph.add(new TripleImpl(resultListNode, resultProperty, r.getNode()));
r.set(RdfResourceEnum.resultScore.getUri(), rank);
sorted.put(rank, r);
}
RdfQueryResultList resultList = new RdfQueryResultList(new FieldQueryImpl(), resultGraph);
if (log.isDebugEnabled()) {
log.debug("---DEBUG Sorting ---");
for (Iterator<Representation> it = resultList.iterator(); it.hasNext(); ) {
Representation r = it.next();
log.debug("{}: {}", r.getFirst(RdfResourceEnum.resultScore.getUri()), r.getId());
}
}
log.debug("---ASSERT Sorting ---");
for (Iterator<Representation> it = resultList.iterator(); it.hasNext(); ) {
Representation r = it.next();
Double lastkey = sorted.lastKey();
Representation last = sorted.get(lastkey);
Assert.assertEquals("score: " + r.getFirst(RdfResourceEnum.resultScore.getUri()) + " of Representation " + r.getId() + " is not as expected " + last.getFirst(RdfResourceEnum.resultScore.getUri()) + " of Representation " + last.getId() + "!", r, last);
sorted.remove(lastkey);
}
Assert.assertTrue(sorted.isEmpty());
}
use of org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory in project stanbol by apache.
the class LDPathHelper method executeLDPath.
/**
* Executes the LDPath program on the contexts stored in the backend and
* returns the result as an RDF graph
* @param contexts the contexts to execute the program on
* @param ldpath the LDPath program to execute
* @param backend the {@link RDFBackend} to use
* @return The results stored within an RDF graph
* @throws LDPathParseException if the parsed LDPath program is invalid
*/
private static Graph executeLDPath(RDFBackend<Object> backend, String ldpath, Set<String> contexts) throws LDPathParseException {
Graph data = new IndexedGraph();
RdfValueFactory vf = new RdfValueFactory(data);
EntityhubLDPath ldPath = new EntityhubLDPath(backend, vf);
Program<Object> program = ldPath.parseProgram(getReader(ldpath));
if (log.isDebugEnabled()) {
log.debug("Execute on Context(s) '{}' LDPath program: \n{}", contexts, program.getPathExpression(backend));
}
/*
* NOTE: We do not need to process the Representations returned by
* EntityhubLDPath#exdecute, because the RdfValueFactory used uses
* the local variable "Graph data" to backup all created
* RdfRepresentation. Because of this all converted data will be
* automatically added the Graph. The only thing we need to do is to
* wrap the Graph in the response.
*/
for (String context : contexts) {
ldPath.execute(vf.createReference(context), program);
}
return data;
}
use of org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory in project stanbol by apache.
the class ResourceAdapterTest method testFloat.
@Test
public void testFloat() {
Graph graph = new IndexedGraph();
IRI id = new IRI("http://www.example.org/test");
IRI doubleTestField = new IRI("http://www.example.org/field/double");
LiteralFactory lf = LiteralFactory.getInstance();
graph.add(new TripleImpl(id, doubleTestField, lf.createTypedLiteral(Float.NaN)));
graph.add(new TripleImpl(id, doubleTestField, lf.createTypedLiteral(Float.POSITIVE_INFINITY)));
graph.add(new TripleImpl(id, doubleTestField, lf.createTypedLiteral(Float.NEGATIVE_INFINITY)));
RdfValueFactory vf = new RdfValueFactory(graph);
Representation r = vf.createRepresentation(id.getUnicodeString());
Set<Float> expected = new HashSet<Float>(Arrays.asList(Float.NaN, Float.POSITIVE_INFINITY, Float.NEGATIVE_INFINITY));
Iterator<Float> dit = r.get(doubleTestField.getUnicodeString(), Float.class);
while (dit.hasNext()) {
Float val = dit.next();
Assert.assertNotNull(val);
Assert.assertTrue(expected.remove(val));
}
Assert.assertTrue(expected.isEmpty());
}
Aggregations