use of org.apache.clerezza.commons.rdf.Graph in project stanbol by apache.
the class RdfResultListTest method testRdfResultSorting.
/**
* Providing a sorted Iteration over query results stored in an RDF
* graph is not something trivial. Therefore this test
*/
@Test
public void testRdfResultSorting() {
SortedMap<Double, RdfRepresentation> sorted = new TreeMap<Double, RdfRepresentation>();
Graph resultGraph = new IndexedGraph();
RdfValueFactory vf = new RdfValueFactory(resultGraph);
IRI resultListNode = new IRI(RdfResourceEnum.QueryResultSet.getUri());
IRI resultProperty = new IRI(RdfResourceEnum.queryResult.getUri());
for (int i = 0; i < 100; i++) {
Double rank;
do {
//avoid duplicate keys
rank = Math.random();
} while (sorted.containsKey(rank));
RdfRepresentation r = vf.createRepresentation("urn:sortTest:rep." + i);
//link the representation with the query result set
resultGraph.add(new TripleImpl(resultListNode, resultProperty, r.getNode()));
r.set(RdfResourceEnum.resultScore.getUri(), rank);
sorted.put(rank, r);
}
RdfQueryResultList resultList = new RdfQueryResultList(new FieldQueryImpl(), resultGraph);
if (log.isDebugEnabled()) {
log.debug("---DEBUG Sorting ---");
for (Iterator<Representation> it = resultList.iterator(); it.hasNext(); ) {
Representation r = it.next();
log.debug("{}: {}", r.getFirst(RdfResourceEnum.resultScore.getUri()), r.getId());
}
}
log.debug("---ASSERT Sorting ---");
for (Iterator<Representation> it = resultList.iterator(); it.hasNext(); ) {
Representation r = it.next();
Double lastkey = sorted.lastKey();
Representation last = sorted.get(lastkey);
Assert.assertEquals("score: " + r.getFirst(RdfResourceEnum.resultScore.getUri()) + " of Representation " + r.getId() + " is not as expected " + last.getFirst(RdfResourceEnum.resultScore.getUri()) + " of Representation " + last.getId() + "!", r, last);
sorted.remove(lastkey);
}
Assert.assertTrue(sorted.isEmpty());
}
use of org.apache.clerezza.commons.rdf.Graph in project stanbol by apache.
the class ClerezzaModelWriter method toRDF.
private Graph toRDF(QueryResultList<?> resultList) {
final Graph resultGraph;
Class<?> type = resultList.getType();
if (String.class.isAssignableFrom(type)) {
//create a new ImmutableGraph
resultGraph = new IndexedGraph();
for (Object result : resultList) {
//add a triple to each reference in the result set
resultGraph.add(new TripleImpl(QUERY_RESULT_LIST, QUERY_RESULT, new IRI(result.toString())));
}
} else {
//first determine the type of the resultList
final boolean isSignType;
if (Representation.class.isAssignableFrom(type)) {
isSignType = false;
} else if (Representation.class.isAssignableFrom(type)) {
isSignType = true;
} else {
//incompatible type -> throw an Exception
throw new IllegalArgumentException("Parsed type " + type + " is not supported");
}
//special treatment for RdfQueryResultList for increased performance
if (resultList instanceof RdfQueryResultList) {
resultGraph = ((RdfQueryResultList) resultList).getResultGraph();
if (isSignType) {
//if we build a ResultList for Signs, that we need to do more things
//first remove all triples representing results
Iterator<Triple> resultTripleIt = resultGraph.filter(QUERY_RESULT_LIST, QUERY_RESULT, null);
while (resultTripleIt.hasNext()) {
resultTripleIt.next();
resultTripleIt.remove();
}
//to the Sign IDs
for (Object result : resultList) {
IRI signId = new IRI(((Entity) result).getId());
addEntityTriplesToGraph(resultGraph, (Entity) result);
resultGraph.add(new TripleImpl(QUERY_RESULT_LIST, QUERY_RESULT, signId));
}
}
} else {
//any other implementation of the QueryResultList interface
//create a new graph
resultGraph = new IndexedGraph();
if (Representation.class.isAssignableFrom(type)) {
for (Object result : resultList) {
IRI resultId;
if (!isSignType) {
addRDFTo(resultGraph, (Representation) result);
resultId = new IRI(((Representation) result).getId());
} else {
addRDFTo(resultGraph, (Entity) result);
resultId = new IRI(((Entity) result).getId());
}
//Note: In case of Representation this Triple points to
// the representation. In case of Signs it points to
// the sign.
resultGraph.add(new TripleImpl(QUERY_RESULT_LIST, QUERY_RESULT, resultId));
}
}
}
}
return resultGraph;
}
use of org.apache.clerezza.commons.rdf.Graph in project stanbol by apache.
the class ClerezzaRuleStore method removeRule.
@Override
public Recipe removeRule(Recipe recipe, Rule rule) {
Graph tripleCollection = tcManager.getGraph(recipe.getRecipeID());
// remove from the graph recipe all the triples having the ruleID as subject.
Iterator<Triple> triplesIterator = tripleCollection.filter(rule.getRuleID(), null, null);
while (triplesIterator.hasNext()) {
tripleCollection.remove(triplesIterator.next());
}
// remove from the graph recipe the triple recipeID hasRule ruleID
tripleCollection.remove(new TripleImpl(recipe.getRecipeID(), Symbols.hasRule, rule.getRuleID()));
recipe.removeRule(rule);
return recipe;
}
use of org.apache.clerezza.commons.rdf.Graph in project stanbol by apache.
the class ClerezzaRuleStore method findRecipesByDescription.
@Override
public RecipeList findRecipesByDescription(String term) {
String sparql = "SELECT ?recipe " + "WHERE { ?recipe a " + Symbols.Recipe.toString() + " . " + "?recipe " + Symbols.description + " ?description . " + "FILTER (regex(?description, \"" + term + "\", \"i\"))" + "}";
Graph tripleCollection = tcManager.getGraph(new IRI(recipeIndexLocation));
RecipeList matchingRecipes = new RecipeList();
try {
SelectQuery query = (SelectQuery) QueryParser.getInstance().parse(sparql);
ResultSet resultSet = tcManager.executeSparqlQuery(query, tripleCollection);
while (resultSet.hasNext()) {
SolutionMapping solutionMapping = resultSet.next();
IRI recipeID = (IRI) solutionMapping.get("recipe");
try {
Recipe recipe = getRecipe(recipeID);
log.info("Found recipe {}.", recipeID.toString());
matchingRecipes.add(recipe);
log.info("Found {} matching recipes.", matchingRecipes.size());
} catch (NoSuchRecipeException e) {
// in this case go on in the iteration by fetching other matching recipes
} catch (RecipeConstructionException e) {
// in this case go on in the iteration by fetching other matching recipes
}
}
} catch (ParseException e) {
log.error("The sparql query contains errors: ", e);
}
return matchingRecipes;
}
use of org.apache.clerezza.commons.rdf.Graph in project stanbol by apache.
the class ClerezzaRuleStore method findRulesByName.
@Override
public RuleList findRulesByName(String term) {
String sparql = "SELECT ?recipe ?rule ?description " + "WHERE { " + "?recipe " + Symbols.hasRule + " ?rule . " + "?rule " + Symbols.ruleName + " ?name . " + "?rule " + Symbols.description + " ?description . " + "FILTER (regex(?name, \"" + term + "\", \"i\"))" + "}";
List<IRI> recipeIDs = listRecipeIDs();
Graph[] tripleCollections = new Graph[recipeIDs.size()];
for (int i = 0; i < tripleCollections.length; i++) {
tripleCollections[i] = tcManager.getGraph(recipeIDs.get(i));
}
UnionGraph unionGraph = new UnionGraph(tripleCollections);
RuleList matchingRules = new RuleList();
try {
SelectQuery query = (SelectQuery) QueryParser.getInstance().parse(sparql);
ResultSet resultSet = tcManager.executeSparqlQuery(query, unionGraph);
while (resultSet.hasNext()) {
SolutionMapping solutionMapping = resultSet.next();
IRI recipeID = (IRI) solutionMapping.get("recipe");
IRI ruleID = (IRI) solutionMapping.get("rule");
Literal description = (Literal) solutionMapping.get("description");
try {
Recipe recipe = getRecipe(recipeID);
Rule rule = new RecipeRule(recipe, getRule(recipe, ruleID));
if (description != null) {
rule.setDescription(description.getLexicalForm());
}
matchingRules.add(rule);
} catch (NoSuchRecipeException e) {
// in this case go on in the iteration by fetching other matching recipes
} catch (RecipeConstructionException e) {
// in this case go on in the iteration by fetching other matching recipes
} catch (NoSuchRuleInRecipeException e) {
// in this case go on in the iteration by fetching other matching recipes
}
}
} catch (ParseException e) {
log.error("The sparql query contains errors: ", e);
}
return matchingRules;
}
Aggregations