use of org.apache.stanbol.rules.base.api.Recipe in project stanbol by apache.
the class RecipeList method remove.
public boolean remove(Object o) {
boolean removed = false;
for (int i = 0; i < recipes.length && !removed; i++) {
Recipe recipe = recipes[i];
if (recipe.equals(o)) {
Recipe[] recipesCopy = new Recipe[recipes.length - 1];
System.arraycopy(recipes, 0, recipesCopy, 0, i);
System.arraycopy(recipes, i + 1, recipesCopy, 0, recipesCopy.length - i);
recipes = recipesCopy;
removed = true;
}
}
return removed;
}
use of org.apache.stanbol.rules.base.api.Recipe in project stanbol by apache.
the class RefactorEnhancementEngine method initEngine.
/**
* Method for adding ontologies to the scope core ontology.
* <ol>
* <li>Get all the ontologies from the property.</li>
* <li>Create a base scope.</li>
* <li>Retrieve the ontology space from the scope.</li>
* <li>Add the ontologies to the scope via ontology space.</li>
* </ol>
*/
private void initEngine(RefactorEnhancementEngineConf engineConfiguration) {
// IRI dulcifierScopeIRI = org.semanticweb.owlapi.model.IRI.create((String) context.getProperties().get(SCOPE));
String scopeId = engineConfiguration.getScope();
// Create or get the scope with the configured ID
try {
scope = onManager.createOntologyScope(scopeId);
// No need to deactivate a newly created scope.
} catch (DuplicateIDException e) {
scope = onManager.getScope(scopeId);
onManager.setScopeActive(scopeId, false);
}
// All resolvable ontologies stated in the configuration are loaded into the core space.
OntologySpace ontologySpace = scope.getCoreSpace();
ontologySpace.tearDown();
String[] coreScopeOntologySet = engineConfiguration.getScopeCoreOntologies();
List<String> success = new ArrayList<String>(), failed = new ArrayList<String>();
try {
log.info("Will now load requested ontology into the core space of scope '{}'.", scopeId);
OWLOntologyManager sharedManager = OWLManager.createOWLOntologyManager();
org.semanticweb.owlapi.model.IRI physicalIRI = null;
for (int o = 0; o < coreScopeOntologySet.length; o++) {
String url = coreScopeOntologySet[o];
try {
physicalIRI = org.semanticweb.owlapi.model.IRI.create(url);
} catch (Exception e) {
failed.add(url);
}
try {
// TODO replace with a Clerezza equivalent
ontologySpace.addOntology(new RootOntologySource(physicalIRI, sharedManager));
success.add(url);
} catch (OWLOntologyCreationException e) {
log.error("Failed to load ontology from physical location " + physicalIRI + " Continuing with next...", e);
failed.add(url);
}
}
} catch (UnmodifiableOntologyCollectorException ex) {
log.error("Ontology space {} was found locked for modification. Cannot populate.", ontologySpace);
}
for (String s : success) log.info(" >> {} : SUCCESS", s);
for (String s : failed) log.info(" >> {} : FAILED", s);
ontologySpace.setUp();
// if (!onManager.containsScope(scopeId)) onManager.registerScope(scope);
onManager.setScopeActive(scopeId, true);
/*
* The first thing to do is to create a recipe in the rule store that can be used by the engine to
* refactor the enhancement graphs.
*/
String recipeId = engineConfiguration.getRecipeId();
Recipe recipe = null;
try {
recipe = ruleStore.createRecipe(new IRI(recipeId), null);
} catch (AlreadyExistingRecipeException e1) {
log.error("A recipe with ID {} already exists in the store.", recipeId);
}
if (recipe != null) {
log.debug("Initialised blank recipe with ID {}", recipeId);
/*
* The set of rule to put in the recipe can be provided by the user. A default set of rules is
* provided in /META-INF/default/seo_rules.sem. Use the property engine.refactor in the felix
* console to pass to the engine your set of rules.
*/
String recipeLocation = engineConfiguration.getRecipeLocation();
InputStream recipeStream = null;
String recipeString = null;
if (recipeLocation != null && !recipeLocation.isEmpty()) {
Dereferencer dereferencer = new DereferencerImpl();
try {
recipeStream = dereferencer.resolve(recipeLocation);
log.debug("Loaded recipe from external source {}", recipeLocation);
} catch (FileNotFoundException e) {
log.error("Recipe Stream is null.", e);
}
} else {
// TODO remove this part (or manage it better in the @Activate method).
String loc = "/META-INF/default/seo_rules.sem";
recipeStream = getClass().getResourceAsStream(loc);
log.debug("Loaded default recipe in {}.", loc);
}
if (recipeStream != null) {
BufferedReader reader = new BufferedReader(new InputStreamReader(recipeStream));
recipeString = "";
String line = null;
try {
while ((line = reader.readLine()) != null) recipeString += line;
} catch (IOException e) {
log.error("Failed to load Refactor Engine recipe from stream. Aborting read. ", e);
recipeString = null;
}
}
log.debug("Recipe content follows :\n{}", recipeString);
if (recipeString != null) {
ruleStore.addRulesToRecipe(recipe, recipeString, null);
log.debug("Added rules to recipe {}", recipeId);
}
}
}
use of org.apache.stanbol.rules.base.api.Recipe in project stanbol by apache.
the class RefactorEnhancementEngine method computeEnhancements.
@Override
public void computeEnhancements(ContentItem ci) throws EngineException {
// Prepare the OntoNet environment. First we create the OntoNet session in which run the whole
final Session session;
try {
session = sessionManager.createSession();
} catch (SessionLimitException e1) {
throw new EngineException("OntoNet session quota reached. The Refactor Engine requires its own new session to execute.");
}
if (session == null)
throw new EngineException("Failed to create OntoNet session. The Refactor Engine requires its own new session to execute.");
log.debug("Refactor enhancement job will run in session '{}'.", session.getID());
// Retrieve and filter the metadata graph for entities recognized by the engines.
final Graph metadataGraph = ci.getMetadata(), signaturesGraph = new IndexedGraph();
// FIXME the Stanbol Enhancer vocabulary should be retrieved from somewhere in the enhancer API.
final IRI ENHANCER_ENTITY_REFERENCE = new IRI("http://fise.iks-project.eu/ontology/entity-reference");
Iterator<Triple> tripleIt = metadataGraph.filter(null, ENHANCER_ENTITY_REFERENCE, null);
while (tripleIt.hasNext()) {
// Get the entity URI
RDFTerm obj = tripleIt.next().getObject();
if (!(obj instanceof IRI)) {
log.warn("Invalid IRI for entity reference {}. Skipping.", obj);
continue;
}
final String entityReference = ((IRI) obj).getUnicodeString();
log.debug("Trying to resolve entity {}", entityReference);
// Populate the entity signatures graph, by querying either the Entity Hub or the dereferencer.
if (engineConfiguration.isEntityHubUsed()) {
Graph result = populateWithEntity(entityReference, signaturesGraph);
if (result != signaturesGraph && result != null) {
log.warn("Entity Hub query added triples to a new graph instead of populating the supplied one!" + " New signatures will be discarded.");
}
} else
try {
OntologyInputSource<Graph> source = new GraphContentSourceWithPhysicalIRI(dereferencer.resolve(entityReference), org.semanticweb.owlapi.model.IRI.create(entityReference));
signaturesGraph.addAll(source.getRootOntology());
} catch (FileNotFoundException e) {
log.error("Failed to dereference entity " + entityReference + ". Skipping.", e);
continue;
}
}
try {
/*
* The dedicated session for this job will store the following: (1) all the (merged) signatures
* for all detected entities; (2) the original content metadata graph returned earlier in the
* chain.
*
* There is no chance that (2) could be null, as it was previously controlled by the JobManager
* through the canEnhance() method and the computeEnhancement is always called iff the former
* returns true.
*/
session.addOntology(new GraphSource(signaturesGraph));
session.addOntology(new GraphSource(metadataGraph));
} catch (UnmodifiableOntologyCollectorException e1) {
throw new EngineException("Cannot add enhancement graph to OntoNet session for refactoring", e1);
}
try {
/*
* Export the entire session (incl. entities and enhancement graph) as a single merged ontology.
*
* TODO the refactorer should have methods to accommodate an OntologyCollector directly instead.
*/
OWLOntology ontology = session.export(OWLOntology.class, true);
log.debug("Refactoring recipe IRI is : " + engineConfiguration.getRecipeId());
/*
* We pass the ontology and the recipe IRI to the Refactor that returns the refactored graph
* expressed by using the given vocabulary.
*
* To perform the refactoring of the ontology to a given vocabulary we use the Stanbol Refactor.
*/
Recipe recipe = ruleStore.getRecipe(new IRI(engineConfiguration.getRecipeId()));
log.debug("Recipe {} contains {} rules.", recipe, recipe.getRuleList().size());
log.debug("The ontology to be refactor is {}", ontology);
Graph tc = refactorer.graphRefactoring(OWLAPIToClerezzaConverter.owlOntologyToClerezzaGraph(ontology), recipe);
/*
* The newly generated ontology is converted to Clarezza format and then added os substitued to
* the old mGraph.
*/
if (engineConfiguration.isInGraphAppendMode()) {
log.debug("Metadata of the content will replace old ones.", this);
} else {
metadataGraph.clear();
log.debug("Content metadata will be appended to the existing ones.", this);
}
metadataGraph.addAll(tc);
} catch (RefactoringException e) {
String msg = "Refactor engine execution failed on content item " + ci + ".";
log.error(msg, e);
throw new EngineException(msg, e);
} catch (NoSuchRecipeException e) {
String msg = "Refactor engine could not find recipe " + engineConfiguration.getRecipeId() + " to refactor content item " + ci + ".";
log.error(msg, e);
throw new EngineException(msg, e);
} catch (Exception e) {
throw new EngineException("Refactor Engine has failed.", e);
} finally {
/*
* The session needs to be destroyed anyhow.
*
* Clear contents before destroying (FIXME only do this until this is implemented in the
* destroySession() method).
*/
for (OWLOntologyID id : session.listManagedOntologies()) {
try {
String key = ontologyProvider.getKey(id.getOntologyIRI());
ontologyProvider.getStore().deleteGraph(new IRI(key));
} catch (Exception ex) {
log.error("Failed to delete triple collection " + id, ex);
continue;
}
}
sessionManager.destroySession(session.getID());
}
}
use of org.apache.stanbol.rules.base.api.Recipe in project stanbol by apache.
the class ClerezzaAdapter method main.
public static void main(String[] args) {
RuleAdapter ruleAdapter = new ClerezzaAdapter();
try {
KB kb = RuleParserImpl.parse("http://sssw.org/2012/rules/", new FileInputStream("/Users/mac/Documents/CNR/SSSW2012/construct/exercise3"));
System.out.println("Rules: " + kb.getRuleList().size());
Recipe recipe = new RecipeImpl(new IRI("http://sssw.org/2012/rules/"), "Recipe", kb.getRuleList());
//List<ConstructQuery> jenaRules = (List<ConstructQuery>) ruleAdapter.adaptTo(recipe, ConstructQuery.class);
String rules = "[ Exercise1: (http://dbpedia.org/resource/Madrid http://dbpedia.org/ontology/locationOf ?location) (?location rdf:type http://dbpedia.org/ontology/Museum) (?location http://dbpedia.org/ontology/numberOfVisitors ?visitors) greaterThan(?visitors '2000000'^^http://www.w3.org/2001/XMLSchema#integer) -> (?location rdf:type http://www.mytravels.com/Itinerary/MadridItinerary) ]";
//List<com.hp.hpl.jena.reasoner.rulesys.Rule> jenaRules = com.hp.hpl.jena.reasoner.rulesys.Rule.parseRules(rules);
String spqral = "CONSTRUCT " + "{ ?city a <http://www.mytravels.com/Itinerary/MovieCityItinerary> . " + " ?city <http://www.w3.org/2000/01/rdf-schema#label> ?cLabel . " + " ?event a <http://linkedevents.org/ontology/Event> . " + " ?event <http://linkedevents.org/ontology/atPlace> ?location . " + " ?location <http://www.w3.org/2000/01/rdf-schema#label> ?lLabel . " + " ?location <http://www.w3.org/2002/07/owl#sameAs> ?city" + "} " + "WHERE " + "{ " + " ?city a <http://www.mytravels.com/Itinerary/MovieCityItinerary> . " + " ?city <http://www.w3.org/2000/01/rdf-schema#label> ?cLabel . " + " ?event a <http://linkedevents.org/ontology/Event> . " + " ?event <http://linkedevents.org/ontology/atPlace> ?location . " + " ?location <http://www.w3.org/2000/01/rdf-schema#label> ?lLabel . " + " FILTER(?lLabel = ?cLabel) " + "}";
Model m = ModelFactory.createDefaultModel();
Model model = FileManager.get().loadModel("/Users/mac/Documents/CNR/SSSW2012/datasets_new/Exercise5_tmp.rdf");
//for(ConstructQuery constructQuery : jenaRules){
//Query query = QueryFactory.create(constructQuery.toString(), Syntax.syntaxARQ);
Query query = QueryFactory.create(spqral, Syntax.syntaxARQ);
QueryExecution queryExecution = QueryExecutionFactory.create(query, model);
//System.out.println(constructQuery.toString());
m.add(queryExecution.execConstruct());
//}
FileOutputStream max = new FileOutputStream("/Users/mac/Documents/CNR/SSSW2012/datasets_new/example5.rdf");
m.write(max);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
/*catch (RuleAtomCallExeption e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (UnavailableRuleObjectException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (UnsupportedTypeForExportException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}*/
}
use of org.apache.stanbol.rules.base.api.Recipe in project stanbol by apache.
the class RulesResource method getRule.
/**
* Get a recipe from the rule base (that is the ontology that contains the rules and the recipe). <br/>
* If the second parameter is not null then the method returns the rule in the recipe identified by that
* parameter. <br/>
*
* curl -v -X GET http://localhost:8080/kres/rule/http
* ://kres.iks-project.eu/ontology/meta/rmi.owl#ProvaParentRule
*
* @param uri
* {A string contains the IRI full name of the rule.}
* @return Return: <br/>
* 200 The rule is retrieved (import declarations point to KReS Services) <br/>
* 404 The rule does not exists in the manager <br/>
* 500 Some error occurred
*
*/
@GET
@Path("/recipe/{recipe:.+}")
@Produces(value = { KRFormat.RDF_XML, KRFormat.TURTLE, KRFormat.OWL_XML, KRFormat.RDF_JSON, KRFormat.FUNCTIONAL_OWL, KRFormat.MANCHESTER_OWL, MediaType.TEXT_PLAIN })
public Response getRule(@PathParam("recipe") String recipeID, @QueryParam("rule") String ruleID, @Context HttpHeaders headers) {
Recipe recipe;
Rule rule;
ResponseBuilder responseBuilder;
try {
URI uri = new URI(recipeID);
if (uri.getScheme() == null) {
recipeID = "urn:" + recipeID;
log.info("The recipe ID is a URI without scheme. The ID is set to " + recipeID);
}
recipe = ruleStore.getRecipe(new IRI(recipeID));
if (ruleID != null && !ruleID.isEmpty()) {
rule = ruleStore.getRule(recipe, new IRI(ruleID));
RuleList ruleList = new RuleList();
ruleList.add(rule);
recipe = new RecipeImpl(recipe.getRecipeID(), recipe.getRecipeDescription(), ruleList);
}
responseBuilder = Response.ok(recipe);
} catch (NoSuchRecipeException e) {
log.error(e.getMessage(), e);
responseBuilder = Response.status(Status.NOT_FOUND);
} catch (RecipeConstructionException e) {
log.error(e.getMessage(), e);
responseBuilder = Response.status(Status.NO_CONTENT);
} catch (NoSuchRuleInRecipeException e) {
log.error(e.getMessage(), e);
responseBuilder = Response.status(Status.NOT_FOUND);
} catch (URISyntaxException e) {
log.error(e.getMessage(), e);
responseBuilder = Response.status(Status.NOT_ACCEPTABLE);
}
// addCORSOrigin(servletContext, responseBuilder, headers);
return responseBuilder.build();
}
Aggregations