use of org.semanticweb.owlapi.model.OWLOntology in project stanbol by apache.
the class TestOWLAPIInputSources method testOfflineImport.
/**
* Uses a {@link ParentPathInputSource} to load an ontology importing a modified FOAF, both located in the
* same resource directory.
*
* @throws Exception
*/
@Test
public void testOfflineImport() throws Exception {
URL url = getClass().getResource("/ontologies/maincharacters.owl");
assertNotNull(url);
File f = new File(url.toURI());
assertNotNull(f);
OntologyInputSource<OWLOntology> coreSource = new ParentPathInputSource(f);
// // Check that all the imports closure is made of local files
// Set<OWLOntology> closure = coreSource.getImports(true);
// for (OWLOntology o : closure)
// assertEquals("file", o.getOWLOntologyManager().getOntologyDocumentIRI(o).getScheme());
assertEquals(coreSource.getRootOntology().getOntologyID().getOntologyIRI(), IRI.create(Constants.PEANUTS_MAIN_BASE));
// Linus is stated to be a foaf:Person
OWLNamedIndividual iLinus = df.getOWLNamedIndividual(IRI.create(Constants.PEANUTS_MAIN_BASE + "#Linus"));
// Lucy is stated to be a foaf:Perzon
OWLNamedIndividual iLucy = df.getOWLNamedIndividual(IRI.create(Constants.PEANUTS_MAIN_BASE + "#Lucy"));
OWLClass cPerzon = df.getOWLClass(IRI.create("http://xmlns.com/foaf/0.1/Perzon"));
Set<OWLIndividual> instances = cPerzon.getIndividuals(coreSource.getRootOntology());
assertTrue(!instances.contains(iLinus) && instances.contains(iLucy));
}
use of org.semanticweb.owlapi.model.OWLOntology in project stanbol by apache.
the class TestOWLAPIInputSources method testOfflineSingleton.
/**
* Loads a modified FOAF by resolving a URI from a resource directory.
*
* @throws Exception
*/
@Test
public void testOfflineSingleton() throws Exception {
URL url = getClass().getResource("/ontologies/mockfoaf.rdf");
assertNotNull(url);
OntologyInputSource<OWLOntology> coreSource = new RootOntologySource(IRI.create(url));
assertNotNull(df);
/*
* To check it fetched the correct ontology, we look for a declaration of the bogus class foaf:Perzon
* (added in the local FOAF)
*/
OWLClass cPerzon = df.getOWLClass(IRI.create("http://xmlns.com/foaf/0.1/Perzon"));
assertTrue(coreSource.getRootOntology().getClassesInSignature().contains(cPerzon));
}
use of org.semanticweb.owlapi.model.OWLOntology in project opentheso by miledrousset.
the class testApiSkosOfficial method testingOWL.
// TODO add test methods here.
// The methods must be annotated with annotation @Test. For example:
//
// @Test
// public void hello() {}
/*
* Copyright (C) 2007, University of Manchester
*
* Modifications to the initial code base are copyright of their
* respective authors, or their employers as appropriate. Authorship
* of the modifications may be determined from the ChangeLog placed at
* the end of this file.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* Author: Simon Jupp<br>
* Date: Mar 17, 2008<br>
* The University of Manchester<br>
* Bio-Health Informatics Group<br>
*/
// @Test
public void testingOWL() throws OWLOntologyCreationException, OWLOntologyStorageException {
// Get hold of an ontology manager
OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
// Load an ontology from the Web. We load the ontology from a document IRI
IRI docIRI = IRI.create("http://www.w3.org/2009/08/skos-reference/skos.rdf");
OWLOntology skos = manager.loadOntologyFromOntologyDocument(docIRI);
System.out.println("Loaded ontology: " + skos);
System.out.println();
// Save a local copy of the ontology. (Specify a path appropriate to your setup)
File file = new File("/Users/Miled/Downloads/downloadAndSaveOWLFile.owl");
manager.saveOntology(skos, IRI.create(file.toURI()));
// Ontologies are saved in the format from which they were loaded.
// We can get information about the format of an ontology from its manager
OWLOntologyFormat format = (OWLOntologyFormat) manager.getOntologyFormat(skos);
System.out.println(" format: " + format);
System.out.println();
// Save the ontology in owl/xml format
OWLXMLOntologyFormat owlxmlFormat = new OWLXMLOntologyFormat();
// so that we have nicely abbreviated IRIs in the new ontology document
if (format.isPrefixOWLOntologyFormat()) {
owlxmlFormat.copyPrefixesFrom(format.asPrefixOWLOntologyFormat());
}
// manager.saveOntology(skos, (OWLOntologyFormat) (OWLDocumentFormat) owlxmlFormat, IRI.create(file.toURI()));
// Dump an ontology to System.out by specifying a different OWLOntologyOutputTarget
// Note that we can write an ontology to a stream in a similar way
// using the StreamOutputTarget class
OWLOntologyDocumentTarget documentTarget = new SystemOutDocumentTarget();
// Try another format - The Manchester OWL Syntax
ManchesterOWLSyntaxOntologyFormat manSyntaxFormat = new ManchesterOWLSyntaxOntologyFormat();
if (format.isPrefixOWLOntologyFormat()) {
manSyntaxFormat.copyPrefixesFrom(format.asPrefixOWLOntologyFormat());
}
// manager.saveOntology(skos, (OWLOntologyFormat) (OWLDocumentFormat) manSyntaxFormat, documentTarget);
}
use of org.semanticweb.owlapi.model.OWLOntology in project goci by EBISPOT.
the class DefaultGWASOWLPublisher method publishGWASData.
public OWLOntology publishGWASData() throws OWLConversionException {
// create new ontology
OWLOntology conversion = getConverter().createConversionOntology();
// grab all studies from the DAO
getLog().debug("Fetching studies that require conversion to OWL using StudyRepository...");
Collection<Study> studies = getStudyService().deepFindPublishedStudies();
//TODO : check with Tony probably better to do it at the Repository/Service level
//Discard studies which are not associated with a disease trait and those which haven't been published yet
//by the GWAS catalog.
Iterator<Study> iterator = studies.iterator();
while (iterator.hasNext()) {
Study study = iterator.next();
//Remove study which have no diseaseTrait.
if (study.getDiseaseTrait() == null) {
iterator.remove();
getLog().error("Study '" + study.getId() + "' has no disease trait");
} else if (study.getHousekeeping().getCatalogPublishDate() == null) {
iterator.remove();
getLog().error("Study '" + study.getId() + "' has not yet been published");
} else //Remove studies that have been unpublished
if (study.getHousekeeping().getCatalogUnpublishDate() != null) {
iterator.remove();
getLog().error("Study '" + study.getId() + "' has been unpublished");
}
// }else {
//
// //Remove study which have no associations where pvalue is not null.
// Collection<Association> associations = study.getAssociations();
// Iterator<Association> associationIterator = associations.iterator();
// int associationCount = 0;
// while (associationIterator.hasNext()) {
// Association association = associationIterator.next();
//
// if (association.getPvalueExponent() != null && association.getPvalueMantissa() != null) {
// associationCount++;
// }
// }
// if (associationCount == 0) {
// iterator.remove();
// }
// }
}
getLog().debug("Query complete, got " + studies.size() + " studies");
// if studies limit is not set, convert all data, else filter to first n studies and associated data
if (getStudiesLimit() == -1 && FilterProperties.getDateFilter() == null && FilterProperties.getPvalueFilter() == null) {
System.out.println("Converting all available data");
// grab all other data from the DAO
getLog().debug("Fetching traits that require conversion to OWL using AssociationRepository...");
Collection<Association> traitAssociations = getAssociationService().findReallyAll();
//TODO check with Tony how to do that in a better way from service or repository (how to not get associations linked to study with no trait.
//Discard all the associations which are linked to study which are not linked to a disease trait or haven't
//been published yet in the GWAS catalog.
Iterator<Association> associationIterator = traitAssociations.iterator();
while (associationIterator.hasNext()) {
Association association = associationIterator.next();
if (association.getStudy().getDiseaseTrait() == null) {
associationIterator.remove();
} else if (association.getStudy().getHousekeeping().getCatalogPublishDate() == null) {
associationIterator.remove();
} else if (association.getStudy().getHousekeeping().getCatalogUnpublishDate() != null) {
associationIterator.remove();
}
}
getLog().debug("Fetching SNPs that require conversion to OWL using SingleNucleotidePolymorphismRepository...");
Collection<SingleNucleotidePolymorphism> snps = getSingleNucleotidePolymorphismService().findAll();
getLog().debug("All data fetched");
// convert this data, starting with SNPs (no dependencies) and working up to studies
getLog().debug("Starting conversion to OWL...");
getLog().debug("Converting SNPs...");
getConverter().addSNPsToOntology(snps, conversion);
getLog().debug("Converting Trait Associations...");
getConverter().addAssociationsToOntology(traitAssociations, conversion);
getLog().debug("Converting Studies...");
getConverter().addStudiesToOntology(studies, conversion);
getLog().debug("All conversion done!");
return conversion;
} else {
System.out.println("Data conforming to the filter only");
return filterAndPublishGWASData(conversion, studies);
}
}
use of org.semanticweb.owlapi.model.OWLOntology in project goci by EBISPOT.
the class IRITreeBuilder method buildIRITree.
// OntologyLoader ontologyLoader;
//
// @Autowired
// public IRITreeBuilder(OntologyLoader ontologyLoader){
// this.ontologyLoader = ontologyLoader;
// }
public IRITree buildIRITree(URL efoLocation) throws URISyntaxException, OWLOntologyCreationException {
// load efo
getLog().info("Loading efo...");
OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
OWLOntology efo = manager.loadOntology(IRI.create(efoLocation));
owlNothingIRI = manager.getOWLDataFactory().getOWLNothing().getIRI().toString();
// create a reasoner over efo
getLog().info("Reasoning over efo...");
OWLReasonerFactory factory = new Reasoner.ReasonerFactory();
ConsoleProgressMonitor progressMonitor = new ConsoleProgressMonitor();
OWLReasonerConfiguration config = new SimpleConfiguration(progressMonitor);
OWLReasoner reasoner = factory.createReasoner(efo, config);
getLog().info("Precomputing inferences...");
reasoner.precomputeInferences();
getLog().info("Checking ontology consistency...");
reasoner.isConsistent();
// get 'top' class
OWLClass topClass = reasoner.getTopClassNode().getRepresentativeElement();
getLog().info("Reasoner 'top class' element is " + topClass.getIRI());
IRITree tree = new IRITree();
// do one level deep manually - should only be experimental factor
IRINode rootNode = null;
OWLClass efClass = null;
NodeSet<OWLClass> subclasses = reasoner.getSubClasses(topClass, true);
for (Node<OWLClass> node : subclasses) {
OWLClass cls = node.getRepresentativeElement();
getLog().debug("Next child of " + topClass + " is " + cls);
if (cls.getIRI().toString().equals(OntologyConstants.EXPERIMENTAL_FACTOR_CLASS_IRI)) {
efClass = cls;
rootNode = new IRINode(cls.getIRI(), getClassLabel(efo, cls));
}
}
if (rootNode != null) {
getLog().info("Building tree... walking ontology from " + rootNode.getLabel() + " down...");
tree.setRootNode(rootNode);
recurse(reasoner, efo, efClass, rootNode);
getLog().info("...Tree build complete!");
} else {
throw new RuntimeException("Could not find Experimental Factor as a child of OWL:Thing");
}
return tree;
}
Aggregations