use of org.semanticweb.binaryowl.BinaryOWLOntologyDocumentSerializer in project webprotege by protegeproject.
the class ProjectDocumentStore method saveOntologyChanges.
// //////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////////////////////////////////////////////////////
public void saveOntologyChanges(List<OWLOntologyChange> rawChangeList) {
// Put changes into a buffer
try {
writeLock.lock();
try {
ListMultimap<OWLOntology, OWLOntologyChange> changesByOntology = ArrayListMultimap.create();
for (OWLOntologyChange change : rawChangeList) {
changesByOntology.put(change.getOntology(), change);
}
for (OWLOntology ontology : changesByOntology.keySet()) {
IRI docIRI = ontology.getOWLOntologyManager().getOntologyDocumentIRI(ontology);
if (!"file".equalsIgnoreCase(docIRI.toURI().getScheme())) {
throw new RuntimeException("Document IRI is not a local file IRI");
}
List<OWLOntologyChange> ontologyChangeList = changesByOntology.get(ontology);
List<OWLOntologyChangeData> infoList = new ArrayList<>();
for (OWLOntologyChange change : ontologyChangeList) {
OWLOntologyChangeRecord changeRecord = change.getChangeRecord();
infoList.add(changeRecord.getData());
}
File file = new File(docIRI.toURI());
BinaryOWLOntologyDocumentSerializer serializer = new BinaryOWLOntologyDocumentSerializer();
serializer.appendOntologyChanges(file, new OntologyChangeDataList(infoList, System.currentTimeMillis(), BinaryOWLMetadata.emptyMetadata()));
}
} catch (IOException e) {
logger.error("An error occurred whilst saving ontology changes: {}", e.getMessage(), e);
e.printStackTrace();
}
} finally {
writeLock.unlock();
}
}
use of org.semanticweb.binaryowl.BinaryOWLOntologyDocumentSerializer in project webprotege by protegeproject.
the class ImportsCacheManager method parseOntologyDocument.
private void parseOntologyDocument(File ontologyDocument) {
try {
WRITE_LOCK.lock();
try (InputStream is = new BufferedInputStream(new FileInputStream(ontologyDocument))) {
BinaryOWLOntologyDocumentSerializer serializer = new BinaryOWLOntologyDocumentSerializer();
final Handler handler = new Handler();
serializer.read(is, handler, new OWLDataFactoryImpl());
OWLOntologyID id = handler.getOntologyID();
if (id.getOntologyIRI().isPresent()) {
ontologyIDs.add(id);
iri2Document.put(id.getOntologyIRI().get(), IRI.create(ontologyDocument.toURI()));
if (id.getVersionIRI().isPresent()) {
iri2Document.put(id.getVersionIRI().get(), IRI.create(ontologyDocument));
}
metadataMap.put(id, new ImportedOntologyMetadata(id, handler.getDocumentIRI(), handler.getTimestamp()));
}
} catch (IOException | BinaryOWLParseException | UnloadableImportException e) {
logger.error("An error occurred", e);
}
} finally {
WRITE_LOCK.unlock();
}
}
use of org.semanticweb.binaryowl.BinaryOWLOntologyDocumentSerializer in project webprotege by protegeproject.
the class ImportsCacheManager method cacheOntologyIfNotAlreadyCached.
private void cacheOntologyIfNotAlreadyCached(OWLOntology ont) {
try {
WRITE_LOCK.lock();
if (ontologyIDs.contains(ont.getOntologyID())) {
return;
}
final File file = getFreshImportCacheFile();
try (DataOutputStream os = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(file)))) {
final long timestamp = System.currentTimeMillis();
IRI documentIRI = ont.getOWLOntologyManager().getOntologyDocumentIRI(ont);
final ImportedOntologyMetadata value = new ImportedOntologyMetadata(ont.getOntologyID(), documentIRI, timestamp);
BinaryOWLMetadata metadata = toBinaryOWLMetadata(value);
BinaryOWLOntologyDocumentSerializer serializer = new BinaryOWLOntologyDocumentSerializer();
serializer.write(new OWLOntologyWrapper(ont), os, metadata);
ontologyIDs.add(ont.getOntologyID());
metadataMap.put(ont.getOntologyID(), value);
logger.info("Cached imported ontology: " + ont.getOntologyID() + " in " + file.getName());
} catch (IOException e) {
logger.error("An error occurred whilst caching an ontology: {}", e.getMessage(), e);
}
} finally {
WRITE_LOCK.unlock();
}
}
Aggregations