use of org.eclipse.rdf4j.rio.RDFFormat in project d3web-KnowWE by denkbares.
the class InitTerminologyHandler method importAttachment.
private void importAttachment(OntologyCompiler compiler, Section<? extends AnnotationContentType> section, String attachmentFile, boolean silent) {
long start = System.currentTimeMillis();
Section<AttachmentType> importSection = Sections.successor(section, AttachmentType.class);
String path = createPath(section, attachmentFile);
WikiAttachment attachment;
try {
attachment = Environment.getInstance().getWikiConnector().getAttachment(path);
} catch (IOException e) {
Messages.storeMessage(compiler, section, this.getClass(), Messages.error("Error while retrieving attachment '" + attachmentFile + "': " + e.getMessage()));
return;
}
Rdf2GoCore core = compiler.getRdf2GoCore();
if (core == null) {
Messages.storeMessage(compiler, section, this.getClass(), Messages.error("No ontology repository found '" + section.getText() + "'"));
return;
}
if (attachment == null) {
Messages.storeMessage(compiler, section, this.getClass(), Messages.error("Attachment '" + section.getText().trim() + "' not found"));
return;
}
String fileName = attachment.getFileName();
RDFFormat syntax = Rdf2GoUtils.syntaxForFileName(fileName);
Future<?> mainReadFuture = executorService.submit(() -> readFrom(compiler, section, core, attachment, syntax));
if (!silent) {
Rdf2GoCore dummyCore = getDummyCore(compiler, section, attachment, syntax);
// register the terminology imported in the cached empty dummy repository
registerTerminology(compiler, dummyCore, importSection);
}
try {
mainReadFuture.get();
} catch (InterruptedException | ExecutionException e) {
handleException(compiler, section, attachment, e);
}
long duration = System.currentTimeMillis() - start;
if (duration > TimeUnit.SECONDS.toMillis(1)) {
LOGGER.info("Loaded ontology from attachment " + path + " in " + duration + "ms");
}
}
use of org.eclipse.rdf4j.rio.RDFFormat in project d3web-KnowWE by denkbares.
the class InitTerminologyHandler method getDummyCore.
private Rdf2GoCore getDummyCore(OntologyCompiler compiler, Section<? extends AnnotationContentType> section, WikiAttachment attachment, RDFFormat syntax) {
// we need rdfs reasoning for the SPARQLs to work
TermCache cache;
String key = attachment.getPath() + "_" + attachment.getDate();
synchronized (importCache) {
cache = importCache.computeIfAbsent(key, k -> {
Rdf2GoCore dummy = new Rdf2GoCore(RepositoryConfigs.find("RDFS"));
readFrom(compiler, section, dummy, attachment, syntax);
return new TermCache(dummy, attachment.getPath(), attachment.getDate());
});
cache.referencingSections.add(section.getID());
importCache.values().removeIf(rCache -> {
boolean remove = noLongerReferenced(rCache) || attachmentOutDated(rCache);
// if we remove, make sure to properly destroy to allow gc and free up memory
if (remove) {
rCache.core.close();
}
return remove;
});
}
return cache.core;
}
use of org.eclipse.rdf4j.rio.RDFFormat in project d3web-KnowWE by denkbares.
the class OntologyExporter method notify.
@Override
public void notify(Event event) {
OntologyCompilerFinishedEvent finishedEvent = (OntologyCompilerFinishedEvent) event;
if (!finishedEvent.isOntologyChanged())
return;
final OntologyCompiler compiler = finishedEvent.getCompiler();
final Rdf2GoCore rdf2GoCore = compiler.getRdf2GoCore();
Section<OntologyType> ontologySection = compiler.getCompileSection();
synchronized (timers) {
Timer lastTimer = timers.get(ontologySection.getID());
if (lastTimer != null)
lastTimer.cancel();
// cleanup times of now longer existing sections
timers.keySet().removeIf(sectionId -> Sections.get(sectionId) == null);
}
Section<?> exportAnnotation = DefaultMarkupType.getAnnotationContentSection(ontologySection, OntologyType.ANNOTATION_EXPORT);
// no export specified, we are finished here
if (exportAnnotation == null)
return;
String export = exportAnnotation.getText();
String[] split = export.split("/");
final String title;
final String annotationName;
if (split.length == 1) {
title = ontologySection.getTitle();
annotationName = split[0];
} else if (split.length == 2) {
title = split[0];
annotationName = split[1];
} else {
Messages.storeMessage(exportAnnotation, this.getClass(), Messages.error("'" + export + "' is not" + " a valid annotation to export to. Use article-name/annotation-name.syntax or" + " only annotation-name.syntax instead."));
return;
}
if (KnowWEUtils.getArticle(ontologySection.getWeb(), title) == null) {
Messages.storeMessage(exportAnnotation, this.getClass(), Messages.error("Article '" + title + "' does not" + " exist, export has to point to an existing article."));
return;
}
// if not failed yet, clean up messages
Messages.clearMessages(exportAnnotation, this.getClass());
RDFFormat parsedSyntax = Rdf2GoUtils.syntaxForFileName(annotationName);
final RDFFormat syntax = parsedSyntax == null ? RDFFormat.TURTLE : parsedSyntax;
long exportDelay = getExportDelay(ontologySection);
Timer timer = new Timer();
timer.schedule(new TimerTask() {
@Override
public void run() {
Stopwatch stopwatch = new Stopwatch();
WikiConnector connector = Environment.getInstance().getWikiConnector();
ByteArrayInputStream stream;
try {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
// cleanup so the versions don't stack to bad...
rdf2GoCore.writeModel(outputStream, syntax);
stream = new ByteArrayInputStream(outputStream.toByteArray());
} catch (Exception e) {
if (e.getCause() instanceof ClosedChannelException) {
LOGGER.warn("Export of ontology from '" + compiler.getCompileSection().getTitle() + "' aborted due to repository shutdown.");
} else {
LOGGER.error("Unable to export ontology from '" + compiler.getCompileSection().getTitle() + "'", e);
}
return;
}
try {
connector.deleteAttachment(title, annotationName, "SYSTEM");
connector.storeAttachment(title, annotationName, "SYSTEM", stream);
} catch (IOException e) {
LOGGER.error("Unable to save exported ontology as an attachment in '" + title + "/" + annotationName + "'", e);
return;
}
LOGGER.info("Exported ontology to attachment '" + title + "/" + annotationName + "' in " + stopwatch.getDisplay() + " after a delay of " + Stopwatch.getDisplay(exportDelay));
}
}, exportDelay);
synchronized (timers) {
timers.put(ontologySection.getID(), timer);
}
}
use of org.eclipse.rdf4j.rio.RDFFormat in project commons-rdf by apache.
the class RDF4JParser method parseSynchronusly.
@Override
protected void parseSynchronusly() throws IOException {
final Optional<RDFFormat> formatByMimeType = getContentType().flatMap(Rio::getParserFormatForMIMEType);
final String base = getBase().map(IRI::getIRIString).orElse(null);
final ParserConfig parserConfig = getParserConfig();
// TODO: Should we need to set anything?
final RDFLoader loader = new RDFLoader(parserConfig, rdf4jTermFactory.getValueFactory());
final RDFHandler rdfHandler = makeRDFHandler();
if (getSourceFile().isPresent()) {
// NOTE: While we could have used
// loader.load(sourcePath.toFile()
// if the path fs provider == FileSystems.getDefault(),
// that RDFLoader method does not use absolute path
// as the base URI, so to be consistent
// we'll always do it with our own input stream
//
// That means we may have to guess format by extensions:
final Optional<RDFFormat> formatByFilename = getSourceFile().map(Path::getFileName).map(Path::toString).flatMap(Rio::getParserFormatForFileName);
// TODO: for the excited.. what about the extension after following
// symlinks?
final RDFFormat format = formatByMimeType.orElse(formatByFilename.orElse(null));
try (InputStream in = Files.newInputStream(getSourceFile().get())) {
loader.load(in, base, format, rdfHandler);
}
} else if (getSourceIri().isPresent()) {
try {
// TODO: Handle international IRIs properly
// (Unicode support for for hostname, path and query)
final URL url = new URL(getSourceIri().get().getIRIString());
// TODO: This probably does not support https:// -> http://
// redirections
loader.load(url, base, formatByMimeType.orElse(null), makeRDFHandler());
} catch (final MalformedURLException ex) {
throw new IOException("Can't handle source URL: " + getSourceIri().get(), ex);
}
}
// must be getSourceInputStream then, this is guaranteed by
// super.checkSource();
loader.load(getSourceInputStream().get(), base, formatByMimeType.orElse(null), rdfHandler);
}
use of org.eclipse.rdf4j.rio.RDFFormat in project AJAN-service by aantakli.
the class MAJANServiceHandler method ExecuteAgent.
@Override
public String ExecuteAgent(String agentId, String endpoint, MRDFGraph content) throws TException {
try {
Agent agent = agentManager.getAgent(agentId);
if (endpoint == null) {
agent.execute();
} else {
// add content graph to endpoint
RDFFormat format = getRDFFormat(content.ContentType);
if (format == null) {
return "null";
}
LinkedHashModel model = string2Model(content.Graph, format);
agent.setEndpointEvent(endpoint, model);
}
return agent.getUrl();
} catch (AgentNotFoundException e) {
return "null";
}
}
Aggregations