use of org.openrdf.rio.helpers.RDFHandlerBase in project incubator-rya by apache.
the class KafkaLoadStatements method fromFile.
@Override
public void fromFile(final Path statementsPath, final String visibilities) throws RyaStreamsException {
requireNonNull(statementsPath);
requireNonNull(visibilities);
if (!statementsPath.toFile().exists()) {
throw new RyaStreamsException("Could not load statements at path '" + statementsPath + "' because that " + "does not exist. Make sure you've entered the correct path.");
}
// Create an RDF Parser whose format is derived from the statementPath's file extension.
final RDFFormat format = RDFFormat.forFileName(statementsPath.getFileName().toString());
final RDFParser parser = Rio.createParser(format);
// Set a handler that writes the statements to the specified kafka topic.
parser.setRDFHandler(new RDFHandlerBase() {
@Override
public void startRDF() throws RDFHandlerException {
log.trace("Starting loading statements.");
}
@Override
public void handleStatement(final Statement stmnt) throws RDFHandlerException {
final VisibilityStatement visiStatement = new VisibilityStatement(stmnt, visibilities);
producer.send(new ProducerRecord<>(topic, visiStatement));
}
@Override
public void endRDF() throws RDFHandlerException {
producer.flush();
log.trace("Done.");
}
});
// Do the parse and load.
try {
parser.parse(Files.newInputStream(statementsPath), "");
} catch (RDFParseException | RDFHandlerException | IOException e) {
throw new RyaStreamsException("Could not load the RDF file's Statements into Rya Streams.", e);
}
}
use of org.openrdf.rio.helpers.RDFHandlerBase in project incubator-rya by apache.
the class InferenceEngine method refreshAllValuesFromRestrictions.
private void refreshAllValuesFromRestrictions(final Map<Resource, URI> restrictions) throws QueryEvaluationException {
allValuesFromByValueType.clear();
ryaDaoQueryWrapper.queryAll(null, OWL.ALLVALUESFROM, null, new RDFHandlerBase() {
@Override
public void handleStatement(final Statement statement) throws RDFHandlerException {
final Resource directRestrictionClass = statement.getSubject();
if (restrictions.containsKey(directRestrictionClass) && statement.getObject() instanceof Resource) {
final URI property = restrictions.get(directRestrictionClass);
final Resource valueClass = (Resource) statement.getObject();
// Should also be triggered by subclasses of the property restriction
final Set<Resource> restrictionClasses = new HashSet<>();
restrictionClasses.add(directRestrictionClass);
if (directRestrictionClass instanceof URI) {
restrictionClasses.addAll(getSubClasses((URI) directRestrictionClass));
}
for (final Resource restrictionClass : restrictionClasses) {
if (!allValuesFromByValueType.containsKey(valueClass)) {
allValuesFromByValueType.put(valueClass, new ConcurrentHashMap<>());
}
allValuesFromByValueType.get(valueClass).put(restrictionClass, property);
}
}
}
});
}
use of org.openrdf.rio.helpers.RDFHandlerBase in project incubator-rya by apache.
the class InferenceEngine method fetchInstances.
/**
* Query for and collect all instances of a given type. Should only be called for types expected
* to have few members, such as ontology vocabulary terms, as instances will be collected in
* memory.
*/
private Set<URI> fetchInstances(final URI type) throws QueryEvaluationException {
final Set<URI> instances = new HashSet<>();
ryaDaoQueryWrapper.queryAll(null, RDF.TYPE, type, new RDFHandlerBase() {
@Override
public void handleStatement(final Statement st) throws RDFHandlerException {
if (st.getSubject() instanceof URI) {
instances.add((URI) st.getSubject());
}
}
});
return instances;
}
use of org.openrdf.rio.helpers.RDFHandlerBase in project incubator-rya by apache.
the class InferenceEngine method refreshSomeValuesFromRestrictions.
private void refreshSomeValuesFromRestrictions(final Map<Resource, URI> restrictions) throws QueryEvaluationException {
someValuesFromByRestrictionType.clear();
ryaDaoQueryWrapper.queryAll(null, OWL.SOMEVALUESFROM, null, new RDFHandlerBase() {
@Override
public void handleStatement(final Statement statement) throws RDFHandlerException {
final Resource restrictionClass = statement.getSubject();
if (restrictions.containsKey(restrictionClass) && statement.getObject() instanceof Resource) {
final URI property = restrictions.get(restrictionClass);
final Resource valueClass = (Resource) statement.getObject();
// Should also be triggered by subclasses of the value class
final Set<Resource> valueClasses = new HashSet<>();
valueClasses.add(valueClass);
if (valueClass instanceof URI) {
valueClasses.addAll(getSubClasses((URI) valueClass));
}
for (final Resource valueSubClass : valueClasses) {
if (!someValuesFromByRestrictionType.containsKey(restrictionClass)) {
someValuesFromByRestrictionType.put(restrictionClass, new ConcurrentHashMap<>());
}
someValuesFromByRestrictionType.get(restrictionClass).put(valueSubClass, property);
}
}
}
});
}
use of org.openrdf.rio.helpers.RDFHandlerBase in project incubator-rya by apache.
the class InferenceEngine method refreshIntersectionOf.
private void refreshIntersectionOf() throws QueryEvaluationException {
final Map<Resource, List<Set<Resource>>> intersectionsProp = new HashMap<>();
// First query for all the owl:intersectionOf's.
// If we have the following intersectionOf:
// :A owl:intersectionOf[:B, :C]
// It will be represented by triples following a pattern similar to:
// <:A> owl:intersectionOf _:bnode1 .
// _:bnode1 rdf:first <:B> .
// _:bnode1 rdf:rest _:bnode2 .
// _:bnode2 rdf:first <:C> .
// _:bnode2 rdf:rest rdf:nil .
ryaDaoQueryWrapper.queryAll(null, OWL.INTERSECTIONOF, null, new RDFHandlerBase() {
@Override
public void handleStatement(final Statement statement) throws RDFHandlerException {
final Resource type = statement.getSubject();
// head will point to a type that is part of the intersection.
final URI head = (URI) statement.getObject();
if (!intersectionsProp.containsKey(type)) {
intersectionsProp.put(type, new ArrayList<Set<Resource>>());
}
// intersection.
try {
final Set<Resource> intersection = new LinkedHashSet<>(getList(head));
if (!intersection.isEmpty()) {
// Add this intersection for this type. There may be more
// intersections for this type so each type has a list of
// intersection sets.
intersectionsProp.get(type).add(intersection);
}
} catch (final QueryEvaluationException e) {
throw new RDFHandlerException("Error getting intersection list.", e);
}
}
});
intersections.clear();
for (final Entry<Resource, List<Set<Resource>>> entry : intersectionsProp.entrySet()) {
final Resource type = entry.getKey();
final List<Set<Resource>> intersectionList = entry.getValue();
final Set<Resource> otherTypes = new HashSet<>();
// Combine all of a type's intersections together.
for (final Set<Resource> intersection : intersectionList) {
otherTypes.addAll(intersection);
}
for (final Resource other : otherTypes) {
// :A intersectionOf[:B, :C] implies that
// :A subclassOf :B
// :A subclassOf :C
// So add each type that's part of the intersection to the
// subClassOf graph.
addSubClassOf(type, other);
for (final Set<Resource> intersection : intersectionList) {
if (!intersection.contains(other)) {
addIntersection(intersection, other);
}
}
}
for (final Set<Resource> intersection : intersectionList) {
addIntersection(intersection, type);
}
}
for (final Entry<Resource, List<Set<Resource>>> entry : intersectionsProp.entrySet()) {
final Resource type = entry.getKey();
final List<Set<Resource>> intersectionList = entry.getValue();
final Set<URI> superClasses = getSuperClasses((URI) type);
for (final URI superClass : superClasses) {
// intersectionOf[:B, :C] subclassOf :D
for (final Set<Resource> intersection : intersectionList) {
addIntersection(intersection, superClass);
}
}
// type. Propagating up through all the superclasses.
for (final Set<Resource> intersection : intersectionList) {
final Set<Resource> otherKeys = Sets.newHashSet(intersectionsProp.keySet());
otherKeys.remove(type);
for (final Resource otherKey : otherKeys) {
if (intersectionsProp.get(otherKey).contains(intersection)) {
addSubClassOf(otherKey, type);
addSubClassOf(type, otherKey);
}
}
}
}
}
Aggregations