use of org.eclipse.rdf4j.rio.helpers.AbstractRDFHandler in project incubator-rya by apache.
the class KafkaLoadStatements method fromFile.
@Override
public void fromFile(final Path statementsPath, final String visibilities) throws RyaStreamsException {
requireNonNull(statementsPath);
requireNonNull(visibilities);
if (!statementsPath.toFile().exists()) {
throw new RyaStreamsException("Could not load statements at path '" + statementsPath + "' because that " + "does not exist. Make sure you've entered the correct path.");
}
// Create an RDF Parser whose format is derived from the statementPath's file extension.
final String filename = statementsPath.getFileName().toString();
final RDFFormat format = RdfFormatUtils.forFileName(filename);
if (format == null) {
throw new UnsupportedRDFormatException("Unknown RDF format for the file: " + filename);
}
final RDFParser parser = Rio.createParser(format);
// Set a handler that writes the statements to the specified kafka topic.
parser.setRDFHandler(new AbstractRDFHandler() {
@Override
public void startRDF() throws RDFHandlerException {
log.trace("Starting loading statements.");
}
@Override
public void handleStatement(final Statement stmnt) throws RDFHandlerException {
final VisibilityStatement visiStatement = new VisibilityStatement(stmnt, visibilities);
producer.send(new ProducerRecord<>(topic, visiStatement));
}
@Override
public void endRDF() throws RDFHandlerException {
producer.flush();
log.trace("Done.");
}
});
// Do the parse and load.
try {
parser.parse(Files.newInputStream(statementsPath), "");
} catch (RDFParseException | RDFHandlerException | IOException e) {
throw new RyaStreamsException("Could not load the RDF file's Statements into Rya Streams.", e);
}
}
use of org.eclipse.rdf4j.rio.helpers.AbstractRDFHandler in project incubator-rya by apache.
the class InferenceEngine method refreshSomeValuesFromRestrictions.
private void refreshSomeValuesFromRestrictions(final Map<Resource, IRI> restrictions) throws QueryEvaluationException {
someValuesFromByRestrictionType.clear();
ryaDaoQueryWrapper.queryAll(null, OWL.SOMEVALUESFROM, null, new AbstractRDFHandler() {
@Override
public void handleStatement(final Statement statement) throws RDFHandlerException {
final Resource restrictionClass = statement.getSubject();
if (restrictions.containsKey(restrictionClass) && statement.getObject() instanceof Resource) {
final IRI property = restrictions.get(restrictionClass);
final Resource valueClass = (Resource) statement.getObject();
// Should also be triggered by subclasses of the value class
final Set<Resource> valueClasses = new HashSet<>();
valueClasses.add(valueClass);
if (valueClass instanceof IRI) {
valueClasses.addAll(getSubClasses((IRI) valueClass));
}
for (final Resource valueSubClass : valueClasses) {
if (!someValuesFromByRestrictionType.containsKey(restrictionClass)) {
someValuesFromByRestrictionType.put(restrictionClass, new ConcurrentHashMap<>());
}
someValuesFromByRestrictionType.get(restrictionClass).put(valueSubClass, property);
}
}
}
});
}
use of org.eclipse.rdf4j.rio.helpers.AbstractRDFHandler in project incubator-rya by apache.
the class InferenceEngine method refreshIntersectionOf.
private void refreshIntersectionOf() throws QueryEvaluationException {
final Map<Resource, List<Set<Resource>>> intersectionsProp = new HashMap<>();
// First query for all the owl:intersectionOf's.
// If we have the following intersectionOf:
// :A owl:intersectionOf[:B, :C]
// It will be represented by triples following a pattern similar to:
// <:A> owl:intersectionOf _:bnode1 .
// _:bnode1 rdf:first <:B> .
// _:bnode1 rdf:rest _:bnode2 .
// _:bnode2 rdf:first <:C> .
// _:bnode2 rdf:rest rdf:nil .
ryaDaoQueryWrapper.queryAll(null, OWL.INTERSECTIONOF, null, new AbstractRDFHandler() {
@Override
public void handleStatement(final Statement statement) throws RDFHandlerException {
final Resource type = statement.getSubject();
// head will point to a type that is part of the intersection.
final IRI head = (IRI) statement.getObject();
if (!intersectionsProp.containsKey(type)) {
intersectionsProp.put(type, new ArrayList<Set<Resource>>());
}
// intersection.
try {
final Set<Resource> intersection = new LinkedHashSet<>(getList(head));
if (!intersection.isEmpty()) {
// Add this intersection for this type. There may be more
// intersections for this type so each type has a list of
// intersection sets.
intersectionsProp.get(type).add(intersection);
}
} catch (final QueryEvaluationException e) {
throw new RDFHandlerException("Error getting intersection list.", e);
}
}
});
intersections.clear();
for (final Entry<Resource, List<Set<Resource>>> entry : intersectionsProp.entrySet()) {
final Resource type = entry.getKey();
final List<Set<Resource>> intersectionList = entry.getValue();
final Set<Resource> otherTypes = new HashSet<>();
// Combine all of a type's intersections together.
for (final Set<Resource> intersection : intersectionList) {
otherTypes.addAll(intersection);
}
for (final Resource other : otherTypes) {
// :A intersectionOf[:B, :C] implies that
// :A subclassOf :B
// :A subclassOf :C
// So add each type that's part of the intersection to the
// subClassOf graph.
addSubClassOf(type, other);
for (final Set<Resource> intersection : intersectionList) {
if (!intersection.contains(other)) {
addIntersection(intersection, other);
}
}
}
for (final Set<Resource> intersection : intersectionList) {
addIntersection(intersection, type);
}
}
for (final Entry<Resource, List<Set<Resource>>> entry : intersectionsProp.entrySet()) {
final Resource type = entry.getKey();
final List<Set<Resource>> intersectionList = entry.getValue();
final Set<IRI> superClasses = getSuperClasses((IRI) type);
for (final IRI superClass : superClasses) {
// intersectionOf[:B, :C] subclassOf :D
for (final Set<Resource> intersection : intersectionList) {
addIntersection(intersection, superClass);
}
}
// type. Propagating up through all the superclasses.
for (final Set<Resource> intersection : intersectionList) {
final Set<Resource> otherKeys = Sets.newHashSet(intersectionsProp.keySet());
otherKeys.remove(type);
for (final Resource otherKey : otherKeys) {
if (intersectionsProp.get(otherKey).contains(intersection)) {
addSubClassOf(otherKey, type);
addSubClassOf(type, otherKey);
}
}
}
}
}
use of org.eclipse.rdf4j.rio.helpers.AbstractRDFHandler in project incubator-rya by apache.
the class WriteStatementsCommand method execute.
@Override
public void execute(final String[] args) throws ArgumentsException, ExecutionException {
requireNonNull(args);
// Parse the command line arguments.
final WriteParameters params = new WriteParameters();
try {
new JCommander(params, args);
} catch (final ParameterException e) {
throw new ArgumentsException("Could not stream the query's results because of invalid command line parameters.", e);
}
// Verify the configured statements file path.
final Path statementsPath = Paths.get(params.statementsFile);
if (!statementsPath.toFile().exists()) {
throw new ArgumentsException("Could not load statements at path '" + statementsPath + "' because that " + "file does not exist. Make sure you've entered the correct path.");
}
// Create an RDF Parser whose format is derived from the statementPath's file extension.
final String filename = statementsPath.getFileName().toString();
final RDFFormat format = RdfFormatUtils.forFileName(filename);
if (format == null) {
throw new UnsupportedRDFormatException("Unknown RDF format for the file: " + filename);
}
final RDFParser parser = Rio.createParser(format);
// Set up the producer.
try (Producer<String, Set<Statement>> producer = makeProducer(params)) {
// Set a handler that writes the statements to the specified kafka topic. It writes batches of 5 Statements.
parser.setRDFHandler(new AbstractRDFHandler() {
private Set<Statement> batch = new HashSet<>(5);
@Override
public void startRDF() throws RDFHandlerException {
log.trace("Starting loading statements.");
}
@Override
public void handleStatement(final Statement stmnt) throws RDFHandlerException {
log.trace("Adding statement.");
batch.add(stmnt);
if (batch.size() == 5) {
flushBatch();
}
}
@Override
public void endRDF() throws RDFHandlerException {
if (!batch.isEmpty()) {
flushBatch();
}
log.trace("Done.");
}
private void flushBatch() {
log.trace("Flushing batch of size " + batch.size());
producer.send(new ProducerRecord<>(params.topic, null, batch));
batch = new HashSet<>(5);
producer.flush();
}
});
// Do the parse and load.
try {
parser.parse(Files.newInputStream(statementsPath), "");
} catch (RDFParseException | RDFHandlerException | IOException e) {
throw new ExecutionException("Could not load the RDF file's Statements into the Kafka topic.", e);
}
}
}
use of org.eclipse.rdf4j.rio.helpers.AbstractRDFHandler in project incubator-rya by apache.
the class StatementsDeserializer method deserialize.
@Override
public Set<Statement> deserialize(final String topic, final byte[] data) {
if (data == null || data.length == 0) {
// Return null because that is the contract of this method.
return null;
}
try {
final RDFParser parser = PARSER_FACTORY.getParser();
final Set<Statement> statements = new HashSet<>();
parser.setRDFHandler(new AbstractRDFHandler() {
@Override
public void handleStatement(final Statement statement) throws RDFHandlerException {
log.debug("Statement: " + statement);
statements.add(statement);
}
});
parser.parse(new ByteArrayInputStream(data), null);
return statements;
} catch (final RDFParseException | RDFHandlerException | IOException e) {
log.error("Could not deserialize a Set of VisibilityStatement objects using the RDF4J Rio Binary format.", e);
return null;
}
}
Aggregations