use of org.openrdf.repository.Repository in project incubator-rya by apache.
the class ConformanceTest method runTest.
/**
* Verify that we can infer the correct triples or detect an inconsistency.
* @param conf Specifies working directory, etc.
* @param OwlTest Contains premise/conclusion graphs, will store result
* @return Return value of the MapReduce job
*/
int runTest(final Configuration conf, final String[] args, final OwlTest test) throws Exception {
conf.setInt(MRReasoningUtils.STEP_PROP, 0);
conf.setInt(MRReasoningUtils.SCHEMA_UPDATE_PROP, 0);
conf.setBoolean(MRReasoningUtils.DEBUG_FLAG, true);
conf.setBoolean(MRReasoningUtils.OUTPUT_FLAG, true);
// Connect to MiniAccumulo and load the test
final Repository repo = MRReasoningUtils.getRepository(conf);
repo.initialize();
final RepositoryConnection conn = repo.getConnection();
conn.clear();
conn.add(new StringReader(test.premise), "", RDFFormat.RDFXML);
conn.close();
repo.shutDown();
// Run the reasoner
final ReasoningDriver reasoner = new ReasoningDriver();
final int result = ToolRunner.run(conf, reasoner, args);
test.success = (result == 0);
// Inconsistency test: successful if determined inconsistent
if (test.types.contains(TEST_INCONSISTENCY)) {
test.success = test.success && reasoner.hasInconsistencies();
}
// Consistency test: successful if determined consistent
if (test.types.contains(TEST_CONSISTENCY)) {
test.success = test.success && !reasoner.hasInconsistencies();
}
// Other types: we'll need to look at the inferred triples/schema
if (test.types.contains(TEST_NONENTAILMENT) || test.types.contains(TEST_ENTAILMENT)) {
System.out.println("Reading inferred triples...");
// Read in the inferred triples from HDFS:
final Schema schema = MRReasoningUtils.loadSchema(conf);
final FileSystem fs = FileSystem.get(conf);
final Path configuredPath = MRReasoningUtils.getOutputPath(conf, "final");
final Path path = PathUtils.cleanHadoopPath(configuredPath, conf);
final OutputCollector inferred = new OutputCollector();
final NTriplesParser parser = new NTriplesParser();
parser.setRDFHandler(inferred);
if (fs.isDirectory(path)) {
for (final FileStatus status : fs.listStatus(path)) {
final String s = status.getPath().getName();
if (s.startsWith(MRReasoningUtils.INCONSISTENT_OUT) || s.startsWith(MRReasoningUtils.DEBUG_OUT)) {
continue;
}
final BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(status.getPath()), StandardCharsets.UTF_8));
parser.parse(br, "");
br.close();
}
}
MRReasoningUtils.deleteIfExists(conf, "final");
test.inferred.addAll(inferred.triples);
// Entailment test: successful if expected triples were inferred
if (test.types.contains(TEST_ENTAILMENT)) {
// the schema reasoner
for (final Statement st : test.expected) {
final Fact fact = new Fact(st);
if (!test.inferred.contains(st) && !triviallyTrue(fact.getTriple(), schema) && !schema.containsTriple(fact.getTriple())) {
test.error.add(st);
}
}
}
// Non-entailment test: failure if non-expected triples inferred
if (test.types.contains(TEST_NONENTAILMENT)) {
for (final Statement st : test.unexpected) {
final Fact fact = new Fact(st);
if (test.inferred.contains(st) || schema.containsTriple(fact.getTriple())) {
test.error.add(st);
}
}
}
test.success = test.success && test.error.isEmpty();
}
conf.setBoolean(MRReasoningUtils.DEBUG_FLAG, false);
MRReasoningUtils.clean(conf);
return result;
}
use of org.openrdf.repository.Repository in project backstage by zepheira.
the class DataLoadingUtilities method loadDataFromStream.
public static void loadDataFromStream(InputStream stream, String sourceURL, String lang, Sail sail) throws Exception {
RepoSailTuple rs = createMemoryRepository(null);
Repository r = rs.repository;
lang = lang.toLowerCase();
if ("exhibit/json".equals(lang)) {
Properties properties = new Properties();
BabelReader reader = new ExhibitJsonReader();
try {
if (reader.takesReader()) {
InputStreamReader isr = new InputStreamReader(stream);
reader.read(isr, sail, properties, Locale.getDefault());
} else {
reader.read(stream, sail, properties, Locale.getDefault());
}
} finally {
stream.close();
}
} else {
RDFParser parser = null;
if ("rdfxml".equals(lang)) {
parser = new RDFXMLParser(r.getValueFactory());
} else if ("n3".equals(lang) || "turtle".equals(lang)) {
parser = new TurtleParser(r.getValueFactory());
} else if ("ntriples".equals(lang)) {
parser = new NTriplesParser(r.getValueFactory());
}
try {
SailConnection c = null;
try {
c = sail.getConnection();
BNodeConverterStatementHandler handler = new BNodeConverterStatementHandler(c);
parser.setRDFHandler(handler);
parser.setParseErrorListener(new LoggingParseErrorListener(sourceURL));
parser.setVerifyData(false);
parser.setStopAtFirstError(false);
parser.parse(stream, sourceURL);
c.commit();
_logger.info("Read " + handler.m_count + " statements from '" + sourceURL + "'");
} catch (RepositoryException e) {
if (c != null)
c.rollback();
} finally {
if (c != null)
c.close();
}
} catch (Exception e) {
throw new ModelReadFromFileException("Failed to read data from '" + sourceURL + "'", e);
} finally {
stream.close();
}
}
}
use of org.openrdf.repository.Repository in project stanbol by apache.
the class SesameYardComponent method removedService.
@Override
@SuppressWarnings("unchecked")
public void removedService(ServiceReference serviceReference, Object o) {
if (serviceReference.equals(repoServiceReference)) {
log.info(" - currently used Repository was removed (ref: {})", serviceReference);
unregisterSesameYard();
}
ServiceReference[] serviceRefs = repositoryTracker.getServiceReferences();
List<ServiceReference> others = serviceRefs == null ? Collections.<ServiceReference>emptyList() : Arrays.asList(serviceRefs);
if (others.size() > 1) {
// sort by priority
Collections.sort(others);
}
boolean registered = false;
for (Iterator<ServiceReference> refs = others.iterator(); !registered && refs.hasNext(); ) {
ServiceReference ref = refs.next();
Repository repo = (Repository) repositoryTracker.getService(ref);
if (repo != null) {
log.info(" - re-register Yard with other available repository {}", ref);
registerSesameYard(ref, repo);
registered = true;
}
}
bundleContext.ungetService(serviceReference);
}
Aggregations