Search in sources :

Example 1 with NTriplesParser

use of org.openrdf.rio.ntriples.NTriplesParser in project incubator-rya by apache.

the class ConformanceTest method runTest.

/**
 * Verify that we can infer the correct triples or detect an inconsistency.
 * @param   conf    Specifies working directory, etc.
 * @param   OwlTest   Contains premise/conclusion graphs, will store result
 * @return  Return value of the MapReduce job
 */
int runTest(final Configuration conf, final String[] args, final OwlTest test) throws Exception {
    conf.setInt(MRReasoningUtils.STEP_PROP, 0);
    conf.setInt(MRReasoningUtils.SCHEMA_UPDATE_PROP, 0);
    conf.setBoolean(MRReasoningUtils.DEBUG_FLAG, true);
    conf.setBoolean(MRReasoningUtils.OUTPUT_FLAG, true);
    // Connect to MiniAccumulo and load the test
    final Repository repo = MRReasoningUtils.getRepository(conf);
    repo.initialize();
    final RepositoryConnection conn = repo.getConnection();
    conn.clear();
    conn.add(new StringReader(test.premise), "", RDFFormat.RDFXML);
    conn.close();
    repo.shutDown();
    // Run the reasoner
    final ReasoningDriver reasoner = new ReasoningDriver();
    final int result = ToolRunner.run(conf, reasoner, args);
    test.success = (result == 0);
    // Inconsistency test: successful if determined inconsistent
    if (test.types.contains(TEST_INCONSISTENCY)) {
        test.success = test.success && reasoner.hasInconsistencies();
    }
    // Consistency test: successful if determined consistent
    if (test.types.contains(TEST_CONSISTENCY)) {
        test.success = test.success && !reasoner.hasInconsistencies();
    }
    // Other types: we'll need to look at the inferred triples/schema
    if (test.types.contains(TEST_NONENTAILMENT) || test.types.contains(TEST_ENTAILMENT)) {
        System.out.println("Reading inferred triples...");
        // Read in the inferred triples from HDFS:
        final Schema schema = MRReasoningUtils.loadSchema(conf);
        final FileSystem fs = FileSystem.get(conf);
        final Path configuredPath = MRReasoningUtils.getOutputPath(conf, "final");
        final Path path = PathUtils.cleanHadoopPath(configuredPath, conf);
        final OutputCollector inferred = new OutputCollector();
        final NTriplesParser parser = new NTriplesParser();
        parser.setRDFHandler(inferred);
        if (fs.isDirectory(path)) {
            for (final FileStatus status : fs.listStatus(path)) {
                final String s = status.getPath().getName();
                if (s.startsWith(MRReasoningUtils.INCONSISTENT_OUT) || s.startsWith(MRReasoningUtils.DEBUG_OUT)) {
                    continue;
                }
                final BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(status.getPath()), StandardCharsets.UTF_8));
                parser.parse(br, "");
                br.close();
            }
        }
        MRReasoningUtils.deleteIfExists(conf, "final");
        test.inferred.addAll(inferred.triples);
        // Entailment test: successful if expected triples were inferred
        if (test.types.contains(TEST_ENTAILMENT)) {
            // the schema reasoner
            for (final Statement st : test.expected) {
                final Fact fact = new Fact(st);
                if (!test.inferred.contains(st) && !triviallyTrue(fact.getTriple(), schema) && !schema.containsTriple(fact.getTriple())) {
                    test.error.add(st);
                }
            }
        }
        // Non-entailment test: failure if non-expected triples inferred
        if (test.types.contains(TEST_NONENTAILMENT)) {
            for (final Statement st : test.unexpected) {
                final Fact fact = new Fact(st);
                if (test.inferred.contains(st) || schema.containsTriple(fact.getTriple())) {
                    test.error.add(st);
                }
            }
        }
        test.success = test.success && test.error.isEmpty();
    }
    conf.setBoolean(MRReasoningUtils.DEBUG_FLAG, false);
    MRReasoningUtils.clean(conf);
    return result;
}
Also used : RepositoryConnection(org.openrdf.repository.RepositoryConnection) Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) InputStreamReader(java.io.InputStreamReader) Statement(org.openrdf.model.Statement) Schema(org.apache.rya.reasoning.Schema) NTriplesParser(org.openrdf.rio.ntriples.NTriplesParser) Fact(org.apache.rya.reasoning.Fact) SailRepository(org.openrdf.repository.sail.SailRepository) Repository(org.openrdf.repository.Repository) FileSystem(org.apache.hadoop.fs.FileSystem) StringReader(java.io.StringReader) BufferedReader(java.io.BufferedReader)

Example 2 with NTriplesParser

use of org.openrdf.rio.ntriples.NTriplesParser in project backstage by zepheira.

the class DataLoadingUtilities method loadDataFromStream.

public static void loadDataFromStream(InputStream stream, String sourceURL, String lang, Sail sail) throws Exception {
    RepoSailTuple rs = createMemoryRepository(null);
    Repository r = rs.repository;
    lang = lang.toLowerCase();
    if ("exhibit/json".equals(lang)) {
        Properties properties = new Properties();
        BabelReader reader = new ExhibitJsonReader();
        try {
            if (reader.takesReader()) {
                InputStreamReader isr = new InputStreamReader(stream);
                reader.read(isr, sail, properties, Locale.getDefault());
            } else {
                reader.read(stream, sail, properties, Locale.getDefault());
            }
        } finally {
            stream.close();
        }
    } else {
        RDFParser parser = null;
        if ("rdfxml".equals(lang)) {
            parser = new RDFXMLParser(r.getValueFactory());
        } else if ("n3".equals(lang) || "turtle".equals(lang)) {
            parser = new TurtleParser(r.getValueFactory());
        } else if ("ntriples".equals(lang)) {
            parser = new NTriplesParser(r.getValueFactory());
        }
        try {
            SailConnection c = null;
            try {
                c = sail.getConnection();
                BNodeConverterStatementHandler handler = new BNodeConverterStatementHandler(c);
                parser.setRDFHandler(handler);
                parser.setParseErrorListener(new LoggingParseErrorListener(sourceURL));
                parser.setVerifyData(false);
                parser.setStopAtFirstError(false);
                parser.parse(stream, sourceURL);
                c.commit();
                _logger.info("Read " + handler.m_count + " statements from '" + sourceURL + "'");
            } catch (RepositoryException e) {
                if (c != null)
                    c.rollback();
            } finally {
                if (c != null)
                    c.close();
            }
        } catch (Exception e) {
            throw new ModelReadFromFileException("Failed to read data from '" + sourceURL + "'", e);
        } finally {
            stream.close();
        }
    }
}
Also used : InputStreamReader(java.io.InputStreamReader) RDFXMLParser(org.openrdf.rio.rdfxml.RDFXMLParser) NTriplesParser(org.openrdf.rio.ntriples.NTriplesParser) RepositoryException(org.openrdf.repository.RepositoryException) Properties(java.util.Properties) RDFParser(org.openrdf.rio.RDFParser) SailException(org.openrdf.sail.SailException) RepositoryException(org.openrdf.repository.RepositoryException) RDFHandlerException(org.openrdf.rio.RDFHandlerException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) TurtleParser(org.openrdf.rio.turtle.TurtleParser) SailRepository(org.openrdf.repository.sail.SailRepository) Repository(org.openrdf.repository.Repository) BabelReader(edu.mit.simile.babel.BabelReader) SailConnection(org.openrdf.sail.SailConnection) ExhibitJsonReader(edu.mit.simile.babel.exhibit.ExhibitJsonReader)

Aggregations

InputStreamReader (java.io.InputStreamReader)2 Repository (org.openrdf.repository.Repository)2 SailRepository (org.openrdf.repository.sail.SailRepository)2 NTriplesParser (org.openrdf.rio.ntriples.NTriplesParser)2 BabelReader (edu.mit.simile.babel.BabelReader)1 ExhibitJsonReader (edu.mit.simile.babel.exhibit.ExhibitJsonReader)1 BufferedReader (java.io.BufferedReader)1 FileNotFoundException (java.io.FileNotFoundException)1 IOException (java.io.IOException)1 StringReader (java.io.StringReader)1 Properties (java.util.Properties)1 FileStatus (org.apache.hadoop.fs.FileStatus)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 Path (org.apache.hadoop.fs.Path)1 Fact (org.apache.rya.reasoning.Fact)1 Schema (org.apache.rya.reasoning.Schema)1 Statement (org.openrdf.model.Statement)1 RepositoryConnection (org.openrdf.repository.RepositoryConnection)1 RepositoryException (org.openrdf.repository.RepositoryException)1 RDFHandlerException (org.openrdf.rio.RDFHandlerException)1