Search in sources :

Example 1 with Fact

use of org.apache.rya.reasoning.Fact in project incubator-rya by apache.

the class ForwardChainTest method testTransitiveChain.

/**
 * MultipleOutputs support is minimal, so we have to check each map/reduce
 * step explicitly
 */
@Test
public void testTransitiveChain() throws Exception {
    int max = 8;
    int n = 4;
    URI prop = TestUtils.uri("subOrganizationOf");
    Map<Integer, Map<Integer, Pair<Fact, NullWritable>>> connections = new HashMap<>();
    for (int i = 0; i <= max; i++) {
        connections.put(i, new HashMap<Integer, Pair<Fact, NullWritable>>());
    }
    // Initial input: make a chain from org0 to org8
    for (int i = 0; i < max; i++) {
        URI orgI = TestUtils.uri("org" + i);
        URI orgJ = TestUtils.uri("org" + (i + 1));
        Fact triple = new Fact(orgI, prop, orgJ);
        connections.get(i).put(i + 1, new Pair<>(triple, NullWritable.get()));
    }
    for (int i = 1; i <= n; i++) {
        // Map:
        MapDriver<Fact, NullWritable, ResourceWritable, Fact> mDriver = new MapDriver<>();
        mDriver.getConfiguration().setInt(MRReasoningUtils.STEP_PROP, i);
        mDriver.setMapper(new ForwardChain.FileMapper(schema));
        for (int j : connections.keySet()) {
            for (int k : connections.get(j).keySet()) {
                mDriver.addInput(connections.get(j).get(k));
            }
        }
        List<Pair<ResourceWritable, Fact>> mapped = mDriver.run();
        // Convert data for reduce phase:
        ReduceFeeder<ResourceWritable, Fact> feeder = new ReduceFeeder<>(mDriver.getConfiguration());
        List<KeyValueReuseList<ResourceWritable, Fact>> intermediate = feeder.sortAndGroup(mapped, new ResourceWritable.SecondaryComparator(), new ResourceWritable.PrimaryComparator());
        // Reduce, and compare to expected output:
        ReduceDriver<ResourceWritable, Fact, Fact, NullWritable> rDriver = new ReduceDriver<>();
        rDriver.getConfiguration().setInt(MRReasoningUtils.STEP_PROP, i);
        rDriver.setReducer(new ForwardChain.ReasoningReducer(schema));
        rDriver.addAllElements(intermediate);
        int maxSpan = (int) Math.pow(2, i);
        int minSpan = (maxSpan / 2) + 1;
        // For each j, build all paths starting with j:
        for (int j = 0; j < max; j++) {
            // This includes any path of length k for appropriate k:
            for (int k = minSpan; k <= maxSpan && j + k <= max; k++) {
                int middle = j + minSpan - 1;
                URI left = TestUtils.uri("org" + j);
                URI right = TestUtils.uri("org" + (j + k));
                Fact triple = new Fact(left, prop, right, i, OwlRule.PRP_TRP, TestUtils.uri("org" + middle));
                triple.addSource(connections.get(j).get(middle).getFirst());
                triple.addSource(connections.get(middle).get(j + k).getFirst());
                Pair<Fact, NullWritable> expected = new Pair<>(triple, NullWritable.get());
                connections.get(j).put(j + k, expected);
                rDriver.addMultiOutput("intermediate", expected);
            }
        }
        rDriver.runTest();
    }
}
Also used : HashMap(java.util.HashMap) ReduceFeeder(org.apache.hadoop.mrunit.mapreduce.ReduceFeeder) URI(org.openrdf.model.URI) ReduceDriver(org.apache.hadoop.mrunit.mapreduce.ReduceDriver) Pair(org.apache.hadoop.mrunit.types.Pair) MapDriver(org.apache.hadoop.mrunit.mapreduce.MapDriver) KeyValueReuseList(org.apache.hadoop.mrunit.types.KeyValueReuseList) Fact(org.apache.rya.reasoning.Fact) NullWritable(org.apache.hadoop.io.NullWritable) HashMap(java.util.HashMap) Map(java.util.Map) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 2 with Fact

use of org.apache.rya.reasoning.Fact in project incubator-rya by apache.

the class DuplicateEliminationTest method testRetainSimplest.

@Test
public void testRetainSimplest() throws Exception {
    List<Derivation> facts = new LinkedList<>();
    facts.add(Y_SUPER_X_INV.getDerivation());
    facts.add(Y_SUPER_X.getDerivation());
    Fact unset = Y_SUPER_X.clone();
    unset.unsetDerivation();
    ReduceDriver<Fact, Derivation, Fact, NullWritable> driver = new ReduceDriver<>();
    driver.getConfiguration().setInt(MRReasoningUtils.STEP_PROP, 1);
    driver.withReducer(new DuplicateElimination.DuplicateEliminationReducer()).withInput(unset, facts).withMultiOutput(MRReasoningUtils.INTERMEDIATE_OUT, Y_SUPER_X, NullWritable.get()).runTest();
}
Also used : Derivation(org.apache.rya.reasoning.Derivation) Fact(org.apache.rya.reasoning.Fact) NullWritable(org.apache.hadoop.io.NullWritable) ReduceDriver(org.apache.hadoop.mrunit.mapreduce.ReduceDriver) LinkedList(java.util.LinkedList) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 3 with Fact

use of org.apache.rya.reasoning.Fact in project incubator-rya by apache.

the class DuplicateEliminationTest method testInconsistencyMapperOutput.

@Test
public void testInconsistencyMapperOutput() throws Exception {
    Fact empty = new Fact();
    empty.setDerivation(X_DISJOINT);
    new MapDriver<Derivation, NullWritable, Fact, Derivation>().withMapper(new DuplicateElimination.InconsistencyMapper()).withInput(X_DISJOINT, NullWritable.get()).withOutput(empty, X_DISJOINT).runTest();
}
Also used : MapDriver(org.apache.hadoop.mrunit.mapreduce.MapDriver) Fact(org.apache.rya.reasoning.Fact) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 4 with Fact

use of org.apache.rya.reasoning.Fact in project incubator-rya by apache.

the class ConformanceTest method runTest.

/**
 * Verify that we can infer the correct triples or detect an inconsistency.
 * @param   conf    Specifies working directory, etc.
 * @param   OwlTest   Contains premise/conclusion graphs, will store result
 * @return  Return value of the MapReduce job
 */
int runTest(final Configuration conf, final String[] args, final OwlTest test) throws Exception {
    conf.setInt(MRReasoningUtils.STEP_PROP, 0);
    conf.setInt(MRReasoningUtils.SCHEMA_UPDATE_PROP, 0);
    conf.setBoolean(MRReasoningUtils.DEBUG_FLAG, true);
    conf.setBoolean(MRReasoningUtils.OUTPUT_FLAG, true);
    // Connect to MiniAccumulo and load the test
    final Repository repo = MRReasoningUtils.getRepository(conf);
    repo.initialize();
    final RepositoryConnection conn = repo.getConnection();
    conn.clear();
    conn.add(new StringReader(test.premise), "", RDFFormat.RDFXML);
    conn.close();
    repo.shutDown();
    // Run the reasoner
    final ReasoningDriver reasoner = new ReasoningDriver();
    final int result = ToolRunner.run(conf, reasoner, args);
    test.success = (result == 0);
    // Inconsistency test: successful if determined inconsistent
    if (test.types.contains(TEST_INCONSISTENCY)) {
        test.success = test.success && reasoner.hasInconsistencies();
    }
    // Consistency test: successful if determined consistent
    if (test.types.contains(TEST_CONSISTENCY)) {
        test.success = test.success && !reasoner.hasInconsistencies();
    }
    // Other types: we'll need to look at the inferred triples/schema
    if (test.types.contains(TEST_NONENTAILMENT) || test.types.contains(TEST_ENTAILMENT)) {
        System.out.println("Reading inferred triples...");
        // Read in the inferred triples from HDFS:
        final Schema schema = MRReasoningUtils.loadSchema(conf);
        final FileSystem fs = FileSystem.get(conf);
        final Path configuredPath = MRReasoningUtils.getOutputPath(conf, "final");
        final Path path = PathUtils.cleanHadoopPath(configuredPath, conf);
        final OutputCollector inferred = new OutputCollector();
        final NTriplesParser parser = new NTriplesParser();
        parser.setRDFHandler(inferred);
        if (fs.isDirectory(path)) {
            for (final FileStatus status : fs.listStatus(path)) {
                final String s = status.getPath().getName();
                if (s.startsWith(MRReasoningUtils.INCONSISTENT_OUT) || s.startsWith(MRReasoningUtils.DEBUG_OUT)) {
                    continue;
                }
                final BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(status.getPath()), StandardCharsets.UTF_8));
                parser.parse(br, "");
                br.close();
            }
        }
        MRReasoningUtils.deleteIfExists(conf, "final");
        test.inferred.addAll(inferred.triples);
        // Entailment test: successful if expected triples were inferred
        if (test.types.contains(TEST_ENTAILMENT)) {
            // the schema reasoner
            for (final Statement st : test.expected) {
                final Fact fact = new Fact(st);
                if (!test.inferred.contains(st) && !triviallyTrue(fact.getTriple(), schema) && !schema.containsTriple(fact.getTriple())) {
                    test.error.add(st);
                }
            }
        }
        // Non-entailment test: failure if non-expected triples inferred
        if (test.types.contains(TEST_NONENTAILMENT)) {
            for (final Statement st : test.unexpected) {
                final Fact fact = new Fact(st);
                if (test.inferred.contains(st) || schema.containsTriple(fact.getTriple())) {
                    test.error.add(st);
                }
            }
        }
        test.success = test.success && test.error.isEmpty();
    }
    conf.setBoolean(MRReasoningUtils.DEBUG_FLAG, false);
    MRReasoningUtils.clean(conf);
    return result;
}
Also used : RepositoryConnection(org.openrdf.repository.RepositoryConnection) Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) InputStreamReader(java.io.InputStreamReader) Statement(org.openrdf.model.Statement) Schema(org.apache.rya.reasoning.Schema) NTriplesParser(org.openrdf.rio.ntriples.NTriplesParser) Fact(org.apache.rya.reasoning.Fact) SailRepository(org.openrdf.repository.sail.SailRepository) Repository(org.openrdf.repository.Repository) FileSystem(org.apache.hadoop.fs.FileSystem) StringReader(java.io.StringReader) BufferedReader(java.io.BufferedReader)

Example 5 with Fact

use of org.apache.rya.reasoning.Fact in project incubator-rya by apache.

the class ForwardChainTest method testRdfMapperOutput.

@Test
public void testRdfMapperOutput() throws Exception {
    RyaStatement rya = TestUtils.ryaStatement("x", "subOrganizationOf", "y");
    RyaStatementWritable rsw = new RyaStatementWritable();
    rsw.setRyaStatement(rya);
    LongWritable l = new LongWritable();
    ResourceWritable rw1 = new ResourceWritable();
    ResourceWritable rw2 = new ResourceWritable();
    rw1.set(TestUtils.uri("x"));
    rw2.set(TestUtils.uri("y"));
    new MapDriver<LongWritable, RyaStatementWritable, ResourceWritable, Fact>().withMapper(new ForwardChain.RdfMapper(schema)).withInput(l, rsw).withOutput(rw1, X_SUB_Y).withOutput(rw2, X_SUB_Y).runTest();
}
Also used : RyaStatementWritable(org.apache.rya.accumulo.mr.RyaStatementWritable) RyaStatement(org.apache.rya.api.domain.RyaStatement) LongWritable(org.apache.hadoop.io.LongWritable) Fact(org.apache.rya.reasoning.Fact) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Aggregations

Fact (org.apache.rya.reasoning.Fact)6 Test (org.junit.Test)5 PrepareForTest (org.powermock.core.classloader.annotations.PrepareForTest)5 NullWritable (org.apache.hadoop.io.NullWritable)2 MapDriver (org.apache.hadoop.mrunit.mapreduce.MapDriver)2 ReduceDriver (org.apache.hadoop.mrunit.mapreduce.ReduceDriver)2 RyaStatement (org.apache.rya.api.domain.RyaStatement)2 BufferedReader (java.io.BufferedReader)1 InputStreamReader (java.io.InputStreamReader)1 StringReader (java.io.StringReader)1 HashMap (java.util.HashMap)1 LinkedList (java.util.LinkedList)1 Map (java.util.Map)1 Key (org.apache.accumulo.core.data.Key)1 Value (org.apache.accumulo.core.data.Value)1 FileStatus (org.apache.hadoop.fs.FileStatus)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 Path (org.apache.hadoop.fs.Path)1 LongWritable (org.apache.hadoop.io.LongWritable)1 ReduceFeeder (org.apache.hadoop.mrunit.mapreduce.ReduceFeeder)1