Search in sources :

Example 1 with HasValueVisitor

use of org.apache.rya.rdftriplestore.inference.HasValueVisitor in project incubator-rya by apache.

the class RdfCloudTripleStoreConnection method evaluateInternal.

@Override
protected CloseableIteration<? extends BindingSet, QueryEvaluationException> evaluateInternal(TupleExpr tupleExpr, final Dataset dataset, BindingSet bindings, final boolean flag) throws SailException {
    verifyIsOpen();
    logger.trace("Incoming query model:\n{}", tupleExpr.toString());
    if (provenanceCollector != null) {
        try {
            provenanceCollector.recordQuery(tupleExpr.toString());
        } catch (final ProvenanceCollectionException e) {
            logger.trace("Provenance failed to record query.", e);
        }
    }
    tupleExpr = tupleExpr.clone();
    final C queryConf = (C) store.getConf().clone();
    if (queryConf == null) {
        // Should not happen, but this is better than a null dereference error.
        throw new SailException("Cloning store.getConf() returned null, aborting.");
    }
    if (bindings != null) {
        final Binding dispPlan = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_QUERYPLAN_FLAG);
        if (dispPlan != null) {
            queryConf.setDisplayQueryPlan(Boolean.parseBoolean(dispPlan.getValue().stringValue()));
        }
        final Binding authBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH);
        if (authBinding != null) {
            queryConf.setAuths(authBinding.getValue().stringValue().split(","));
        }
        final Binding ttlBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_TTL);
        if (ttlBinding != null) {
            queryConf.setTtl(Long.valueOf(ttlBinding.getValue().stringValue()));
        }
        final Binding startTimeBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_STARTTIME);
        if (startTimeBinding != null) {
            queryConf.setStartTime(Long.valueOf(startTimeBinding.getValue().stringValue()));
        }
        final Binding performantBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_PERFORMANT);
        if (performantBinding != null) {
            queryConf.setBoolean(RdfCloudTripleStoreConfiguration.CONF_PERFORMANT, Boolean.parseBoolean(performantBinding.getValue().stringValue()));
        }
        final Binding inferBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_INFER);
        if (inferBinding != null) {
            queryConf.setInfer(Boolean.parseBoolean(inferBinding.getValue().stringValue()));
        }
        final Binding useStatsBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_USE_STATS);
        if (useStatsBinding != null) {
            queryConf.setUseStats(Boolean.parseBoolean(useStatsBinding.getValue().stringValue()));
        }
        final Binding offsetBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_OFFSET);
        if (offsetBinding != null) {
            queryConf.setOffset(Long.parseLong(offsetBinding.getValue().stringValue()));
        }
        final Binding limitBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_LIMIT);
        if (limitBinding != null) {
            queryConf.setLimit(Long.parseLong(limitBinding.getValue().stringValue()));
        }
    } else {
        bindings = new QueryBindingSet();
    }
    if (!(tupleExpr instanceof QueryRoot)) {
        tupleExpr = new QueryRoot(tupleExpr);
    }
    try {
        final List<Class<QueryOptimizer>> optimizers = queryConf.getOptimizers();
        final Class<QueryOptimizer> pcjOptimizer = queryConf.getPcjOptimizer();
        if (pcjOptimizer != null) {
            QueryOptimizer opt = null;
            try {
                final Constructor<QueryOptimizer> construct = pcjOptimizer.getDeclaredConstructor(new Class[] {});
                opt = construct.newInstance();
            } catch (final Exception e) {
            }
            if (opt == null) {
                throw new NoSuchMethodException("Could not find valid constructor for " + pcjOptimizer.getName());
            }
            if (opt instanceof Configurable) {
                ((Configurable) opt).setConf(conf);
            }
            opt.optimize(tupleExpr, dataset, bindings);
        }
        final ParallelEvaluationStrategyImpl strategy = new ParallelEvaluationStrategyImpl(new StoreTripleSource<C>(queryConf, ryaDAO), inferenceEngine, dataset, queryConf);
        (new BindingAssigner()).optimize(tupleExpr, dataset, bindings);
        (new ConstantOptimizer(strategy)).optimize(tupleExpr, dataset, bindings);
        (new CompareOptimizer()).optimize(tupleExpr, dataset, bindings);
        (new ConjunctiveConstraintSplitter()).optimize(tupleExpr, dataset, bindings);
        (new DisjunctiveConstraintOptimizer()).optimize(tupleExpr, dataset, bindings);
        (new SameTermFilterOptimizer()).optimize(tupleExpr, dataset, bindings);
        (new QueryModelNormalizer()).optimize(tupleExpr, dataset, bindings);
        (new IterativeEvaluationOptimizer()).optimize(tupleExpr, dataset, bindings);
        if (!optimizers.isEmpty()) {
            for (final Class<QueryOptimizer> optclz : optimizers) {
                QueryOptimizer result = null;
                try {
                    final Constructor<QueryOptimizer> meth = optclz.getDeclaredConstructor(new Class[] {});
                    result = meth.newInstance();
                } catch (final Exception e) {
                }
                try {
                    final Constructor<QueryOptimizer> meth = optclz.getDeclaredConstructor(EvaluationStrategy.class);
                    result = meth.newInstance(strategy);
                } catch (final Exception e) {
                }
                if (result == null) {
                    throw new NoSuchMethodException("Could not find valid constructor for " + optclz.getName());
                }
                if (result instanceof Configurable) {
                    ((Configurable) result).setConf(conf);
                }
                result.optimize(tupleExpr, dataset, bindings);
            }
        }
        (new FilterOptimizer()).optimize(tupleExpr, dataset, bindings);
        (new OrderLimitOptimizer()).optimize(tupleExpr, dataset, bindings);
        logger.trace("Optimized query model:\n{}", tupleExpr.toString());
        if (queryConf.isInfer() && this.inferenceEngine != null) {
            try {
                tupleExpr.visit(new DomainRangeVisitor(queryConf, inferenceEngine));
                tupleExpr.visit(new SomeValuesFromVisitor(queryConf, inferenceEngine));
                tupleExpr.visit(new AllValuesFromVisitor(queryConf, inferenceEngine));
                tupleExpr.visit(new HasValueVisitor(queryConf, inferenceEngine));
                tupleExpr.visit(new IntersectionOfVisitor(queryConf, inferenceEngine));
                tupleExpr.visit(new ReflexivePropertyVisitor(queryConf, inferenceEngine));
                tupleExpr.visit(new PropertyChainVisitor(queryConf, inferenceEngine));
                tupleExpr.visit(new TransitivePropertyVisitor(queryConf, inferenceEngine));
                tupleExpr.visit(new SymmetricPropertyVisitor(queryConf, inferenceEngine));
                tupleExpr.visit(new InverseOfVisitor(queryConf, inferenceEngine));
                tupleExpr.visit(new SubPropertyOfVisitor(queryConf, inferenceEngine));
                tupleExpr.visit(new SubClassOfVisitor(queryConf, inferenceEngine));
                tupleExpr.visit(new SameAsVisitor(queryConf, inferenceEngine));
                tupleExpr.visit(new OneOfVisitor(queryConf, inferenceEngine));
                tupleExpr.visit(new HasSelfVisitor(queryConf, inferenceEngine));
            } catch (final Exception e) {
                logger.error("Error encountered while visiting query node.", e);
            }
        }
        if (queryConf.isPerformant()) {
            tupleExpr.visit(new SeparateFilterJoinsVisitor());
        // tupleExpr.visit(new FilterTimeIndexVisitor(queryConf));
        // tupleExpr.visit(new PartitionFilterTimeIndexVisitor(queryConf));
        }
        final FilterRangeVisitor rangeVisitor = new FilterRangeVisitor(queryConf);
        tupleExpr.visit(rangeVisitor);
        // this has to be done twice to get replace the statementpatterns with the right ranges
        tupleExpr.visit(rangeVisitor);
        EvaluationStatistics stats = null;
        if (!queryConf.isUseStats() && queryConf.isPerformant() || rdfEvalStatsDAO == null) {
            stats = new DefaultStatistics();
        } else if (queryConf.isUseStats()) {
            if (queryConf.isUseSelectivity()) {
                stats = new RdfCloudTripleStoreSelectivityEvaluationStatistics<C>(queryConf, rdfEvalStatsDAO, selectEvalDAO);
            } else {
                stats = new RdfCloudTripleStoreEvaluationStatistics<C>(queryConf, rdfEvalStatsDAO);
            }
        }
        if (stats != null) {
            if (stats instanceof RdfCloudTripleStoreSelectivityEvaluationStatistics) {
                final QueryJoinSelectOptimizer qjso = new QueryJoinSelectOptimizer(stats, selectEvalDAO);
                qjso.optimize(tupleExpr, dataset, bindings);
            } else {
                final QueryJoinOptimizer qjo = new QueryJoinOptimizer(stats);
                // TODO: Make pluggable
                qjo.optimize(tupleExpr, dataset, bindings);
            }
        }
        final CloseableIteration<BindingSet, QueryEvaluationException> iter = strategy.evaluate(tupleExpr, EmptyBindingSet.getInstance());
        final CloseableIteration<BindingSet, QueryEvaluationException> iterWrap = new CloseableIteration<BindingSet, QueryEvaluationException>() {

            @Override
            public void remove() throws QueryEvaluationException {
                iter.remove();
            }

            @Override
            public BindingSet next() throws QueryEvaluationException {
                return iter.next();
            }

            @Override
            public boolean hasNext() throws QueryEvaluationException {
                return iter.hasNext();
            }

            @Override
            public void close() throws QueryEvaluationException {
                iter.close();
                strategy.shutdown();
            }
        };
        return iterWrap;
    } catch (final QueryEvaluationException e) {
        throw new SailException(e);
    } catch (final Exception e) {
        throw new SailException(e);
    }
}
Also used : OneOfVisitor(org.apache.rya.rdftriplestore.inference.OneOfVisitor) FilterOptimizer(org.openrdf.query.algebra.evaluation.impl.FilterOptimizer) SameTermFilterOptimizer(org.openrdf.query.algebra.evaluation.impl.SameTermFilterOptimizer) EvaluationStatistics(org.openrdf.query.algebra.evaluation.impl.EvaluationStatistics) RdfCloudTripleStoreSelectivityEvaluationStatistics(org.apache.rya.rdftriplestore.evaluation.RdfCloudTripleStoreSelectivityEvaluationStatistics) RdfCloudTripleStoreEvaluationStatistics(org.apache.rya.rdftriplestore.evaluation.RdfCloudTripleStoreEvaluationStatistics) DisjunctiveConstraintOptimizer(org.openrdf.query.algebra.evaluation.impl.DisjunctiveConstraintOptimizer) DomainRangeVisitor(org.apache.rya.rdftriplestore.inference.DomainRangeVisitor) IterativeEvaluationOptimizer(org.openrdf.query.algebra.evaluation.impl.IterativeEvaluationOptimizer) BindingAssigner(org.openrdf.query.algebra.evaluation.impl.BindingAssigner) QueryRoot(org.openrdf.query.algebra.QueryRoot) QueryJoinOptimizer(org.apache.rya.rdftriplestore.evaluation.QueryJoinOptimizer) InverseOfVisitor(org.apache.rya.rdftriplestore.inference.InverseOfVisitor) Binding(org.openrdf.query.Binding) QueryBindingSet(org.openrdf.query.algebra.evaluation.QueryBindingSet) EmptyBindingSet(org.openrdf.query.impl.EmptyBindingSet) BindingSet(org.openrdf.query.BindingSet) ProvenanceCollectionException(org.apache.rya.rdftriplestore.provenance.ProvenanceCollectionException) ReflexivePropertyVisitor(org.apache.rya.rdftriplestore.inference.ReflexivePropertyVisitor) SymmetricPropertyVisitor(org.apache.rya.rdftriplestore.inference.SymmetricPropertyVisitor) QueryJoinSelectOptimizer(org.apache.rya.rdftriplestore.evaluation.QueryJoinSelectOptimizer) QueryBindingSet(org.openrdf.query.algebra.evaluation.QueryBindingSet) IntersectionOfVisitor(org.apache.rya.rdftriplestore.inference.IntersectionOfVisitor) QueryEvaluationException(org.openrdf.query.QueryEvaluationException) RdfCloudTripleStoreEvaluationStatistics(org.apache.rya.rdftriplestore.evaluation.RdfCloudTripleStoreEvaluationStatistics) HasSelfVisitor(org.apache.rya.rdftriplestore.inference.HasSelfVisitor) OrderLimitOptimizer(org.openrdf.query.algebra.evaluation.impl.OrderLimitOptimizer) SameAsVisitor(org.apache.rya.rdftriplestore.inference.SameAsVisitor) HasValueVisitor(org.apache.rya.rdftriplestore.inference.HasValueVisitor) SameTermFilterOptimizer(org.openrdf.query.algebra.evaluation.impl.SameTermFilterOptimizer) SubClassOfVisitor(org.apache.rya.rdftriplestore.inference.SubClassOfVisitor) SeparateFilterJoinsVisitor(org.apache.rya.rdftriplestore.evaluation.SeparateFilterJoinsVisitor) Configurable(org.apache.hadoop.conf.Configurable) PropertyChainVisitor(org.apache.rya.rdftriplestore.inference.PropertyChainVisitor) FilterRangeVisitor(org.apache.rya.rdftriplestore.evaluation.FilterRangeVisitor) ParallelEvaluationStrategyImpl(org.apache.rya.rdftriplestore.evaluation.ParallelEvaluationStrategyImpl) TransitivePropertyVisitor(org.apache.rya.rdftriplestore.inference.TransitivePropertyVisitor) SomeValuesFromVisitor(org.apache.rya.rdftriplestore.inference.SomeValuesFromVisitor) DefaultStatistics(org.apache.rya.rdftriplestore.utils.DefaultStatistics) QueryModelNormalizer(org.openrdf.query.algebra.evaluation.impl.QueryModelNormalizer) ConjunctiveConstraintSplitter(org.openrdf.query.algebra.evaluation.impl.ConjunctiveConstraintSplitter) SubPropertyOfVisitor(org.apache.rya.rdftriplestore.inference.SubPropertyOfVisitor) RdfCloudTripleStoreSelectivityEvaluationStatistics(org.apache.rya.rdftriplestore.evaluation.RdfCloudTripleStoreSelectivityEvaluationStatistics) SailException(org.openrdf.sail.SailException) QueryOptimizer(org.openrdf.query.algebra.evaluation.QueryOptimizer) SailException(org.openrdf.sail.SailException) ProvenanceCollectionException(org.apache.rya.rdftriplestore.provenance.ProvenanceCollectionException) QueryEvaluationException(org.openrdf.query.QueryEvaluationException) NoSuchElementException(java.util.NoSuchElementException) RyaDAOException(org.apache.rya.api.persist.RyaDAOException) CloseableIteration(info.aduna.iteration.CloseableIteration) CompareOptimizer(org.openrdf.query.algebra.evaluation.impl.CompareOptimizer) ConstantOptimizer(org.openrdf.query.algebra.evaluation.impl.ConstantOptimizer) AllValuesFromVisitor(org.apache.rya.rdftriplestore.inference.AllValuesFromVisitor)

Example 2 with HasValueVisitor

use of org.apache.rya.rdftriplestore.inference.HasValueVisitor in project incubator-rya by apache.

the class HasValueVisitorTest method testRewriteValuePattern.

@Test
public void testRewriteValuePattern() throws Exception {
    // Configure a mock inference engine with an ontology:
    final InferenceEngine inferenceEngine = mock(InferenceEngine.class);
    Map<Resource, Set<Value>> typeToCharacteristic = new HashMap<>();
    Set<Value> chordateCharacteristics = new HashSet<>();
    Set<Value> vertebrateCharacteristics = new HashSet<>();
    chordateCharacteristics.add(notochord);
    vertebrateCharacteristics.addAll(chordateCharacteristics);
    vertebrateCharacteristics.add(skull);
    typeToCharacteristic.put(chordate, chordateCharacteristics);
    typeToCharacteristic.put(tunicate, chordateCharacteristics);
    typeToCharacteristic.put(vertebrate, vertebrateCharacteristics);
    typeToCharacteristic.put(mammal, vertebrateCharacteristics);
    when(inferenceEngine.getHasValueByProperty(hasCharacteristic)).thenReturn(typeToCharacteristic);
    // Query for a specific type and rewrite using the visitor:
    final Projection query = new Projection(new StatementPattern(new Var("s"), new Var("p", hasCharacteristic), new Var("o")), new ProjectionElemList(new ProjectionElem("s", "subject"), new ProjectionElem("o", "characteristic")));
    query.visit(new HasValueVisitor(conf, inferenceEngine));
    // Expected structure: Union(Join(FSP, SP), [original SP])
    Assert.assertTrue(query.getArg() instanceof Union);
    final Union union = (Union) query.getArg();
    final StatementPattern originalSP = new StatementPattern(new Var("s"), new Var("p", hasCharacteristic), new Var("o"));
    Join join;
    if (union.getLeftArg() instanceof Join) {
        join = (Join) union.getLeftArg();
        Assert.assertEquals(originalSP, union.getRightArg());
    } else {
        Assert.assertTrue(union.getRightArg() instanceof Join);
        join = (Join) union.getRightArg();
        Assert.assertEquals(originalSP, union.getLeftArg());
    }
    Assert.assertTrue(join.getLeftArg() instanceof FixedStatementPattern);
    Assert.assertTrue(join.getRightArg() instanceof StatementPattern);
    final FixedStatementPattern fsp = (FixedStatementPattern) join.getLeftArg();
    final StatementPattern sp = (StatementPattern) join.getRightArg();
    // Verify join: FSP{ ?t _ ?originalObjectVar } JOIN { ?originalSubjectVar rdf:type ?t }
    Assert.assertEquals(originalSP.getSubjectVar(), sp.getSubjectVar());
    Assert.assertEquals(RDF.TYPE, sp.getPredicateVar().getValue());
    Assert.assertEquals(fsp.getSubjectVar(), sp.getObjectVar());
    Assert.assertEquals(originalSP.getObjectVar(), fsp.getObjectVar());
    // Verify FSP: should provide (type, value) pairs
    final Set<Statement> expectedStatements = new HashSet<>();
    final URI fspPred = (URI) fsp.getPredicateVar().getValue();
    expectedStatements.add(vf.createStatement(chordate, fspPred, notochord));
    expectedStatements.add(vf.createStatement(tunicate, fspPred, notochord));
    expectedStatements.add(vf.createStatement(vertebrate, fspPred, notochord));
    expectedStatements.add(vf.createStatement(mammal, fspPred, notochord));
    expectedStatements.add(vf.createStatement(vertebrate, fspPred, skull));
    expectedStatements.add(vf.createStatement(mammal, fspPred, skull));
    final Set<Statement> actualStatements = new HashSet<>(fsp.statements);
    Assert.assertEquals(expectedStatements, actualStatements);
}
Also used : ProjectionElemList(org.openrdf.query.algebra.ProjectionElemList) HashSet(java.util.HashSet) Set(java.util.Set) HasValueVisitor(org.apache.rya.rdftriplestore.inference.HasValueVisitor) HashMap(java.util.HashMap) Var(org.openrdf.query.algebra.Var) Statement(org.openrdf.model.Statement) Resource(org.openrdf.model.Resource) Projection(org.openrdf.query.algebra.Projection) Join(org.openrdf.query.algebra.Join) URI(org.openrdf.model.URI) Union(org.openrdf.query.algebra.Union) FixedStatementPattern(org.apache.rya.rdftriplestore.utils.FixedStatementPattern) StatementPattern(org.openrdf.query.algebra.StatementPattern) InferenceEngine(org.apache.rya.rdftriplestore.inference.InferenceEngine) Value(org.openrdf.model.Value) ProjectionElem(org.openrdf.query.algebra.ProjectionElem) FixedStatementPattern(org.apache.rya.rdftriplestore.utils.FixedStatementPattern) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 3 with HasValueVisitor

use of org.apache.rya.rdftriplestore.inference.HasValueVisitor in project incubator-rya by apache.

the class HasValueVisitorTest method testRewriteTypePattern.

@Test
public void testRewriteTypePattern() throws Exception {
    // Configure a mock instance engine with an ontology:
    final InferenceEngine inferenceEngine = mock(InferenceEngine.class);
    Map<URI, Set<Value>> vertebrateValues = new HashMap<>();
    vertebrateValues.put(hasCharacteristic, new HashSet<>());
    vertebrateValues.put(belongsTo, new HashSet<>());
    vertebrateValues.get(hasCharacteristic).add(notochord);
    vertebrateValues.get(hasCharacteristic).add(skull);
    vertebrateValues.get(belongsTo).add(chordata);
    when(inferenceEngine.getHasValueByType(vertebrate)).thenReturn(vertebrateValues);
    // Query for a specific type and rewrite using the visitor:
    final Projection query = new Projection(new StatementPattern(new Var("s"), new Var("p", RDF.TYPE), new Var("o", vertebrate)), new ProjectionElemList(new ProjectionElem("s", "subject")));
    query.visit(new HasValueVisitor(conf, inferenceEngine));
    // Expected structure: two nested unions whose members are (in some order) the original
    // statement pattern and two joins, one for each unique property involved in a relevant
    // restriction. Each join should be between a StatementPattern for the property and a
    // FixedStatementPattern providing the value(s).
    // Collect the arguments to the unions, ignoring nesting order:
    Assert.assertTrue(query.getArg() instanceof Union);
    final Union union1 = (Union) query.getArg();
    final Set<TupleExpr> unionArgs = new HashSet<>();
    if (union1.getLeftArg() instanceof Union) {
        unionArgs.add(((Union) union1.getLeftArg()).getLeftArg());
        unionArgs.add(((Union) union1.getLeftArg()).getRightArg());
        unionArgs.add(union1.getRightArg());
    } else {
        Assert.assertTrue(union1.getRightArg() instanceof Union);
        unionArgs.add(union1.getLeftArg());
        unionArgs.add(((Union) union1.getRightArg()).getLeftArg());
        unionArgs.add(((Union) union1.getRightArg()).getRightArg());
    }
    // There should be one StatementPattern and two joins with structure Join(FSP, SP):
    final StatementPattern directSP = new StatementPattern(new Var("s"), new Var("p", RDF.TYPE), new Var("o", vertebrate));
    StatementPattern actualSP = null;
    FixedStatementPattern hasCharacteristicFSP = null;
    FixedStatementPattern belongsToFSP = null;
    for (TupleExpr arg : unionArgs) {
        if (arg instanceof StatementPattern) {
            actualSP = (StatementPattern) arg;
        } else {
            Assert.assertTrue(arg instanceof Join);
            final Join join = (Join) arg;
            Assert.assertTrue(join.getLeftArg() instanceof FixedStatementPattern);
            Assert.assertTrue(join.getRightArg() instanceof StatementPattern);
            final FixedStatementPattern fsp = (FixedStatementPattern) join.getLeftArg();
            final StatementPattern sp = (StatementPattern) join.getRightArg();
            // Should join FSP([unused], property, ?value) with SP(subject, property, ?value)
            Assert.assertEquals(directSP.getSubjectVar(), sp.getSubjectVar());
            Assert.assertEquals(fsp.getPredicateVar(), sp.getPredicateVar());
            Assert.assertEquals(fsp.getObjectVar(), sp.getObjectVar());
            if (hasCharacteristic.equals(fsp.getPredicateVar().getValue())) {
                hasCharacteristicFSP = fsp;
            } else if (belongsTo.equals(fsp.getPredicateVar().getValue())) {
                belongsToFSP = fsp;
            } else {
                Assert.fail("Unexpected property variable in rewritten query: " + fsp.getPredicateVar());
            }
        }
    }
    Assert.assertEquals(directSP, actualSP);
    Assert.assertNotNull(hasCharacteristicFSP);
    Assert.assertNotNull(belongsToFSP);
    // Verify the expected FSPs for the appropriate properties:
    Assert.assertEquals(2, hasCharacteristicFSP.statements.size());
    Assert.assertTrue(hasCharacteristicFSP.statements.contains(vf.createStatement(vertebrate, hasCharacteristic, skull)));
    Assert.assertTrue(hasCharacteristicFSP.statements.contains(vf.createStatement(vertebrate, hasCharacteristic, notochord)));
    Assert.assertEquals(1, belongsToFSP.statements.size());
    Assert.assertTrue(belongsToFSP.statements.contains(vf.createStatement(vertebrate, belongsTo, chordata)));
}
Also used : ProjectionElemList(org.openrdf.query.algebra.ProjectionElemList) HashSet(java.util.HashSet) Set(java.util.Set) HasValueVisitor(org.apache.rya.rdftriplestore.inference.HasValueVisitor) HashMap(java.util.HashMap) Var(org.openrdf.query.algebra.Var) Projection(org.openrdf.query.algebra.Projection) Join(org.openrdf.query.algebra.Join) URI(org.openrdf.model.URI) Union(org.openrdf.query.algebra.Union) TupleExpr(org.openrdf.query.algebra.TupleExpr) FixedStatementPattern(org.apache.rya.rdftriplestore.utils.FixedStatementPattern) StatementPattern(org.openrdf.query.algebra.StatementPattern) InferenceEngine(org.apache.rya.rdftriplestore.inference.InferenceEngine) ProjectionElem(org.openrdf.query.algebra.ProjectionElem) FixedStatementPattern(org.apache.rya.rdftriplestore.utils.FixedStatementPattern) HashSet(java.util.HashSet) Test(org.junit.Test)

Aggregations

HasValueVisitor (org.apache.rya.rdftriplestore.inference.HasValueVisitor)3 HashMap (java.util.HashMap)2 HashSet (java.util.HashSet)2 Set (java.util.Set)2 InferenceEngine (org.apache.rya.rdftriplestore.inference.InferenceEngine)2 FixedStatementPattern (org.apache.rya.rdftriplestore.utils.FixedStatementPattern)2 Test (org.junit.Test)2 URI (org.openrdf.model.URI)2 CloseableIteration (info.aduna.iteration.CloseableIteration)1 NoSuchElementException (java.util.NoSuchElementException)1 Configurable (org.apache.hadoop.conf.Configurable)1 RyaDAOException (org.apache.rya.api.persist.RyaDAOException)1 FilterRangeVisitor (org.apache.rya.rdftriplestore.evaluation.FilterRangeVisitor)1 ParallelEvaluationStrategyImpl (org.apache.rya.rdftriplestore.evaluation.ParallelEvaluationStrategyImpl)1 QueryJoinOptimizer (org.apache.rya.rdftriplestore.evaluation.QueryJoinOptimizer)1 QueryJoinSelectOptimizer (org.apache.rya.rdftriplestore.evaluation.QueryJoinSelectOptimizer)1 RdfCloudTripleStoreEvaluationStatistics (org.apache.rya.rdftriplestore.evaluation.RdfCloudTripleStoreEvaluationStatistics)1 RdfCloudTripleStoreSelectivityEvaluationStatistics (org.apache.rya.rdftriplestore.evaluation.RdfCloudTripleStoreSelectivityEvaluationStatistics)1 SeparateFilterJoinsVisitor (org.apache.rya.rdftriplestore.evaluation.SeparateFilterJoinsVisitor)1 AllValuesFromVisitor (org.apache.rya.rdftriplestore.inference.AllValuesFromVisitor)1