Search in sources :

Example 11 with QueryModelNode

use of org.openrdf.query.algebra.QueryModelNode in project incubator-rya by apache.

the class EntityOptimizerTest method testOptimizeQ2DiffPriority.

@Test
public void testOptimizeQ2DiffPriority() throws Exception {
    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
    accc.setConf(conf);
    accc.setConnector(accCon);
    accc.setRdfEvalDAO(res);
    accc.init();
    BatchWriter bw1 = accCon.createBatchWriter("rya_prospects", config);
    BatchWriter bw2 = accCon.createBatchWriter("rya_selectivity", config);
    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
    String s3 = "predicate" + DELIM + "uri:peesOn";
    String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears";
    String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
    List<Mutation> mList = new ArrayList<Mutation>();
    List<Mutation> mList2 = new ArrayList<Mutation>();
    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "objectsubject", "objectpredicate", "objectobject");
    Mutation m1, m2, m3, m4, m5, m6;
    m1 = new Mutation(s1 + DELIM + "1");
    m1.put(new Text("count"), new Text(""), new Value("1".getBytes()));
    m2 = new Mutation(s2 + DELIM + "1");
    m2.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m3 = new Mutation(s3 + DELIM + "1");
    m3.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m4 = new Mutation(s4 + DELIM + "1");
    m4.put(new Text("count"), new Text(""), new Value("3".getBytes()));
    m5 = new Mutation(s5 + DELIM + "1");
    m5.put(new Text("count"), new Text(""), new Value("3".getBytes()));
    mList.add(m1);
    mList.add(m2);
    mList.add(m3);
    mList.add(m4);
    mList.add(m5);
    bw1.addMutations(mList);
    bw1.close();
    m1 = new Mutation(s1);
    m2 = new Mutation(s2);
    m3 = new Mutation(s3);
    m4 = new Mutation(s4);
    m5 = new Mutation(s5);
    m6 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
    m6.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
    for (String s : sList) {
        m1.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
        m2.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
        m3.put(new Text(s), new Text(Integer.toString(3)), EMPTY_VAL);
        m4.put(new Text(s), new Text(Integer.toString(3)), EMPTY_VAL);
        m5.put(new Text(s), new Text(Integer.toString(3)), EMPTY_VAL);
    }
    mList2.add(m1);
    mList2.add(m2);
    mList2.add(m3);
    mList2.add(m4);
    mList2.add(m5);
    mList2.add(m6);
    bw2.addMutations(mList2);
    bw2.close();
    TupleExpr te = getTupleExpr(q5);
    EntityOptimizer cco = new EntityOptimizer(accc);
    System.out.println("Originial query is " + te);
    cco.optimize(te, null, null);
    EntityCentricVisitor ccv = new EntityCentricVisitor();
    te.visit(ccv);
    List<QueryModelNode> nodes = Lists.newArrayList(ccv.getCcNodes());
    Assert.assertEquals(2, nodes.size());
    for (QueryModelNode q : nodes) {
        if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 2) {
            Assert.assertEquals("h", ((EntityTupleSet) q).getStarQuery().getCommonVarName());
        } else if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 3) {
            Assert.assertEquals("m", ((EntityTupleSet) q).getStarQuery().getCommonVarName());
        } else {
            Assert.assertTrue(false);
        }
    }
    System.out.println(te);
}
Also used : EntityOptimizer(org.apache.rya.indexing.accumulo.entity.EntityOptimizer) EntityTupleSet(org.apache.rya.indexing.accumulo.entity.EntityTupleSet) ArrayList(java.util.ArrayList) AccumuloSelectivityEvalDAO(org.apache.rya.joinselect.AccumuloSelectivityEvalDAO) QueryModelNode(org.openrdf.query.algebra.QueryModelNode) Text(org.apache.hadoop.io.Text) TupleExpr(org.openrdf.query.algebra.TupleExpr) Value(org.apache.accumulo.core.data.Value) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) Test(org.junit.Test)

Example 12 with QueryModelNode

use of org.openrdf.query.algebra.QueryModelNode in project incubator-rya by apache.

the class EntityOptimizerTest method testOptimizeQ2DiffPriority2.

@Test
public void testOptimizeQ2DiffPriority2() throws Exception {
    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
    accc.setConf(conf);
    accc.setConnector(accCon);
    accc.setRdfEvalDAO(res);
    accc.init();
    BatchWriter bw1 = accCon.createBatchWriter("rya_prospects", config);
    BatchWriter bw2 = accCon.createBatchWriter("rya_selectivity", config);
    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
    String s3 = "predicate" + DELIM + "uri:peesOn";
    String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears";
    String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
    List<Mutation> mList = new ArrayList<Mutation>();
    List<Mutation> mList2 = new ArrayList<Mutation>();
    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "objectsubject", "objectpredicate", "objectobject");
    Mutation m1, m2, m3, m4, m5, m6;
    m1 = new Mutation(s1 + DELIM + "1");
    m1.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m2 = new Mutation(s2 + DELIM + "1");
    m2.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m3 = new Mutation(s3 + DELIM + "1");
    m3.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m4 = new Mutation(s4 + DELIM + "1");
    m4.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m5 = new Mutation(s5 + DELIM + "1");
    m5.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    mList.add(m1);
    mList.add(m2);
    mList.add(m3);
    mList.add(m4);
    mList.add(m5);
    bw1.addMutations(mList);
    bw1.close();
    m1 = new Mutation(s1);
    m2 = new Mutation(s2);
    m3 = new Mutation(s3);
    m4 = new Mutation(s4);
    m5 = new Mutation(s5);
    m6 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
    m6.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
    for (String s : sList) {
        m1.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
        m2.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
        m3.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
        m4.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
        m5.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
    }
    mList2.add(m1);
    mList2.add(m2);
    mList2.add(m3);
    mList2.add(m4);
    mList2.add(m5);
    mList2.add(m6);
    bw2.addMutations(mList2);
    bw2.close();
    TupleExpr te = getTupleExpr(q5);
    EntityOptimizer cco = new EntityOptimizer(accc);
    System.out.println("Originial query is " + te);
    cco.optimize(te, null, null);
    EntityCentricVisitor ccv = new EntityCentricVisitor();
    te.visit(ccv);
    List<QueryModelNode> nodes = Lists.newArrayList(ccv.getCcNodes());
    Assert.assertEquals(2, nodes.size());
    for (QueryModelNode q : nodes) {
        if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 2) {
            Assert.assertEquals("m", ((EntityTupleSet) q).getStarQuery().getCommonVarName());
        } else if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 3) {
            Assert.assertEquals("h", ((EntityTupleSet) q).getStarQuery().getCommonVarName());
        } else {
            Assert.assertTrue(false);
        }
    }
    System.out.println(te);
}
Also used : EntityOptimizer(org.apache.rya.indexing.accumulo.entity.EntityOptimizer) EntityTupleSet(org.apache.rya.indexing.accumulo.entity.EntityTupleSet) ArrayList(java.util.ArrayList) AccumuloSelectivityEvalDAO(org.apache.rya.joinselect.AccumuloSelectivityEvalDAO) QueryModelNode(org.openrdf.query.algebra.QueryModelNode) Text(org.apache.hadoop.io.Text) TupleExpr(org.openrdf.query.algebra.TupleExpr) Value(org.apache.accumulo.core.data.Value) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) Test(org.junit.Test)

Example 13 with QueryModelNode

use of org.openrdf.query.algebra.QueryModelNode in project incubator-rya by apache.

the class EntityOptimizerTest method testOptimizeQ6DiffPriority.

@Test
public void testOptimizeQ6DiffPriority() throws Exception {
    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
    accc.setConf(conf);
    accc.setConnector(accCon);
    accc.setRdfEvalDAO(res);
    accc.init();
    BatchWriter bw1 = accCon.createBatchWriter("rya_prospects", config);
    BatchWriter bw2 = accCon.createBatchWriter("rya_selectivity", config);
    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
    String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
    String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears";
    String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
    String s6 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:kibble";
    String s7 = "predicateobject" + DELIM + "uri:rollsIn" + DELIM + "uri:mud";
    String s8 = "predicateobject" + DELIM + "uri:runsIn" + DELIM + "uri:field";
    String s9 = "predicate" + DELIM + "uri:smells";
    String s10 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:sticks";
    String s11 = "predicate" + DELIM + "uri:watches";
    List<Mutation> mList = new ArrayList<Mutation>();
    List<Mutation> mList2 = new ArrayList<Mutation>();
    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "objectsubject", "objectpredicate", "objectobject");
    Mutation m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12;
    m1 = new Mutation(s1 + DELIM + "1");
    m1.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m2 = new Mutation(s2 + DELIM + "1");
    m2.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m3 = new Mutation(s3 + DELIM + "1");
    m3.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m4 = new Mutation(s4 + DELIM + "1");
    m4.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m5 = new Mutation(s5 + DELIM + "1");
    m5.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m6 = new Mutation(s6 + DELIM + "1");
    m6.put(new Text("count"), new Text(""), new Value("1".getBytes()));
    m7 = new Mutation(s7 + DELIM + "1");
    m7.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m8 = new Mutation(s8 + DELIM + "1");
    m8.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m9 = new Mutation(s9 + DELIM + "1");
    m9.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m10 = new Mutation(s10 + DELIM + "1");
    m10.put(new Text("count"), new Text(""), new Value("1".getBytes()));
    m11 = new Mutation(s11 + DELIM + "1");
    m11.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    mList.add(m1);
    mList.add(m2);
    mList.add(m3);
    mList.add(m4);
    mList.add(m5);
    mList.add(m6);
    mList.add(m7);
    mList.add(m8);
    mList.add(m9);
    mList.add(m10);
    mList.add(m11);
    bw1.addMutations(mList);
    bw1.close();
    m1 = new Mutation(s1);
    m2 = new Mutation(s2);
    m3 = new Mutation(s3);
    m4 = new Mutation(s4);
    m5 = new Mutation(s5);
    m6 = new Mutation(s6);
    m7 = new Mutation(s7);
    m8 = new Mutation(s8);
    m9 = new Mutation(s9);
    m10 = new Mutation(s10);
    m11 = new Mutation(s11);
    m12 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
    m12.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
    for (String s : sList) {
        m1.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
        m2.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
        m3.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
        m4.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
        m5.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
        m6.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
        m7.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
        m8.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
        m9.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
        m10.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
        m11.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
    }
    mList2.add(m1);
    mList2.add(m2);
    mList2.add(m3);
    mList2.add(m4);
    mList2.add(m5);
    mList2.add(m6);
    mList2.add(m7);
    mList2.add(m8);
    mList2.add(m9);
    mList2.add(m10);
    mList2.add(m11);
    mList2.add(m12);
    bw2.addMutations(mList2);
    bw2.close();
    TupleExpr te = getTupleExpr(q6);
    EntityOptimizer cco = new EntityOptimizer(accc);
    System.out.println("Originial query is " + te);
    cco.optimize(te, null, null);
    EntityCentricVisitor ccv = new EntityCentricVisitor();
    te.visit(ccv);
    List<QueryModelNode> nodes = Lists.newArrayList(ccv.getCcNodes());
    Assert.assertEquals(3, nodes.size());
    List<String> cVarList = Lists.newArrayList();
    cVarList.add("i");
    cVarList.add("m");
    for (QueryModelNode q : nodes) {
        if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 2) {
            String s = ((EntityTupleSet) q).getStarQuery().getCommonVarName();
            System.out.println("node is " + q + " and common var is " + s);
            System.out.println("star query is " + ((EntityTupleSet) q).getStarQuery());
            Assert.assertTrue(cVarList.contains(s));
            cVarList.remove(s);
        } else if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 3) {
            Assert.assertEquals("h", ((EntityTupleSet) q).getStarQuery().getCommonVarName());
        } else {
            Assert.assertTrue(false);
        }
    }
    System.out.println(te);
}
Also used : EntityOptimizer(org.apache.rya.indexing.accumulo.entity.EntityOptimizer) EntityTupleSet(org.apache.rya.indexing.accumulo.entity.EntityTupleSet) ArrayList(java.util.ArrayList) AccumuloSelectivityEvalDAO(org.apache.rya.joinselect.AccumuloSelectivityEvalDAO) QueryModelNode(org.openrdf.query.algebra.QueryModelNode) Text(org.apache.hadoop.io.Text) TupleExpr(org.openrdf.query.algebra.TupleExpr) Value(org.apache.accumulo.core.data.Value) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) Test(org.junit.Test)

Example 14 with QueryModelNode

use of org.openrdf.query.algebra.QueryModelNode in project incubator-rya by apache.

the class OptionalJoinSegmentTest method testBasicOptional.

@Test
public void testBasicOptional() throws MalformedQueryException {
    String query1 = // 
    "" + // 
    "SELECT ?e ?c ?l" + // 
    "{" + // 
    "  ?e a ?c . " + // 
    "  OPTIONAL{?e <uri:talksTo> ?l } . " + // 
    "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l " + // 
    "}";
    String query2 = // 
    "" + // 
    "SELECT ?e ?c ?l" + // 
    "{" + // 
    "  ?e a ?c . " + // 
    "  OPTIONAL{?e <uri:talksTo> ?l } . " + // 
    "}";
    SPARQLParser parser = new SPARQLParser();
    ParsedQuery pq1 = parser.parseQuery(query1, null);
    ParsedQuery pq2 = parser.parseQuery(query2, null);
    TupleExpr te1 = pq1.getTupleExpr();
    TupleExpr te2 = pq2.getTupleExpr();
    Join join = (Join) ((Projection) te1).getArg();
    LeftJoin lj = (LeftJoin) ((Projection) te2).getArg();
    QuerySegment<ExternalTupleSet> seg1 = qFactory.getQuerySegment(join);
    QuerySegment<ExternalTupleSet> seg2 = qFactory.getQuerySegment(lj);
    Assert.assertEquals(true, seg1.containsQuerySegment(seg2));
    Assert.assertEquals(join, seg1.getQuery().getTupleExpr());
    SimpleExternalTupleSet pcj = new SimpleExternalTupleSet((Projection) te2);
    List<QueryModelNode> nodes = seg1.getOrderedNodes();
    QueryModelNode node = nodes.get(0);
    seg1.replaceWithExternalSet(seg2, pcj);
    Set<QueryModelNode> nodeSet = new HashSet<>();
    nodeSet.add(node);
    nodeSet.add(pcj);
    Assert.assertEquals(nodeSet, seg1.getUnOrderedNodes());
}
Also used : SPARQLParser(org.openrdf.query.parser.sparql.SPARQLParser) ParsedQuery(org.openrdf.query.parser.ParsedQuery) LeftJoin(org.openrdf.query.algebra.LeftJoin) Join(org.openrdf.query.algebra.Join) QueryModelNode(org.openrdf.query.algebra.QueryModelNode) TupleExpr(org.openrdf.query.algebra.TupleExpr) SimpleExternalTupleSet(org.apache.rya.indexing.external.tupleSet.SimpleExternalTupleSet) ExternalTupleSet(org.apache.rya.indexing.external.tupleSet.ExternalTupleSet) SimpleExternalTupleSet(org.apache.rya.indexing.external.tupleSet.SimpleExternalTupleSet) LeftJoin(org.openrdf.query.algebra.LeftJoin) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 15 with QueryModelNode

use of org.openrdf.query.algebra.QueryModelNode in project incubator-rya by apache.

the class OptionalJoinSegmentTest method testBasicOptionalWithFilter.

@Test
public void testBasicOptionalWithFilter() throws Exception {
    String query1 = // 
    "" + // 
    "SELECT ?e ?c ?l" + // 
    "{" + // 
    " Filter(?e = <uri:Bob>)" + // 
    "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . " + // 
    "  ?e a ?c . " + // 
    "  OPTIONAL{?e <uri:talksTo> ?l } . " + // 
    "}";
    String query2 = // 
    "" + // 
    "SELECT ?e ?c ?l" + // 
    "{" + // 
    " Filter(?e = <uri:Bob>)" + // 
    "  ?e a ?c . " + // 
    "  OPTIONAL{?e <uri:talksTo> ?l } . " + // 
    "}";
    SPARQLParser parser = new SPARQLParser();
    ParsedQuery pq1 = parser.parseQuery(query1, null);
    ParsedQuery pq2 = parser.parseQuery(query2, null);
    TupleExpr te1 = pq1.getTupleExpr();
    TupleExpr te2 = pq2.getTupleExpr();
    TopOfQueryFilterRelocator.moveFiltersToTop(te1);
    Filter filter1 = (Filter) ((Projection) te1).getArg();
    Filter filter2 = (Filter) ((Projection) te2).getArg();
    QuerySegment<ExternalTupleSet> seg1 = qFactory.getQuerySegment(filter1);
    QuerySegment<ExternalTupleSet> seg2 = qFactory.getQuerySegment(filter2);
    Assert.assertEquals(filter1, seg1.getQuery().getTupleExpr());
    Assert.assertEquals(true, seg1.containsQuerySegment(seg2));
    SimpleExternalTupleSet pcj = new SimpleExternalTupleSet((Projection) te2);
    List<QueryModelNode> nodes = seg1.getOrderedNodes();
    QueryModelNode node = nodes.get(3);
    seg1.replaceWithExternalSet(seg2, pcj);
    Set<QueryModelNode> nodeSet = new HashSet<>();
    nodeSet.add(node);
    nodeSet.add(pcj);
    Assert.assertEquals(nodeSet, seg1.getUnOrderedNodes());
}
Also used : SPARQLParser(org.openrdf.query.parser.sparql.SPARQLParser) SimpleExternalTupleSet(org.apache.rya.indexing.external.tupleSet.SimpleExternalTupleSet) ParsedQuery(org.openrdf.query.parser.ParsedQuery) Filter(org.openrdf.query.algebra.Filter) QueryModelNode(org.openrdf.query.algebra.QueryModelNode) TupleExpr(org.openrdf.query.algebra.TupleExpr) SimpleExternalTupleSet(org.apache.rya.indexing.external.tupleSet.SimpleExternalTupleSet) ExternalTupleSet(org.apache.rya.indexing.external.tupleSet.ExternalTupleSet) HashSet(java.util.HashSet) Test(org.junit.Test)

Aggregations

QueryModelNode (org.openrdf.query.algebra.QueryModelNode)98 TupleExpr (org.openrdf.query.algebra.TupleExpr)74 Test (org.junit.Test)68 ArrayList (java.util.ArrayList)63 ParsedQuery (org.openrdf.query.parser.ParsedQuery)63 SPARQLParser (org.openrdf.query.parser.sparql.SPARQLParser)62 ExternalTupleSet (org.apache.rya.indexing.external.tupleSet.ExternalTupleSet)56 SimpleExternalTupleSet (org.apache.rya.indexing.external.tupleSet.SimpleExternalTupleSet)48 StatementPattern (org.openrdf.query.algebra.StatementPattern)33 PCJOptimizer (org.apache.rya.indexing.pcj.matching.PCJOptimizer)27 HashSet (java.util.HashSet)26 Projection (org.openrdf.query.algebra.Projection)23 Filter (org.openrdf.query.algebra.Filter)15 LeftJoin (org.openrdf.query.algebra.LeftJoin)12 Join (org.openrdf.query.algebra.Join)11 ValueExpr (org.openrdf.query.algebra.ValueExpr)11 QueryNodeConsolidator (org.apache.rya.indexing.external.matching.QueryNodeConsolidator)8 Configuration (org.apache.hadoop.conf.Configuration)7 AccumuloIndexSetProvider (org.apache.rya.indexing.pcj.matching.provider.AccumuloIndexSetProvider)7 BatchWriter (org.apache.accumulo.core.client.BatchWriter)6