Search in sources :

Example 1 with AccumuloSelectivityEvalDAO

use of org.apache.rya.joinselect.AccumuloSelectivityEvalDAO in project incubator-rya by apache.

the class EntityOptimizerTest method testOptimizeQ2DiffPriority.

@Test
public void testOptimizeQ2DiffPriority() throws Exception {
    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
    accc.setConf(conf);
    accc.setConnector(accCon);
    accc.setRdfEvalDAO(res);
    accc.init();
    BatchWriter bw1 = accCon.createBatchWriter("rya_prospects", config);
    BatchWriter bw2 = accCon.createBatchWriter("rya_selectivity", config);
    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
    String s3 = "predicate" + DELIM + "uri:peesOn";
    String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears";
    String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
    List<Mutation> mList = new ArrayList<Mutation>();
    List<Mutation> mList2 = new ArrayList<Mutation>();
    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "objectsubject", "objectpredicate", "objectobject");
    Mutation m1, m2, m3, m4, m5, m6;
    m1 = new Mutation(s1 + DELIM + "1");
    m1.put(new Text("count"), new Text(""), new Value("1".getBytes()));
    m2 = new Mutation(s2 + DELIM + "1");
    m2.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m3 = new Mutation(s3 + DELIM + "1");
    m3.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m4 = new Mutation(s4 + DELIM + "1");
    m4.put(new Text("count"), new Text(""), new Value("3".getBytes()));
    m5 = new Mutation(s5 + DELIM + "1");
    m5.put(new Text("count"), new Text(""), new Value("3".getBytes()));
    mList.add(m1);
    mList.add(m2);
    mList.add(m3);
    mList.add(m4);
    mList.add(m5);
    bw1.addMutations(mList);
    bw1.close();
    m1 = new Mutation(s1);
    m2 = new Mutation(s2);
    m3 = new Mutation(s3);
    m4 = new Mutation(s4);
    m5 = new Mutation(s5);
    m6 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
    m6.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
    for (String s : sList) {
        m1.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
        m2.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
        m3.put(new Text(s), new Text(Integer.toString(3)), EMPTY_VAL);
        m4.put(new Text(s), new Text(Integer.toString(3)), EMPTY_VAL);
        m5.put(new Text(s), new Text(Integer.toString(3)), EMPTY_VAL);
    }
    mList2.add(m1);
    mList2.add(m2);
    mList2.add(m3);
    mList2.add(m4);
    mList2.add(m5);
    mList2.add(m6);
    bw2.addMutations(mList2);
    bw2.close();
    TupleExpr te = getTupleExpr(q5);
    EntityOptimizer cco = new EntityOptimizer(accc);
    System.out.println("Originial query is " + te);
    cco.optimize(te, null, null);
    EntityCentricVisitor ccv = new EntityCentricVisitor();
    te.visit(ccv);
    List<QueryModelNode> nodes = Lists.newArrayList(ccv.getCcNodes());
    Assert.assertEquals(2, nodes.size());
    for (QueryModelNode q : nodes) {
        if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 2) {
            Assert.assertEquals("h", ((EntityTupleSet) q).getStarQuery().getCommonVarName());
        } else if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 3) {
            Assert.assertEquals("m", ((EntityTupleSet) q).getStarQuery().getCommonVarName());
        } else {
            Assert.assertTrue(false);
        }
    }
    System.out.println(te);
}
Also used : EntityOptimizer(org.apache.rya.indexing.accumulo.entity.EntityOptimizer) EntityTupleSet(org.apache.rya.indexing.accumulo.entity.EntityTupleSet) ArrayList(java.util.ArrayList) AccumuloSelectivityEvalDAO(org.apache.rya.joinselect.AccumuloSelectivityEvalDAO) QueryModelNode(org.openrdf.query.algebra.QueryModelNode) Text(org.apache.hadoop.io.Text) TupleExpr(org.openrdf.query.algebra.TupleExpr) Value(org.apache.accumulo.core.data.Value) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) Test(org.junit.Test)

Example 2 with AccumuloSelectivityEvalDAO

use of org.apache.rya.joinselect.AccumuloSelectivityEvalDAO in project incubator-rya by apache.

the class EntityOptimizerTest method testOptimizeQ2DiffPriority2.

@Test
public void testOptimizeQ2DiffPriority2() throws Exception {
    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
    accc.setConf(conf);
    accc.setConnector(accCon);
    accc.setRdfEvalDAO(res);
    accc.init();
    BatchWriter bw1 = accCon.createBatchWriter("rya_prospects", config);
    BatchWriter bw2 = accCon.createBatchWriter("rya_selectivity", config);
    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
    String s3 = "predicate" + DELIM + "uri:peesOn";
    String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears";
    String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
    List<Mutation> mList = new ArrayList<Mutation>();
    List<Mutation> mList2 = new ArrayList<Mutation>();
    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "objectsubject", "objectpredicate", "objectobject");
    Mutation m1, m2, m3, m4, m5, m6;
    m1 = new Mutation(s1 + DELIM + "1");
    m1.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m2 = new Mutation(s2 + DELIM + "1");
    m2.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m3 = new Mutation(s3 + DELIM + "1");
    m3.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m4 = new Mutation(s4 + DELIM + "1");
    m4.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m5 = new Mutation(s5 + DELIM + "1");
    m5.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    mList.add(m1);
    mList.add(m2);
    mList.add(m3);
    mList.add(m4);
    mList.add(m5);
    bw1.addMutations(mList);
    bw1.close();
    m1 = new Mutation(s1);
    m2 = new Mutation(s2);
    m3 = new Mutation(s3);
    m4 = new Mutation(s4);
    m5 = new Mutation(s5);
    m6 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
    m6.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
    for (String s : sList) {
        m1.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
        m2.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
        m3.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
        m4.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
        m5.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
    }
    mList2.add(m1);
    mList2.add(m2);
    mList2.add(m3);
    mList2.add(m4);
    mList2.add(m5);
    mList2.add(m6);
    bw2.addMutations(mList2);
    bw2.close();
    TupleExpr te = getTupleExpr(q5);
    EntityOptimizer cco = new EntityOptimizer(accc);
    System.out.println("Originial query is " + te);
    cco.optimize(te, null, null);
    EntityCentricVisitor ccv = new EntityCentricVisitor();
    te.visit(ccv);
    List<QueryModelNode> nodes = Lists.newArrayList(ccv.getCcNodes());
    Assert.assertEquals(2, nodes.size());
    for (QueryModelNode q : nodes) {
        if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 2) {
            Assert.assertEquals("m", ((EntityTupleSet) q).getStarQuery().getCommonVarName());
        } else if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 3) {
            Assert.assertEquals("h", ((EntityTupleSet) q).getStarQuery().getCommonVarName());
        } else {
            Assert.assertTrue(false);
        }
    }
    System.out.println(te);
}
Also used : EntityOptimizer(org.apache.rya.indexing.accumulo.entity.EntityOptimizer) EntityTupleSet(org.apache.rya.indexing.accumulo.entity.EntityTupleSet) ArrayList(java.util.ArrayList) AccumuloSelectivityEvalDAO(org.apache.rya.joinselect.AccumuloSelectivityEvalDAO) QueryModelNode(org.openrdf.query.algebra.QueryModelNode) Text(org.apache.hadoop.io.Text) TupleExpr(org.openrdf.query.algebra.TupleExpr) Value(org.apache.accumulo.core.data.Value) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) Test(org.junit.Test)

Example 3 with AccumuloSelectivityEvalDAO

use of org.apache.rya.joinselect.AccumuloSelectivityEvalDAO in project incubator-rya by apache.

the class EntityOptimizerTest method testOptimizeQ1SamePriority.

@Test
public void testOptimizeQ1SamePriority() throws Exception {
    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
    accc.setConf(conf);
    accc.setConnector(accCon);
    accc.setRdfEvalDAO(res);
    accc.init();
    BatchWriter bw1 = accCon.createBatchWriter("rya_prospects", config);
    BatchWriter bw2 = accCon.createBatchWriter("rya_selectivity", config);
    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
    String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
    List<Mutation> mList = new ArrayList<Mutation>();
    List<Mutation> mList2 = new ArrayList<Mutation>();
    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject");
    Mutation m1, m2, m3, m4;
    m1 = new Mutation(s1 + DELIM + "1");
    m1.put(new Text("count"), new Text(""), new Value("1".getBytes()));
    m2 = new Mutation(s2 + DELIM + "1");
    m2.put(new Text("count"), new Text(""), new Value("1".getBytes()));
    m3 = new Mutation(s3 + DELIM + "1");
    m3.put(new Text("count"), new Text(""), new Value("1".getBytes()));
    mList.add(m1);
    mList.add(m2);
    mList.add(m3);
    bw1.addMutations(mList);
    bw1.close();
    m1 = new Mutation(s1);
    m2 = new Mutation(s2);
    m3 = new Mutation(s3);
    m4 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
    m4.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
    for (String s : sList) {
        m1.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
        m2.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
        m3.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
    }
    mList2.add(m1);
    mList2.add(m2);
    mList2.add(m3);
    mList2.add(m4);
    bw2.addMutations(mList2);
    bw2.close();
    TupleExpr te = getTupleExpr(q1);
    EntityOptimizer cco = new EntityOptimizer(accc);
    System.out.println("Originial query is " + te);
    cco.optimize(te, null, null);
    EntityCentricVisitor ccv = new EntityCentricVisitor();
    te.visit(ccv);
    Assert.assertEquals(1, ccv.getCcNodes().size());
    System.out.println(te);
}
Also used : EntityOptimizer(org.apache.rya.indexing.accumulo.entity.EntityOptimizer) ArrayList(java.util.ArrayList) Value(org.apache.accumulo.core.data.Value) AccumuloSelectivityEvalDAO(org.apache.rya.joinselect.AccumuloSelectivityEvalDAO) Text(org.apache.hadoop.io.Text) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) TupleExpr(org.openrdf.query.algebra.TupleExpr) Test(org.junit.Test)

Example 4 with AccumuloSelectivityEvalDAO

use of org.apache.rya.joinselect.AccumuloSelectivityEvalDAO in project incubator-rya by apache.

the class EntityOptimizerTest method testOptimizeQ6DiffPriority.

@Test
public void testOptimizeQ6DiffPriority() throws Exception {
    AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
    accc.setConf(conf);
    accc.setConnector(accCon);
    accc.setRdfEvalDAO(res);
    accc.init();
    BatchWriter bw1 = accCon.createBatchWriter("rya_prospects", config);
    BatchWriter bw2 = accCon.createBatchWriter("rya_selectivity", config);
    String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
    String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
    String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
    String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears";
    String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
    String s6 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:kibble";
    String s7 = "predicateobject" + DELIM + "uri:rollsIn" + DELIM + "uri:mud";
    String s8 = "predicateobject" + DELIM + "uri:runsIn" + DELIM + "uri:field";
    String s9 = "predicate" + DELIM + "uri:smells";
    String s10 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:sticks";
    String s11 = "predicate" + DELIM + "uri:watches";
    List<Mutation> mList = new ArrayList<Mutation>();
    List<Mutation> mList2 = new ArrayList<Mutation>();
    List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "objectsubject", "objectpredicate", "objectobject");
    Mutation m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12;
    m1 = new Mutation(s1 + DELIM + "1");
    m1.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m2 = new Mutation(s2 + DELIM + "1");
    m2.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m3 = new Mutation(s3 + DELIM + "1");
    m3.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m4 = new Mutation(s4 + DELIM + "1");
    m4.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m5 = new Mutation(s5 + DELIM + "1");
    m5.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m6 = new Mutation(s6 + DELIM + "1");
    m6.put(new Text("count"), new Text(""), new Value("1".getBytes()));
    m7 = new Mutation(s7 + DELIM + "1");
    m7.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m8 = new Mutation(s8 + DELIM + "1");
    m8.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m9 = new Mutation(s9 + DELIM + "1");
    m9.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    m10 = new Mutation(s10 + DELIM + "1");
    m10.put(new Text("count"), new Text(""), new Value("1".getBytes()));
    m11 = new Mutation(s11 + DELIM + "1");
    m11.put(new Text("count"), new Text(""), new Value("2".getBytes()));
    mList.add(m1);
    mList.add(m2);
    mList.add(m3);
    mList.add(m4);
    mList.add(m5);
    mList.add(m6);
    mList.add(m7);
    mList.add(m8);
    mList.add(m9);
    mList.add(m10);
    mList.add(m11);
    bw1.addMutations(mList);
    bw1.close();
    m1 = new Mutation(s1);
    m2 = new Mutation(s2);
    m3 = new Mutation(s3);
    m4 = new Mutation(s4);
    m5 = new Mutation(s5);
    m6 = new Mutation(s6);
    m7 = new Mutation(s7);
    m8 = new Mutation(s8);
    m9 = new Mutation(s9);
    m10 = new Mutation(s10);
    m11 = new Mutation(s11);
    m12 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
    m12.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
    for (String s : sList) {
        m1.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
        m2.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
        m3.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
        m4.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
        m5.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
        m6.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
        m7.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
        m8.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
        m9.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
        m10.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
        m11.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
    }
    mList2.add(m1);
    mList2.add(m2);
    mList2.add(m3);
    mList2.add(m4);
    mList2.add(m5);
    mList2.add(m6);
    mList2.add(m7);
    mList2.add(m8);
    mList2.add(m9);
    mList2.add(m10);
    mList2.add(m11);
    mList2.add(m12);
    bw2.addMutations(mList2);
    bw2.close();
    TupleExpr te = getTupleExpr(q6);
    EntityOptimizer cco = new EntityOptimizer(accc);
    System.out.println("Originial query is " + te);
    cco.optimize(te, null, null);
    EntityCentricVisitor ccv = new EntityCentricVisitor();
    te.visit(ccv);
    List<QueryModelNode> nodes = Lists.newArrayList(ccv.getCcNodes());
    Assert.assertEquals(3, nodes.size());
    List<String> cVarList = Lists.newArrayList();
    cVarList.add("i");
    cVarList.add("m");
    for (QueryModelNode q : nodes) {
        if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 2) {
            String s = ((EntityTupleSet) q).getStarQuery().getCommonVarName();
            System.out.println("node is " + q + " and common var is " + s);
            System.out.println("star query is " + ((EntityTupleSet) q).getStarQuery());
            Assert.assertTrue(cVarList.contains(s));
            cVarList.remove(s);
        } else if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 3) {
            Assert.assertEquals("h", ((EntityTupleSet) q).getStarQuery().getCommonVarName());
        } else {
            Assert.assertTrue(false);
        }
    }
    System.out.println(te);
}
Also used : EntityOptimizer(org.apache.rya.indexing.accumulo.entity.EntityOptimizer) EntityTupleSet(org.apache.rya.indexing.accumulo.entity.EntityTupleSet) ArrayList(java.util.ArrayList) AccumuloSelectivityEvalDAO(org.apache.rya.joinselect.AccumuloSelectivityEvalDAO) QueryModelNode(org.openrdf.query.algebra.QueryModelNode) Text(org.apache.hadoop.io.Text) TupleExpr(org.openrdf.query.algebra.TupleExpr) Value(org.apache.accumulo.core.data.Value) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) Test(org.junit.Test)

Example 5 with AccumuloSelectivityEvalDAO

use of org.apache.rya.joinselect.AccumuloSelectivityEvalDAO in project incubator-rya by apache.

the class EntityOptimizer method setConf.

@Override
public void setConf(Configuration conf) {
    if (conf instanceof RdfCloudTripleStoreConfiguration) {
        this.conf = (RdfCloudTripleStoreConfiguration) conf;
    } else {
        this.conf = new AccumuloRdfConfiguration(conf);
    }
    if (!isEvalDaoSet) {
        if (this.conf.isUseStats() && this.conf.isUseSelectivity()) {
            try {
                eval = new AccumuloSelectivityEvalDAO(this.conf, ConfigUtils.getConnector(this.conf));
                ((AccumuloSelectivityEvalDAO) eval).setRdfEvalDAO(new ProspectorServiceEvalStatsDAO(ConfigUtils.getConnector(this.conf), this.conf));
                eval.init();
            } catch (final AccumuloException | AccumuloSecurityException e) {
                LOG.warn("A problem was encountered while setting the Configuration for the EntityOptimizer.", e);
            }
            isEvalDaoSet = true;
        } else {
            eval = null;
            isEvalDaoSet = true;
        }
    }
}
Also used : AccumuloException(org.apache.accumulo.core.client.AccumuloException) ProspectorServiceEvalStatsDAO(org.apache.rya.prospector.service.ProspectorServiceEvalStatsDAO) AccumuloSelectivityEvalDAO(org.apache.rya.joinselect.AccumuloSelectivityEvalDAO) AccumuloSecurityException(org.apache.accumulo.core.client.AccumuloSecurityException) RdfCloudTripleStoreConfiguration(org.apache.rya.api.RdfCloudTripleStoreConfiguration) AccumuloRdfConfiguration(org.apache.rya.accumulo.AccumuloRdfConfiguration)

Aggregations

AccumuloSelectivityEvalDAO (org.apache.rya.joinselect.AccumuloSelectivityEvalDAO)25 Test (org.junit.Test)23 ArrayList (java.util.ArrayList)22 BatchWriter (org.apache.accumulo.core.client.BatchWriter)22 Mutation (org.apache.accumulo.core.data.Mutation)22 Value (org.apache.accumulo.core.data.Value)22 Text (org.apache.hadoop.io.Text)22 TupleExpr (org.openrdf.query.algebra.TupleExpr)17 ProspectorServiceEvalStatsDAO (org.apache.rya.prospector.service.ProspectorServiceEvalStatsDAO)10 RdfCloudTripleStoreConfiguration (org.apache.rya.api.RdfCloudTripleStoreConfiguration)9 EntityOptimizer (org.apache.rya.indexing.accumulo.entity.EntityOptimizer)9 Map (java.util.Map)7 Scanner (org.apache.accumulo.core.client.Scanner)7 Key (org.apache.accumulo.core.data.Key)7 Range (org.apache.accumulo.core.data.Range)7 Authorizations (org.apache.accumulo.core.security.Authorizations)7 EntityTupleSet (org.apache.rya.indexing.accumulo.entity.EntityTupleSet)6 QueryJoinSelectOptimizer (org.apache.rya.rdftriplestore.evaluation.QueryJoinSelectOptimizer)6 RdfCloudTripleStoreSelectivityEvaluationStatistics (org.apache.rya.rdftriplestore.evaluation.RdfCloudTripleStoreSelectivityEvaluationStatistics)6 QueryModelNode (org.openrdf.query.algebra.QueryModelNode)6