use of org.openrdf.query.algebra.evaluation.impl.FilterOptimizer in project incubator-rya by apache.
the class RdfCloudTripleStoreConnection method evaluateInternal.
@Override
protected CloseableIteration<? extends BindingSet, QueryEvaluationException> evaluateInternal(TupleExpr tupleExpr, final Dataset dataset, BindingSet bindings, final boolean flag) throws SailException {
verifyIsOpen();
logger.trace("Incoming query model:\n{}", tupleExpr.toString());
if (provenanceCollector != null) {
try {
provenanceCollector.recordQuery(tupleExpr.toString());
} catch (final ProvenanceCollectionException e) {
logger.trace("Provenance failed to record query.", e);
}
}
tupleExpr = tupleExpr.clone();
final C queryConf = (C) store.getConf().clone();
if (queryConf == null) {
// Should not happen, but this is better than a null dereference error.
throw new SailException("Cloning store.getConf() returned null, aborting.");
}
if (bindings != null) {
final Binding dispPlan = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_QUERYPLAN_FLAG);
if (dispPlan != null) {
queryConf.setDisplayQueryPlan(Boolean.parseBoolean(dispPlan.getValue().stringValue()));
}
final Binding authBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH);
if (authBinding != null) {
queryConf.setAuths(authBinding.getValue().stringValue().split(","));
}
final Binding ttlBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_TTL);
if (ttlBinding != null) {
queryConf.setTtl(Long.valueOf(ttlBinding.getValue().stringValue()));
}
final Binding startTimeBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_STARTTIME);
if (startTimeBinding != null) {
queryConf.setStartTime(Long.valueOf(startTimeBinding.getValue().stringValue()));
}
final Binding performantBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_PERFORMANT);
if (performantBinding != null) {
queryConf.setBoolean(RdfCloudTripleStoreConfiguration.CONF_PERFORMANT, Boolean.parseBoolean(performantBinding.getValue().stringValue()));
}
final Binding inferBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_INFER);
if (inferBinding != null) {
queryConf.setInfer(Boolean.parseBoolean(inferBinding.getValue().stringValue()));
}
final Binding useStatsBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_USE_STATS);
if (useStatsBinding != null) {
queryConf.setUseStats(Boolean.parseBoolean(useStatsBinding.getValue().stringValue()));
}
final Binding offsetBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_OFFSET);
if (offsetBinding != null) {
queryConf.setOffset(Long.parseLong(offsetBinding.getValue().stringValue()));
}
final Binding limitBinding = bindings.getBinding(RdfCloudTripleStoreConfiguration.CONF_LIMIT);
if (limitBinding != null) {
queryConf.setLimit(Long.parseLong(limitBinding.getValue().stringValue()));
}
} else {
bindings = new QueryBindingSet();
}
if (!(tupleExpr instanceof QueryRoot)) {
tupleExpr = new QueryRoot(tupleExpr);
}
try {
final List<Class<QueryOptimizer>> optimizers = queryConf.getOptimizers();
final Class<QueryOptimizer> pcjOptimizer = queryConf.getPcjOptimizer();
if (pcjOptimizer != null) {
QueryOptimizer opt = null;
try {
final Constructor<QueryOptimizer> construct = pcjOptimizer.getDeclaredConstructor(new Class[] {});
opt = construct.newInstance();
} catch (final Exception e) {
}
if (opt == null) {
throw new NoSuchMethodException("Could not find valid constructor for " + pcjOptimizer.getName());
}
if (opt instanceof Configurable) {
((Configurable) opt).setConf(conf);
}
opt.optimize(tupleExpr, dataset, bindings);
}
final ParallelEvaluationStrategyImpl strategy = new ParallelEvaluationStrategyImpl(new StoreTripleSource<C>(queryConf, ryaDAO), inferenceEngine, dataset, queryConf);
(new BindingAssigner()).optimize(tupleExpr, dataset, bindings);
(new ConstantOptimizer(strategy)).optimize(tupleExpr, dataset, bindings);
(new CompareOptimizer()).optimize(tupleExpr, dataset, bindings);
(new ConjunctiveConstraintSplitter()).optimize(tupleExpr, dataset, bindings);
(new DisjunctiveConstraintOptimizer()).optimize(tupleExpr, dataset, bindings);
(new SameTermFilterOptimizer()).optimize(tupleExpr, dataset, bindings);
(new QueryModelNormalizer()).optimize(tupleExpr, dataset, bindings);
(new IterativeEvaluationOptimizer()).optimize(tupleExpr, dataset, bindings);
if (!optimizers.isEmpty()) {
for (final Class<QueryOptimizer> optclz : optimizers) {
QueryOptimizer result = null;
try {
final Constructor<QueryOptimizer> meth = optclz.getDeclaredConstructor(new Class[] {});
result = meth.newInstance();
} catch (final Exception e) {
}
try {
final Constructor<QueryOptimizer> meth = optclz.getDeclaredConstructor(EvaluationStrategy.class);
result = meth.newInstance(strategy);
} catch (final Exception e) {
}
if (result == null) {
throw new NoSuchMethodException("Could not find valid constructor for " + optclz.getName());
}
if (result instanceof Configurable) {
((Configurable) result).setConf(conf);
}
result.optimize(tupleExpr, dataset, bindings);
}
}
(new FilterOptimizer()).optimize(tupleExpr, dataset, bindings);
(new OrderLimitOptimizer()).optimize(tupleExpr, dataset, bindings);
logger.trace("Optimized query model:\n{}", tupleExpr.toString());
if (queryConf.isInfer() && this.inferenceEngine != null) {
try {
tupleExpr.visit(new DomainRangeVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new SomeValuesFromVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new AllValuesFromVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new HasValueVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new IntersectionOfVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new ReflexivePropertyVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new PropertyChainVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new TransitivePropertyVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new SymmetricPropertyVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new InverseOfVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new SubPropertyOfVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new SubClassOfVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new SameAsVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new OneOfVisitor(queryConf, inferenceEngine));
tupleExpr.visit(new HasSelfVisitor(queryConf, inferenceEngine));
} catch (final Exception e) {
logger.error("Error encountered while visiting query node.", e);
}
}
if (queryConf.isPerformant()) {
tupleExpr.visit(new SeparateFilterJoinsVisitor());
// tupleExpr.visit(new FilterTimeIndexVisitor(queryConf));
// tupleExpr.visit(new PartitionFilterTimeIndexVisitor(queryConf));
}
final FilterRangeVisitor rangeVisitor = new FilterRangeVisitor(queryConf);
tupleExpr.visit(rangeVisitor);
// this has to be done twice to get replace the statementpatterns with the right ranges
tupleExpr.visit(rangeVisitor);
EvaluationStatistics stats = null;
if (!queryConf.isUseStats() && queryConf.isPerformant() || rdfEvalStatsDAO == null) {
stats = new DefaultStatistics();
} else if (queryConf.isUseStats()) {
if (queryConf.isUseSelectivity()) {
stats = new RdfCloudTripleStoreSelectivityEvaluationStatistics<C>(queryConf, rdfEvalStatsDAO, selectEvalDAO);
} else {
stats = new RdfCloudTripleStoreEvaluationStatistics<C>(queryConf, rdfEvalStatsDAO);
}
}
if (stats != null) {
if (stats instanceof RdfCloudTripleStoreSelectivityEvaluationStatistics) {
final QueryJoinSelectOptimizer qjso = new QueryJoinSelectOptimizer(stats, selectEvalDAO);
qjso.optimize(tupleExpr, dataset, bindings);
} else {
final QueryJoinOptimizer qjo = new QueryJoinOptimizer(stats);
// TODO: Make pluggable
qjo.optimize(tupleExpr, dataset, bindings);
}
}
final CloseableIteration<BindingSet, QueryEvaluationException> iter = strategy.evaluate(tupleExpr, EmptyBindingSet.getInstance());
final CloseableIteration<BindingSet, QueryEvaluationException> iterWrap = new CloseableIteration<BindingSet, QueryEvaluationException>() {
@Override
public void remove() throws QueryEvaluationException {
iter.remove();
}
@Override
public BindingSet next() throws QueryEvaluationException {
return iter.next();
}
@Override
public boolean hasNext() throws QueryEvaluationException {
return iter.hasNext();
}
@Override
public void close() throws QueryEvaluationException {
iter.close();
strategy.shutdown();
}
};
return iterWrap;
} catch (final QueryEvaluationException e) {
throw new SailException(e);
} catch (final Exception e) {
throw new SailException(e);
}
}
use of org.openrdf.query.algebra.evaluation.impl.FilterOptimizer in project incubator-rya by apache.
the class QueryJoinSelectOptimizerTest method testOptimizeQ6.
@Test
public void testOptimizeQ6() throws Exception {
RdfEvalStatsDAO<RdfCloudTripleStoreConfiguration> res = new ProspectorServiceEvalStatsDAO(conn, arc);
AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
accc.setConf(arc);
accc.setConnector(conn);
accc.setRdfEvalDAO(res);
accc.init();
BatchWriter bw1 = conn.createBatchWriter("rya_prospects", config);
BatchWriter bw2 = conn.createBatchWriter("rya_selectivity", config);
String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
String s5 = "predicateobject" + DELIM + "uri:watches" + DELIM + "uri:television";
String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
String s6 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:kibble";
String s7 = "predicateobject" + DELIM + "uri:rollsIn" + DELIM + "uri:mud";
String s8 = "predicateobject" + DELIM + "uri:runsIn" + DELIM + "uri:field";
String s9 = "predicateobject" + DELIM + "uri:smells" + DELIM + "uri:butt";
String s10 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:sticks";
List<Mutation> mList = new ArrayList<Mutation>();
List<Mutation> mList2 = new ArrayList<Mutation>();
List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "predicateobject", "predicatepredicate", "predicatesubject");
Mutation m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11;
m1 = new Mutation(s1 + DELIM + "3");
m1.put(new Text("count"), new Text(""), new Value("5".getBytes()));
m2 = new Mutation(s2 + DELIM + "2");
m2.put(new Text("count"), new Text(""), new Value("3".getBytes()));
m3 = new Mutation(s3 + DELIM + "1");
m3.put(new Text("count"), new Text(""), new Value("2".getBytes()));
m4 = new Mutation(s4 + DELIM + "1");
m4.put(new Text("count"), new Text(""), new Value("0".getBytes()));
m5 = new Mutation(s5 + DELIM + "1");
m5.put(new Text("count"), new Text(""), new Value("1".getBytes()));
m6 = new Mutation(s6 + DELIM + "1");
m6.put(new Text("count"), new Text(""), new Value("3".getBytes()));
m7 = new Mutation(s7 + DELIM + "1");
m7.put(new Text("count"), new Text(""), new Value("2".getBytes()));
m8 = new Mutation(s8 + DELIM + "1");
m8.put(new Text("count"), new Text(""), new Value("3".getBytes()));
m9 = new Mutation(s9 + DELIM + "1");
m9.put(new Text("count"), new Text(""), new Value("1".getBytes()));
m10 = new Mutation(s10 + DELIM + "1");
m10.put(new Text("count"), new Text(""), new Value("1".getBytes()));
mList.add(m1);
mList.add(m2);
mList.add(m3);
mList.add(m4);
mList.add(m5);
mList.add(m6);
mList.add(m7);
mList.add(m8);
mList.add(m9);
mList.add(m10);
bw1.addMutations(mList);
bw1.close();
Scanner scan = conn.createScanner("rya_prospects", new Authorizations());
scan.setRange(new Range());
for (Map.Entry<Key, Value> entry : scan) {
System.out.println("Key row string is " + entry.getKey().getRow().toString());
System.out.println("Key is " + entry.getKey());
System.out.println("Value is " + (new String(entry.getValue().get())));
}
m1 = new Mutation(s1);
m2 = new Mutation(s2);
m3 = new Mutation(s3);
m4 = new Mutation(s4);
m5 = new Mutation(s5);
m6 = new Mutation(s6);
m7 = new Mutation(s7);
m8 = new Mutation(s8);
m9 = new Mutation(s9);
m10 = new Mutation(s10);
m11 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
m11.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
int i = 2;
int j = 3;
int k = 4;
int l = 5;
Long count1;
Long count2;
Long count3;
Long count4;
for (String s : sList) {
count1 = (long) i;
count2 = (long) j;
count3 = (long) k;
count4 = (long) l;
m1.put(new Text(s), new Text(count4.toString()), EMPTY_VAL);
m2.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
m3.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
m4.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
m5.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
m6.put(new Text(s), new Text(count2.toString()), EMPTY_VAL);
m7.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
m8.put(new Text(s), new Text(count4.toString()), EMPTY_VAL);
m9.put(new Text(s), new Text(count3.toString()), EMPTY_VAL);
m10.put(new Text(s), new Text(count1.toString()), EMPTY_VAL);
i = 2 * i;
j = 2 * j;
k = 2 * k;
l = 2 * l;
}
mList2.add(m1);
mList2.add(m2);
mList2.add(m3);
mList2.add(m5);
mList2.add(m4);
mList2.add(m6);
mList2.add(m7);
mList2.add(m8);
mList2.add(m9);
mList2.add(m10);
mList2.add(m11);
bw2.addMutations(mList2);
bw2.close();
scan = conn.createScanner("rya_selectivity", new Authorizations());
scan.setRange(new Range());
for (Map.Entry<Key, Value> entry : scan) {
System.out.println("Key row string is " + entry.getKey().getRow().toString());
System.out.println("Key is " + entry.getKey());
System.out.println("Value is " + (new String(entry.getKey().getColumnQualifier().toString())));
}
TupleExpr te = getTupleExpr(q6);
TupleExpr te2 = (TupleExpr) te.clone();
System.out.println("Bindings are " + te.getBindingNames());
RdfCloudTripleStoreSelectivityEvaluationStatistics ars = new RdfCloudTripleStoreSelectivityEvaluationStatistics(arc, res, accc);
QueryJoinSelectOptimizer qjs = new QueryJoinSelectOptimizer(ars, accc);
System.out.println("Originial query is " + te);
qjs.optimize(te, null, null);
FilterOptimizer fo = new FilterOptimizer();
fo.optimize(te2, null, null);
System.out.print("filter optimized query before js opt is " + te2);
qjs.optimize(te2, null, null);
System.out.println("join selectivity opt query before filter opt is " + te);
fo.optimize(te, null, null);
System.out.println("join selectivity opt query is " + te);
System.out.print("filter optimized query is " + te2);
}
use of org.openrdf.query.algebra.evaluation.impl.FilterOptimizer in project incubator-rya by apache.
the class EntityOptimizerTest method testOptimizeFilters.
@Test
public void testOptimizeFilters() throws Exception {
AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
accc.setConf(conf);
accc.setConnector(accCon);
accc.setRdfEvalDAO(res);
accc.init();
BatchWriter bw1 = accCon.createBatchWriter("rya_prospects", config);
BatchWriter bw2 = accCon.createBatchWriter("rya_selectivity", config);
String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:chickens";
String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:chickens";
String s3 = "predicate" + DELIM + "uri:peesOn";
String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears";
String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
List<Mutation> mList = new ArrayList<Mutation>();
List<Mutation> mList2 = new ArrayList<Mutation>();
List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "objectsubject", "objectpredicate", "objectobject");
Mutation m1, m2, m3, m4, m5, m6;
m1 = new Mutation(s1 + DELIM + "1");
m1.put(new Text("count"), new Text(""), new Value("2".getBytes()));
m2 = new Mutation(s2 + DELIM + "1");
m2.put(new Text("count"), new Text(""), new Value("2".getBytes()));
m3 = new Mutation(s3 + DELIM + "1");
m3.put(new Text("count"), new Text(""), new Value("2".getBytes()));
m4 = new Mutation(s4 + DELIM + "1");
m4.put(new Text("count"), new Text(""), new Value("2".getBytes()));
m5 = new Mutation(s5 + DELIM + "1");
m5.put(new Text("count"), new Text(""), new Value("2".getBytes()));
mList.add(m1);
mList.add(m2);
mList.add(m3);
mList.add(m4);
mList.add(m5);
bw1.addMutations(mList);
bw1.close();
m1 = new Mutation(s1);
m2 = new Mutation(s2);
m3 = new Mutation(s3);
m4 = new Mutation(s4);
m5 = new Mutation(s5);
m6 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
m6.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
for (String s : sList) {
m1.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
m2.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
m3.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
m4.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
m5.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
}
mList2.add(m1);
mList2.add(m2);
mList2.add(m3);
mList2.add(m4);
mList2.add(m5);
mList2.add(m6);
bw2.addMutations(mList2);
bw2.close();
TupleExpr te = getTupleExpr(q8);
(new FilterOptimizer()).optimize(te, null, null);
EntityOptimizer cco = new EntityOptimizer(accc);
System.out.println("Originial query is " + te);
cco.optimize(te, null, null);
EntityCentricVisitor ccv = new EntityCentricVisitor();
te.visit(ccv);
List<QueryModelNode> nodes = Lists.newArrayList(ccv.getCcNodes());
System.out.println("Test 8 nodes are :" + nodes);
Assert.assertEquals(2, nodes.size());
for (QueryModelNode q : nodes) {
if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 2) {
Assert.assertEquals("m", ((EntityTupleSet) q).getStarQuery().getCommonVarName());
} else if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 3) {
Assert.assertEquals("uri:chickens", ((EntityTupleSet) q).getStarQuery().getCommonVarName());
} else {
Assert.assertTrue(false);
}
}
System.out.println(te);
}
use of org.openrdf.query.algebra.evaluation.impl.FilterOptimizer in project incubator-rya by apache.
the class EntityOptimizerTest method testOptimizeFilter2.
@Test
public void testOptimizeFilter2() throws Exception {
AccumuloSelectivityEvalDAO accc = new AccumuloSelectivityEvalDAO();
accc.setConf(conf);
accc.setConnector(accCon);
accc.setRdfEvalDAO(res);
accc.init();
BatchWriter bw1 = accCon.createBatchWriter("rya_prospects", config);
BatchWriter bw2 = accCon.createBatchWriter("rya_selectivity", config);
String s1 = "predicateobject" + DELIM + "http://www.w3.org/2000/01/rdf-schema#label" + DELIM + "uri:dog";
String s2 = "predicateobject" + DELIM + "uri:barksAt" + DELIM + "uri:cat";
String s3 = "predicateobject" + DELIM + "uri:peesOn" + DELIM + "uri:hydrant";
String s5 = "predicateobject" + DELIM + "uri:scratches" + DELIM + "uri:ears";
String s4 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:chickens";
String s6 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:kibble";
String s7 = "predicateobject" + DELIM + "uri:rollsIn" + DELIM + "uri:mud";
String s8 = "predicateobject" + DELIM + "uri:runsIn" + DELIM + "uri:field";
String s9 = "predicate" + DELIM + "uri:smells";
String s10 = "predicateobject" + DELIM + "uri:eats" + DELIM + "uri:sticks";
String s11 = "predicate" + DELIM + "uri:watches";
List<Mutation> mList = new ArrayList<Mutation>();
List<Mutation> mList2 = new ArrayList<Mutation>();
List<String> sList = Arrays.asList("subjectobject", "subjectpredicate", "subjectsubject", "objectsubject", "objectpredicate", "objectobject");
Mutation m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12;
m1 = new Mutation(s1 + DELIM + "1");
m1.put(new Text("count"), new Text(""), new Value("2".getBytes()));
m2 = new Mutation(s2 + DELIM + "1");
m2.put(new Text("count"), new Text(""), new Value("2".getBytes()));
m3 = new Mutation(s3 + DELIM + "1");
m3.put(new Text("count"), new Text(""), new Value("2".getBytes()));
m4 = new Mutation(s4 + DELIM + "1");
m4.put(new Text("count"), new Text(""), new Value("2".getBytes()));
m5 = new Mutation(s5 + DELIM + "1");
m5.put(new Text("count"), new Text(""), new Value("2".getBytes()));
m6 = new Mutation(s6 + DELIM + "1");
m6.put(new Text("count"), new Text(""), new Value("1".getBytes()));
m7 = new Mutation(s7 + DELIM + "1");
m7.put(new Text("count"), new Text(""), new Value("2".getBytes()));
m8 = new Mutation(s8 + DELIM + "1");
m8.put(new Text("count"), new Text(""), new Value("2".getBytes()));
m9 = new Mutation(s9 + DELIM + "1");
m9.put(new Text("count"), new Text(""), new Value("2".getBytes()));
m10 = new Mutation(s10 + DELIM + "1");
m10.put(new Text("count"), new Text(""), new Value("1".getBytes()));
m11 = new Mutation(s11 + DELIM + "1");
m11.put(new Text("count"), new Text(""), new Value("2".getBytes()));
mList.add(m1);
mList.add(m2);
mList.add(m3);
mList.add(m4);
mList.add(m5);
mList.add(m6);
mList.add(m7);
mList.add(m8);
mList.add(m9);
mList.add(m10);
mList.add(m11);
bw1.addMutations(mList);
bw1.close();
m1 = new Mutation(s1);
m2 = new Mutation(s2);
m3 = new Mutation(s3);
m4 = new Mutation(s4);
m5 = new Mutation(s5);
m6 = new Mutation(s6);
m7 = new Mutation(s7);
m8 = new Mutation(s8);
m9 = new Mutation(s9);
m10 = new Mutation(s10);
m11 = new Mutation(s11);
m12 = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
m12.put(new Text("FullTableCardinality"), new Text("100"), EMPTY_VAL);
for (String s : sList) {
m1.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
m2.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
m3.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
m4.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
m5.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
m6.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
m7.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
m8.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
m9.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
m10.put(new Text(s), new Text(Integer.toString(1)), EMPTY_VAL);
m11.put(new Text(s), new Text(Integer.toString(2)), EMPTY_VAL);
}
mList2.add(m1);
mList2.add(m2);
mList2.add(m3);
mList2.add(m4);
mList2.add(m5);
mList2.add(m6);
mList2.add(m7);
mList2.add(m8);
mList2.add(m9);
mList2.add(m10);
mList2.add(m11);
mList2.add(m12);
bw2.addMutations(mList2);
bw2.close();
TupleExpr te = getTupleExpr(q9);
System.out.println(te);
(new FilterOptimizer()).optimize(te, null, null);
EntityOptimizer cco = new EntityOptimizer(accc);
System.out.println("Originial query is " + te);
cco.optimize(te, null, null);
EntityCentricVisitor ccv = new EntityCentricVisitor();
te.visit(ccv);
List<QueryModelNode> nodes = Lists.newArrayList(ccv.getCcNodes());
Assert.assertEquals(3, nodes.size());
List<String> cVarList = Lists.newArrayList();
cVarList.add("i");
cVarList.add("m");
for (QueryModelNode q : nodes) {
if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 2) {
String s = ((EntityTupleSet) q).getStarQuery().getCommonVarName();
System.out.println("node is " + q + " and common var is " + s);
System.out.println("star query is " + ((EntityTupleSet) q).getStarQuery());
Assert.assertTrue(cVarList.contains(s));
cVarList.remove(s);
} else if (((EntityTupleSet) q).getStarQuery().getNodes().size() == 3) {
Assert.assertEquals("h", ((EntityTupleSet) q).getStarQuery().getCommonVarName());
} else {
Assert.assertTrue(false);
}
}
System.out.println(te);
}
use of org.openrdf.query.algebra.evaluation.impl.FilterOptimizer in project incubator-rya by apache.
the class TupleReArrangerTest method tupleReArrangeTest4.
@Test
public void tupleReArrangeTest4() throws MalformedQueryException {
String queryString = //
"" + //
"SELECT ?a ?b ?c ?d ?e ?x ?y" + //
"{" + //
" Filter(?c = <uri:label2>)" + //
" Filter(?x = <uri:somethingFunny>) " + //
" Filter(?d = <uri:Fred> ) " + //
" ?e <uri:laughsAt> ?x ." + //
" ?e <uri:livesIn> ?y . " + //
"{ ?a a ?b . ?a <http://www.w3.org/2000/01/rdf-schema#label> ?c }" + //
" UNION { ?a <uri:talksTo> ?d . ?a <http://www.w3.org/2000/01/rdf-schema#label> ?e }" + //
"}";
SPARQLParser sp = new SPARQLParser();
ParsedQuery pq = sp.parseQuery(queryString, null);
TupleExpr te = pq.getTupleExpr();
new FilterOptimizer().optimize(te, null, null);
System.out.println(te);
List<TupleExpr> tuples = TupleReArranger.getTupleReOrderings(te);
System.out.println(tuples);
Assert.assertEquals(24, tuples.size());
}
Aggregations