use of org.openrdf.query.algebra.QueryModelNode in project incubator-rya by apache.
the class PCJOptionalTestIT method testSimpleOptionalTest1.
@Test
public void testSimpleOptionalTest1() throws Exception {
final String query = //
"" + //
"SELECT ?u ?s ?t " + //
"{" + //
" ?s a ?t ." + //
" OPTIONAL{?t <http://www.w3.org/2000/01/rdf-schema#label> ?u } ." + //
" ?u <uri:talksTo> ?s . " + //
"}";
final SPARQLParser parser = new SPARQLParser();
final ParsedQuery pq1 = parser.parseQuery(query, null);
final SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet((Projection) pq1.getTupleExpr().clone());
final List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
list.add(extTup1);
final List<QueryModelNode> optTupNodes = Lists.newArrayList();
optTupNodes.add(extTup1);
final PCJOptimizer pcj = new PCJOptimizer(list, true, new AccumuloIndexSetProvider(new Configuration(), list));
final TupleExpr te = pq1.getTupleExpr();
pcj.optimize(te, null, null);
final NodeCollector nc = new NodeCollector();
te.visit(nc);
final List<QueryModelNode> qNodes = nc.getNodes();
Assert.assertEquals(qNodes.size(), optTupNodes.size());
for (final QueryModelNode node : qNodes) {
Assert.assertTrue(optTupNodes.contains(node));
}
}
use of org.openrdf.query.algebra.QueryModelNode in project incubator-rya by apache.
the class PCJOptimizerUtilities method getVarNames.
private static Set<String> getVarNames(final Collection<QueryModelNode> nodes) {
List<String> tempVars;
final Set<String> nodeVarNames = Sets.newHashSet();
for (final QueryModelNode s : nodes) {
tempVars = VarCollector.process(s);
for (final String t : tempVars) {
nodeVarNames.add(t);
}
}
return nodeVarNames;
}
use of org.openrdf.query.algebra.QueryModelNode in project incubator-rya by apache.
the class QueryVariableNormalizer method evaluateMap.
/**
* Adds or removes HashMap substitution schemes to the list of substitutions
* schemes depending on whether or not they are consistent with the two
* lists of statement patterns
*
* @param qArray
* List of StatementPatterns associated with query array
* @param iArray
* List of StatementPatterns associated with index array
* @param hMap
* HashMap of substitutions to be analyzed for consistent and
* added or removed
* @param hMaps
* List of HashMaps containing substitution schemes
* @param trees
* Array of TreeMaps used for comparison of StatementPattern
* nodes
*/
private static void evaluateMap(List<QueryModelNode> qArray, List<QueryModelNode> iArray, HashMap<String, String> hMap, List<HashMap<String, String>> hMaps, HashMap<HashMap<String, String>, Boolean> mapConsistent, TreeMap<String, Integer>[] trees) throws IllegalArgumentException {
// list of possible substitution schemes.
if (iArray.size() == 0) {
if (!hMaps.contains(hMap)) {
hMaps.add(hMap);
}
mapConsistent.put(hMap, true);
return;
}
// index list to generate all possible substitution schemes.
for (int i = 0; i < iArray.size(); i++) {
for (int j = 0; j < qArray.size(); j++) {
// System.out.println("Query list is " + qArray+ " and index list is " + iArray);
QueryModelNode node1 = qArray.get(j);
QueryModelNode node2 = iArray.get(i);
// independent of variables names (same constants in same place, non constant Vars in same place)
if ((node1 instanceof StatementPattern) && (node2 instanceof StatementPattern)) {
if (genConstantCompare((StatementPattern) node1, (StatementPattern) node2)) {
List<Var> variables1 = ((StatementPattern) node1).getVarList();
List<Var> variables2 = ((StatementPattern) node2).getVarList();
List<List<String>> vars = genGetCommonVars(variables1, variables2);
List<String> vars1 = vars.get(0);
List<String> vars2 = vars.get(1);
if (listConsistent(vars1, vars2, hMap)) {
HashMap<String, String> hashMap = Maps.newHashMap(hMap);
putVars(vars1, vars2, hashMap);
List<QueryModelNode> queryArray = Lists.newArrayList(qArray);
List<QueryModelNode> indexArray = Lists.newArrayList(iArray);
indexArray.remove(i);
queryArray.remove(j);
evaluateMap(queryArray, indexArray, hashMap, hMaps, mapConsistent, trees);
}
}
} else // (check that conditions are same independent of variable names).
if ((node1 instanceof Filter) && (node2 instanceof Filter)) {
try {
if (filterCompare((Filter) node1, (Filter) node2)) {
List<QueryModelNode> variables1 = FilterVarValueCollector.process(((Filter) node1).getCondition());
List<QueryModelNode> variables2 = FilterVarValueCollector.process(((Filter) node2).getCondition());
List<List<String>> vars = filterCommonVars(variables1, variables2);
List<String> vars1 = vars.get(0);
List<String> vars2 = vars.get(1);
if (listConsistent(vars1, vars2, hMap)) {
HashMap<String, String> hashMap = Maps.newHashMap(hMap);
putVars(vars1, vars2, hashMap);
List<QueryModelNode> queryArray = Lists.newArrayList(qArray);
List<QueryModelNode> indexArray = Lists.newArrayList(iArray);
indexArray.remove(i);
queryArray.remove(j);
evaluateMap(queryArray, indexArray, hashMap, hMaps, mapConsistent, trees);
}
}
} catch (Exception e) {
System.out.println("Invalid Filter! " + e);
}
} else {
throw new IllegalArgumentException("Invalid query tree.");
}
}
}
if (mapConsistent.containsKey(hMap))
if (mapConsistent.get(hMap) == false) {
hMaps.remove(hMap);
}
return;
}
use of org.openrdf.query.algebra.QueryModelNode in project incubator-rya by apache.
the class QueryVariableNormalizer method getNormalizedIndex.
/**
* @param tuple1
* tuple expression from a parsed query
* @param tuple2
* tuple expression from a parsed query (the proposed index whose
* variables are to be relabeled)
* @return list of all possible tuples obtained by substituting the
* variables of proposed index with the variables from query
* @throws Exception
* @throws IllegalArgumentException
*/
public static List<TupleExpr> getNormalizedIndex(TupleExpr tuple1, TupleExpr tuple2) throws Exception {
List<QueryModelNode> nodes1, nodes2;
TreeMap<String, List<QueryModelNode>> queryMap1, indexMap1;
List<HashMap<String, String>> varChanges = new ArrayList<HashMap<String, String>>();
List<TupleExpr> tupleList = new ArrayList<TupleExpr>();
// if tuples are equal, no need to do anything
if (tuple1.equals(tuple2)) {
tupleList.add((TupleExpr) tuple1.clone());
return tupleList;
}
NormalizeQueryVisitor tupNVis = new NormalizeQueryVisitor(false);
NormalizeQueryVisitor indexNVis = new NormalizeQueryVisitor(true);
tuple1.visit(tupNVis);
tuple2.visit(indexNVis);
TupleExpr tuple;
queryMap1 = tupNVis.getMap();
indexMap1 = indexNVis.getMap();
// TreeMaps that used for comparators
TreeMap<String, Integer>[] trees = (TreeMap<String, Integer>[]) new TreeMap[4];
for (int i = 0; i < 4; i++) {
trees[i] = new TreeMap<String, Integer>();
}
// query tuple variable count
trees[0] = tupNVis.getKeyMap();
// index tuple variable count
trees[2] = indexNVis.getKeyMap();
// if query does not contain as many constant Vars as index,
// normalization not possible.
// if (!(trees[0].keySet().size() >= trees[2].keySet().size())) {
// System.out.println("In here:1");
// return tupleList;
// }
// sort keys according to size of associated StatementPattern list
// this optimization ensures that initial list of HashMaps (possible
// variable substitutions)
// is as small as possible
// Maybe add additional criteria to comparator taking into account size
// of query bin lists
Set<String> keys = indexMap1.keySet();
List<String> keyList = new ArrayList<String>(keys);
Collections.sort(keyList, new ConstantKeyComp(indexMap1, queryMap1));
for (String s : keyList) {
if (queryMap1.containsKey(s)) {
nodes1 = queryMap1.get(s);
nodes2 = indexMap1.get(s);
if (!(nodes1.size() >= nodes2.size())) {
// nodes2);
return tupleList;
}
// query
trees[1] = getListVarCnt(nodes1, tupNVis.getVariableMap());
// list
// variable
// count
// index
trees[3] = getListVarCnt(nodes2, indexNVis.getVariableMap());
// list
// variable
// count
Collections.sort(nodes1, new CountComp(trees[1], trees[0]));
Collections.sort(nodes2, new CountComp(trees[3], trees[2]));
varChanges = statementCompare(nodes1, nodes2, varChanges, trees);
if (varChanges.size() == 0) {
return tupleList;
}
} else {
return tupleList;
}
}
List<QueryModelNode> filters2 = indexNVis.getFilters();
// determine if index contains filters whose variables need to be relabeled
if (filters2.size() != 0) {
List<QueryModelNode> filters1 = tupNVis.getFilters();
// only attempt to normalize variables if query contains more filters than index
if (filters1.size() >= filters2.size()) {
Collections.sort(filters1, new FilterComp());
Collections.sort(filters2, new FilterComp());
varChanges = statementCompare(filters1, filters2, varChanges, trees);
}
}
List<HashMap<String, String>> varChangeSet = new ArrayList<HashMap<String, String>>();
for (HashMap<String, String> s : varChanges) {
if (!varChangeSet.contains(s)) {
varChangeSet.add(s);
}
}
ValueMapVisitor valMapVis = new ValueMapVisitor();
tuple1.visit(valMapVis);
Map<String, Value> valMap = valMapVis.getValueMap();
for (HashMap<String, String> s : varChangeSet) {
// System.out.println(s);
tuple = tuple2.clone();
replaceTupleVariables(s, tuple, valMap);
tupleList.add(tuple);
}
return tupleList;
}
use of org.openrdf.query.algebra.QueryModelNode in project incubator-rya by apache.
the class JoinSegment method replaceWithExternalSet.
/**
* This method matches the ordered nodes returned by
* {@link JoinSegment#getOrderedNodes()} for nodeToReplace with a subset of
* the ordered nodes for this JoinSegment. The order of the nodes for
* nodeToReplace must match the order of the nodes as a subset of
* orderedNodes
*
* @param nodeToReplace
* - nodes to be replaced by pcj
* @param pcj
* - pcj node that will replace specified query nodes
*/
@Override
public boolean replaceWithExternalSet(QuerySegment<T> nodeToReplace, T set) {
Preconditions.checkNotNull(nodeToReplace);
Preconditions.checkNotNull(set);
if (!containsQuerySegment(nodeToReplace)) {
return false;
}
Set<QueryModelNode> nodeSet = nodeToReplace.getUnOrderedNodes();
orderedNodes.removeAll(nodeSet);
orderedNodes.add(set);
unorderedNodes.removeAll(nodeSet);
unorderedNodes.add(set);
for (QueryModelNode q : nodeSet) {
if (q instanceof ValueExpr) {
conditionMap.remove(q);
}
}
return true;
}
Aggregations