use of org.apache.jackrabbit.commons.predicate.Predicate in project jackrabbit by apache.
the class LuceneQueryFactory method mapConstraintToQueryAndFilter.
protected Predicate mapConstraintToQueryAndFilter(QueryPair query, Constraint constraint, Map<String, NodeType> selectorMap, JackrabbitIndexSearcher searcher, IndexReader reader) throws RepositoryException, IOException {
Predicate filter = Predicate.TRUE;
if (constraint instanceof And) {
And and = (And) constraint;
filter = mapConstraintToQueryAndFilter(query, and.getConstraint1(), selectorMap, searcher, reader);
Predicate other = mapConstraintToQueryAndFilter(query, and.getConstraint2(), selectorMap, searcher, reader);
if (filter == Predicate.TRUE) {
filter = other;
} else if (other != Predicate.TRUE) {
filter = Predicates.and(filter, other);
}
} else if (constraint instanceof Comparison) {
Comparison c = (Comparison) constraint;
Transform transform = new Transform(c.getOperand1());
DynamicOperand left = transform.operand;
final String operator = c.getOperator();
StaticOperand right = c.getOperand2();
if (left instanceof Length || left instanceof FullTextSearchScore || (((!JCR_OPERATOR_EQUAL_TO.equals(operator) && !JCR_OPERATOR_LIKE.equals(operator)) || transform.transform != TRANSFORM_NONE) && (left instanceof NodeName || left instanceof NodeLocalName))) {
try {
int type = PropertyType.UNDEFINED;
if (left instanceof Length) {
type = PropertyType.LONG;
} else if (left instanceof FullTextSearchScore) {
type = PropertyType.DOUBLE;
}
final DynamicOperand operand = c.getOperand1();
final Value value = evaluator.getValue(right, type);
filter = new RowPredicate() {
@Override
protected boolean evaluate(Row row) throws RepositoryException {
return new ValueComparator().evaluate(operator, evaluator.getValue(operand, row), value);
}
};
} catch (ValueFormatException e) {
throw new InvalidQueryException(e);
}
} else {
Query cq = getComparisonQuery(left, transform.transform, operator, right, selectorMap);
query.subQuery.add(cq, MUST);
}
} else if (constraint instanceof DescendantNode) {
final DescendantNode descendantNode = (DescendantNode) constraint;
Query context = getNodeIdQuery(UUID, descendantNode.getAncestorPath());
query.mainQuery = new DescendantSelfAxisQuery(context, query.subQuery, false);
} else {
query.subQuery.add(create(constraint, selectorMap, searcher), MUST);
}
return filter;
}
use of org.apache.jackrabbit.commons.predicate.Predicate in project jackrabbit by apache.
the class LuceneQueryFactory method execute.
/**
* @param columns
* @param selector
* @param constraint
* @param externalSort
* if <code>true</code> it means that the lqf should just let the
* QueryEngine take care of sorting and applying applying offset
* and limit constraints
* @param offsetIn
* used in pagination
* @param limitIn
* used in pagination
* @return a list of rows
* @throws RepositoryException
* @throws IOException
*/
public List<Row> execute(Map<String, PropertyValue> columns, Selector selector, Constraint constraint, Sort sort, boolean externalSort, long offsetIn, long limitIn) throws RepositoryException, IOException {
final IndexReader reader = index.getIndexReader(true);
final int offset = offsetIn < 0 ? 0 : (int) offsetIn;
final int limit = limitIn < 0 ? Integer.MAX_VALUE : (int) limitIn;
QueryHits hits = null;
try {
JackrabbitIndexSearcher searcher = new JackrabbitIndexSearcher(session, reader, index.getContext().getItemStateManager());
searcher.setSimilarity(index.getSimilarity());
Predicate filter = Predicate.TRUE;
BooleanQuery query = new BooleanQuery();
QueryPair qp = new QueryPair(query);
query.add(create(selector), MUST);
if (constraint != null) {
String name = selector.getSelectorName();
NodeType type = ntManager.getNodeType(selector.getNodeTypeName());
filter = mapConstraintToQueryAndFilter(qp, constraint, Collections.singletonMap(name, type), searcher, reader);
}
List<Row> rows = new ArrayList<Row>();
// TODO depending on the filters, we could push the offset info
// into the searcher
hits = searcher.evaluate(qp.mainQuery, sort, offset + limit);
int currentNode = 0;
int addedNodes = 0;
ScoreNode node = hits.nextScoreNode();
while (node != null) {
Row row = null;
try {
row = new SelectorRow(columns, evaluator, selector.getSelectorName(), session.getNodeById(node.getNodeId()), node.getScore());
} catch (ItemNotFoundException e) {
// skip the node
}
if (row != null && filter.evaluate(row)) {
if (externalSort) {
// return everything and not worry about sort
rows.add(row);
} else {
// apply limit and offset rules locally
if (currentNode >= offset && currentNode - offset < limit) {
rows.add(row);
addedNodes++;
}
currentNode++;
// end the loop when going over the limit
if (addedNodes == limit) {
break;
}
}
}
node = hits.nextScoreNode();
}
return rows;
} finally {
if (hits != null) {
hits.close();
}
Util.closeOrRelease(reader);
}
}
Aggregations