use of org.apache.solr.search.WrappedQuery in project SearchServices by Alfresco.
the class SolrCachingPathQuery method createWeight.
/*
* @see org.apache.lucene.search.Query#createWeight(org.apache.lucene.search.Searcher)
*/
public Weight createWeight(IndexSearcher indexSearcher, boolean requiresScore) throws IOException {
SolrIndexSearcher searcher = null;
if (!(indexSearcher instanceof SolrIndexSearcher)) {
throw new IllegalStateException("Must have a SolrIndexSearcher");
} else {
searcher = (SolrIndexSearcher) indexSearcher;
}
DocSet results = (DocSet) searcher.cacheLookup(CacheConstants.ALFRESCO_PATH_CACHE, pathQuery);
if (results == null) {
// Cache miss: get path query results and cache them
WrappedQuery wrapped = new WrappedQuery(pathQuery);
wrapped.setCache(false);
results = searcher.getDocSet(wrapped);
searcher.cacheInsert(CacheConstants.ALFRESCO_PATH_CACHE, pathQuery, results);
}
return new ConstantScoreQuery(results.getTopFilter()).createWeight(searcher, false);
}
use of org.apache.solr.search.WrappedQuery in project SearchServices by Alfresco.
the class SolrCachingPathScorer method create.
/**
* Factory method used to create {@link SolrCachingPathScorer} instances.
* @param acceptDocs
*/
public static SolrCachingPathScorer create(SolrCachingPathWeight weight, LeafReaderContext context, SolrIndexSearcher searcher, SolrPathQuery wrappedPathQuery) throws IOException {
DocSet results = (DocSet) searcher.cacheLookup(CacheConstants.ALFRESCO_PATH_CACHE, wrappedPathQuery);
if (results == null) {
// Cache miss: get path query results and cache them
WrappedQuery wrapped = new WrappedQuery(wrappedPathQuery);
wrapped.setCache(false);
results = searcher.getDocSet(wrapped);
searcher.cacheInsert(CacheConstants.ALFRESCO_PATH_CACHE, wrappedPathQuery, results);
}
return new SolrCachingPathScorer(weight, results, context, searcher);
}
use of org.apache.solr.search.WrappedQuery in project SearchServices by Alfresco.
the class SolrDenySetScorer2 method createDenySetScorer.
public static SolrDenySetScorer2 createDenySetScorer(Weight weight, LeafReaderContext context, SolrIndexSearcher searcher, String authorities, LeafReader reader) throws IOException {
DocSet deniedDocSet = (DocSet) searcher.cacheLookup(CacheConstants.ALFRESCO_DENIED_CACHE, authorities);
if (deniedDocSet == null) {
String[] auths = authorities.substring(1).split(authorities.substring(0, 1));
deniedDocSet = new BitDocSet(new FixedBitSet(searcher.maxDoc()));
BooleanQuery.Builder bQuery = new BooleanQuery.Builder();
for (String current : auths) {
bQuery.add(new TermQuery(new Term(QueryConstants.FIELD_DENIED, current)), Occur.SHOULD);
}
WrappedQuery wrapped = new WrappedQuery(bQuery.build());
wrapped.setCache(false);
DocSet aclDocs = searcher.getDocSet(wrapped);
HashSet<Long> aclsFound = new HashSet<Long>(aclDocs.size());
NumericDocValues aclDocValues = searcher.getSlowAtomicReader().getNumericDocValues(QueryConstants.FIELD_ACLID);
for (DocIterator it = aclDocs.iterator(); it.hasNext(); ) /**/
{
int docID = it.nextDoc();
// Obtain the ACL ID for this ACL doc.
long aclID = aclDocValues.get(docID);
aclsFound.add(getLong(aclID));
}
if (aclsFound.size() > 0) {
for (LeafReaderContext readerContext : searcher.getSlowAtomicReader().leaves()) {
int maxDoc = readerContext.reader().maxDoc();
NumericDocValues fieldValues = DocValuesCache.getNumericDocValues(QueryConstants.FIELD_ACLID, readerContext.reader());
if (fieldValues != null) {
for (int i = 0; i < maxDoc; i++) {
long aclID = fieldValues.get(i);
Long key = getLong(aclID);
if (aclsFound.contains(key)) {
deniedDocSet.add(readerContext.docBase + i);
}
}
}
}
}
// Exclude the ACL docs from the results, we only want real docs that match.
// Probably not very efficient, what we really want is remove(docID)
deniedDocSet = deniedDocSet.andNot(aclDocs);
searcher.cacheInsert(CacheConstants.ALFRESCO_DENIED_CACHE, authorities, deniedDocSet);
}
// plus check of course, for presence in cache at start of method.
return new SolrDenySetScorer2(weight, deniedDocSet, context, searcher);
}
use of org.apache.solr.search.WrappedQuery in project SearchServices by Alfresco.
the class SolrAuthoritySetScorer method createAuthoritySetScorer.
public static SolrAuthoritySetScorer createAuthoritySetScorer(Weight weight, LeafReaderContext context, SolrIndexSearcher searcher, String authorities) throws IOException {
Properties p = searcher.getSchema().getResourceLoader().getCoreProperties();
boolean doPermissionChecks = Boolean.parseBoolean(p.getProperty("alfresco.doPermissionChecks", "true"));
Query key = new SolrAuthoritySetQuery(authorities);
DocSet answer = (DocSet) searcher.cacheLookup(CacheConstants.ALFRESCO_AUTHORITY_CACHE, key);
if (answer != null) {
// Answer was in the cache, so return it.
return new SolrAuthoritySetScorer(weight, answer, context, searcher);
}
// Answer was not in cache, so build the results, cache and return.
String[] auths = authorities.substring(1).split(authorities.substring(0, 1));
boolean hasGlobalRead = false;
final HashSet<String> globalReaders = GlobalReaders.getReaders();
for (String auth : auths) {
if (globalReaders.contains(auth)) {
hasGlobalRead = true;
break;
}
}
if (hasGlobalRead || (doPermissionChecks == false)) {
// can read all
WrappedQuery wrapped = new WrappedQuery(new MatchAllDocsQuery());
wrapped.setCache(false);
DocSet allDocs = searcher.getDocSet(wrapped);
return new SolrAuthoritySetScorer(weight, allDocs, context, searcher);
}
// Docs for which the authorities have explicit read access.
WrappedQuery wrapped;
wrapped = new WrappedQuery(new SolrReaderSetQuery(authorities));
wrapped.setCache(false);
DocSet readableDocSet = searcher.getDocSet(wrapped);
// Are all doc owners granted read permissions at a global level?
if (globalReaders.contains(PermissionService.OWNER_AUTHORITY)) {
// Get the set of docs owned by the authorities (which they can therefore read).
wrapped = new WrappedQuery(new SolrOwnerSetQuery(authorities));
wrapped.setCache(false);
DocSet authorityOwnedDocs = searcher.getDocSet(wrapped);
// Final set of docs that the authorities can read.
DocSet toCache = readableDocSet.union(authorityOwnedDocs);
searcher.cacheInsert(CacheConstants.ALFRESCO_AUTHORITY_CACHE, key, toCache);
return new SolrAuthoritySetScorer(weight, toCache, context, searcher);
} else {
// for that docs I own that have owner Read rights
wrapped = new WrappedQuery(new SolrReaderSetQuery("|" + PermissionService.OWNER_AUTHORITY));
wrapped.setCache(false);
DocSet ownerReadableDocSet = searcher.getDocSet(wrapped);
wrapped = new WrappedQuery(new SolrOwnerSetQuery(authorities));
wrapped.setCache(false);
DocSet authorityOwnedDocs = searcher.getDocSet(wrapped);
// Docs where the authority is an owner and where owners have read rights.
DocSet docsAuthorityOwnsAndCanRead = ownerReadableDocSet.intersection(authorityOwnedDocs);
// Final set of docs that the authorities can read.
DocSet toCache = readableDocSet.union(docsAuthorityOwnsAndCanRead);
searcher.cacheInsert(CacheConstants.ALFRESCO_AUTHORITY_CACHE, key, toCache);
return new SolrAuthoritySetScorer(weight, toCache, context, searcher);
}
}
use of org.apache.solr.search.WrappedQuery in project SearchServices by Alfresco.
the class SolrReaderSetScorer2 method createReaderSetScorer.
public static AbstractSolrCachingScorer createReaderSetScorer(Weight weight, LeafReaderContext context, SolrIndexSearcher searcher, String authorities, LeafReader reader) throws IOException {
DocSet readableDocSet = (DocSet) searcher.cacheLookup(CacheConstants.ALFRESCO_READER_CACHE, authorities);
if (readableDocSet == null) {
String[] auths = authorities.substring(1).split(authorities.substring(0, 1));
readableDocSet = new BitDocSet(new FixedBitSet(searcher.maxDoc()));
BooleanQuery.Builder bQuery = new BooleanQuery.Builder();
for (String current : auths) {
bQuery.add(new TermQuery(new Term(QueryConstants.FIELD_READER, current)), Occur.SHOULD);
}
WrappedQuery wrapped = new WrappedQuery(bQuery.build());
wrapped.setCache(false);
DocSet aclDocs = searcher.getDocSet(wrapped);
HashSet<Long> aclsFound = new HashSet<Long>(aclDocs.size());
NumericDocValues aclDocValues = searcher.getSlowAtomicReader().getNumericDocValues(QueryConstants.FIELD_ACLID);
for (DocIterator it = aclDocs.iterator(); it.hasNext(); ) /**/
{
int docID = it.nextDoc();
// Obtain the ACL ID for this ACL doc.
long aclID = aclDocValues.get(docID);
aclsFound.add(getLong(aclID));
}
if (aclsFound.size() > 0) {
for (LeafReaderContext readerContext : searcher.getSlowAtomicReader().leaves()) {
int maxDoc = readerContext.reader().maxDoc();
NumericDocValues fieldValues = DocValuesCache.getNumericDocValues(QueryConstants.FIELD_ACLID, readerContext.reader());
if (fieldValues != null) {
for (int i = 0; i < maxDoc; i++) {
long aclID = fieldValues.get(i);
Long key = getLong(aclID);
if (aclsFound.contains(key)) {
readableDocSet.add(readerContext.docBase + i);
}
}
}
}
}
// Exclude the ACL docs from the results, we only want real docs that match.
// Probably not very efficient, what we really want is remove(docID)
readableDocSet = readableDocSet.andNot(aclDocs);
searcher.cacheInsert(CacheConstants.ALFRESCO_READER_CACHE, authorities, readableDocSet);
}
// plus check of course, for presence in cache at start of method.
return new SolrReaderSetScorer2(weight, readableDocSet, context, searcher);
}
Aggregations