Search in sources :

Example 6 with PropertyRestriction

use of org.apache.jackrabbit.oak.spi.query.Filter.PropertyRestriction in project jackrabbit-oak by apache.

the class LuceneIndex method query.

@Override
public Cursor query(final IndexPlan plan, NodeState rootState) {
    final Filter filter = plan.getFilter();
    FullTextExpression ft = filter.getFullTextConstraint();
    final Set<String> relPaths = getRelativePaths(ft);
    if (relPaths.size() > 1) {
        return new MultiLuceneIndex(filter, rootState, relPaths).query();
    }
    final String parent = relPaths.size() == 0 ? "" : relPaths.iterator().next();
    // we only restrict non-full-text conditions if there is
    // no relative property in the full-text constraint
    final boolean nonFullTextConstraints = parent.isEmpty();
    final int parentDepth = getDepth(parent);
    QueryEngineSettings settings = filter.getQueryEngineSettings();
    Iterator<LuceneResultRow> itr = new AbstractIterator<LuceneResultRow>() {

        private final Deque<LuceneResultRow> queue = Queues.newArrayDeque();

        private final Set<String> seenPaths = Sets.newHashSet();

        private ScoreDoc lastDoc;

        private int nextBatchSize = LUCENE_QUERY_BATCH_SIZE;

        private boolean noDocs = false;

        private long lastSearchIndexerVersion;

        private int reloadCount;

        @Override
        protected LuceneResultRow computeNext() {
            while (!queue.isEmpty() || loadDocs()) {
                return queue.remove();
            }
            return endOfData();
        }

        private LuceneResultRow convertToRow(ScoreDoc doc, IndexSearcher searcher, String excerpt) throws IOException {
            IndexReader reader = searcher.getIndexReader();
            PathStoredFieldVisitor visitor = new PathStoredFieldVisitor();
            reader.document(doc.doc, visitor);
            String path = visitor.getPath();
            if (path != null) {
                if ("".equals(path)) {
                    path = "/";
                }
                if (!parent.isEmpty()) {
                    // TODO OAK-828 this breaks node aggregation
                    // get the base path
                    // ensure the path ends with the given
                    // relative path
                    // if (!path.endsWith("/" + parent)) {
                    // continue;
                    // }
                    path = getAncestorPath(path, parentDepth);
                    // avoid duplicate entries
                    if (seenPaths.contains(path)) {
                        return null;
                    }
                    seenPaths.add(path);
                }
                return new LuceneResultRow(path, doc.score, excerpt);
            }
            return null;
        }

        /**
             * Loads the lucene documents in batches
             * @return true if any document is loaded
             */
        private boolean loadDocs() {
            if (noDocs) {
                return false;
            }
            ScoreDoc lastDocToRecord = null;
            IndexNode indexNode = tracker.acquireIndexNode((String) plan.getAttribute(ATTR_INDEX_PATH));
            checkState(indexNode != null);
            try {
                IndexSearcher searcher = indexNode.getSearcher();
                LuceneRequestFacade luceneRequestFacade = getLuceneRequest(filter, searcher.getIndexReader(), nonFullTextConstraints, indexNode.getDefinition());
                if (luceneRequestFacade.getLuceneRequest() instanceof Query) {
                    Query query = (Query) luceneRequestFacade.getLuceneRequest();
                    TopDocs docs;
                    long time = System.currentTimeMillis();
                    checkForIndexVersionChange(searcher);
                    while (true) {
                        if (lastDoc != null) {
                            LOG.debug("loading the next {} entries for query {}", nextBatchSize, query);
                            docs = searcher.searchAfter(lastDoc, query, nextBatchSize);
                        } else {
                            LOG.debug("loading the first {} entries for query {}", nextBatchSize, query);
                            docs = searcher.search(query, nextBatchSize);
                        }
                        time = System.currentTimeMillis() - time;
                        LOG.debug("... took {} ms", time);
                        nextBatchSize = (int) Math.min(nextBatchSize * 2L, 100000);
                        PropertyRestriction restriction = filter.getPropertyRestriction(QueryImpl.REP_EXCERPT);
                        boolean addExcerpt = restriction != null && restriction.isNotNullRestriction();
                        Analyzer analyzer = indexNode.getDefinition().getAnalyzer();
                        if (addExcerpt) {
                            // setup highlighter
                            QueryScorer scorer = new QueryScorer(query);
                            scorer.setExpandMultiTermQuery(true);
                            highlighter.setFragmentScorer(scorer);
                        }
                        for (ScoreDoc doc : docs.scoreDocs) {
                            String excerpt = null;
                            if (addExcerpt) {
                                excerpt = getExcerpt(analyzer, searcher, doc);
                            }
                            LuceneResultRow row = convertToRow(doc, searcher, excerpt);
                            if (row != null) {
                                queue.add(row);
                            }
                            lastDocToRecord = doc;
                        }
                        if (queue.isEmpty() && docs.scoreDocs.length > 0) {
                            lastDoc = lastDocToRecord;
                        } else {
                            break;
                        }
                    }
                } else if (luceneRequestFacade.getLuceneRequest() instanceof SpellcheckHelper.SpellcheckQuery) {
                    SpellcheckHelper.SpellcheckQuery spellcheckQuery = (SpellcheckHelper.SpellcheckQuery) luceneRequestFacade.getLuceneRequest();
                    noDocs = true;
                    SuggestWord[] suggestWords = SpellcheckHelper.getSpellcheck(spellcheckQuery);
                    // ACL filter spellchecks
                    Collection<String> suggestedWords = new ArrayList<String>(suggestWords.length);
                    QueryParser qp = new QueryParser(Version.LUCENE_47, FieldNames.SUGGEST, indexNode.getDefinition().getAnalyzer());
                    for (SuggestWord suggestion : suggestWords) {
                        Query query = qp.createPhraseQuery(FieldNames.SUGGEST, suggestion.string);
                        TopDocs topDocs = searcher.search(query, 100);
                        if (topDocs.totalHits > 0) {
                            for (ScoreDoc doc : topDocs.scoreDocs) {
                                Document retrievedDoc = searcher.doc(doc.doc);
                                if (filter.isAccessible(retrievedDoc.get(FieldNames.PATH))) {
                                    suggestedWords.add(suggestion.string);
                                    break;
                                }
                            }
                        }
                    }
                    queue.add(new LuceneResultRow(suggestedWords));
                } else if (luceneRequestFacade.getLuceneRequest() instanceof SuggestHelper.SuggestQuery) {
                    SuggestHelper.SuggestQuery suggestQuery = (SuggestHelper.SuggestQuery) luceneRequestFacade.getLuceneRequest();
                    noDocs = true;
                    List<Lookup.LookupResult> lookupResults = SuggestHelper.getSuggestions(indexNode.getLookup(), suggestQuery);
                    // ACL filter suggestions
                    Collection<String> suggestedWords = new ArrayList<String>(lookupResults.size());
                    QueryParser qp = new QueryParser(Version.LUCENE_47, FieldNames.FULLTEXT, indexNode.getDefinition().getAnalyzer());
                    for (Lookup.LookupResult suggestion : lookupResults) {
                        Query query = qp.createPhraseQuery(FieldNames.FULLTEXT, suggestion.key.toString());
                        TopDocs topDocs = searcher.search(query, 100);
                        if (topDocs.totalHits > 0) {
                            for (ScoreDoc doc : topDocs.scoreDocs) {
                                Document retrievedDoc = searcher.doc(doc.doc);
                                if (filter.isAccessible(retrievedDoc.get(FieldNames.PATH))) {
                                    suggestedWords.add("{term=" + suggestion.key + ",weight=" + suggestion.value + "}");
                                    break;
                                }
                            }
                        }
                    }
                    queue.add(new LuceneResultRow(suggestedWords));
                }
            } catch (IOException e) {
                LOG.warn("query via {} failed.", LuceneIndex.this, e);
            } finally {
                indexNode.release();
            }
            if (lastDocToRecord != null) {
                this.lastDoc = lastDocToRecord;
            }
            return !queue.isEmpty();
        }

        private void checkForIndexVersionChange(IndexSearcher searcher) {
            long currentVersion = LucenePropertyIndex.getVersion(searcher);
            if (currentVersion != lastSearchIndexerVersion && lastDoc != null) {
                reloadCount++;
                if (reloadCount > MAX_RELOAD_COUNT) {
                    LOG.error("More than {} index version changes detected for query {}", MAX_RELOAD_COUNT, plan);
                    throw new IllegalStateException("Too many version changes");
                }
                lastDoc = null;
                LOG.debug("Change in index version detected {} => {}. Query would be performed without " + "offset; reload {}", currentVersion, lastSearchIndexerVersion, reloadCount);
            }
            this.lastSearchIndexerVersion = currentVersion;
        }
    };
    SizeEstimator sizeEstimator = new SizeEstimator() {

        @Override
        public long getSize() {
            IndexNode indexNode = tracker.acquireIndexNode((String) plan.getAttribute(ATTR_INDEX_PATH));
            checkState(indexNode != null);
            try {
                IndexSearcher searcher = indexNode.getSearcher();
                LuceneRequestFacade luceneRequestFacade = getLuceneRequest(filter, searcher.getIndexReader(), nonFullTextConstraints, indexNode.getDefinition());
                if (luceneRequestFacade.getLuceneRequest() instanceof Query) {
                    Query query = (Query) luceneRequestFacade.getLuceneRequest();
                    TotalHitCountCollector collector = new TotalHitCountCollector();
                    searcher.search(query, collector);
                    int totalHits = collector.getTotalHits();
                    LOG.debug("Estimated size for query {} is {}", query, totalHits);
                    return totalHits;
                }
                LOG.debug("Estimated size: not a Query: {}", luceneRequestFacade.getLuceneRequest());
            } catch (IOException e) {
                LOG.warn("query via {} failed.", LuceneIndex.this, e);
            } finally {
                indexNode.release();
            }
            return -1;
        }
    };
    return new LucenePathCursor(itr, settings, sizeEstimator);
}
Also used : IndexSearcher(org.apache.lucene.search.IndexSearcher) Set(java.util.Set) HashSet(java.util.HashSet) Query(org.apache.lucene.search.Query) PhraseQuery(org.apache.lucene.search.PhraseQuery) PrefixQuery(org.apache.lucene.search.PrefixQuery) MatchAllDocsQuery(org.apache.lucene.search.MatchAllDocsQuery) WildcardQuery(org.apache.lucene.search.WildcardQuery) TermQuery(org.apache.lucene.search.TermQuery) BooleanQuery(org.apache.lucene.search.BooleanQuery) TermRangeQuery(org.apache.lucene.search.TermRangeQuery) QueryEngineSettings(org.apache.jackrabbit.oak.query.QueryEngineSettings) ArrayList(java.util.ArrayList) Analyzer(org.apache.lucene.analysis.Analyzer) Document(org.apache.lucene.document.Document) ScoreDoc(org.apache.lucene.search.ScoreDoc) TopDocs(org.apache.lucene.search.TopDocs) Lookup(org.apache.lucene.search.suggest.Lookup) TotalHitCountCollector(org.apache.lucene.search.TotalHitCountCollector) AbstractIterator(com.google.common.collect.AbstractIterator) PropertyRestriction(org.apache.jackrabbit.oak.spi.query.Filter.PropertyRestriction) QueryScorer(org.apache.lucene.search.highlight.QueryScorer) SuggestHelper(org.apache.jackrabbit.oak.plugins.index.lucene.util.SuggestHelper) IOException(java.io.IOException) Deque(java.util.Deque) QueryParser(org.apache.lucene.queryparser.classic.QueryParser) Filter(org.apache.jackrabbit.oak.spi.query.Filter) FullTextExpression(org.apache.jackrabbit.oak.query.fulltext.FullTextExpression) IndexReader(org.apache.lucene.index.IndexReader) SuggestWord(org.apache.lucene.search.spell.SuggestWord) SpellcheckHelper(org.apache.jackrabbit.oak.plugins.index.lucene.util.SpellcheckHelper) Collection(java.util.Collection)

Example 7 with PropertyRestriction

use of org.apache.jackrabbit.oak.spi.query.Filter.PropertyRestriction in project jackrabbit-oak by apache.

the class LuceneIndex method getLuceneRequest.

/**
     * Get the Lucene query for the given filter.
     *
     * @param filter the filter, including full-text constraint
     * @param reader the Lucene reader
     * @param nonFullTextConstraints whether non-full-text constraints (such a
     *            path, node type, and so on) should be added to the Lucene
     *            query
     * @param indexDefinition nodestate that contains the index definition
     * @return the Lucene query
     */
private static LuceneRequestFacade getLuceneRequest(Filter filter, IndexReader reader, boolean nonFullTextConstraints, IndexDefinition indexDefinition) {
    List<Query> qs = new ArrayList<Query>();
    Analyzer analyzer = indexDefinition.getAnalyzer();
    FullTextExpression ft = filter.getFullTextConstraint();
    if (ft == null) {
    // there might be no full-text constraint
    // when using the LowCostLuceneIndexProvider
    // which is used for testing
    } else {
        qs.add(getFullTextQuery(ft, analyzer, reader));
    }
    PropertyRestriction pr = filter.getPropertyRestriction(NATIVE_QUERY_FUNCTION);
    if (pr != null) {
        String query = String.valueOf(pr.first.getValue(pr.first.getType()));
        QueryParser queryParser = new QueryParser(VERSION, "", indexDefinition.getAnalyzer());
        if (query.startsWith("mlt?")) {
            String mltQueryString = query.replace("mlt?", "");
            if (reader != null) {
                Query moreLikeThis = MoreLikeThisHelper.getMoreLikeThis(reader, analyzer, mltQueryString);
                if (moreLikeThis != null) {
                    qs.add(moreLikeThis);
                }
            }
        }
        if (query.startsWith("spellcheck?")) {
            String spellcheckQueryString = query.replace("spellcheck?", "");
            if (reader != null) {
                return new LuceneRequestFacade<SpellcheckHelper.SpellcheckQuery>(SpellcheckHelper.getSpellcheckQuery(spellcheckQueryString, reader));
            }
        } else if (query.startsWith("suggest?")) {
            String suggestQueryString = query.replace("suggest?", "");
            if (reader != null) {
                return new LuceneRequestFacade<SuggestHelper.SuggestQuery>(SuggestHelper.getSuggestQuery(suggestQueryString));
            }
        } else {
            try {
                qs.add(queryParser.parse(query));
            } catch (ParseException e) {
                throw new RuntimeException(e);
            }
        }
    } else if (nonFullTextConstraints) {
        addNonFullTextConstraints(qs, filter, reader, analyzer, indexDefinition);
    }
    if (qs.size() == 0) {
        return new LuceneRequestFacade<Query>(new MatchAllDocsQuery());
    }
    return LucenePropertyIndex.performAdditionalWraps(qs);
}
Also used : PropertyRestriction(org.apache.jackrabbit.oak.spi.query.Filter.PropertyRestriction) Query(org.apache.lucene.search.Query) PhraseQuery(org.apache.lucene.search.PhraseQuery) PrefixQuery(org.apache.lucene.search.PrefixQuery) MatchAllDocsQuery(org.apache.lucene.search.MatchAllDocsQuery) WildcardQuery(org.apache.lucene.search.WildcardQuery) TermQuery(org.apache.lucene.search.TermQuery) BooleanQuery(org.apache.lucene.search.BooleanQuery) TermRangeQuery(org.apache.lucene.search.TermRangeQuery) ArrayList(java.util.ArrayList) Analyzer(org.apache.lucene.analysis.Analyzer) MatchAllDocsQuery(org.apache.lucene.search.MatchAllDocsQuery) QueryParser(org.apache.lucene.queryparser.classic.QueryParser) FullTextExpression(org.apache.jackrabbit.oak.query.fulltext.FullTextExpression) SpellcheckHelper(org.apache.jackrabbit.oak.plugins.index.lucene.util.SpellcheckHelper) ParseException(org.apache.lucene.queryparser.classic.ParseException)

Example 8 with PropertyRestriction

use of org.apache.jackrabbit.oak.spi.query.Filter.PropertyRestriction in project jackrabbit-oak by apache.

the class LucenePropertyIndex method addNonFullTextConstraints.

private static void addNonFullTextConstraints(List<Query> qs, IndexPlan plan, IndexReader reader) {
    Filter filter = plan.getFilter();
    PlanResult planResult = getPlanResult(plan);
    IndexDefinition defn = planResult.indexDefinition;
    if (!filter.matchesAllTypes()) {
        addNodeTypeConstraints(planResult.indexingRule, qs, filter);
    }
    String path = getPathRestriction(plan);
    switch(filter.getPathRestriction()) {
        case ALL_CHILDREN:
            if (defn.evaluatePathRestrictions()) {
                if ("/".equals(path)) {
                    break;
                }
                qs.add(new TermQuery(newAncestorTerm(path)));
            }
            break;
        case DIRECT_CHILDREN:
            if (defn.evaluatePathRestrictions()) {
                BooleanQuery bq = new BooleanQuery();
                bq.add(new BooleanClause(new TermQuery(newAncestorTerm(path)), BooleanClause.Occur.MUST));
                bq.add(new BooleanClause(newDepthQuery(path), BooleanClause.Occur.MUST));
                qs.add(bq);
            }
            break;
        case EXACT:
            qs.add(new TermQuery(newPathTerm(path)));
            break;
        case PARENT:
            if (denotesRoot(path)) {
                // there's no parent of the root node
                // we add a path that can not possibly occur because there
                // is no way to say "match no documents" in Lucene
                qs.add(new TermQuery(new Term(FieldNames.PATH, "///")));
            } else {
                qs.add(new TermQuery(newPathTerm(getParentPath(path))));
            }
            break;
        case NO_RESTRICTION:
            break;
    }
    for (PropertyRestriction pr : filter.getPropertyRestrictions()) {
        String name = pr.propertyName;
        if (QueryImpl.REP_EXCERPT.equals(name) || QueryImpl.OAK_SCORE_EXPLANATION.equals(name) || QueryImpl.REP_FACET.equals(name)) {
            continue;
        }
        if (QueryConstants.RESTRICTION_LOCAL_NAME.equals(name)) {
            if (planResult.evaluateNodeNameRestriction()) {
                Query q = createNodeNameQuery(pr);
                if (q != null) {
                    qs.add(q);
                }
            }
            continue;
        }
        if (pr.first != null && pr.first.equals(pr.last) && pr.firstIncluding && pr.lastIncluding) {
            String first = pr.first.getValue(STRING);
            first = first.replace("\\", "");
            if (JCR_PATH.equals(name)) {
                qs.add(new TermQuery(newPathTerm(first)));
                continue;
            } else if ("*".equals(name)) {
                //TODO Revisit reference constraint. For performant impl
                //references need to be indexed in a different manner
                addReferenceConstraint(first, qs, reader);
                continue;
            }
        }
        PropertyDefinition pd = planResult.getPropDefn(pr);
        if (pd == null) {
            continue;
        }
        Query q = createQuery(pr, pd);
        if (q != null) {
            qs.add(q);
        }
    }
}
Also used : BooleanClause(org.apache.lucene.search.BooleanClause) TermQuery(org.apache.lucene.search.TermQuery) BooleanQuery(org.apache.lucene.search.BooleanQuery) PropertyRestriction(org.apache.jackrabbit.oak.spi.query.Filter.PropertyRestriction) PlanResult(org.apache.jackrabbit.oak.plugins.index.lucene.IndexPlanner.PlanResult) Query(org.apache.lucene.search.Query) MatchAllDocsQuery(org.apache.lucene.search.MatchAllDocsQuery) WildcardQuery(org.apache.lucene.search.WildcardQuery) NumericRangeQuery(org.apache.lucene.search.NumericRangeQuery) CustomScoreQuery(org.apache.lucene.queries.CustomScoreQuery) PrefixQuery(org.apache.lucene.search.PrefixQuery) TermQuery(org.apache.lucene.search.TermQuery) BooleanQuery(org.apache.lucene.search.BooleanQuery) TermRangeQuery(org.apache.lucene.search.TermRangeQuery) Filter(org.apache.jackrabbit.oak.spi.query.Filter) Term(org.apache.lucene.index.Term) TermFactory.newAncestorTerm(org.apache.jackrabbit.oak.plugins.index.lucene.TermFactory.newAncestorTerm) TermFactory.newPathTerm(org.apache.jackrabbit.oak.plugins.index.lucene.TermFactory.newPathTerm) FullTextTerm(org.apache.jackrabbit.oak.query.fulltext.FullTextTerm)

Example 9 with PropertyRestriction

use of org.apache.jackrabbit.oak.spi.query.Filter.PropertyRestriction in project jackrabbit-oak by apache.

the class LucenePropertyIndex method query.

@Override
public Cursor query(final IndexPlan plan, NodeState rootState) {
    final Filter filter = plan.getFilter();
    final Sort sort = getSort(plan);
    final PlanResult pr = getPlanResult(plan);
    QueryEngineSettings settings = filter.getQueryEngineSettings();
    Iterator<LuceneResultRow> itr = new AbstractIterator<LuceneResultRow>() {

        private final Deque<LuceneResultRow> queue = Queues.newArrayDeque();

        private final Set<String> seenPaths = Sets.newHashSet();

        private ScoreDoc lastDoc;

        private int nextBatchSize = LUCENE_QUERY_BATCH_SIZE;

        private boolean noDocs = false;

        private IndexSearcher indexSearcher;

        private int indexNodeId = -1;

        @Override
        protected LuceneResultRow computeNext() {
            while (!queue.isEmpty() || loadDocs()) {
                return queue.remove();
            }
            releaseSearcher();
            return endOfData();
        }

        private LuceneResultRow convertToRow(ScoreDoc doc, IndexSearcher searcher, String excerpt, Facets facets, String explanation) throws IOException {
            IndexReader reader = searcher.getIndexReader();
            //TODO Look into usage of field cache for retrieving the path
            //instead of reading via reader if no of docs in index are limited
            PathStoredFieldVisitor visitor = new PathStoredFieldVisitor();
            reader.document(doc.doc, visitor);
            String path = visitor.getPath();
            if (path != null) {
                if ("".equals(path)) {
                    path = "/";
                }
                if (pr.isPathTransformed()) {
                    String originalPath = path;
                    path = pr.transformPath(path);
                    if (path == null) {
                        LOG.trace("Ignoring path {} : Transformation returned null", originalPath);
                        return null;
                    }
                    // avoid duplicate entries
                    if (seenPaths.contains(path)) {
                        LOG.trace("Ignoring path {} : Duplicate post transformation", originalPath);
                        return null;
                    }
                    seenPaths.add(path);
                }
                LOG.trace("Matched path {}", path);
                return new LuceneResultRow(path, doc.score, excerpt, facets, explanation);
            }
            return null;
        }

        /**
             * Loads the lucene documents in batches
             * @return true if any document is loaded
             */
        private boolean loadDocs() {
            if (noDocs) {
                return false;
            }
            ScoreDoc lastDocToRecord = null;
            final IndexNode indexNode = acquireIndexNode(plan);
            checkState(indexNode != null);
            try {
                IndexSearcher searcher = getCurrentSearcher(indexNode);
                LuceneRequestFacade luceneRequestFacade = getLuceneRequest(plan, augmentorFactory, searcher.getIndexReader());
                if (luceneRequestFacade.getLuceneRequest() instanceof Query) {
                    Query query = (Query) luceneRequestFacade.getLuceneRequest();
                    CustomScoreQuery customScoreQuery = getCustomScoreQuery(plan, query);
                    if (customScoreQuery != null) {
                        query = customScoreQuery;
                    }
                    TopDocs docs;
                    long start = PERF_LOGGER.start();
                    while (true) {
                        if (lastDoc != null) {
                            LOG.debug("loading the next {} entries for query {}", nextBatchSize, query);
                            if (sort == null) {
                                docs = searcher.searchAfter(lastDoc, query, nextBatchSize);
                            } else {
                                docs = searcher.searchAfter(lastDoc, query, nextBatchSize, sort);
                            }
                        } else {
                            LOG.debug("loading the first {} entries for query {}", nextBatchSize, query);
                            if (sort == null) {
                                docs = searcher.search(query, nextBatchSize);
                            } else {
                                docs = searcher.search(query, nextBatchSize, sort);
                            }
                        }
                        PERF_LOGGER.end(start, -1, "{} ...", docs.scoreDocs.length);
                        nextBatchSize = (int) Math.min(nextBatchSize * 2L, 100000);
                        long f = PERF_LOGGER.start();
                        Facets facets = FacetHelper.getFacets(searcher, query, docs, plan, indexNode.getDefinition().isSecureFacets());
                        PERF_LOGGER.end(f, -1, "facets retrieved");
                        PropertyRestriction restriction = filter.getPropertyRestriction(QueryImpl.REP_EXCERPT);
                        boolean addExcerpt = restriction != null && restriction.isNotNullRestriction();
                        restriction = filter.getPropertyRestriction(QueryImpl.OAK_SCORE_EXPLANATION);
                        boolean addExplain = restriction != null && restriction.isNotNullRestriction();
                        Analyzer analyzer = indexNode.getDefinition().getAnalyzer();
                        FieldInfos mergedFieldInfos = null;
                        if (addExcerpt) {
                            // setup highlighter
                            QueryScorer scorer = new QueryScorer(query);
                            scorer.setExpandMultiTermQuery(true);
                            highlighter.setFragmentScorer(scorer);
                            mergedFieldInfos = MultiFields.getMergedFieldInfos(searcher.getIndexReader());
                        }
                        for (ScoreDoc doc : docs.scoreDocs) {
                            String excerpt = null;
                            if (addExcerpt) {
                                excerpt = getExcerpt(query, analyzer, searcher, doc, mergedFieldInfos);
                            }
                            String explanation = null;
                            if (addExplain) {
                                explanation = searcher.explain(query, doc.doc).toString();
                            }
                            LuceneResultRow row = convertToRow(doc, searcher, excerpt, facets, explanation);
                            if (row != null) {
                                queue.add(row);
                            }
                            lastDocToRecord = doc;
                        }
                        if (queue.isEmpty() && docs.scoreDocs.length > 0) {
                            //queue is still empty but more results can be fetched
                            //from Lucene so still continue
                            lastDoc = lastDocToRecord;
                        } else {
                            break;
                        }
                    }
                } else if (luceneRequestFacade.getLuceneRequest() instanceof SpellcheckHelper.SpellcheckQuery) {
                    String aclCheckField = indexNode.getDefinition().isFullTextEnabled() ? FieldNames.FULLTEXT : FieldNames.SPELLCHECK;
                    noDocs = true;
                    SpellcheckHelper.SpellcheckQuery spellcheckQuery = (SpellcheckHelper.SpellcheckQuery) luceneRequestFacade.getLuceneRequest();
                    SuggestWord[] suggestWords = SpellcheckHelper.getSpellcheck(spellcheckQuery);
                    // ACL filter spellchecks
                    QueryParser qp = new QueryParser(Version.LUCENE_47, aclCheckField, indexNode.getDefinition().getAnalyzer());
                    for (SuggestWord suggestion : suggestWords) {
                        Query query = qp.createPhraseQuery(aclCheckField, QueryParserBase.escape(suggestion.string));
                        query = addDescendantClauseIfRequired(query, plan);
                        TopDocs topDocs = searcher.search(query, 100);
                        if (topDocs.totalHits > 0) {
                            for (ScoreDoc doc : topDocs.scoreDocs) {
                                Document retrievedDoc = searcher.doc(doc.doc);
                                String prefix = filter.getPath();
                                if (prefix.length() == 1) {
                                    prefix = "";
                                }
                                if (filter.isAccessible(prefix + retrievedDoc.get(FieldNames.PATH))) {
                                    queue.add(new LuceneResultRow(suggestion.string));
                                    break;
                                }
                            }
                        }
                    }
                } else if (luceneRequestFacade.getLuceneRequest() instanceof SuggestHelper.SuggestQuery) {
                    SuggestHelper.SuggestQuery suggestQuery = (SuggestHelper.SuggestQuery) luceneRequestFacade.getLuceneRequest();
                    noDocs = true;
                    List<Lookup.LookupResult> lookupResults = SuggestHelper.getSuggestions(indexNode.getLookup(), suggestQuery);
                    QueryParser qp = new QueryParser(Version.LUCENE_47, FieldNames.SUGGEST, indexNode.getDefinition().isSuggestAnalyzed() ? indexNode.getDefinition().getAnalyzer() : SuggestHelper.getAnalyzer());
                    // ACL filter suggestions
                    for (Lookup.LookupResult suggestion : lookupResults) {
                        Query query = qp.parse("\"" + QueryParserBase.escape(suggestion.key.toString()) + "\"");
                        query = addDescendantClauseIfRequired(query, plan);
                        TopDocs topDocs = searcher.search(query, 100);
                        if (topDocs.totalHits > 0) {
                            for (ScoreDoc doc : topDocs.scoreDocs) {
                                Document retrievedDoc = searcher.doc(doc.doc);
                                String prefix = filter.getPath();
                                if (prefix.length() == 1) {
                                    prefix = "";
                                }
                                if (filter.isAccessible(prefix + retrievedDoc.get(FieldNames.PATH))) {
                                    queue.add(new LuceneResultRow(suggestion.key.toString(), suggestion.value));
                                    break;
                                }
                            }
                        }
                    }
                }
            } catch (Exception e) {
                LOG.warn("query via {} failed.", LucenePropertyIndex.this, e);
            } finally {
                indexNode.release();
            }
            if (lastDocToRecord != null) {
                this.lastDoc = lastDocToRecord;
            }
            return !queue.isEmpty();
        }

        private IndexSearcher getCurrentSearcher(IndexNode indexNode) {
            //the searcher would be refreshed as done earlier
            if (indexNodeId != indexNode.getIndexNodeId()) {
                //if already initialized then log about change
                if (indexNodeId > 0) {
                    LOG.debug("Change in index version detected. Query would be performed without offset");
                }
                //TODO Add testcase for this scenario
                indexSearcher = indexNode.getSearcher();
                indexNodeId = indexNode.getIndexNodeId();
                lastDoc = null;
            }
            return indexSearcher;
        }

        private void releaseSearcher() {
            //For now nullifying it.
            indexSearcher = null;
        }
    };
    SizeEstimator sizeEstimator = new SizeEstimator() {

        @Override
        public long getSize() {
            IndexNode indexNode = acquireIndexNode(plan);
            checkState(indexNode != null);
            try {
                IndexSearcher searcher = indexNode.getSearcher();
                LuceneRequestFacade luceneRequestFacade = getLuceneRequest(plan, augmentorFactory, searcher.getIndexReader());
                if (luceneRequestFacade.getLuceneRequest() instanceof Query) {
                    Query query = (Query) luceneRequestFacade.getLuceneRequest();
                    TotalHitCountCollector collector = new TotalHitCountCollector();
                    searcher.search(query, collector);
                    int totalHits = collector.getTotalHits();
                    LOG.debug("Estimated size for query {} is {}", query, totalHits);
                    return totalHits;
                }
                LOG.debug("estimate size: not a Query: {}", luceneRequestFacade.getLuceneRequest());
            } catch (IOException e) {
                LOG.warn("query via {} failed.", LucenePropertyIndex.this, e);
            } finally {
                indexNode.release();
            }
            return -1;
        }
    };
    return new LucenePathCursor(itr, plan, settings, sizeEstimator);
}
Also used : IndexSearcher(org.apache.lucene.search.IndexSearcher) PlanResult(org.apache.jackrabbit.oak.plugins.index.lucene.IndexPlanner.PlanResult) Set(java.util.Set) Facets(org.apache.lucene.facet.Facets) Query(org.apache.lucene.search.Query) MatchAllDocsQuery(org.apache.lucene.search.MatchAllDocsQuery) WildcardQuery(org.apache.lucene.search.WildcardQuery) NumericRangeQuery(org.apache.lucene.search.NumericRangeQuery) CustomScoreQuery(org.apache.lucene.queries.CustomScoreQuery) PrefixQuery(org.apache.lucene.search.PrefixQuery) TermQuery(org.apache.lucene.search.TermQuery) BooleanQuery(org.apache.lucene.search.BooleanQuery) TermRangeQuery(org.apache.lucene.search.TermRangeQuery) QueryEngineSettings(org.apache.jackrabbit.oak.query.QueryEngineSettings) Analyzer(org.apache.lucene.analysis.Analyzer) Document(org.apache.lucene.document.Document) ScoreDoc(org.apache.lucene.search.ScoreDoc) TopDocs(org.apache.lucene.search.TopDocs) CustomScoreQuery(org.apache.lucene.queries.CustomScoreQuery) Sort(org.apache.lucene.search.Sort) Lookup(org.apache.lucene.search.suggest.Lookup) TotalHitCountCollector(org.apache.lucene.search.TotalHitCountCollector) AbstractIterator(com.google.common.collect.AbstractIterator) PropertyRestriction(org.apache.jackrabbit.oak.spi.query.Filter.PropertyRestriction) QueryScorer(org.apache.lucene.search.highlight.QueryScorer) SuggestHelper(org.apache.jackrabbit.oak.plugins.index.lucene.util.SuggestHelper) IOException(java.io.IOException) Deque(java.util.Deque) QueryNodeException(org.apache.lucene.queryparser.flexible.core.QueryNodeException) ParseException(org.apache.lucene.queryparser.classic.ParseException) IOException(java.io.IOException) InvalidTokenOffsetsException(org.apache.lucene.search.highlight.InvalidTokenOffsetsException) FieldInfos(org.apache.lucene.index.FieldInfos) StandardQueryParser(org.apache.lucene.queryparser.flexible.standard.StandardQueryParser) QueryParser(org.apache.lucene.queryparser.classic.QueryParser) Filter(org.apache.jackrabbit.oak.spi.query.Filter) IndexReader(org.apache.lucene.index.IndexReader) SuggestWord(org.apache.lucene.search.spell.SuggestWord) SpellcheckHelper(org.apache.jackrabbit.oak.plugins.index.lucene.util.SpellcheckHelper)

Aggregations

PropertyRestriction (org.apache.jackrabbit.oak.spi.query.Filter.PropertyRestriction)9 PrefixQuery (org.apache.lucene.search.PrefixQuery)6 TermQuery (org.apache.lucene.search.TermQuery)6 WildcardQuery (org.apache.lucene.search.WildcardQuery)6 BooleanQuery (org.apache.lucene.search.BooleanQuery)5 MatchAllDocsQuery (org.apache.lucene.search.MatchAllDocsQuery)5 Query (org.apache.lucene.search.Query)5 TermRangeQuery (org.apache.lucene.search.TermRangeQuery)5 FullTextExpression (org.apache.jackrabbit.oak.query.fulltext.FullTextExpression)4 Filter (org.apache.jackrabbit.oak.spi.query.Filter)4 Analyzer (org.apache.lucene.analysis.Analyzer)4 QueryParser (org.apache.lucene.queryparser.classic.QueryParser)4 ArrayList (java.util.ArrayList)3 PlanResult (org.apache.jackrabbit.oak.plugins.index.lucene.IndexPlanner.PlanResult)3 SpellcheckHelper (org.apache.jackrabbit.oak.plugins.index.lucene.util.SpellcheckHelper)3 SuggestHelper (org.apache.jackrabbit.oak.plugins.index.lucene.util.SuggestHelper)3 CustomScoreQuery (org.apache.lucene.queries.CustomScoreQuery)3 ParseException (org.apache.lucene.queryparser.classic.ParseException)3 NumericRangeQuery (org.apache.lucene.search.NumericRangeQuery)3 AbstractIterator (com.google.common.collect.AbstractIterator)2