use of org.opensearch.search.internal.ContextIndexSearcher in project OpenSearch by opensearch-project.
the class AbstractStringTermsAggregator method buildEmptySignificantTermsAggregation.
protected SignificantStringTerms buildEmptySignificantTermsAggregation(long subsetSize, SignificanceHeuristic significanceHeuristic) {
// We need to account for the significance of a miss in our global stats - provide corpus size as context
ContextIndexSearcher searcher = context.searcher();
IndexReader topReader = searcher.getIndexReader();
int supersetSize = topReader.numDocs();
return new SignificantStringTerms(name, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getMinDocCount(), metadata(), format, subsetSize, supersetSize, significanceHeuristic, emptyList());
}
use of org.opensearch.search.internal.ContextIndexSearcher in project OpenSearch by opensearch-project.
the class SearchCancellationTests method testCancellableCollector.
public void testCancellableCollector() throws IOException {
TotalHitCountCollector collector1 = new TotalHitCountCollector();
Runnable cancellation = () -> {
throw new TaskCancelledException("cancelled");
};
ContextIndexSearcher searcher = new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), true);
searcher.search(new MatchAllDocsQuery(), collector1);
assertThat(collector1.getTotalHits(), equalTo(reader.numDocs()));
searcher.addQueryCancellation(cancellation);
expectThrows(TaskCancelledException.class, () -> searcher.search(new MatchAllDocsQuery(), collector1));
searcher.removeQueryCancellation(cancellation);
TotalHitCountCollector collector2 = new TotalHitCountCollector();
searcher.search(new MatchAllDocsQuery(), collector2);
assertThat(collector2.getTotalHits(), equalTo(reader.numDocs()));
}
use of org.opensearch.search.internal.ContextIndexSearcher in project OpenSearch by opensearch-project.
the class QueryPhaseTests method newEarlyTerminationContextSearcher.
private static ContextIndexSearcher newEarlyTerminationContextSearcher(IndexReader reader, int size) throws IOException {
return new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), true) {
@Override
public void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
final Collector in = new AssertingEarlyTerminationFilterCollector(collector, size);
super.search(leaves, weight, in);
}
};
}
use of org.opensearch.search.internal.ContextIndexSearcher in project OpenSearch by opensearch-project.
the class QueryPhase method executeInternal.
/**
* In a package-private method so that it can be tested without having to
* wire everything (mapperService, etc.)
* @return whether the rescoring phase should be executed
*/
static boolean executeInternal(SearchContext searchContext) throws QueryPhaseExecutionException {
final ContextIndexSearcher searcher = searchContext.searcher();
final IndexReader reader = searcher.getIndexReader();
QuerySearchResult queryResult = searchContext.queryResult();
queryResult.searchTimedOut(false);
try {
queryResult.from(searchContext.from());
queryResult.size(searchContext.size());
Query query = searchContext.query();
// already rewritten
assert query == searcher.rewrite(query);
final ScrollContext scrollContext = searchContext.scrollContext();
if (scrollContext != null) {
if (scrollContext.totalHits == null) {
// first round
assert scrollContext.lastEmittedDoc == null;
// there is not much that we can optimize here since we want to collect all
// documents in order to get the total number of hits
} else {
final ScoreDoc after = scrollContext.lastEmittedDoc;
if (returnsDocsInOrder(query, searchContext.sort())) {
// skip to the desired doc
if (after != null) {
query = new BooleanQuery.Builder().add(query, BooleanClause.Occur.MUST).add(new MinDocQuery(after.doc + 1), BooleanClause.Occur.FILTER).build();
}
// ... and stop collecting after ${size} matches
searchContext.terminateAfter(searchContext.size());
} else if (canEarlyTerminate(reader, searchContext.sort())) {
// skip to the desired doc
if (after != null) {
query = new BooleanQuery.Builder().add(query, BooleanClause.Occur.MUST).add(new SearchAfterSortedDocQuery(searchContext.sort().sort, (FieldDoc) after), BooleanClause.Occur.FILTER).build();
}
}
}
}
final LinkedList<QueryCollectorContext> collectors = new LinkedList<>();
// whether the chain contains a collector that filters documents
boolean hasFilterCollector = false;
if (searchContext.terminateAfter() != SearchContext.DEFAULT_TERMINATE_AFTER) {
// add terminate_after before the filter collectors
// it will only be applied on documents accepted by these filter collectors
collectors.add(createEarlyTerminationCollectorContext(searchContext.terminateAfter()));
// this collector can filter documents during the collection
hasFilterCollector = true;
}
if (searchContext.parsedPostFilter() != null) {
// add post filters before aggregations
// it will only be applied to top hits
collectors.add(createFilteredCollectorContext(searcher, searchContext.parsedPostFilter().query()));
// this collector can filter documents during the collection
hasFilterCollector = true;
}
if (searchContext.queryCollectors().isEmpty() == false) {
// plug in additional collectors, like aggregations
collectors.add(createMultiCollectorContext(searchContext.queryCollectors().values()));
}
if (searchContext.minimumScore() != null) {
// apply the minimum score after multi collector so we filter aggs as well
collectors.add(createMinScoreCollectorContext(searchContext.minimumScore()));
// this collector can filter documents during the collection
hasFilterCollector = true;
}
// optimizing sort on Numerics (long and date)
if ((searchContext.sort() != null) && SYS_PROP_REWRITE_SORT) {
enhanceSortOnNumeric(searchContext, searcher.getIndexReader());
}
boolean timeoutSet = scrollContext == null && searchContext.timeout() != null && searchContext.timeout().equals(SearchService.NO_TIMEOUT) == false;
final Runnable timeoutRunnable;
if (timeoutSet) {
final long startTime = searchContext.getRelativeTimeInMillis();
final long timeout = searchContext.timeout().millis();
final long maxTime = startTime + timeout;
timeoutRunnable = searcher.addQueryCancellation(() -> {
final long time = searchContext.getRelativeTimeInMillis();
if (time > maxTime) {
throw new TimeExceededException();
}
});
} else {
timeoutRunnable = null;
}
if (searchContext.lowLevelCancellation()) {
searcher.addQueryCancellation(() -> {
SearchShardTask task = searchContext.getTask();
if (task != null && task.isCancelled()) {
throw new TaskCancelledException("cancelled task with reason: " + task.getReasonCancelled());
}
});
}
try {
boolean shouldRescore = searchWithCollector(searchContext, searcher, query, collectors, hasFilterCollector, timeoutSet);
ExecutorService executor = searchContext.indexShard().getThreadPool().executor(ThreadPool.Names.SEARCH);
if (executor instanceof QueueResizingOpenSearchThreadPoolExecutor) {
QueueResizingOpenSearchThreadPoolExecutor rExecutor = (QueueResizingOpenSearchThreadPoolExecutor) executor;
queryResult.nodeQueueSize(rExecutor.getCurrentQueueSize());
queryResult.serviceTimeEWMA((long) rExecutor.getTaskExecutionEWMA());
}
return shouldRescore;
} finally {
// otherwise aggregation phase might get cancelled.
if (timeoutRunnable != null) {
searcher.removeQueryCancellation(timeoutRunnable);
}
}
} catch (Exception e) {
throw new QueryPhaseExecutionException(searchContext.shardTarget(), "Failed to execute main query", e);
}
}
use of org.opensearch.search.internal.ContextIndexSearcher in project OpenSearch by opensearch-project.
the class QueryPhaseTests method countTestCase.
private void countTestCase(Query query, IndexReader reader, boolean shouldCollectSearch, boolean shouldCollectCount) throws Exception {
ContextIndexSearcher searcher = shouldCollectSearch ? newContextSearcher(reader) : newEarlyTerminationContextSearcher(reader, 0);
TestSearchContext context = new TestSearchContext(null, indexShard, searcher);
context.parsedQuery(new ParsedQuery(query));
context.setSize(0);
context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()));
final boolean rescore = QueryPhase.executeInternal(context);
assertFalse(rescore);
ContextIndexSearcher countSearcher = shouldCollectCount ? newContextSearcher(reader) : newEarlyTerminationContextSearcher(reader, 0);
assertEquals(countSearcher.count(query), context.queryResult().topDocs().topDocs.totalHits.value);
}
Aggregations