use of org.opensearch.search.sort.SortAndFormats in project OpenSearch by opensearch-project.
the class InnerHitContextBuilder method setupInnerHitsContext.
protected void setupInnerHitsContext(QueryShardContext queryShardContext, InnerHitsContext.InnerHitSubContext innerHitsContext) throws IOException {
innerHitsContext.from(innerHitBuilder.getFrom());
innerHitsContext.size(innerHitBuilder.getSize());
innerHitsContext.explain(innerHitBuilder.isExplain());
innerHitsContext.version(innerHitBuilder.isVersion());
innerHitsContext.seqNoAndPrimaryTerm(innerHitBuilder.isSeqNoAndPrimaryTerm());
innerHitsContext.trackScores(innerHitBuilder.isTrackScores());
if (innerHitBuilder.getStoredFieldsContext() != null) {
innerHitsContext.storedFieldsContext(innerHitBuilder.getStoredFieldsContext());
}
if (innerHitBuilder.getDocValueFields() != null) {
FetchDocValuesContext docValuesContext = FetchDocValuesContext.create(queryShardContext.getMapperService(), innerHitBuilder.getDocValueFields());
innerHitsContext.docValuesContext(docValuesContext);
}
if (innerHitBuilder.getFetchFields() != null) {
String indexName = queryShardContext.index().getName();
FetchFieldsContext fieldsContext = new FetchFieldsContext(innerHitBuilder.getFetchFields());
innerHitsContext.fetchFieldsContext(fieldsContext);
}
if (innerHitBuilder.getScriptFields() != null) {
for (SearchSourceBuilder.ScriptField field : innerHitBuilder.getScriptFields()) {
QueryShardContext innerContext = innerHitsContext.getQueryShardContext();
FieldScript.Factory factory = innerContext.compile(field.script(), FieldScript.CONTEXT);
FieldScript.LeafFactory fieldScript = factory.newFactory(field.script().getParams(), innerContext.lookup());
innerHitsContext.scriptFields().add(new org.opensearch.search.fetch.subphase.ScriptFieldsContext.ScriptField(field.fieldName(), fieldScript, field.ignoreFailure()));
}
}
if (innerHitBuilder.getFetchSourceContext() != null) {
innerHitsContext.fetchSourceContext(innerHitBuilder.getFetchSourceContext());
}
if (innerHitBuilder.getSorts() != null) {
Optional<SortAndFormats> optionalSort = SortBuilder.buildSort(innerHitBuilder.getSorts(), queryShardContext);
if (optionalSort.isPresent()) {
innerHitsContext.sort(optionalSort.get());
}
}
if (innerHitBuilder.getHighlightBuilder() != null) {
innerHitsContext.highlight(innerHitBuilder.getHighlightBuilder().build(queryShardContext));
}
ParsedQuery parsedQuery = new ParsedQuery(query.toQuery(queryShardContext), queryShardContext.copyNamedQueries());
innerHitsContext.parsedQuery(parsedQuery);
Map<String, InnerHitsContext.InnerHitSubContext> baseChildren = buildChildInnerHits(innerHitsContext.parentSearchContext(), children);
innerHitsContext.setChildInnerHits(baseChildren);
}
use of org.opensearch.search.sort.SortAndFormats in project OpenSearch by opensearch-project.
the class TopHitsAggregationBuilder method doBuild.
@Override
protected TopHitsAggregatorFactory doBuild(QueryShardContext queryShardContext, AggregatorFactory parent, Builder subfactoriesBuilder) throws IOException {
long innerResultWindow = from() + size();
int maxInnerResultWindow = queryShardContext.getMapperService().getIndexSettings().getMaxInnerResultWindow();
if (innerResultWindow > maxInnerResultWindow) {
throw new IllegalArgumentException("Top hits result window is too large, the top hits aggregator [" + name + "]'s from + size must be less " + "than or equal to: [" + maxInnerResultWindow + "] but was [" + innerResultWindow + "]. This limit can be set by changing the [" + IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey() + "] index level setting.");
}
List<ScriptFieldsContext.ScriptField> scriptFields = new ArrayList<>();
if (this.scriptFields != null) {
for (ScriptField field : this.scriptFields) {
FieldScript.Factory factory = queryShardContext.compile(field.script(), FieldScript.CONTEXT);
FieldScript.LeafFactory searchScript = factory.newFactory(field.script().getParams(), queryShardContext.lookup());
scriptFields.add(new org.opensearch.search.fetch.subphase.ScriptFieldsContext.ScriptField(field.fieldName(), searchScript, field.ignoreFailure()));
}
}
final Optional<SortAndFormats> optionalSort;
if (sorts == null) {
optionalSort = Optional.empty();
} else {
optionalSort = SortBuilder.buildSort(sorts, queryShardContext);
}
return new TopHitsAggregatorFactory(name, from, size, explain, version, seqNoAndPrimaryTerm, trackScores, optionalSort, highlightBuilder, storedFieldsContext, docValueFields, fetchFields, scriptFields, fetchSourceContext, queryShardContext, parent, subfactoriesBuilder, metadata);
}
use of org.opensearch.search.sort.SortAndFormats in project OpenSearch by opensearch-project.
the class SearchService method parseSource.
private void parseSource(DefaultSearchContext context, SearchSourceBuilder source, boolean includeAggregations) {
// nothing to parse...
if (source == null) {
return;
}
SearchShardTarget shardTarget = context.shardTarget();
QueryShardContext queryShardContext = context.getQueryShardContext();
context.from(source.from());
context.size(source.size());
Map<String, InnerHitContextBuilder> innerHitBuilders = new HashMap<>();
if (source.query() != null) {
InnerHitContextBuilder.extractInnerHits(source.query(), innerHitBuilders);
context.parsedQuery(queryShardContext.toQuery(source.query()));
}
if (source.postFilter() != null) {
InnerHitContextBuilder.extractInnerHits(source.postFilter(), innerHitBuilders);
context.parsedPostFilter(queryShardContext.toQuery(source.postFilter()));
}
if (innerHitBuilders.size() > 0) {
for (Map.Entry<String, InnerHitContextBuilder> entry : innerHitBuilders.entrySet()) {
try {
entry.getValue().build(context, context.innerHits());
} catch (IOException e) {
throw new SearchException(shardTarget, "failed to build inner_hits", e);
}
}
}
if (source.sorts() != null) {
try {
Optional<SortAndFormats> optionalSort = SortBuilder.buildSort(source.sorts(), context.getQueryShardContext());
if (optionalSort.isPresent()) {
context.sort(optionalSort.get());
}
} catch (IOException e) {
throw new SearchException(shardTarget, "failed to create sort elements", e);
}
}
context.trackScores(source.trackScores());
if (source.trackTotalHitsUpTo() != null && source.trackTotalHitsUpTo() != SearchContext.TRACK_TOTAL_HITS_ACCURATE && context.scrollContext() != null) {
throw new SearchException(shardTarget, "disabling [track_total_hits] is not allowed in a scroll context");
}
if (source.trackTotalHitsUpTo() != null) {
context.trackTotalHitsUpTo(source.trackTotalHitsUpTo());
}
if (source.minScore() != null) {
context.minimumScore(source.minScore());
}
if (source.profile()) {
context.setProfilers(new Profilers(context.searcher()));
}
if (source.timeout() != null) {
context.timeout(source.timeout());
}
context.terminateAfter(source.terminateAfter());
if (source.aggregations() != null && includeAggregations) {
try {
AggregatorFactories factories = source.aggregations().build(queryShardContext, null);
context.aggregations(new SearchContextAggregations(factories, multiBucketConsumerService.create()));
} catch (IOException e) {
throw new AggregationInitializationException("Failed to create aggregators", e);
}
}
if (source.suggest() != null) {
try {
context.suggest(source.suggest().build(queryShardContext));
} catch (IOException e) {
throw new SearchException(shardTarget, "failed to create SuggestionSearchContext", e);
}
}
if (source.rescores() != null) {
try {
for (RescorerBuilder<?> rescore : source.rescores()) {
context.addRescore(rescore.buildContext(queryShardContext));
}
} catch (IOException e) {
throw new SearchException(shardTarget, "failed to create RescoreSearchContext", e);
}
}
if (source.explain() != null) {
context.explain(source.explain());
}
if (source.fetchSource() != null) {
context.fetchSourceContext(source.fetchSource());
}
if (source.docValueFields() != null) {
FetchDocValuesContext docValuesContext = FetchDocValuesContext.create(context.mapperService(), source.docValueFields());
context.docValuesContext(docValuesContext);
}
if (source.fetchFields() != null) {
FetchFieldsContext fetchFieldsContext = new FetchFieldsContext(source.fetchFields());
context.fetchFieldsContext(fetchFieldsContext);
}
if (source.highlighter() != null) {
HighlightBuilder highlightBuilder = source.highlighter();
try {
context.highlight(highlightBuilder.build(queryShardContext));
} catch (IOException e) {
throw new SearchException(shardTarget, "failed to create SearchContextHighlighter", e);
}
}
if (source.scriptFields() != null && source.size() != 0) {
int maxAllowedScriptFields = context.mapperService().getIndexSettings().getMaxScriptFields();
if (source.scriptFields().size() > maxAllowedScriptFields) {
throw new IllegalArgumentException("Trying to retrieve too many script_fields. Must be less than or equal to: [" + maxAllowedScriptFields + "] but was [" + source.scriptFields().size() + "]. This limit can be set by changing the [" + IndexSettings.MAX_SCRIPT_FIELDS_SETTING.getKey() + "] index level setting.");
}
for (org.opensearch.search.builder.SearchSourceBuilder.ScriptField field : source.scriptFields()) {
FieldScript.Factory factory = scriptService.compile(field.script(), FieldScript.CONTEXT);
SearchLookup lookup = context.getQueryShardContext().lookup();
FieldScript.LeafFactory searchScript = factory.newFactory(field.script().getParams(), lookup);
context.scriptFields().add(new ScriptField(field.fieldName(), searchScript, field.ignoreFailure()));
}
}
if (source.ext() != null) {
for (SearchExtBuilder searchExtBuilder : source.ext()) {
context.addSearchExt(searchExtBuilder);
}
}
if (source.version() != null) {
context.version(source.version());
}
if (source.seqNoAndPrimaryTerm() != null) {
context.seqNoAndPrimaryTerm(source.seqNoAndPrimaryTerm());
}
if (source.stats() != null) {
context.groupStats(source.stats());
}
if (CollectionUtils.isEmpty(source.searchAfter()) == false) {
if (context.scrollContext() != null) {
throw new SearchException(shardTarget, "`search_after` cannot be used in a scroll context.");
}
if (context.from() > 0) {
throw new SearchException(shardTarget, "`from` parameter must be set to 0 when `search_after` is used.");
}
FieldDoc fieldDoc = SearchAfterBuilder.buildFieldDoc(context.sort(), source.searchAfter());
context.searchAfter(fieldDoc);
}
if (source.slice() != null) {
if (context.scrollContext() == null) {
throw new SearchException(shardTarget, "`slice` cannot be used outside of a scroll context");
}
context.sliceBuilder(source.slice());
}
if (source.storedFields() != null) {
if (source.storedFields().fetchFields() == false) {
if (context.sourceRequested()) {
throw new SearchException(shardTarget, "[stored_fields] cannot be disabled if [_source] is requested");
}
if (context.fetchFieldsContext() != null) {
throw new SearchException(shardTarget, "[stored_fields] cannot be disabled when using the [fields] option");
}
}
context.storedFieldsContext(source.storedFields());
}
if (source.collapse() != null) {
if (context.scrollContext() != null) {
throw new SearchException(shardTarget, "cannot use `collapse` in a scroll context");
}
if (context.searchAfter() != null) {
throw new SearchException(shardTarget, "cannot use `collapse` in conjunction with `search_after`");
}
if (context.rescore() != null && context.rescore().isEmpty() == false) {
throw new SearchException(shardTarget, "cannot use `collapse` in conjunction with `rescore`");
}
final CollapseContext collapseContext = source.collapse().build(queryShardContext);
context.collapse(collapseContext);
}
}
use of org.opensearch.search.sort.SortAndFormats in project OpenSearch by opensearch-project.
the class TopHitsAggregator method getLeafCollector.
@Override
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector sub) throws IOException {
// Create leaf collectors here instead of at the aggregator level. Otherwise in case this collector get invoked
// when post collecting then we have already replaced the leaf readers on the aggregator level have already been
// replaced with the next leaf readers and then post collection pushes docids of the previous segment, which
// then causes assertions to trip or incorrect top docs to be computed.
final LongObjectHashMap<LeafCollector> leafCollectors = new LongObjectHashMap<>(1);
return new LeafBucketCollectorBase(sub, null) {
Scorable scorer;
@Override
public void setScorer(Scorable scorer) throws IOException {
this.scorer = scorer;
super.setScorer(scorer);
for (ObjectCursor<LeafCollector> cursor : leafCollectors.values()) {
cursor.value.setScorer(scorer);
}
}
@Override
public void collect(int docId, long bucket) throws IOException {
Collectors collectors = topDocsCollectors.get(bucket);
if (collectors == null) {
SortAndFormats sort = subSearchContext.sort();
int topN = subSearchContext.from() + subSearchContext.size();
if (sort == null) {
for (RescoreContext rescoreContext : context.rescore()) {
topN = Math.max(rescoreContext.getWindowSize(), topN);
}
}
// In the QueryPhase we don't need this protection, because it is build into the IndexSearcher,
// but here we create collectors ourselves and we need prevent OOM because of crazy an offset and size.
topN = Math.min(topN, subSearchContext.searcher().getIndexReader().maxDoc());
if (sort == null) {
collectors = new Collectors(TopScoreDocCollector.create(topN, Integer.MAX_VALUE), null);
} else {
// TODO: can we pass trackTotalHits=subSearchContext.trackTotalHits(){
// Note that this would require to catch CollectionTerminatedException
collectors = new Collectors(TopFieldCollector.create(sort.sort, topN, Integer.MAX_VALUE), subSearchContext.trackScores() ? new MaxScoreCollector() : null);
}
topDocsCollectors.put(bucket, collectors);
}
final LeafCollector leafCollector;
final int key = leafCollectors.indexOf(bucket);
if (key < 0) {
leafCollector = collectors.collector.getLeafCollector(ctx);
if (scorer != null) {
leafCollector.setScorer(scorer);
}
leafCollectors.indexInsert(key, bucket, leafCollector);
} else {
leafCollector = leafCollectors.indexGet(key);
}
leafCollector.collect(docId);
}
};
}
use of org.opensearch.search.sort.SortAndFormats in project OpenSearch by opensearch-project.
the class DefaultSearchContextTests method testPreProcess.
public void testPreProcess() throws Exception {
TimeValue timeout = new TimeValue(randomIntBetween(1, 100));
ShardSearchRequest shardSearchRequest = mock(ShardSearchRequest.class);
when(shardSearchRequest.searchType()).thenReturn(SearchType.DEFAULT);
ShardId shardId = new ShardId("index", UUID.randomUUID().toString(), 1);
when(shardSearchRequest.shardId()).thenReturn(shardId);
ThreadPool threadPool = new TestThreadPool(this.getClass().getName());
IndexShard indexShard = mock(IndexShard.class);
QueryCachingPolicy queryCachingPolicy = mock(QueryCachingPolicy.class);
when(indexShard.getQueryCachingPolicy()).thenReturn(queryCachingPolicy);
when(indexShard.getThreadPool()).thenReturn(threadPool);
int maxResultWindow = randomIntBetween(50, 100);
int maxRescoreWindow = randomIntBetween(50, 100);
int maxSlicesPerScroll = randomIntBetween(50, 100);
Settings settings = Settings.builder().put("index.max_result_window", maxResultWindow).put("index.max_slices_per_scroll", maxSlicesPerScroll).put("index.max_rescore_window", maxRescoreWindow).put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1).put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2).build();
IndexService indexService = mock(IndexService.class);
IndexCache indexCache = mock(IndexCache.class);
QueryCache queryCache = mock(QueryCache.class);
when(indexCache.query()).thenReturn(queryCache);
when(indexService.cache()).thenReturn(indexCache);
QueryShardContext queryShardContext = mock(QueryShardContext.class);
when(indexService.newQueryShardContext(eq(shardId.id()), any(), any(), nullable(String.class))).thenReturn(queryShardContext);
MapperService mapperService = mock(MapperService.class);
when(mapperService.hasNested()).thenReturn(randomBoolean());
when(indexService.mapperService()).thenReturn(mapperService);
IndexMetadata indexMetadata = IndexMetadata.builder("index").settings(settings).build();
IndexSettings indexSettings = new IndexSettings(indexMetadata, Settings.EMPTY);
when(indexService.getIndexSettings()).thenReturn(indexSettings);
when(mapperService.getIndexSettings()).thenReturn(indexSettings);
BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService());
try (Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
final Supplier<Engine.SearcherSupplier> searcherSupplier = () -> new Engine.SearcherSupplier(Function.identity()) {
@Override
protected void doClose() {
}
@Override
protected Engine.Searcher acquireSearcherInternal(String source) {
try {
IndexReader reader = w.getReader();
return new Engine.Searcher("test", reader, IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), reader);
} catch (IOException exc) {
throw new AssertionError(exc);
}
}
};
SearchShardTarget target = new SearchShardTarget("node", shardId, null, OriginalIndices.NONE);
ReaderContext readerWithoutScroll = new ReaderContext(newContextId(), indexService, indexShard, searcherSupplier.get(), randomNonNegativeLong(), false);
DefaultSearchContext contextWithoutScroll = new DefaultSearchContext(readerWithoutScroll, shardSearchRequest, target, null, bigArrays, null, timeout, null, false, Version.CURRENT);
contextWithoutScroll.from(300);
contextWithoutScroll.close();
// resultWindow greater than maxResultWindow and scrollContext is null
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> contextWithoutScroll.preProcess(false));
assertThat(exception.getMessage(), equalTo("Result window is too large, from + size must be less than or equal to:" + " [" + maxResultWindow + "] but was [310]. See the scroll api for a more efficient way to request large data sets. " + "This limit can be set by changing the [" + IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey() + "] index level setting."));
// resultWindow greater than maxResultWindow and scrollContext isn't null
when(shardSearchRequest.scroll()).thenReturn(new Scroll(TimeValue.timeValueMillis(randomInt(1000))));
ReaderContext readerContext = new LegacyReaderContext(newContextId(), indexService, indexShard, searcherSupplier.get(), shardSearchRequest, randomNonNegativeLong());
DefaultSearchContext context1 = new DefaultSearchContext(readerContext, shardSearchRequest, target, null, bigArrays, null, timeout, null, false, Version.CURRENT);
context1.from(300);
exception = expectThrows(IllegalArgumentException.class, () -> context1.preProcess(false));
assertThat(exception.getMessage(), equalTo("Batch size is too large, size must be less than or equal to: [" + maxResultWindow + "] but was [310]. Scroll batch sizes cost as much memory as result windows so they are " + "controlled by the [" + IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey() + "] index level setting."));
// resultWindow not greater than maxResultWindow and both rescore and sort are not null
context1.from(0);
DocValueFormat docValueFormat = mock(DocValueFormat.class);
SortAndFormats sortAndFormats = new SortAndFormats(new Sort(), new DocValueFormat[] { docValueFormat });
context1.sort(sortAndFormats);
RescoreContext rescoreContext = mock(RescoreContext.class);
when(rescoreContext.getWindowSize()).thenReturn(500);
context1.addRescore(rescoreContext);
exception = expectThrows(IllegalArgumentException.class, () -> context1.preProcess(false));
assertThat(exception.getMessage(), equalTo("Cannot use [sort] option in conjunction with [rescore]."));
// rescore is null but sort is not null and rescoreContext.getWindowSize() exceeds maxResultWindow
context1.sort(null);
exception = expectThrows(IllegalArgumentException.class, () -> context1.preProcess(false));
assertThat(exception.getMessage(), equalTo("Rescore window [" + rescoreContext.getWindowSize() + "] is too large. " + "It must be less than [" + maxRescoreWindow + "]. This prevents allocating massive heaps for storing the results " + "to be rescored. This limit can be set by changing the [" + IndexSettings.MAX_RESCORE_WINDOW_SETTING.getKey() + "] index level setting."));
readerContext.close();
readerContext = new ReaderContext(newContextId(), indexService, indexShard, searcherSupplier.get(), randomNonNegativeLong(), false);
// rescore is null but sliceBuilder is not null
DefaultSearchContext context2 = new DefaultSearchContext(readerContext, shardSearchRequest, target, null, bigArrays, null, timeout, null, false, Version.CURRENT);
SliceBuilder sliceBuilder = mock(SliceBuilder.class);
int numSlices = maxSlicesPerScroll + randomIntBetween(1, 100);
when(sliceBuilder.getMax()).thenReturn(numSlices);
context2.sliceBuilder(sliceBuilder);
exception = expectThrows(IllegalArgumentException.class, () -> context2.preProcess(false));
assertThat(exception.getMessage(), equalTo("The number of slices [" + numSlices + "] is too large. It must " + "be less than [" + maxSlicesPerScroll + "]. This limit can be set by changing the [" + IndexSettings.MAX_SLICES_PER_SCROLL.getKey() + "] index level setting."));
// No exceptions should be thrown
when(shardSearchRequest.getAliasFilter()).thenReturn(AliasFilter.EMPTY);
when(shardSearchRequest.indexBoost()).thenReturn(AbstractQueryBuilder.DEFAULT_BOOST);
DefaultSearchContext context3 = new DefaultSearchContext(readerContext, shardSearchRequest, target, null, bigArrays, null, timeout, null, false, Version.CURRENT);
ParsedQuery parsedQuery = ParsedQuery.parsedMatchAllQuery();
context3.sliceBuilder(null).parsedQuery(parsedQuery).preProcess(false);
assertEquals(context3.query(), context3.buildFilteredQuery(parsedQuery.query()));
when(queryShardContext.getIndexSettings()).thenReturn(indexSettings);
when(queryShardContext.fieldMapper(anyString())).thenReturn(mock(MappedFieldType.class));
when(shardSearchRequest.indexRoutings()).thenReturn(new String[0]);
readerContext.close();
readerContext = new ReaderContext(newContextId(), indexService, indexShard, searcherSupplier.get(), randomNonNegativeLong(), false);
DefaultSearchContext context4 = new DefaultSearchContext(readerContext, shardSearchRequest, target, null, bigArrays, null, timeout, null, false, Version.CURRENT);
context4.sliceBuilder(new SliceBuilder(1, 2)).parsedQuery(parsedQuery).preProcess(false);
Query query1 = context4.query();
context4.sliceBuilder(new SliceBuilder(0, 2)).parsedQuery(parsedQuery).preProcess(false);
Query query2 = context4.query();
assertTrue(query1 instanceof MatchNoDocsQuery || query2 instanceof MatchNoDocsQuery);
readerContext.close();
threadPool.shutdown();
}
}
Aggregations