use of org.opensearch.index.query.QueryShardContext in project OpenSearch by opensearch-project.
the class PercolatorQuerySearchTests method testRangeQueriesWithNow.
public void testRangeQueriesWithNow() throws Exception {
IndexService indexService = createIndex("test", Settings.builder().put("index.number_of_shards", 1).build(), "_doc", "field1", "type=keyword", "field2", "type=date", "query", "type=percolator");
client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("query", rangeQuery("field2").from("now-1h").to("now+1h")).endObject()).get();
client().prepareIndex("test").setId("2").setSource(jsonBuilder().startObject().field("query", boolQuery().filter(termQuery("field1", "value")).filter(rangeQuery("field2").from("now-1h").to("now+1h"))).endObject()).get();
Script script = new Script(ScriptType.INLINE, MockScriptPlugin.NAME, "1==1", Collections.emptyMap());
client().prepareIndex("test").setId("3").setSource(jsonBuilder().startObject().field("query", boolQuery().filter(scriptQuery(script)).filter(rangeQuery("field2").from("now-1h").to("now+1h"))).endObject()).get();
client().admin().indices().prepareRefresh().get();
try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) {
long[] currentTime = new long[] { System.currentTimeMillis() };
QueryShardContext queryShardContext = indexService.newQueryShardContext(0, searcher, () -> currentTime[0], null);
BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", currentTime[0]).endObject());
QueryBuilder queryBuilder = new PercolateQueryBuilder("query", source, XContentType.JSON);
Query query = queryBuilder.toQuery(queryShardContext);
assertThat(searcher.count(query), equalTo(3));
// + 3 hours
currentTime[0] = currentTime[0] + 10800000;
source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", currentTime[0]).endObject());
queryBuilder = new PercolateQueryBuilder("query", source, XContentType.JSON);
query = queryBuilder.toQuery(queryShardContext);
assertThat(searcher.count(query), equalTo(3));
}
}
use of org.opensearch.index.query.QueryShardContext in project OpenSearch by opensearch-project.
the class SearchAsYouTypeFieldMapperTests method testMultiMatchBoolPrefix.
public void testMultiMatchBoolPrefix() throws IOException {
QueryShardContext queryShardContext = createQueryShardContext(createMapperService(fieldMapping(b -> b.field("type", "search_as_you_type").field("max_shingle_size", 4))));
MultiMatchQueryBuilder builder = new MultiMatchQueryBuilder("quick brown fox jump lazy dog", "field", "field._2gram", "field._3gram", "field._4gram");
builder.type(MultiMatchQueryBuilder.Type.BOOL_PREFIX);
final Query actual = builder.toQuery(queryShardContext);
assertThat(actual, instanceOf(DisjunctionMaxQuery.class));
final DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) actual;
assertThat(disMaxQuery.getDisjuncts(), hasSize(4));
assertThat(disMaxQuery.getDisjuncts(), containsInAnyOrder(buildBoolPrefixQuery("field", "field._index_prefix", asList("quick", "brown", "fox", "jump", "lazy", "dog")), buildBoolPrefixQuery("field._2gram", "field._index_prefix", asList("quick brown", "brown fox", "fox jump", "jump lazy", "lazy dog")), buildBoolPrefixQuery("field._3gram", "field._index_prefix", asList("quick brown fox", "brown fox jump", "fox jump lazy", "jump lazy dog")), buildBoolPrefixQuery("field._4gram", "field._index_prefix", asList("quick brown fox jump", "brown fox jump lazy", "fox jump lazy dog"))));
}
use of org.opensearch.index.query.QueryShardContext in project OpenSearch by opensearch-project.
the class SearchService method createSearchContext.
private DefaultSearchContext createSearchContext(ReaderContext reader, ShardSearchRequest request, TimeValue timeout) throws IOException {
boolean success = false;
DefaultSearchContext searchContext = null;
try {
SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().getId(), reader.indexShard().shardId(), request.getClusterAlias(), OriginalIndices.NONE);
searchContext = new DefaultSearchContext(reader, request, shardTarget, clusterService, bigArrays, threadPool::relativeTimeInMillis, timeout, fetchPhase, lowLevelCancellation, clusterService.state().nodes().getMinNodeVersion());
// we clone the query shard context here just for rewriting otherwise we
// might end up with incorrect state since we are using now() or script services
// during rewrite and normalized / evaluate templates etc.
QueryShardContext context = new QueryShardContext(searchContext.getQueryShardContext());
Rewriteable.rewrite(request.getRewriteable(), context, true);
assert searchContext.getQueryShardContext().isCacheable();
success = true;
} finally {
if (success == false) {
// we handle the case where `IndicesService#indexServiceSafe`or `IndexService#getShard`, or the DefaultSearchContext
// constructor throws an exception since we would otherwise leak a searcher and this can have severe implications
// (unable to obtain shard lock exceptions).
IOUtils.closeWhileHandlingException(searchContext);
}
}
return searchContext;
}
use of org.opensearch.index.query.QueryShardContext in project OpenSearch by opensearch-project.
the class SearchService method parseSource.
private void parseSource(DefaultSearchContext context, SearchSourceBuilder source, boolean includeAggregations) {
// nothing to parse...
if (source == null) {
return;
}
SearchShardTarget shardTarget = context.shardTarget();
QueryShardContext queryShardContext = context.getQueryShardContext();
context.from(source.from());
context.size(source.size());
Map<String, InnerHitContextBuilder> innerHitBuilders = new HashMap<>();
if (source.query() != null) {
InnerHitContextBuilder.extractInnerHits(source.query(), innerHitBuilders);
context.parsedQuery(queryShardContext.toQuery(source.query()));
}
if (source.postFilter() != null) {
InnerHitContextBuilder.extractInnerHits(source.postFilter(), innerHitBuilders);
context.parsedPostFilter(queryShardContext.toQuery(source.postFilter()));
}
if (innerHitBuilders.size() > 0) {
for (Map.Entry<String, InnerHitContextBuilder> entry : innerHitBuilders.entrySet()) {
try {
entry.getValue().build(context, context.innerHits());
} catch (IOException e) {
throw new SearchException(shardTarget, "failed to build inner_hits", e);
}
}
}
if (source.sorts() != null) {
try {
Optional<SortAndFormats> optionalSort = SortBuilder.buildSort(source.sorts(), context.getQueryShardContext());
if (optionalSort.isPresent()) {
context.sort(optionalSort.get());
}
} catch (IOException e) {
throw new SearchException(shardTarget, "failed to create sort elements", e);
}
}
context.trackScores(source.trackScores());
if (source.trackTotalHitsUpTo() != null && source.trackTotalHitsUpTo() != SearchContext.TRACK_TOTAL_HITS_ACCURATE && context.scrollContext() != null) {
throw new SearchException(shardTarget, "disabling [track_total_hits] is not allowed in a scroll context");
}
if (source.trackTotalHitsUpTo() != null) {
context.trackTotalHitsUpTo(source.trackTotalHitsUpTo());
}
if (source.minScore() != null) {
context.minimumScore(source.minScore());
}
if (source.profile()) {
context.setProfilers(new Profilers(context.searcher()));
}
if (source.timeout() != null) {
context.timeout(source.timeout());
}
context.terminateAfter(source.terminateAfter());
if (source.aggregations() != null && includeAggregations) {
try {
AggregatorFactories factories = source.aggregations().build(queryShardContext, null);
context.aggregations(new SearchContextAggregations(factories, multiBucketConsumerService.create()));
} catch (IOException e) {
throw new AggregationInitializationException("Failed to create aggregators", e);
}
}
if (source.suggest() != null) {
try {
context.suggest(source.suggest().build(queryShardContext));
} catch (IOException e) {
throw new SearchException(shardTarget, "failed to create SuggestionSearchContext", e);
}
}
if (source.rescores() != null) {
try {
for (RescorerBuilder<?> rescore : source.rescores()) {
context.addRescore(rescore.buildContext(queryShardContext));
}
} catch (IOException e) {
throw new SearchException(shardTarget, "failed to create RescoreSearchContext", e);
}
}
if (source.explain() != null) {
context.explain(source.explain());
}
if (source.fetchSource() != null) {
context.fetchSourceContext(source.fetchSource());
}
if (source.docValueFields() != null) {
FetchDocValuesContext docValuesContext = FetchDocValuesContext.create(context.mapperService(), source.docValueFields());
context.docValuesContext(docValuesContext);
}
if (source.fetchFields() != null) {
FetchFieldsContext fetchFieldsContext = new FetchFieldsContext(source.fetchFields());
context.fetchFieldsContext(fetchFieldsContext);
}
if (source.highlighter() != null) {
HighlightBuilder highlightBuilder = source.highlighter();
try {
context.highlight(highlightBuilder.build(queryShardContext));
} catch (IOException e) {
throw new SearchException(shardTarget, "failed to create SearchContextHighlighter", e);
}
}
if (source.scriptFields() != null && source.size() != 0) {
int maxAllowedScriptFields = context.mapperService().getIndexSettings().getMaxScriptFields();
if (source.scriptFields().size() > maxAllowedScriptFields) {
throw new IllegalArgumentException("Trying to retrieve too many script_fields. Must be less than or equal to: [" + maxAllowedScriptFields + "] but was [" + source.scriptFields().size() + "]. This limit can be set by changing the [" + IndexSettings.MAX_SCRIPT_FIELDS_SETTING.getKey() + "] index level setting.");
}
for (org.opensearch.search.builder.SearchSourceBuilder.ScriptField field : source.scriptFields()) {
FieldScript.Factory factory = scriptService.compile(field.script(), FieldScript.CONTEXT);
SearchLookup lookup = context.getQueryShardContext().lookup();
FieldScript.LeafFactory searchScript = factory.newFactory(field.script().getParams(), lookup);
context.scriptFields().add(new ScriptField(field.fieldName(), searchScript, field.ignoreFailure()));
}
}
if (source.ext() != null) {
for (SearchExtBuilder searchExtBuilder : source.ext()) {
context.addSearchExt(searchExtBuilder);
}
}
if (source.version() != null) {
context.version(source.version());
}
if (source.seqNoAndPrimaryTerm() != null) {
context.seqNoAndPrimaryTerm(source.seqNoAndPrimaryTerm());
}
if (source.stats() != null) {
context.groupStats(source.stats());
}
if (CollectionUtils.isEmpty(source.searchAfter()) == false) {
if (context.scrollContext() != null) {
throw new SearchException(shardTarget, "`search_after` cannot be used in a scroll context.");
}
if (context.from() > 0) {
throw new SearchException(shardTarget, "`from` parameter must be set to 0 when `search_after` is used.");
}
FieldDoc fieldDoc = SearchAfterBuilder.buildFieldDoc(context.sort(), source.searchAfter());
context.searchAfter(fieldDoc);
}
if (source.slice() != null) {
if (context.scrollContext() == null) {
throw new SearchException(shardTarget, "`slice` cannot be used outside of a scroll context");
}
context.sliceBuilder(source.slice());
}
if (source.storedFields() != null) {
if (source.storedFields().fetchFields() == false) {
if (context.sourceRequested()) {
throw new SearchException(shardTarget, "[stored_fields] cannot be disabled if [_source] is requested");
}
if (context.fetchFieldsContext() != null) {
throw new SearchException(shardTarget, "[stored_fields] cannot be disabled when using the [fields] option");
}
}
context.storedFieldsContext(source.storedFields());
}
if (source.collapse() != null) {
if (context.scrollContext() != null) {
throw new SearchException(shardTarget, "cannot use `collapse` in a scroll context");
}
if (context.searchAfter() != null) {
throw new SearchException(shardTarget, "cannot use `collapse` in conjunction with `search_after`");
}
if (context.rescore() != null && context.rescore().isEmpty() == false) {
throw new SearchException(shardTarget, "cannot use `collapse` in conjunction with `rescore`");
}
final CollapseContext collapseContext = source.collapse().build(queryShardContext);
context.collapse(collapseContext);
}
}
use of org.opensearch.index.query.QueryShardContext in project OpenSearch by opensearch-project.
the class SearchService method canMatch.
private CanMatchResponse canMatch(ShardSearchRequest request, boolean checkRefreshPending) throws IOException {
assert request.searchType() == SearchType.QUERY_THEN_FETCH : "unexpected search type: " + request.searchType();
final ReaderContext readerContext = request.readerId() != null ? findReaderContext(request.readerId(), request) : null;
final Releasable markAsUsed = readerContext != null ? readerContext.markAsUsed(getKeepAlive(request)) : () -> {
};
try (Releasable ignored = markAsUsed) {
final IndexService indexService;
final Engine.Searcher canMatchSearcher;
final boolean hasRefreshPending;
if (readerContext != null) {
indexService = readerContext.indexService();
canMatchSearcher = readerContext.acquireSearcher(Engine.CAN_MATCH_SEARCH_SOURCE);
hasRefreshPending = false;
} else {
indexService = indicesService.indexServiceSafe(request.shardId().getIndex());
IndexShard indexShard = indexService.getShard(request.shardId().getId());
hasRefreshPending = indexShard.hasRefreshPending() && checkRefreshPending;
canMatchSearcher = indexShard.acquireSearcher(Engine.CAN_MATCH_SEARCH_SOURCE);
}
try (Releasable ignored2 = canMatchSearcher) {
QueryShardContext context = indexService.newQueryShardContext(request.shardId().id(), canMatchSearcher, request::nowInMillis, request.getClusterAlias());
Rewriteable.rewrite(request.getRewriteable(), context, false);
final boolean aliasFilterCanMatch = request.getAliasFilter().getQueryBuilder() instanceof MatchNoneQueryBuilder == false;
FieldSortBuilder sortBuilder = FieldSortBuilder.getPrimaryFieldSortOrNull(request.source());
MinAndMax<?> minMax = sortBuilder != null ? FieldSortBuilder.getMinMaxOrNull(context, sortBuilder) : null;
final boolean canMatch;
if (canRewriteToMatchNone(request.source())) {
QueryBuilder queryBuilder = request.source().query();
canMatch = aliasFilterCanMatch && queryBuilder instanceof MatchNoneQueryBuilder == false;
} else {
// null query means match_all
canMatch = aliasFilterCanMatch;
}
return new CanMatchResponse(canMatch || hasRefreshPending, minMax);
}
}
}
Aggregations