use of org.apache.lucene.search.Explanation in project elasticsearch by elastic.
the class ExplainFetchSubPhase method hitExecute.
@Override
public void hitExecute(SearchContext context, HitContext hitContext) {
if (context.explain() == false) {
return;
}
try {
final int topLevelDocId = hitContext.hit().docId();
Explanation explanation = context.searcher().explain(context.query(), topLevelDocId);
for (RescoreSearchContext rescore : context.rescore()) {
explanation = rescore.rescorer().explain(topLevelDocId, context, rescore, explanation);
}
// we use the top level doc id, since we work with the top level searcher
hitContext.hit().explanation(explanation);
} catch (IOException e) {
throw new FetchPhaseExecutionException(context, "Failed to explain doc [" + hitContext.hit().getType() + "#" + hitContext.hit().getId() + "]", e);
} finally {
context.clearReleasables(SearchContext.Lifetime.COLLECTION);
}
}
use of org.apache.lucene.search.Explanation in project elasticsearch by elastic.
the class TransportExplainAction method shardOperation.
@Override
protected ExplainResponse shardOperation(ExplainRequest request, ShardId shardId) throws IOException {
ShardSearchLocalRequest shardSearchLocalRequest = new ShardSearchLocalRequest(shardId, new String[] { request.type() }, request.nowInMillis, request.filteringAlias());
SearchContext context = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT, null);
Term uidTerm = new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(request.type(), request.id()));
Engine.GetResult result = null;
try {
result = context.indexShard().get(new Engine.Get(false, uidTerm));
if (!result.exists()) {
return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), false);
}
context.parsedQuery(context.getQueryShardContext().toQuery(request.query()));
context.preProcess(true);
int topLevelDocId = result.docIdAndVersion().docId + result.docIdAndVersion().context.docBase;
Explanation explanation = context.searcher().explain(context.query(), topLevelDocId);
for (RescoreSearchContext ctx : context.rescore()) {
Rescorer rescorer = ctx.rescorer();
explanation = rescorer.explain(topLevelDocId, context, ctx, explanation);
}
if (request.storedFields() != null || (request.fetchSourceContext() != null && request.fetchSourceContext().fetchSource())) {
// Advantage is that we're not opening a second searcher to retrieve the _source. Also
// because we are working in the same searcher in engineGetResult we can be sure that a
// doc isn't deleted between the initial get and this call.
GetResult getResult = context.indexShard().getService().get(result, request.id(), request.type(), request.storedFields(), request.fetchSourceContext());
return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), true, explanation, getResult);
} else {
return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), true, explanation);
}
} catch (IOException e) {
throw new ElasticsearchException("Could not explain", e);
} finally {
Releasables.close(result, context);
}
}
use of org.apache.lucene.search.Explanation in project elasticsearch by elastic.
the class RestExplainAction method prepareRequest.
@Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
final ExplainRequest explainRequest = new ExplainRequest(request.param("index"), request.param("type"), request.param("id"));
explainRequest.parent(request.param("parent"));
explainRequest.routing(request.param("routing"));
explainRequest.preference(request.param("preference"));
String queryString = request.param("q");
request.withContentOrSourceParamParserOrNull(parser -> {
if (parser != null) {
explainRequest.query(RestActions.getQueryContent(parser));
} else if (queryString != null) {
QueryBuilder query = RestActions.urlParamsToQueryBuilder(request);
explainRequest.query(query);
}
});
if (request.param("fields") != null) {
throw new IllegalArgumentException("The parameter [fields] is no longer supported, " + "please use [stored_fields] to retrieve stored fields");
}
String sField = request.param("stored_fields");
if (sField != null) {
String[] sFields = Strings.splitStringByCommaToArray(sField);
if (sFields != null) {
explainRequest.storedFields(sFields);
}
}
explainRequest.fetchSourceContext(FetchSourceContext.parseFromRestRequest(request));
return channel -> client.explain(explainRequest, new RestBuilderListener<ExplainResponse>(channel) {
@Override
public RestResponse buildResponse(ExplainResponse response, XContentBuilder builder) throws Exception {
builder.startObject();
builder.field(Fields._INDEX, response.getIndex()).field(Fields._TYPE, response.getType()).field(Fields._ID, response.getId()).field(Fields.MATCHED, response.isMatch());
if (response.hasExplanation()) {
builder.startObject(Fields.EXPLANATION);
buildExplanation(builder, response.getExplanation());
builder.endObject();
}
GetResult getResult = response.getGetResult();
if (getResult != null) {
builder.startObject(Fields.GET);
response.getGetResult().toXContentEmbedded(builder, request);
builder.endObject();
}
builder.endObject();
return new BytesRestResponse(response.isExists() ? OK : NOT_FOUND, builder);
}
private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException {
builder.field(Fields.VALUE, explanation.getValue());
builder.field(Fields.DESCRIPTION, explanation.getDescription());
Explanation[] innerExps = explanation.getDetails();
if (innerExps != null) {
builder.startArray(Fields.DETAILS);
for (Explanation exp : innerExps) {
builder.startObject();
buildExplanation(builder, exp);
builder.endObject();
}
builder.endArray();
}
}
});
}
use of org.apache.lucene.search.Explanation in project elasticsearch by elastic.
the class OldIndexBackwardsCompatibilityIT method assertAllSearchWorks.
void assertAllSearchWorks(String indexName) {
logger.info("--> testing _all search");
SearchResponse searchRsp = client().prepareSearch(indexName).get();
ElasticsearchAssertions.assertNoFailures(searchRsp);
assertThat(searchRsp.getHits().getTotalHits(), greaterThanOrEqualTo(1L));
SearchHit bestHit = searchRsp.getHits().getAt(0);
// Make sure there are payloads and they are taken into account for the score
// the 'string' field has a boost of 4 in the mappings so it should get a payload boost
String stringValue = (String) bestHit.getSourceAsMap().get("string");
assertNotNull(stringValue);
Explanation explanation = client().prepareExplain(indexName, bestHit.getType(), bestHit.getId()).setQuery(QueryBuilders.matchQuery("_all", stringValue)).get().getExplanation();
assertTrue("Could not find payload boost in explanation\n" + explanation, findPayloadBoostInExplanation(explanation));
// Make sure the query can run on the whole index
searchRsp = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("_all", stringValue)).setExplain(true).get();
ElasticsearchAssertions.assertNoFailures(searchRsp);
assertThat(searchRsp.getHits().getTotalHits(), greaterThanOrEqualTo(1L));
}
use of org.apache.lucene.search.Explanation in project elasticsearch by elastic.
the class PercolateQueryTests method testPercolateQuery.
public void testPercolateQuery() throws Exception {
List<Iterable<? extends IndexableField>> docs = new ArrayList<>();
List<Query> queries = new ArrayList<>();
PercolateQuery.QueryStore queryStore = ctx -> queries::get;
queries.add(new TermQuery(new Term("field", "fox")));
docs.add(Collections.singleton(new StringField("select", "a", Field.Store.NO)));
SpanNearQuery.Builder snp = new SpanNearQuery.Builder("field", true);
snp.addClause(new SpanTermQuery(new Term("field", "jumps")));
snp.addClause(new SpanTermQuery(new Term("field", "lazy")));
snp.addClause(new SpanTermQuery(new Term("field", "dog")));
snp.setSlop(2);
queries.add(snp.build());
docs.add(Collections.singleton(new StringField("select", "b", Field.Store.NO)));
PhraseQuery.Builder pq1 = new PhraseQuery.Builder();
pq1.add(new Term("field", "quick"));
pq1.add(new Term("field", "brown"));
pq1.add(new Term("field", "jumps"));
pq1.setSlop(1);
queries.add(pq1.build());
docs.add(Collections.singleton(new StringField("select", "b", Field.Store.NO)));
BooleanQuery.Builder bq1 = new BooleanQuery.Builder();
bq1.add(new TermQuery(new Term("field", "quick")), BooleanClause.Occur.MUST);
bq1.add(new TermQuery(new Term("field", "brown")), BooleanClause.Occur.MUST);
bq1.add(new TermQuery(new Term("field", "fox")), BooleanClause.Occur.MUST);
queries.add(bq1.build());
docs.add(Collections.singleton(new StringField("select", "b", Field.Store.NO)));
indexWriter.addDocuments(docs);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer());
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
// no scoring, wrapping it in a constant score query:
Query query = new ConstantScoreQuery(new PercolateQuery("type", queryStore, new BytesArray("a"), new TermQuery(new Term("select", "a")), percolateSearcher, new MatchNoDocsQuery("")));
TopDocs topDocs = shardSearcher.search(query, 10);
assertThat(topDocs.totalHits, equalTo(1));
assertThat(topDocs.scoreDocs.length, equalTo(1));
assertThat(topDocs.scoreDocs[0].doc, equalTo(0));
Explanation explanation = shardSearcher.explain(query, 0);
assertThat(explanation.isMatch(), is(true));
assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[0].score));
query = new ConstantScoreQuery(new PercolateQuery("type", queryStore, new BytesArray("b"), new TermQuery(new Term("select", "b")), percolateSearcher, new MatchNoDocsQuery("")));
topDocs = shardSearcher.search(query, 10);
assertThat(topDocs.totalHits, equalTo(3));
assertThat(topDocs.scoreDocs.length, equalTo(3));
assertThat(topDocs.scoreDocs[0].doc, equalTo(1));
explanation = shardSearcher.explain(query, 1);
assertThat(explanation.isMatch(), is(true));
assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[0].score));
assertThat(topDocs.scoreDocs[1].doc, equalTo(2));
explanation = shardSearcher.explain(query, 2);
assertThat(explanation.isMatch(), is(true));
assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[1].score));
assertThat(topDocs.scoreDocs[2].doc, equalTo(3));
explanation = shardSearcher.explain(query, 2);
assertThat(explanation.isMatch(), is(true));
assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[2].score));
query = new ConstantScoreQuery(new PercolateQuery("type", queryStore, new BytesArray("c"), new MatchAllDocsQuery(), percolateSearcher, new MatchAllDocsQuery()));
topDocs = shardSearcher.search(query, 10);
assertThat(topDocs.totalHits, equalTo(4));
query = new PercolateQuery("type", queryStore, new BytesArray("{}"), new TermQuery(new Term("select", "b")), percolateSearcher, new MatchNoDocsQuery(""));
topDocs = shardSearcher.search(query, 10);
assertThat(topDocs.totalHits, equalTo(3));
assertThat(topDocs.scoreDocs.length, equalTo(3));
assertThat(topDocs.scoreDocs[0].doc, equalTo(3));
explanation = shardSearcher.explain(query, 3);
assertThat(explanation.isMatch(), is(true));
assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[0].score));
assertThat(explanation.getDetails(), arrayWithSize(1));
assertThat(topDocs.scoreDocs[1].doc, equalTo(2));
explanation = shardSearcher.explain(query, 2);
assertThat(explanation.isMatch(), is(true));
assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[1].score));
assertThat(explanation.getDetails(), arrayWithSize(1));
assertThat(topDocs.scoreDocs[2].doc, equalTo(1));
explanation = shardSearcher.explain(query, 1);
assertThat(explanation.isMatch(), is(true));
assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[2].score));
assertThat(explanation.getDetails(), arrayWithSize(1));
}
Aggregations