use of org.apache.lucene.search.Explanation in project elasticsearch by elastic.
the class SearchHit method buildExplanation.
private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException {
builder.startObject();
builder.field(Fields.VALUE, explanation.getValue());
builder.field(Fields.DESCRIPTION, explanation.getDescription());
Explanation[] innerExps = explanation.getDetails();
if (innerExps != null) {
builder.startArray(Fields.DETAILS);
for (Explanation exp : innerExps) {
buildExplanation(builder, exp);
}
builder.endArray();
}
builder.endObject();
}
use of org.apache.lucene.search.Explanation in project elasticsearch by elastic.
the class QueryRescorer method explain.
@Override
public Explanation explain(int topLevelDocId, SearchContext context, RescoreSearchContext rescoreContext, Explanation sourceExplanation) throws IOException {
QueryRescoreContext rescore = (QueryRescoreContext) rescoreContext;
ContextIndexSearcher searcher = context.searcher();
if (sourceExplanation == null) {
// this should not happen but just in case
return Explanation.noMatch("nothing matched");
}
// TODO: this isn't right? I.e., we are incorrectly pretending all first pass hits were rescored? If the requested docID was
// beyond the top rescoreContext.window() in the first pass hits, we don't rescore it now?
Explanation rescoreExplain = searcher.explain(rescore.query(), topLevelDocId);
float primaryWeight = rescore.queryWeight();
Explanation prim;
if (sourceExplanation.isMatch()) {
prim = Explanation.match(sourceExplanation.getValue() * primaryWeight, "product of:", sourceExplanation, Explanation.match(primaryWeight, "primaryWeight"));
} else {
prim = Explanation.noMatch("First pass did not match", sourceExplanation);
}
// we should add QueryRescorer.explainCombine to Lucene?
if (rescoreExplain != null && rescoreExplain.isMatch()) {
float secondaryWeight = rescore.rescoreQueryWeight();
Explanation sec = Explanation.match(rescoreExplain.getValue() * secondaryWeight, "product of:", rescoreExplain, Explanation.match(secondaryWeight, "secondaryWeight"));
QueryRescoreMode scoreMode = rescore.scoreMode();
return Explanation.match(scoreMode.combine(prim.getValue(), sec.getValue()), scoreMode + " of:", prim, sec);
} else {
return prim;
}
}
use of org.apache.lucene.search.Explanation in project elasticsearch by elastic.
the class TopHitsIT method testFetchFeatures.
public void testFetchFeatures() {
SearchResponse response = client().prepareSearch("idx").setTypes("type").setQuery(matchQuery("text", "text").queryName("test")).addAggregation(terms("terms").executionHint(randomExecutionHint()).field(TERMS_AGGS_FIELD).subAggregation(topHits("hits").size(1).highlighter(new HighlightBuilder().field("text")).explain(true).storedField("text").fieldDataField("field1").scriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())).fetchSource("text", null).version(true))).get();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo("terms"));
assertThat(terms.getBuckets().size(), equalTo(5));
for (Terms.Bucket bucket : terms.getBuckets()) {
TopHits topHits = bucket.getAggregations().get("hits");
SearchHits hits = topHits.getHits();
assertThat(hits.getTotalHits(), equalTo(10L));
assertThat(hits.getHits().length, equalTo(1));
SearchHit hit = hits.getAt(0);
HighlightField highlightField = hit.getHighlightFields().get("text");
assertThat(highlightField.getFragments().length, equalTo(1));
assertThat(highlightField.getFragments()[0].string(), equalTo("some <em>text</em> to entertain"));
Explanation explanation = hit.getExplanation();
assertThat(explanation.toString(), containsString("text:text"));
long version = hit.getVersion();
assertThat(version, equalTo(1L));
assertThat(hit.getMatchedQueries()[0], equalTo("test"));
SearchHitField field = hit.field("field1");
assertThat(field.getValue().toString(), equalTo("5"));
assertThat(hit.getSourceAsMap().get("text").toString(), equalTo("some text to entertain"));
field = hit.field("script");
assertThat(field.getValue().toString(), equalTo("5"));
assertThat(hit.getSourceAsMap().size(), equalTo(1));
assertThat(hit.getSourceAsMap().get("text").toString(), equalTo("some text to entertain"));
}
}
use of org.apache.lucene.search.Explanation in project elasticsearch by elastic.
the class CandidateQueryTests method duelRun.
private void duelRun(PercolateQuery.QueryStore queryStore, MemoryIndex memoryIndex, IndexSearcher shardSearcher) throws IOException {
boolean requireScore = randomBoolean();
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
Query percolateQuery = fieldType.percolateQuery("type", queryStore, new BytesArray("{}"), percolateSearcher);
Query query = requireScore ? percolateQuery : new ConstantScoreQuery(percolateQuery);
TopDocs topDocs = shardSearcher.search(query, 10);
Query controlQuery = new ControlQuery(memoryIndex, queryStore);
controlQuery = requireScore ? controlQuery : new ConstantScoreQuery(controlQuery);
TopDocs controlTopDocs = shardSearcher.search(controlQuery, 10);
assertThat(topDocs.totalHits, equalTo(controlTopDocs.totalHits));
assertThat(topDocs.scoreDocs.length, equalTo(controlTopDocs.scoreDocs.length));
for (int j = 0; j < topDocs.scoreDocs.length; j++) {
assertThat(topDocs.scoreDocs[j].doc, equalTo(controlTopDocs.scoreDocs[j].doc));
assertThat(topDocs.scoreDocs[j].score, equalTo(controlTopDocs.scoreDocs[j].score));
if (requireScore) {
Explanation explain1 = shardSearcher.explain(query, topDocs.scoreDocs[j].doc);
Explanation explain2 = shardSearcher.explain(controlQuery, controlTopDocs.scoreDocs[j].doc);
assertThat(explain1.isMatch(), equalTo(explain2.isMatch()));
assertThat(explain1.getValue(), equalTo(explain2.getValue()));
}
}
}
use of org.apache.lucene.search.Explanation in project elasticsearch by elastic.
the class AllTermQuery method createWeight.
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
if (needsScores == false) {
return new TermQuery(term).createWeight(searcher, needsScores);
}
final TermContext termStates = TermContext.build(searcher.getTopReaderContext(), term);
final CollectionStatistics collectionStats = searcher.collectionStatistics(term.field());
final TermStatistics termStats = searcher.termStatistics(term, termStates);
final Similarity similarity = searcher.getSimilarity(needsScores);
final SimWeight stats = similarity.computeWeight(collectionStats, termStats);
return new Weight(this) {
@Override
public float getValueForNormalization() throws IOException {
return stats.getValueForNormalization();
}
@Override
public void normalize(float norm, float topLevelBoost) {
stats.normalize(norm, topLevelBoost);
}
@Override
public void extractTerms(Set<Term> terms) {
terms.add(term);
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
AllTermScorer scorer = scorer(context);
if (scorer != null) {
int newDoc = scorer.iterator().advance(doc);
if (newDoc == doc) {
float score = scorer.score();
float freq = scorer.freq();
SimScorer docScorer = similarity.simScorer(stats, context);
Explanation freqExplanation = Explanation.match(freq, "termFreq=" + freq);
Explanation termScoreExplanation = docScorer.explain(doc, freqExplanation);
Explanation payloadBoostExplanation = Explanation.match(scorer.payloadBoost(), "payloadBoost=" + scorer.payloadBoost());
return Explanation.match(score, "weight(" + getQuery() + " in " + doc + ") [" + similarity.getClass().getSimpleName() + "], product of:", termScoreExplanation, payloadBoostExplanation);
}
}
return Explanation.noMatch("no matching term");
}
@Override
public AllTermScorer scorer(LeafReaderContext context) throws IOException {
final Terms terms = context.reader().terms(term.field());
if (terms == null) {
return null;
}
final TermsEnum termsEnum = terms.iterator();
if (termsEnum == null) {
return null;
}
final TermState state = termStates.get(context.ord);
if (state == null) {
// Term does not exist in this segment
return null;
}
termsEnum.seekExact(term.bytes(), state);
PostingsEnum docs = termsEnum.postings(null, PostingsEnum.PAYLOADS);
assert docs != null;
return new AllTermScorer(this, docs, similarity.simScorer(stats, context));
}
};
}
Aggregations