use of org.apache.lucene.search.Explanation in project lucene-solr by apache.
the class TestBM25Similarity method testLengthEncodingBackwardCompatibility.
public void testLengthEncodingBackwardCompatibility() throws IOException {
Similarity similarity = new BM25Similarity();
for (int indexCreatedVersionMajor : new int[] { Version.LUCENE_6_0_0.major, Version.LATEST.major }) {
for (int length : new int[] { 1, 2, 4 }) {
// these length values are encoded accurately on both cases
Directory dir = newDirectory();
// set the version on the directory
new SegmentInfos(indexCreatedVersionMajor).commit(dir);
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(similarity));
Document doc = new Document();
String value = IntStream.range(0, length).mapToObj(i -> "b").collect(Collectors.joining(" "));
doc.add(new TextField("foo", value, Store.NO));
w.addDocument(doc);
IndexReader reader = DirectoryReader.open(w);
IndexSearcher searcher = newSearcher(reader);
searcher.setSimilarity(similarity);
Explanation expl = searcher.explain(new TermQuery(new Term("foo", "b")), 0);
Explanation docLen = findExplanation(expl, "fieldLength");
assertNotNull(docLen);
assertEquals(docLen.toString(), length, (int) docLen.getValue());
w.close();
reader.close();
dir.close();
}
}
}
use of org.apache.lucene.search.Explanation in project lucene-solr by apache.
the class TestRangeFacetCounts method testCustomDoubleValuesSource.
public void testCustomDoubleValuesSource() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
writer.addDocument(doc);
writer.addDocument(doc);
writer.addDocument(doc);
// Test wants 3 docs in one segment:
writer.forceMerge(1);
final DoubleValuesSource vs = new DoubleValuesSource() {
@Override
public DoubleValues getValues(LeafReaderContext ctx, DoubleValues scores) throws IOException {
return new DoubleValues() {
int doc = -1;
@Override
public double doubleValue() throws IOException {
return doc + 1;
}
@Override
public boolean advanceExact(int doc) throws IOException {
this.doc = doc;
return true;
}
};
}
@Override
public boolean needsScores() {
return false;
}
@Override
public Explanation explain(LeafReaderContext ctx, int docId, Explanation scoreExplanation) throws IOException {
return Explanation.match(docId + 1, "");
}
};
FacetsConfig config = new FacetsConfig();
FacetsCollector fc = new FacetsCollector();
IndexReader r = writer.getReader();
IndexSearcher s = newSearcher(r);
s.search(new MatchAllDocsQuery(), fc);
final DoubleRange[] ranges = new DoubleRange[] { new DoubleRange("< 1", 0.0, true, 1.0, false), new DoubleRange("< 2", 0.0, true, 2.0, false), new DoubleRange("< 5", 0.0, true, 5.0, false), new DoubleRange("< 10", 0.0, true, 10.0, false), new DoubleRange("< 20", 0.0, true, 20.0, false), new DoubleRange("< 50", 0.0, true, 50.0, false) };
final Query fastMatchFilter;
final AtomicBoolean filterWasUsed = new AtomicBoolean();
if (random().nextBoolean()) {
// Sort of silly:
final Query in = new MatchAllDocsQuery();
fastMatchFilter = new UsedQuery(in, filterWasUsed);
} else {
fastMatchFilter = null;
}
if (VERBOSE) {
System.out.println("TEST: fastMatchFilter=" + fastMatchFilter);
}
Facets facets = new DoubleRangeFacetCounts("field", vs, fc, fastMatchFilter, ranges);
assertEquals("dim=field path=[] value=3 childCount=6\n < 1 (0)\n < 2 (1)\n < 5 (3)\n < 10 (3)\n < 20 (3)\n < 50 (3)\n", facets.getTopChildren(10, "field").toString());
assertTrue(fastMatchFilter == null || filterWasUsed.get());
DrillDownQuery ddq = new DrillDownQuery(config);
ddq.add("field", ranges[1].getQuery(fastMatchFilter, vs));
// Test simple drill-down:
assertEquals(1, s.search(ddq, 10).totalHits);
// Test drill-sideways after drill-down
DrillSideways ds = new DrillSideways(s, config, (TaxonomyReader) null) {
@Override
protected Facets buildFacetsResult(FacetsCollector drillDowns, FacetsCollector[] drillSideways, String[] drillSidewaysDims) throws IOException {
assert drillSideways.length == 1;
return new DoubleRangeFacetCounts("field", vs, drillSideways[0], fastMatchFilter, ranges);
}
@Override
protected boolean scoreSubDocsAtOnce() {
return random().nextBoolean();
}
};
DrillSidewaysResult dsr = ds.search(ddq, 10);
assertEquals(1, dsr.hits.totalHits);
assertEquals("dim=field path=[] value=3 childCount=6\n < 1 (0)\n < 2 (1)\n < 5 (3)\n < 10 (3)\n < 20 (3)\n < 50 (3)\n", dsr.facets.getTopChildren(10, "field").toString());
writer.close();
IOUtils.close(r, dir);
}
use of org.apache.lucene.search.Explanation in project lucene-solr by apache.
the class TestCustomScoreExplanations method testSubExplanations.
public void testSubExplanations() throws IOException {
Query query = new FunctionQuery(new ConstValueSource(5));
IndexSearcher searcher = newSearcher(BaseExplanationTestCase.searcher.getIndexReader());
searcher.setSimilarity(new BM25Similarity());
Explanation expl = searcher.explain(query, 0);
assertEquals(2, expl.getDetails().length);
// function
assertEquals(5f, expl.getDetails()[0].getValue(), 0f);
// boost
assertEquals("boost", expl.getDetails()[1].getDescription());
assertEquals(1f, expl.getDetails()[1].getValue(), 0f);
query = new BoostQuery(query, 2);
expl = searcher.explain(query, 0);
assertEquals(2, expl.getDetails().length);
// function
assertEquals(5f, expl.getDetails()[0].getValue(), 0f);
// boost
assertEquals("boost", expl.getDetails()[1].getDescription());
assertEquals(2f, expl.getDetails()[1].getValue(), 0f);
// in order to have a queryNorm != 1
searcher.setSimilarity(new ClassicSimilarity());
expl = searcher.explain(query, 0);
assertEquals(2, expl.getDetails().length);
// function
assertEquals(5f, expl.getDetails()[0].getValue(), 0f);
// boost
assertEquals("boost", expl.getDetails()[1].getDescription());
assertEquals(2f, expl.getDetails()[1].getValue(), 0f);
}
use of org.apache.lucene.search.Explanation in project lucene-solr by apache.
the class TestFunctionScoreExplanations method testSubExplanations.
public void testSubExplanations() throws IOException {
Query query = new FunctionScoreQuery(new MatchAllDocsQuery(), DoubleValuesSource.constant(5));
IndexSearcher searcher = newSearcher(BaseExplanationTestCase.searcher.getIndexReader());
searcher.setSimilarity(new BM25Similarity());
Explanation expl = searcher.explain(query, 0);
assertEquals("constant(5.0)", expl.getDescription());
assertEquals(0, expl.getDetails().length);
query = new BoostQuery(query, 2);
expl = searcher.explain(query, 0);
assertEquals(2, expl.getDetails().length);
// function
assertEquals(5f, expl.getDetails()[1].getValue(), 0f);
// boost
assertEquals("boost", expl.getDetails()[0].getDescription());
assertEquals(2f, expl.getDetails()[0].getValue(), 0f);
// in order to have a queryNorm != 1
searcher.setSimilarity(new ClassicSimilarity());
expl = searcher.explain(query, 0);
assertEquals(2, expl.getDetails().length);
// function
assertEquals(5f, expl.getDetails()[1].getValue(), 0f);
// boost
assertEquals("boost", expl.getDetails()[0].getDescription());
assertEquals(2f, expl.getDetails()[0].getValue(), 0f);
}
use of org.apache.lucene.search.Explanation in project lucene-solr by apache.
the class TestFunctionRangeQuery method testExplain.
@Test
public void testExplain() throws IOException {
Query rangeQuery = new FunctionRangeQuery(INT_VALUESOURCE, 2, 2, true, true);
ScoreDoc[] scoreDocs = indexSearcher.search(rangeQuery, N_DOCS).scoreDocs;
Explanation explain = indexSearcher.explain(rangeQuery, scoreDocs[0].doc);
// Just validate it looks reasonable
assertEquals("2.0 = frange(int(" + INT_FIELD + ")):[2 TO 2]\n" + " 2.0 = int(" + INT_FIELD + ")=2\n", explain.toString());
}
Aggregations