use of org.apache.lucene.facet.FacetResult in project lucene-solr by apache.
the class TestRangeFacetCounts method testRandomDoubles.
public void testRandomDoubles() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
int numDocs = atLeast(1000);
double[] values = new double[numDocs];
double minValue = Double.POSITIVE_INFINITY;
double maxValue = Double.NEGATIVE_INFINITY;
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
double v = random().nextDouble();
values[i] = v;
doc.add(new DoubleDocValuesField("field", v));
doc.add(new DoublePoint("field", v));
w.addDocument(doc);
minValue = Math.min(minValue, v);
maxValue = Math.max(maxValue, v);
}
IndexReader r = w.getReader();
IndexSearcher s = newSearcher(r, false);
FacetsConfig config = new FacetsConfig();
int numIters = atLeast(10);
for (int iter = 0; iter < numIters; iter++) {
if (VERBOSE) {
System.out.println("TEST: iter=" + iter);
}
int numRange = TestUtil.nextInt(random(), 1, 5);
DoubleRange[] ranges = new DoubleRange[numRange];
int[] expectedCounts = new int[numRange];
double minAcceptedValue = Double.POSITIVE_INFINITY;
double maxAcceptedValue = Double.NEGATIVE_INFINITY;
for (int rangeID = 0; rangeID < numRange; rangeID++) {
double min;
if (rangeID > 0 && random().nextInt(10) == 7) {
// Use an existing boundary:
DoubleRange prevRange = ranges[random().nextInt(rangeID)];
if (random().nextBoolean()) {
min = prevRange.min;
} else {
min = prevRange.max;
}
} else {
min = random().nextDouble();
}
double max;
if (rangeID > 0 && random().nextInt(10) == 7) {
// Use an existing boundary:
DoubleRange prevRange = ranges[random().nextInt(rangeID)];
if (random().nextBoolean()) {
max = prevRange.min;
} else {
max = prevRange.max;
}
} else {
max = random().nextDouble();
}
if (min > max) {
double x = min;
min = max;
max = x;
}
boolean minIncl;
boolean maxIncl;
long minAsLong = NumericUtils.doubleToSortableLong(min);
long maxAsLong = NumericUtils.doubleToSortableLong(max);
// NOTE: maxAsLong - minAsLong >= 0 is here to handle the common overflow case!
if (maxAsLong - minAsLong >= 0 && maxAsLong - minAsLong < 2) {
minIncl = true;
maxIncl = true;
} else {
minIncl = random().nextBoolean();
maxIncl = random().nextBoolean();
}
ranges[rangeID] = new DoubleRange("r" + rangeID, min, minIncl, max, maxIncl);
// expected count:
for (int i = 0; i < numDocs; i++) {
boolean accept = true;
if (minIncl) {
accept &= values[i] >= min;
} else {
accept &= values[i] > min;
}
if (maxIncl) {
accept &= values[i] <= max;
} else {
accept &= values[i] < max;
}
if (accept) {
expectedCounts[rangeID]++;
minAcceptedValue = Math.min(minAcceptedValue, values[i]);
maxAcceptedValue = Math.max(maxAcceptedValue, values[i]);
}
}
}
FacetsCollector sfc = new FacetsCollector();
s.search(new MatchAllDocsQuery(), sfc);
Query fastMatchFilter;
if (random().nextBoolean()) {
if (random().nextBoolean()) {
fastMatchFilter = DoublePoint.newRangeQuery("field", minValue, maxValue);
} else {
fastMatchFilter = DoublePoint.newRangeQuery("field", minAcceptedValue, maxAcceptedValue);
}
} else {
fastMatchFilter = null;
}
DoubleValuesSource vs = DoubleValuesSource.fromDoubleField("field");
Facets facets = new DoubleRangeFacetCounts("field", vs, sfc, fastMatchFilter, ranges);
FacetResult result = facets.getTopChildren(10, "field");
assertEquals(numRange, result.labelValues.length);
for (int rangeID = 0; rangeID < numRange; rangeID++) {
if (VERBOSE) {
System.out.println(" range " + rangeID + " expectedCount=" + expectedCounts[rangeID]);
}
LabelAndValue subNode = result.labelValues[rangeID];
assertEquals("r" + rangeID, subNode.label);
assertEquals(expectedCounts[rangeID], subNode.value.intValue());
DoubleRange range = ranges[rangeID];
// Test drill-down:
DrillDownQuery ddq = new DrillDownQuery(config);
if (random().nextBoolean()) {
ddq.add("field", DoublePoint.newRangeQuery("field", range.min, range.max));
} else {
ddq.add("field", range.getQuery(fastMatchFilter, vs));
}
assertEquals(expectedCounts[rangeID], s.search(ddq, 10).totalHits);
}
}
w.close();
IOUtils.close(r, dir);
}
use of org.apache.lucene.facet.FacetResult in project lucene-solr by apache.
the class TestTaxonomyFacetSumValueSource method testWrongIndexFieldName.
public void testWrongIndexFieldName() throws Exception {
Directory dir = newDirectory();
Directory taxoDir = newDirectory();
// Writes facet ords to a separate directory from the
// main index:
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE);
FacetsConfig config = new FacetsConfig();
config.setIndexFieldName("a", "$facets2");
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(new NumericDocValuesField("num", 10));
doc.add(new FacetField("a", "foo1"));
writer.addDocument(config.build(taxoWriter, doc));
// NRT open
IndexSearcher searcher = newSearcher(writer.getReader());
writer.close();
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
taxoWriter.close();
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
TaxonomyFacetSumValueSource facets = new TaxonomyFacetSumValueSource(taxoReader, config, c, DoubleValuesSource.fromIntField("num"));
// Ask for top 10 labels for any dims that have counts:
List<FacetResult> results = facets.getAllDims(10);
assertTrue(results.isEmpty());
expectThrows(IllegalArgumentException.class, () -> {
facets.getSpecificValue("a");
});
expectThrows(IllegalArgumentException.class, () -> {
facets.getTopChildren(10, "a");
});
IOUtils.close(searcher.getIndexReader(), taxoReader, dir, taxoDir);
}
use of org.apache.lucene.facet.FacetResult in project lucene-solr by apache.
the class TestTaxonomyFacetCounts2 method testNoParents.
@Test
public void testNoParents() throws Exception {
DirectoryReader indexReader = DirectoryReader.open(indexDir);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
IndexSearcher searcher = newSearcher(indexReader);
FacetsCollector sfc = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), sfc);
Facets facets = getTaxonomyFacetCounts(taxoReader, getConfig(), sfc);
FacetResult result = facets.getTopChildren(NUM_CHILDREN_CP_C, CP_C);
assertEquals(allExpectedCounts.get(CP_C), result.value);
for (LabelAndValue labelValue : result.labelValues) {
assertEquals(allExpectedCounts.get(CP_C + "/" + labelValue.label), labelValue.value);
}
result = facets.getTopChildren(NUM_CHILDREN_CP_D, CP_D);
assertEquals(allExpectedCounts.get(CP_C), result.value);
for (LabelAndValue labelValue : result.labelValues) {
assertEquals(allExpectedCounts.get(CP_D + "/" + labelValue.label), labelValue.value);
}
IOUtils.close(indexReader, taxoReader);
}
use of org.apache.lucene.facet.FacetResult in project lucene-solr by apache.
the class TestSearcherTaxonomyManager method testNRT.
public void testNRT() throws Exception {
Directory dir = newDirectory();
Directory taxoDir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
// Don't allow tiny maxBufferedDocs; it can make this
// test too slow:
iwc.setMaxBufferedDocs(Math.max(500, iwc.getMaxBufferedDocs()));
// MockRandom/AlcololicMergePolicy are too slow:
TieredMergePolicy tmp = new TieredMergePolicy();
tmp.setFloorSegmentMB(.001);
iwc.setMergePolicy(tmp);
final IndexWriter w = new IndexWriter(dir, iwc);
final DirectoryTaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir);
final FacetsConfig config = new FacetsConfig();
config.setMultiValued("field", true);
final AtomicBoolean stop = new AtomicBoolean();
// How many unique facets to index before stopping:
final int ordLimit = TEST_NIGHTLY ? 100000 : 6000;
Thread indexer = new IndexerThread(w, config, tw, null, ordLimit, stop);
final SearcherTaxonomyManager mgr = new SearcherTaxonomyManager(w, true, null, tw);
Thread reopener = new Thread() {
@Override
public void run() {
while (!stop.get()) {
try {
// Sleep for up to 20 msec:
Thread.sleep(random().nextInt(20));
if (VERBOSE) {
System.out.println("TEST: reopen");
}
mgr.maybeRefresh();
if (VERBOSE) {
System.out.println("TEST: reopen done");
}
} catch (Exception ioe) {
throw new RuntimeException(ioe);
}
}
}
};
reopener.setName("reopener");
reopener.start();
indexer.setName("indexer");
indexer.start();
try {
while (!stop.get()) {
SearcherAndTaxonomy pair = mgr.acquire();
try {
//System.out.println("search maxOrd=" + pair.taxonomyReader.getSize());
FacetsCollector sfc = new FacetsCollector();
pair.searcher.search(new MatchAllDocsQuery(), sfc);
Facets facets = getTaxonomyFacetCounts(pair.taxonomyReader, config, sfc);
FacetResult result = facets.getTopChildren(10, "field");
if (pair.searcher.getIndexReader().numDocs() > 0) {
//System.out.println(pair.taxonomyReader.getSize());
assertTrue(result.childCount > 0);
assertTrue(result.labelValues.length > 0);
}
//if (VERBOSE) {
//System.out.println("TEST: facets=" + FacetTestUtils.toString(results.get(0)));
//}
} finally {
mgr.release(pair);
}
}
} finally {
indexer.join();
reopener.join();
}
if (VERBOSE) {
System.out.println("TEST: now stop");
}
w.close();
IOUtils.close(mgr, tw, taxoDir, dir);
}
use of org.apache.lucene.facet.FacetResult in project lucene-solr by apache.
the class TestTaxonomyFacetCounts method testLabelWithDelimiter.
public void testLabelWithDelimiter() throws Exception {
Directory dir = newDirectory();
Directory taxoDir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE);
FacetsConfig config = new FacetsConfig();
config.setMultiValued("dim", true);
Document doc = new Document();
doc.add(newTextField("field", "text", Field.Store.NO));
doc.add(new FacetField("dim", "testone"));
doc.add(new FacetField("dim", "testtwo"));
writer.addDocument(config.build(taxoWriter, doc));
// NRT open
IndexSearcher searcher = newSearcher(writer.getReader());
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config);
assertEquals(1, facets.getSpecificValue("dim", "testone"));
assertEquals(1, facets.getSpecificValue("dim", "testtwo"));
FacetResult result = facets.getTopChildren(10, "dim");
assertEquals("dim=dim path=[] value=-1 childCount=2\n testone (1)\n testtwo (1)\n", result.toString());
writer.close();
IOUtils.close(taxoWriter, searcher.getIndexReader(), taxoReader, dir, taxoDir);
}
Aggregations