Search in sources :

Example 36 with FacetResult

use of org.apache.lucene.facet.FacetResult in project lucene-solr by apache.

the class TestRangeFacetCounts method testRandomDoubles.

public void testRandomDoubles() throws Exception {
    Directory dir = newDirectory();
    RandomIndexWriter w = new RandomIndexWriter(random(), dir);
    int numDocs = atLeast(1000);
    double[] values = new double[numDocs];
    double minValue = Double.POSITIVE_INFINITY;
    double maxValue = Double.NEGATIVE_INFINITY;
    for (int i = 0; i < numDocs; i++) {
        Document doc = new Document();
        double v = random().nextDouble();
        values[i] = v;
        doc.add(new DoubleDocValuesField("field", v));
        doc.add(new DoublePoint("field", v));
        w.addDocument(doc);
        minValue = Math.min(minValue, v);
        maxValue = Math.max(maxValue, v);
    }
    IndexReader r = w.getReader();
    IndexSearcher s = newSearcher(r, false);
    FacetsConfig config = new FacetsConfig();
    int numIters = atLeast(10);
    for (int iter = 0; iter < numIters; iter++) {
        if (VERBOSE) {
            System.out.println("TEST: iter=" + iter);
        }
        int numRange = TestUtil.nextInt(random(), 1, 5);
        DoubleRange[] ranges = new DoubleRange[numRange];
        int[] expectedCounts = new int[numRange];
        double minAcceptedValue = Double.POSITIVE_INFINITY;
        double maxAcceptedValue = Double.NEGATIVE_INFINITY;
        for (int rangeID = 0; rangeID < numRange; rangeID++) {
            double min;
            if (rangeID > 0 && random().nextInt(10) == 7) {
                // Use an existing boundary:
                DoubleRange prevRange = ranges[random().nextInt(rangeID)];
                if (random().nextBoolean()) {
                    min = prevRange.min;
                } else {
                    min = prevRange.max;
                }
            } else {
                min = random().nextDouble();
            }
            double max;
            if (rangeID > 0 && random().nextInt(10) == 7) {
                // Use an existing boundary:
                DoubleRange prevRange = ranges[random().nextInt(rangeID)];
                if (random().nextBoolean()) {
                    max = prevRange.min;
                } else {
                    max = prevRange.max;
                }
            } else {
                max = random().nextDouble();
            }
            if (min > max) {
                double x = min;
                min = max;
                max = x;
            }
            boolean minIncl;
            boolean maxIncl;
            long minAsLong = NumericUtils.doubleToSortableLong(min);
            long maxAsLong = NumericUtils.doubleToSortableLong(max);
            // NOTE: maxAsLong - minAsLong >= 0 is here to handle the common overflow case!
            if (maxAsLong - minAsLong >= 0 && maxAsLong - minAsLong < 2) {
                minIncl = true;
                maxIncl = true;
            } else {
                minIncl = random().nextBoolean();
                maxIncl = random().nextBoolean();
            }
            ranges[rangeID] = new DoubleRange("r" + rangeID, min, minIncl, max, maxIncl);
            // expected count:
            for (int i = 0; i < numDocs; i++) {
                boolean accept = true;
                if (minIncl) {
                    accept &= values[i] >= min;
                } else {
                    accept &= values[i] > min;
                }
                if (maxIncl) {
                    accept &= values[i] <= max;
                } else {
                    accept &= values[i] < max;
                }
                if (accept) {
                    expectedCounts[rangeID]++;
                    minAcceptedValue = Math.min(minAcceptedValue, values[i]);
                    maxAcceptedValue = Math.max(maxAcceptedValue, values[i]);
                }
            }
        }
        FacetsCollector sfc = new FacetsCollector();
        s.search(new MatchAllDocsQuery(), sfc);
        Query fastMatchFilter;
        if (random().nextBoolean()) {
            if (random().nextBoolean()) {
                fastMatchFilter = DoublePoint.newRangeQuery("field", minValue, maxValue);
            } else {
                fastMatchFilter = DoublePoint.newRangeQuery("field", minAcceptedValue, maxAcceptedValue);
            }
        } else {
            fastMatchFilter = null;
        }
        DoubleValuesSource vs = DoubleValuesSource.fromDoubleField("field");
        Facets facets = new DoubleRangeFacetCounts("field", vs, sfc, fastMatchFilter, ranges);
        FacetResult result = facets.getTopChildren(10, "field");
        assertEquals(numRange, result.labelValues.length);
        for (int rangeID = 0; rangeID < numRange; rangeID++) {
            if (VERBOSE) {
                System.out.println("  range " + rangeID + " expectedCount=" + expectedCounts[rangeID]);
            }
            LabelAndValue subNode = result.labelValues[rangeID];
            assertEquals("r" + rangeID, subNode.label);
            assertEquals(expectedCounts[rangeID], subNode.value.intValue());
            DoubleRange range = ranges[rangeID];
            // Test drill-down:
            DrillDownQuery ddq = new DrillDownQuery(config);
            if (random().nextBoolean()) {
                ddq.add("field", DoublePoint.newRangeQuery("field", range.min, range.max));
            } else {
                ddq.add("field", range.getQuery(fastMatchFilter, vs));
            }
            assertEquals(expectedCounts[rangeID], s.search(ddq, 10).totalHits);
        }
    }
    w.close();
    IOUtils.close(r, dir);
}
Also used : IndexSearcher(org.apache.lucene.search.IndexSearcher) Query(org.apache.lucene.search.Query) DrillDownQuery(org.apache.lucene.facet.DrillDownQuery) MatchAllDocsQuery(org.apache.lucene.search.MatchAllDocsQuery) MultiFacets(org.apache.lucene.facet.MultiFacets) Facets(org.apache.lucene.facet.Facets) DrillDownQuery(org.apache.lucene.facet.DrillDownQuery) Document(org.apache.lucene.document.Document) LabelAndValue(org.apache.lucene.facet.LabelAndValue) DoubleValuesSource(org.apache.lucene.search.DoubleValuesSource) DoubleDocValuesField(org.apache.lucene.document.DoubleDocValuesField) Directory(org.apache.lucene.store.Directory) FacetsConfig(org.apache.lucene.facet.FacetsConfig) MatchAllDocsQuery(org.apache.lucene.search.MatchAllDocsQuery) LongPoint(org.apache.lucene.document.LongPoint) DoublePoint(org.apache.lucene.document.DoublePoint) FacetsCollector(org.apache.lucene.facet.FacetsCollector) DoublePoint(org.apache.lucene.document.DoublePoint) IndexReader(org.apache.lucene.index.IndexReader) FacetResult(org.apache.lucene.facet.FacetResult) RandomIndexWriter(org.apache.lucene.index.RandomIndexWriter)

Example 37 with FacetResult

use of org.apache.lucene.facet.FacetResult in project lucene-solr by apache.

the class TestTaxonomyFacetSumValueSource method testWrongIndexFieldName.

public void testWrongIndexFieldName() throws Exception {
    Directory dir = newDirectory();
    Directory taxoDir = newDirectory();
    // Writes facet ords to a separate directory from the
    // main index:
    DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE);
    FacetsConfig config = new FacetsConfig();
    config.setIndexFieldName("a", "$facets2");
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    Document doc = new Document();
    doc.add(new NumericDocValuesField("num", 10));
    doc.add(new FacetField("a", "foo1"));
    writer.addDocument(config.build(taxoWriter, doc));
    // NRT open
    IndexSearcher searcher = newSearcher(writer.getReader());
    writer.close();
    // NRT open
    TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
    taxoWriter.close();
    FacetsCollector c = new FacetsCollector();
    searcher.search(new MatchAllDocsQuery(), c);
    TaxonomyFacetSumValueSource facets = new TaxonomyFacetSumValueSource(taxoReader, config, c, DoubleValuesSource.fromIntField("num"));
    // Ask for top 10 labels for any dims that have counts:
    List<FacetResult> results = facets.getAllDims(10);
    assertTrue(results.isEmpty());
    expectThrows(IllegalArgumentException.class, () -> {
        facets.getSpecificValue("a");
    });
    expectThrows(IllegalArgumentException.class, () -> {
        facets.getTopChildren(10, "a");
    });
    IOUtils.close(searcher.getIndexReader(), taxoReader, dir, taxoDir);
}
Also used : IndexSearcher(org.apache.lucene.search.IndexSearcher) FacetsConfig(org.apache.lucene.facet.FacetsConfig) DirectoryTaxonomyReader(org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader) FacetField(org.apache.lucene.facet.FacetField) Document(org.apache.lucene.document.Document) MatchAllDocsQuery(org.apache.lucene.search.MatchAllDocsQuery) FacetsCollector(org.apache.lucene.facet.FacetsCollector) DirectoryTaxonomyWriter(org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter) NumericDocValuesField(org.apache.lucene.document.NumericDocValuesField) FacetResult(org.apache.lucene.facet.FacetResult) RandomIndexWriter(org.apache.lucene.index.RandomIndexWriter) Directory(org.apache.lucene.store.Directory) DirectoryTaxonomyReader(org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader)

Example 38 with FacetResult

use of org.apache.lucene.facet.FacetResult in project lucene-solr by apache.

the class TestTaxonomyFacetCounts2 method testNoParents.

@Test
public void testNoParents() throws Exception {
    DirectoryReader indexReader = DirectoryReader.open(indexDir);
    TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
    IndexSearcher searcher = newSearcher(indexReader);
    FacetsCollector sfc = new FacetsCollector();
    searcher.search(new MatchAllDocsQuery(), sfc);
    Facets facets = getTaxonomyFacetCounts(taxoReader, getConfig(), sfc);
    FacetResult result = facets.getTopChildren(NUM_CHILDREN_CP_C, CP_C);
    assertEquals(allExpectedCounts.get(CP_C), result.value);
    for (LabelAndValue labelValue : result.labelValues) {
        assertEquals(allExpectedCounts.get(CP_C + "/" + labelValue.label), labelValue.value);
    }
    result = facets.getTopChildren(NUM_CHILDREN_CP_D, CP_D);
    assertEquals(allExpectedCounts.get(CP_C), result.value);
    for (LabelAndValue labelValue : result.labelValues) {
        assertEquals(allExpectedCounts.get(CP_D + "/" + labelValue.label), labelValue.value);
    }
    IOUtils.close(indexReader, taxoReader);
}
Also used : IndexSearcher(org.apache.lucene.search.IndexSearcher) Facets(org.apache.lucene.facet.Facets) DirectoryReader(org.apache.lucene.index.DirectoryReader) DirectoryTaxonomyReader(org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader) FacetResult(org.apache.lucene.facet.FacetResult) MatchAllDocsQuery(org.apache.lucene.search.MatchAllDocsQuery) LabelAndValue(org.apache.lucene.facet.LabelAndValue) DirectoryTaxonomyReader(org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader) FacetsCollector(org.apache.lucene.facet.FacetsCollector) Test(org.junit.Test)

Example 39 with FacetResult

use of org.apache.lucene.facet.FacetResult in project lucene-solr by apache.

the class TestSearcherTaxonomyManager method testNRT.

public void testNRT() throws Exception {
    Directory dir = newDirectory();
    Directory taxoDir = newDirectory();
    IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
    // Don't allow tiny maxBufferedDocs; it can make this
    // test too slow:
    iwc.setMaxBufferedDocs(Math.max(500, iwc.getMaxBufferedDocs()));
    // MockRandom/AlcololicMergePolicy are too slow:
    TieredMergePolicy tmp = new TieredMergePolicy();
    tmp.setFloorSegmentMB(.001);
    iwc.setMergePolicy(tmp);
    final IndexWriter w = new IndexWriter(dir, iwc);
    final DirectoryTaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir);
    final FacetsConfig config = new FacetsConfig();
    config.setMultiValued("field", true);
    final AtomicBoolean stop = new AtomicBoolean();
    // How many unique facets to index before stopping:
    final int ordLimit = TEST_NIGHTLY ? 100000 : 6000;
    Thread indexer = new IndexerThread(w, config, tw, null, ordLimit, stop);
    final SearcherTaxonomyManager mgr = new SearcherTaxonomyManager(w, true, null, tw);
    Thread reopener = new Thread() {

        @Override
        public void run() {
            while (!stop.get()) {
                try {
                    // Sleep for up to 20 msec:
                    Thread.sleep(random().nextInt(20));
                    if (VERBOSE) {
                        System.out.println("TEST: reopen");
                    }
                    mgr.maybeRefresh();
                    if (VERBOSE) {
                        System.out.println("TEST: reopen done");
                    }
                } catch (Exception ioe) {
                    throw new RuntimeException(ioe);
                }
            }
        }
    };
    reopener.setName("reopener");
    reopener.start();
    indexer.setName("indexer");
    indexer.start();
    try {
        while (!stop.get()) {
            SearcherAndTaxonomy pair = mgr.acquire();
            try {
                //System.out.println("search maxOrd=" + pair.taxonomyReader.getSize());
                FacetsCollector sfc = new FacetsCollector();
                pair.searcher.search(new MatchAllDocsQuery(), sfc);
                Facets facets = getTaxonomyFacetCounts(pair.taxonomyReader, config, sfc);
                FacetResult result = facets.getTopChildren(10, "field");
                if (pair.searcher.getIndexReader().numDocs() > 0) {
                    //System.out.println(pair.taxonomyReader.getSize());
                    assertTrue(result.childCount > 0);
                    assertTrue(result.labelValues.length > 0);
                }
            //if (VERBOSE) {
            //System.out.println("TEST: facets=" + FacetTestUtils.toString(results.get(0)));
            //}
            } finally {
                mgr.release(pair);
            }
        }
    } finally {
        indexer.join();
        reopener.join();
    }
    if (VERBOSE) {
        System.out.println("TEST: now stop");
    }
    w.close();
    IOUtils.close(mgr, tw, taxoDir, dir);
}
Also used : FacetsConfig(org.apache.lucene.facet.FacetsConfig) Facets(org.apache.lucene.facet.Facets) MatchAllDocsQuery(org.apache.lucene.search.MatchAllDocsQuery) IOException(java.io.IOException) FacetsCollector(org.apache.lucene.facet.FacetsCollector) TieredMergePolicy(org.apache.lucene.index.TieredMergePolicy) DirectoryTaxonomyWriter(org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) MockAnalyzer(org.apache.lucene.analysis.MockAnalyzer) IndexWriter(org.apache.lucene.index.IndexWriter) FacetResult(org.apache.lucene.facet.FacetResult) SearcherAndTaxonomy(org.apache.lucene.facet.taxonomy.SearcherTaxonomyManager.SearcherAndTaxonomy) Directory(org.apache.lucene.store.Directory) IndexWriterConfig(org.apache.lucene.index.IndexWriterConfig)

Example 40 with FacetResult

use of org.apache.lucene.facet.FacetResult in project lucene-solr by apache.

the class TestTaxonomyFacetCounts method testLabelWithDelimiter.

public void testLabelWithDelimiter() throws Exception {
    Directory dir = newDirectory();
    Directory taxoDir = newDirectory();
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE);
    FacetsConfig config = new FacetsConfig();
    config.setMultiValued("dim", true);
    Document doc = new Document();
    doc.add(newTextField("field", "text", Field.Store.NO));
    doc.add(new FacetField("dim", "testone"));
    doc.add(new FacetField("dim", "testtwo"));
    writer.addDocument(config.build(taxoWriter, doc));
    // NRT open
    IndexSearcher searcher = newSearcher(writer.getReader());
    // NRT open
    TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
    Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config);
    assertEquals(1, facets.getSpecificValue("dim", "testone"));
    assertEquals(1, facets.getSpecificValue("dim", "testtwo"));
    FacetResult result = facets.getTopChildren(10, "dim");
    assertEquals("dim=dim path=[] value=-1 childCount=2\n  testone (1)\n  testtwo (1)\n", result.toString());
    writer.close();
    IOUtils.close(taxoWriter, searcher.getIndexReader(), taxoReader, dir, taxoDir);
}
Also used : DirectoryTaxonomyWriter(org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter) IndexSearcher(org.apache.lucene.search.IndexSearcher) FacetsConfig(org.apache.lucene.facet.FacetsConfig) Facets(org.apache.lucene.facet.Facets) DirectoryTaxonomyReader(org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader) FacetField(org.apache.lucene.facet.FacetField) FacetResult(org.apache.lucene.facet.FacetResult) Document(org.apache.lucene.document.Document) RandomIndexWriter(org.apache.lucene.index.RandomIndexWriter) Directory(org.apache.lucene.store.Directory) DirectoryTaxonomyReader(org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader)

Aggregations

FacetResult (org.apache.lucene.facet.FacetResult)68 Facets (org.apache.lucene.facet.Facets)47 FacetsCollector (org.apache.lucene.facet.FacetsCollector)42 IndexSearcher (org.apache.lucene.search.IndexSearcher)36 DirectoryTaxonomyReader (org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader)29 LabelAndValue (org.apache.lucene.facet.LabelAndValue)28 MatchAllDocsQuery (org.apache.lucene.search.MatchAllDocsQuery)23 DirectoryReader (org.apache.lucene.index.DirectoryReader)22 ArrayList (java.util.ArrayList)21 Directory (org.apache.lucene.store.Directory)21 FacetsConfig (org.apache.lucene.facet.FacetsConfig)19 RandomIndexWriter (org.apache.lucene.index.RandomIndexWriter)19 Document (org.apache.lucene.document.Document)18 DirectoryTaxonomyWriter (org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter)14 DefaultSortedSetDocValuesReaderState (org.apache.lucene.facet.sortedset.DefaultSortedSetDocValuesReaderState)13 SortedSetDocValuesFacetCounts (org.apache.lucene.facet.sortedset.SortedSetDocValuesFacetCounts)13 SortedSetDocValuesReaderState (org.apache.lucene.facet.sortedset.SortedSetDocValuesReaderState)13 IOException (java.io.IOException)12 TaxonomyReader (org.apache.lucene.facet.taxonomy.TaxonomyReader)12 FacetField (org.apache.lucene.facet.FacetField)11